summaryrefslogtreecommitdiffstats
path: root/java/examples/datasets
diff options
context:
space:
mode:
authorDana Robinson <derobins@hdfgroup.org>2016-03-06 17:49:30 (GMT)
committerDana Robinson <derobins@hdfgroup.org>2016-03-06 17:49:30 (GMT)
commit0a57cb562c116683529013c158f6873ebd5b9c34 (patch)
tree9da832cf2a41c7322dd259ffe717515e6ff0d663 /java/examples/datasets
parentc95332e8aeacc5551d3c9541256afd98d89b01d7 (diff)
downloadhdf5-0a57cb562c116683529013c158f6873ebd5b9c34.zip
hdf5-0a57cb562c116683529013c158f6873ebd5b9c34.tar.gz
hdf5-0a57cb562c116683529013c158f6873ebd5b9c34.tar.bz2
[svn-r29291] Merge of r29226-29259 from trunk.
Mostly java JNI + a few smaller merges. Tested on: h5committest.new jam w/ java
Diffstat (limited to 'java/examples/datasets')
-rw-r--r--java/examples/datasets/CMakeLists.txt138
-rw-r--r--java/examples/datasets/H5Ex_D_Alloc.java301
-rw-r--r--java/examples/datasets/H5Ex_D_Checksum.java347
-rw-r--r--java/examples/datasets/H5Ex_D_Chunk.java366
-rw-r--r--java/examples/datasets/H5Ex_D_Compact.java289
-rw-r--r--java/examples/datasets/H5Ex_D_External.java238
-rw-r--r--java/examples/datasets/H5Ex_D_FillValue.java246
-rw-r--r--java/examples/datasets/H5Ex_D_Gzip.java336
-rw-r--r--java/examples/datasets/H5Ex_D_Hyperslab.java269
-rw-r--r--java/examples/datasets/H5Ex_D_Nbit.java305
-rw-r--r--java/examples/datasets/H5Ex_D_ReadWrite.java179
-rw-r--r--java/examples/datasets/H5Ex_D_Shuffle.java373
-rw-r--r--java/examples/datasets/H5Ex_D_Sofloat.java356
-rw-r--r--java/examples/datasets/H5Ex_D_Soint.java335
-rw-r--r--java/examples/datasets/H5Ex_D_Szip.java337
-rw-r--r--java/examples/datasets/H5Ex_D_Transform.java250
-rw-r--r--java/examples/datasets/H5Ex_D_UnlimitedAdd.java393
-rw-r--r--java/examples/datasets/H5Ex_D_UnlimitedGzip.java504
-rw-r--r--java/examples/datasets/H5Ex_D_UnlimitedMod.java379
-rw-r--r--java/examples/datasets/Makefile.am78
-rw-r--r--java/examples/datasets/runExample.sh.in413
21 files changed, 6432 insertions, 0 deletions
diff --git a/java/examples/datasets/CMakeLists.txt b/java/examples/datasets/CMakeLists.txt
new file mode 100644
index 0000000..077c6bb
--- /dev/null
+++ b/java/examples/datasets/CMakeLists.txt
@@ -0,0 +1,138 @@
+cmake_minimum_required (VERSION 3.1.0)
+PROJECT (HDFJAVA_EXAMPLES_DATASETS Java)
+
+set (CMAKE_VERBOSE_MAKEFILE 1)
+
+INCLUDE_DIRECTORIES (
+ ${HDF5_JAVA_JNI_BINARY_DIR}
+ ${HDF5_JAVA_HDF5_LIB_DIR}
+)
+
+set (HDF_JAVA_EXAMPLES
+ H5Ex_D_Alloc
+ H5Ex_D_Checksum
+ H5Ex_D_Chunk
+ H5Ex_D_Compact
+ H5Ex_D_External
+ H5Ex_D_FillValue
+ H5Ex_D_Gzip
+ H5Ex_D_Hyperslab
+ H5Ex_D_ReadWrite
+ H5Ex_D_Shuffle
+ H5Ex_D_Szip
+ H5Ex_D_UnlimitedAdd
+ H5Ex_D_UnlimitedGzip
+ H5Ex_D_UnlimitedMod
+ H5Ex_D_Nbit
+ H5Ex_D_Transform
+ H5Ex_D_Sofloat
+ H5Ex_D_Soint
+)
+
+if (WIN32)
+ set (CMAKE_JAVA_INCLUDE_FLAG_SEP ";")
+else (WIN32)
+ set (CMAKE_JAVA_INCLUDE_FLAG_SEP ":")
+endif (WIN32)
+
+set (CMAKE_JAVA_INCLUDE_PATH "${HDF5_JAVA_JARS}")
+
+set (CMAKE_JAVA_CLASSPATH ".")
+foreach (CMAKE_INCLUDE_PATH ${CMAKE_JAVA_INCLUDE_PATH})
+ set (CMAKE_JAVA_CLASSPATH "${CMAKE_JAVA_CLASSPATH}${CMAKE_JAVA_INCLUDE_FLAG_SEP}${CMAKE_INCLUDE_PATH}")
+endforeach (CMAKE_INCLUDE_PATH)
+
+foreach (example ${HDF_JAVA_EXAMPLES})
+ file (WRITE ${PROJECT_BINARY_DIR}/${example}_Manifest.txt
+ "Main-Class: examples.datasets.${example}
+"
+ )
+ add_jar (${example} MANIFEST ${PROJECT_BINARY_DIR}/${example}_Manifest.txt ${example}.java)
+ get_target_property (${example}_JAR_FILE ${example} JAR_FILE)
+# install_jar (${example} ${HJAVA_INSTALL_DATA_DIR}/examples examples)
+ get_target_property (${example}_CLASSPATH ${example} CLASSDIR)
+ add_dependencies (${example} ${HDF5_JAVA_HDF5_LIB_TARGET})
+endforeach (example ${HDF_JAVA_EXAMPLES})
+
+set (CMAKE_JAVA_INCLUDE_PATH "${HDF5_JAVA_JARS};${HDF5_JAVA_LOGGING_JAR};${HDF5_JAVA_LOGGING_NOP_JAR}")
+
+set (CMAKE_JAVA_CLASSPATH ".")
+foreach (HDFJAVA_JAR ${CMAKE_JAVA_INCLUDE_PATH})
+ set (CMAKE_JAVA_CLASSPATH "${CMAKE_JAVA_CLASSPATH}${CMAKE_JAVA_INCLUDE_FLAG_SEP}${HDFJAVA_JAR}")
+endforeach (HDFJAVA_JAR)
+
+MACRO (ADD_H5_TEST resultfile resultcode)
+ add_test (
+ NAME JAVA_datasets-${resultfile}
+ COMMAND "${CMAKE_COMMAND}"
+ -D "TEST_TESTER=${CMAKE_Java_RUNTIME};${CMAKE_Java_RUNTIME_FLAGS}"
+ -D "TEST_CLASSPATH:STRING=${CMAKE_JAVA_CLASSPATH}${CMAKE_JAVA_INCLUDE_FLAG_SEP}${${resultfile}_JAR_FILE}"
+ -D "TEST_ARGS:STRING=${ARGN}"
+ -D "TEST_PROGRAM=examples.datasets.${resultfile}"
+ -D "TEST_LIBRARY_DIRECTORY=${CMAKE_TEST_OUTPUT_DIRECTORY}"
+ -D "TEST_FOLDER=${HDFJAVA_EXAMPLES_BINARY_DIR}"
+ -D "TEST_OUTPUT=datasets/${resultfile}.out"
+ -D "TEST_EXPECT=${resultcode}"
+ -D "TEST_REFERENCE=datasets/${resultfile}.txt"
+ -P "${HDF_RESOURCES_DIR}/jrunTest.cmake"
+ )
+ if (NOT "${last_test}" STREQUAL "")
+ set_tests_properties (JAVA_datasets-${resultfile} PROPERTIES DEPENDS ${last_test})
+ endif (NOT "${last_test}" STREQUAL "")
+ set (last_test "JAVA_datasets-${resultfile}")
+ENDMACRO (ADD_H5_TEST file)
+
+if (BUILD_TESTING)
+
+# detect whether the encoder is present.
+ if (H5_HAVE_FILTER_DEFLATE)
+ set (USE_FILTER_DEFLATE "true")
+ endif (H5_HAVE_FILTER_DEFLATE)
+
+ if (H5_HAVE_FILTER_SZIP)
+ set (USE_FILTER_SZIP "true")
+ endif (H5_HAVE_FILTER_SZIP)
+
+ foreach (example ${HDF_JAVA_EXAMPLES})
+ if (${example} STREQUAL "H5Ex_D_External")
+ add_test (
+ NAME JAVA_datasets-${example}-clearall-objects
+ COMMAND ${CMAKE_COMMAND}
+ -E remove
+ ${HDFJAVA_EXAMPLES_BINARY_DIR}/${example}.h5
+ ${HDFJAVA_EXAMPLES_BINARY_DIR}/${example}.data
+ ${example}.out
+ ${example}.out.err
+ )
+ else (${example} STREQUAL "H5Ex_D_External")
+ add_test (
+ NAME JAVA_datasets-${example}-clearall-objects
+ COMMAND ${CMAKE_COMMAND}
+ -E remove
+ ${HDFJAVA_EXAMPLES_BINARY_DIR}/${example}.h5
+ ${example}.out
+ ${example}.out.err
+ )
+ endif (${example} STREQUAL "H5Ex_D_External")
+ if (NOT "${last_test}" STREQUAL "")
+ set_tests_properties (JAVA_datasets-${example}-clearall-objects PROPERTIES DEPENDS ${last_test})
+ endif (NOT "${last_test}" STREQUAL "")
+ add_test (
+ NAME JAVA_datasets-${example}-copy-objects
+ COMMAND ${CMAKE_COMMAND}
+ -E copy_if_different
+ ${HDFJAVA_EXAMPLES_SOURCE_DIR}/testfiles/examples.datasets.${example}.txt
+ ${HDFJAVA_EXAMPLES_DATASETS_BINARY_DIR}/${example}.txt
+ )
+ set_tests_properties (JAVA_datasets-${example}-copy-objects PROPERTIES DEPENDS JAVA_datasets-${example}-clearall-objects)
+ set (last_test "JAVA_datasets-${example}-copy-objects")
+ if (${example} STREQUAL "H5Ex_D_Szip")
+ if (USE_FILTER_SZIP)
+ ADD_H5_TEST (${example} 0)
+ endif (USE_FILTER_SZIP)
+ else (${example} STREQUAL "H5Ex_D_Szip")
+ ADD_H5_TEST (${example} 0)
+ endif (${example} STREQUAL "H5Ex_D_Szip")
+
+ endforeach (example ${HDF_JAVA_EXAMPLES})
+endif (BUILD_TESTING)
diff --git a/java/examples/datasets/H5Ex_D_Alloc.java b/java/examples/datasets/H5Ex_D_Alloc.java
new file mode 100644
index 0000000..69fee38
--- /dev/null
+++ b/java/examples/datasets/H5Ex_D_Alloc.java
@@ -0,0 +1,301 @@
+/* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
+ * Copyright by The HDF Group. *
+ * Copyright by the Board of Trustees of the University of Illinois. *
+ * All rights reserved. *
+ * *
+ * This file is part of HDF5. The full HDF5 copyright notice, including *
+ * terms governing use, modification, and redistribution, is contained in *
+ * the files COPYING and Copyright.html. COPYING can be found at the root *
+ * of the source code distribution tree; Copyright.html can be found at the *
+ * root level of an installed copy of the electronic HDF5 document set and *
+ * is linked from the top-level documents page. It can also be found at *
+ * http://hdfgroup.org/HDF5/doc/Copyright.html. If you do not have *
+ * access to either file, you may request a copy from help@hdfgroup.org. *
+ * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */
+
+/************************************************************
+ This example shows how to set the space allocation time
+ for a dataset. The program first creates two datasets,
+ one with the default allocation time (late) and one with
+ early allocation time, and displays whether each has been
+ allocated and their allocation size. Next, it writes data
+ to the datasets, and again displays whether each has been
+ allocated and their allocation size.
+ ************************************************************/
+package examples.datasets;
+
+import java.util.EnumSet;
+import java.util.HashMap;
+import java.util.Map;
+
+import hdf.hdf5lib.H5;
+import hdf.hdf5lib.HDF5Constants;
+
+public class H5Ex_D_Alloc {
+ private static String FILENAME = "H5Ex_D_Alloc.h5";
+ private static String DATASETNAME1 = "DS1";
+ private static String DATASETNAME2 = "DS2";
+ private static final int DIM_X = 4;
+ private static final int DIM_Y = 7;
+ private static final int FILLVAL = 99;
+ private static final int RANK = 2;
+
+ // Values for the status of space allocation
+ enum H5D_space_status {
+ H5D_SPACE_STATUS_ERROR(-1), H5D_SPACE_STATUS_NOT_ALLOCATED(0), H5D_SPACE_STATUS_PART_ALLOCATED(1), H5D_SPACE_STATUS_ALLOCATED(
+ 2);
+ private static final Map<Integer, H5D_space_status> lookup = new HashMap<Integer, H5D_space_status>();
+
+ static {
+ for (H5D_space_status s : EnumSet.allOf(H5D_space_status.class))
+ lookup.put(s.getCode(), s);
+ }
+
+ private int code;
+
+ H5D_space_status(int space_status) {
+ this.code = space_status;
+ }
+
+ public int getCode() {
+ return this.code;
+ }
+
+ public static H5D_space_status get(int code) {
+ return lookup.get(code);
+ }
+ }
+
+ private static void allocation() {
+ long file_id = -1;
+ long filespace_id = -1;
+ long dataset_id1 = -1;
+ long dataset_id2 = -1;
+ long dcpl_id = -1;
+ long[] dims = { DIM_X, DIM_Y };
+ int[][] dset_data = new int[DIM_X][DIM_Y];
+ int space_status = 0;
+ long storage_size = 0;
+
+ // Initialize the dataset.
+ for (int indx = 0; indx < DIM_X; indx++)
+ for (int jndx = 0; jndx < DIM_Y; jndx++)
+ dset_data[indx][jndx] = FILLVAL;
+
+ // Create a file using default properties.
+ try {
+ file_id = H5.H5Fcreate(FILENAME, HDF5Constants.H5F_ACC_TRUNC, HDF5Constants.H5P_DEFAULT,
+ HDF5Constants.H5P_DEFAULT);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Create dataspace. Setting maximum size to NULL sets the maximum
+ // size to be the current size.
+ try {
+ filespace_id = H5.H5Screate_simple(RANK, dims, null);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Create the dataset creation property list, and set the chunk size.
+ try {
+ dcpl_id = H5.H5Pcreate(HDF5Constants.H5P_DATASET_CREATE);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Set the allocation time to "early". This way we can be sure
+ // that reading from the dataset immediately after creation will
+ // return the fill value.
+ try {
+ if (dcpl_id >= 0)
+ H5.H5Pset_alloc_time(dcpl_id, HDF5Constants.H5D_ALLOC_TIME_EARLY);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ System.out.println("Creating datasets...");
+ System.out.println(DATASETNAME1 + " has allocation time H5D_ALLOC_TIME_LATE");
+ System.out.println(DATASETNAME2 + " has allocation time H5D_ALLOC_TIME_EARLY");
+ System.out.println();
+
+ // Create the dataset using the dataset default creation property list.
+ try {
+ if ((file_id >= 0) && (filespace_id >= 0))
+ dataset_id1 = H5.H5Dcreate(file_id, DATASETNAME1, HDF5Constants.H5T_NATIVE_INT, filespace_id,
+ HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Create the dataset using the dataset creation property list.
+ try {
+ if ((file_id >= 0) && (filespace_id >= 0) && (dcpl_id >= 0))
+ dataset_id2 = H5.H5Dcreate(file_id, DATASETNAME2, HDF5Constants.H5T_NATIVE_INT, filespace_id,
+ HDF5Constants.H5P_DEFAULT, dcpl_id, HDF5Constants.H5P_DEFAULT);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Retrieve and print space status and storage size for dset1.
+ try {
+ if (dataset_id1 >= 0)
+ space_status = H5.H5Dget_space_status(dataset_id1);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+ try {
+ if (dataset_id1 >= 0)
+ storage_size = H5.H5Dget_storage_size(dataset_id1);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+ String the_space = " ";
+ if (H5D_space_status.get(space_status) != H5D_space_status.H5D_SPACE_STATUS_ALLOCATED)
+ the_space += "not ";
+ System.out.println("Space for " + DATASETNAME1 + " has" + the_space + "been allocated.");
+ System.out.println("Storage size for " + DATASETNAME1 + " is: " + storage_size + " bytes.");
+
+ // Retrieve and print space status and storage size for dset2.
+ try {
+ if (dataset_id2 >= 0)
+ space_status = H5.H5Dget_space_status(dataset_id2);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+ try {
+ if (dataset_id2 >= 0)
+ storage_size = H5.H5Dget_storage_size(dataset_id2);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+ the_space = " ";
+ if (H5D_space_status.get(space_status) != H5D_space_status.H5D_SPACE_STATUS_ALLOCATED)
+ the_space += "not ";
+ System.out.println("Space for " + DATASETNAME2 + " has" + the_space + "been allocated.");
+ System.out.println("Storage size for " + DATASETNAME2 + " is: " + storage_size + " bytes.");
+ System.out.println();
+
+ System.out.println("Writing data...");
+ System.out.println();
+
+ // Write the data to the datasets.
+ try {
+ if (dataset_id1 >= 0)
+ H5.H5Dwrite(dataset_id1, HDF5Constants.H5T_NATIVE_INT, HDF5Constants.H5S_ALL, HDF5Constants.H5S_ALL,
+ HDF5Constants.H5P_DEFAULT, dset_data[0]);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+ try {
+ if (dataset_id2 >= 0)
+ H5.H5Dwrite(dataset_id2, HDF5Constants.H5T_NATIVE_INT, HDF5Constants.H5S_ALL, HDF5Constants.H5S_ALL,
+ HDF5Constants.H5P_DEFAULT, dset_data[0]);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Retrieve and print space status and storage size for dset1.
+ try {
+ if (dataset_id1 >= 0)
+ space_status = H5.H5Dget_space_status(dataset_id1);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+ try {
+ if (dataset_id1 >= 0)
+ storage_size = H5.H5Dget_storage_size(dataset_id1);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+ the_space = " ";
+ if (H5D_space_status.get(space_status) != H5D_space_status.H5D_SPACE_STATUS_ALLOCATED)
+ the_space += "not ";
+ System.out.println("Space for " + DATASETNAME1 + " has" + the_space + "been allocated.");
+ System.out.println("Storage size for " + DATASETNAME1 + " is: " + storage_size + " bytes.");
+
+ // Retrieve and print space status and storage size for dset2.
+ try {
+ if (dataset_id2 >= 0)
+ space_status = H5.H5Dget_space_status(dataset_id2);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+ try {
+ if (dataset_id2 >= 0)
+ storage_size = H5.H5Dget_storage_size(dataset_id2);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+ the_space = " ";
+ if (H5D_space_status.get(space_status) != H5D_space_status.H5D_SPACE_STATUS_ALLOCATED)
+ the_space += "not ";
+ System.out.println("Space for " + DATASETNAME2 + " has" + the_space + "been allocated.");
+ System.out.println("Storage size for " + DATASETNAME2 + " is: " + storage_size + " bytes.");
+ System.out.println();
+
+ // End access to the dataset and release resources used by it.
+ try {
+ if (dcpl_id >= 0)
+ H5.H5Pclose(dcpl_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ try {
+ if (dataset_id1 >= 0)
+ H5.H5Dclose(dataset_id1);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ try {
+ if (dataset_id2 >= 0)
+ H5.H5Dclose(dataset_id2);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ try {
+ if (filespace_id >= 0)
+ H5.H5Sclose(filespace_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Close the file.
+ try {
+ if (file_id >= 0)
+ H5.H5Fclose(file_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+ }
+
+ public static void main(String[] args) {
+ H5Ex_D_Alloc.allocation();
+ }
+
+}
diff --git a/java/examples/datasets/H5Ex_D_Checksum.java b/java/examples/datasets/H5Ex_D_Checksum.java
new file mode 100644
index 0000000..3a2f98f
--- /dev/null
+++ b/java/examples/datasets/H5Ex_D_Checksum.java
@@ -0,0 +1,347 @@
+/* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
+ * Copyright by The HDF Group. *
+ * Copyright by the Board of Trustees of the University of Illinois. *
+ * All rights reserved. *
+ * *
+ * This file is part of HDF5. The full HDF5 copyright notice, including *
+ * terms governing use, modification, and redistribution, is contained in *
+ * the files COPYING and Copyright.html. COPYING can be found at the root *
+ * of the source code distribution tree; Copyright.html can be found at the *
+ * root level of an installed copy of the electronic HDF5 document set and *
+ * is linked from the top-level documents page. It can also be found at *
+ * http://hdfgroup.org/HDF5/doc/Copyright.html. If you do not have *
+ * access to either file, you may request a copy from help@hdfgroup.org. *
+ * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */
+
+/************************************************************
+ This example shows how to read and write data to a dataset
+ using the Fletcher32 checksum filter. The program first
+ checks if the Fletcher32 filter is available, then if it
+ is it writes integers to a dataset using Fletcher32, then
+ closes the file. Next, it reopens the file, reads back
+ the data, checks if the filter detected an error and
+ outputs the type of filter and the maximum value in the
+ dataset to the screen.
+ ************************************************************/
+package examples.datasets;
+
+import java.util.EnumSet;
+import java.util.HashMap;
+import java.util.Map;
+
+import hdf.hdf5lib.H5;
+import hdf.hdf5lib.HDF5Constants;
+
+public class H5Ex_D_Checksum {
+ private static String FILENAME = "H5Ex_D_Checksum.h5";
+ private static String DATASETNAME = "DS1";
+ private static final int DIM_X = 32;
+ private static final int DIM_Y = 64;
+ private static final int CHUNK_X = 4;
+ private static final int CHUNK_Y = 8;
+ private static final int RANK = 2;
+ private static final int NDIMS = 2;
+
+ // Values for the status of space allocation
+ enum H5Z_filter {
+ H5Z_FILTER_ERROR(-1), H5Z_FILTER_NONE(0), H5Z_FILTER_DEFLATE(1), H5Z_FILTER_SHUFFLE(2), H5Z_FILTER_FLETCHER32(3), H5Z_FILTER_SZIP(
+ 4), H5Z_FILTER_NBIT(5), H5Z_FILTER_SCALEOFFSET(6), H5Z_FILTER_RESERVED(256), H5Z_FILTER_MAX(65535);
+ private static final Map<Integer, H5Z_filter> lookup = new HashMap<Integer, H5Z_filter>();
+
+ static {
+ for (H5Z_filter s : EnumSet.allOf(H5Z_filter.class))
+ lookup.put(s.getCode(), s);
+ }
+
+ private int code;
+
+ H5Z_filter(int layout_type) {
+ this.code = layout_type;
+ }
+
+ public int getCode() {
+ return this.code;
+ }
+
+ public static H5Z_filter get(int code) {
+ return lookup.get(code);
+ }
+ }
+
+ private static boolean checkFletcher32Filter() {
+ try {
+ int available = H5.H5Zfilter_avail(H5Z_filter.H5Z_FILTER_FLETCHER32.getCode());
+ if (available == 0) {
+ System.out.println("N-Bit filter not available.");
+ return false;
+ }
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ try {
+ int filter_info = H5.H5Zget_filter_info(HDF5Constants.H5Z_FILTER_FLETCHER32);
+ if (((filter_info & HDF5Constants.H5Z_FILTER_CONFIG_ENCODE_ENABLED) == 0)
+ || ((filter_info & HDF5Constants.H5Z_FILTER_CONFIG_DECODE_ENABLED) == 0)) {
+ System.out.println("N-Bit filter not available for encoding and decoding.");
+ return false;
+ }
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+ return true;
+ }
+
+ private static void writeChecksum() {
+ long file_id = -1;
+ long filespace_id = -1;
+ long dataset_id = -1;
+ long dcpl_id = -1;
+ long[] dims = { DIM_X, DIM_Y };
+ long[] chunk_dims = { CHUNK_X, CHUNK_Y };
+ int[][] dset_data = new int[DIM_X][DIM_Y];
+
+ // Initialize data.
+ for (int indx = 0; indx < DIM_X; indx++)
+ for (int jndx = 0; jndx < DIM_Y; jndx++)
+ dset_data[indx][jndx] = indx * jndx - jndx;
+
+ // Create a new file using default properties.
+ try {
+ file_id = H5.H5Fcreate(FILENAME, HDF5Constants.H5F_ACC_TRUNC, HDF5Constants.H5P_DEFAULT,
+ HDF5Constants.H5P_DEFAULT);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Create dataspace. Setting maximum size to NULL sets the maximum
+ // size to be the current size.
+ try {
+ filespace_id = H5.H5Screate_simple(RANK, dims, null);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Create the dataset creation property list, add the N-Bit filter.
+ try {
+ dcpl_id = H5.H5Pcreate(HDF5Constants.H5P_DATASET_CREATE);
+ if (dcpl_id >= 0) {
+ H5.H5Pset_fletcher32(dcpl_id);
+ // Set the chunk size.
+ H5.H5Pset_chunk(dcpl_id, NDIMS, chunk_dims);
+ }
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Create the dataset.
+ try {
+ if ((file_id >= 0) && (filespace_id >= 0) && (dcpl_id >= 0))
+ dataset_id = H5.H5Dcreate(file_id, DATASETNAME, HDF5Constants.H5T_STD_I32LE, filespace_id,
+ HDF5Constants.H5P_DEFAULT, dcpl_id, HDF5Constants.H5P_DEFAULT);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Write the data to the dataset.
+ try {
+ if (dataset_id >= 0)
+ H5.H5Dwrite(dataset_id, HDF5Constants.H5T_NATIVE_INT, HDF5Constants.H5S_ALL, HDF5Constants.H5S_ALL,
+ HDF5Constants.H5P_DEFAULT, dset_data);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // End access to the dataset and release resources used by it.
+ try {
+ if (dcpl_id >= 0)
+ H5.H5Pclose(dcpl_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ try {
+ if (dataset_id >= 0)
+ H5.H5Dclose(dataset_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ try {
+ if (filespace_id >= 0)
+ H5.H5Sclose(filespace_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Close the file.
+ try {
+ if (file_id >= 0)
+ H5.H5Fclose(file_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+ }
+
+ private static void readChecksum() {
+ long file_id = -1;
+ long dataset_id = -1;
+ long dcpl_id = -1;
+ int[][] dset_data = new int[DIM_X][DIM_Y];
+
+ // Open an existing file.
+ try {
+ file_id = H5.H5Fopen(FILENAME, HDF5Constants.H5F_ACC_RDONLY, HDF5Constants.H5P_DEFAULT);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Open an existing dataset.
+ try {
+ if (file_id >= 0)
+ dataset_id = H5.H5Dopen(file_id, DATASETNAME, HDF5Constants.H5P_DEFAULT);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Retrieve the dataset creation property list.
+ try {
+ if (dataset_id >= 0)
+ dcpl_id = H5.H5Dget_create_plist(dataset_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Retrieve and print the filter type. Here we only retrieve the
+ // first filter because we know that we only added one filter.
+ try {
+ if (dcpl_id >= 0) {
+ // Java lib requires a valid filter_name object and cd_values
+ int[] flags = { 0 };
+ long[] cd_nelmts = { 1 };
+ int[] cd_values = { 0 };
+ String[] filter_name = { "" };
+ int[] filter_config = { 0 };
+ int filter_type = -1;
+ filter_type = H5
+ .H5Pget_filter(dcpl_id, 0, flags, cd_nelmts, cd_values, 120, filter_name, filter_config);
+ System.out.print("Filter type is: ");
+ switch (H5Z_filter.get(filter_type)) {
+ case H5Z_FILTER_DEFLATE:
+ System.out.println("H5Z_FILTER_DEFLATE");
+ break;
+ case H5Z_FILTER_SHUFFLE:
+ System.out.println("H5Z_FILTER_SHUFFLE");
+ break;
+ case H5Z_FILTER_FLETCHER32:
+ System.out.println("H5Z_FILTER_FLETCHER32");
+ break;
+ case H5Z_FILTER_SZIP:
+ System.out.println("H5Z_FILTER_SZIP");
+ break;
+ default:
+ System.out.println("H5Z_FILTER_ERROR");
+ }
+ System.out.println();
+ }
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Read the data using the default properties.
+ try {
+ if (dataset_id >= 0) {
+ int status = H5.H5Dread(dataset_id, HDF5Constants.H5T_NATIVE_INT, HDF5Constants.H5S_ALL,
+ HDF5Constants.H5S_ALL, HDF5Constants.H5P_DEFAULT, dset_data);
+ // Check if the read was successful. Normally we do not perform
+ // error checking in these examples for the sake of clarity, but in
+ // this case we will make an exception because this is how the
+ // fletcher32 checksum filter reports data errors.
+ if (status < 0) {
+ System.out.print("Dataset read failed!");
+ try {
+ if (dcpl_id >= 0)
+ H5.H5Pclose(dcpl_id);
+ if (dataset_id >= 0)
+ H5.H5Dclose(dataset_id);
+ if (file_id >= 0)
+ H5.H5Fclose(file_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+ return;
+ }
+ }
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Find the maximum value in the dataset, to verify that it was read
+ // correctly.
+ int max = dset_data[0][0];
+ for (int indx = 0; indx < DIM_X; indx++) {
+ for (int jndx = 0; jndx < DIM_Y; jndx++)
+ if (max < dset_data[indx][jndx])
+ max = dset_data[indx][jndx];
+ }
+ // Print the maximum value.
+ System.out.println("Maximum value in " + DATASETNAME + " is: " + max);
+
+ // End access to the dataset and release resources used by it.
+ try {
+ if (dcpl_id >= 0)
+ H5.H5Pclose(dcpl_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ try {
+ if (dataset_id >= 0)
+ H5.H5Dclose(dataset_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Close the file.
+ try {
+ if (file_id >= 0)
+ H5.H5Fclose(file_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+ }
+
+ public static void main(String[] args) {
+ // Check if the Fletcher32 filter is available and can be used for
+ // both encoding and decoding. Normally we do not perform error
+ // checking in these examples for the sake of clarity, but in this
+ // case we will make an exception because this filter is an
+ // optional part of the hdf5 library.
+ // size to be the current size.
+ if (H5Ex_D_Checksum.checkFletcher32Filter()) {
+ H5Ex_D_Checksum.writeChecksum();
+ H5Ex_D_Checksum.readChecksum();
+ }
+ }
+
+}
diff --git a/java/examples/datasets/H5Ex_D_Chunk.java b/java/examples/datasets/H5Ex_D_Chunk.java
new file mode 100644
index 0000000..7f02e5a
--- /dev/null
+++ b/java/examples/datasets/H5Ex_D_Chunk.java
@@ -0,0 +1,366 @@
+/* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
+ * Copyright by The HDF Group. *
+ * Copyright by the Board of Trustees of the University of Illinois. *
+ * All rights reserved. *
+ * *
+ * This file is part of HDF5. The full HDF5 copyright notice, including *
+ * terms governing use, modification, and redistribution, is contained in *
+ * the files COPYING and Copyright.html. COPYING can be found at the root *
+ * of the source code distribution tree; Copyright.html can be found at the *
+ * root level of an installed copy of the electronic HDF5 document set and *
+ * is linked from the top-level documents page. It can also be found at *
+ * http://hdfgroup.org/HDF5/doc/Copyright.html. If you do not have *
+ * access to either file, you may request a copy from help@hdfgroup.org. *
+ * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */
+
+/************************************************************
+ This example shows how to create a chunked dataset. The
+ program first writes integers in a hyperslab selection to
+ a chunked dataset with dataspace dimensions of DIM_XxDIM_Y
+ and chunk size of CHUNK_XxCHUNK_Y, then closes the file.
+ Next, it reopens the file, reads back the data, and
+ outputs it to the screen. Finally it reads the data again
+ using a different hyperslab selection, and outputs
+ the result to the screen.
+ ************************************************************/
+package examples.datasets;
+
+import java.util.EnumSet;
+import java.util.HashMap;
+import java.util.Map;
+
+import hdf.hdf5lib.H5;
+import hdf.hdf5lib.HDF5Constants;
+
+public class H5Ex_D_Chunk {
+ private static String FILENAME = "H5Ex_D_Chunk.h5";
+ private static String DATASETNAME = "DS1";
+ private static final int DIM_X = 6;
+ private static final int DIM_Y = 8;
+ private static final int CHUNK_X = 4;
+ private static final int CHUNK_Y = 4;
+ private static final int RANK = 2;
+ private static final int NDIMS = 2;
+
+ // Values for the status of space allocation
+ enum H5D_layout {
+ H5D_LAYOUT_ERROR(-1), H5D_COMPACT(0), H5D_CONTIGUOUS(1), H5D_CHUNKED(2), H5D_NLAYOUTS(3);
+ private static final Map<Integer, H5D_layout> lookup = new HashMap<Integer, H5D_layout>();
+
+ static {
+ for (H5D_layout s : EnumSet.allOf(H5D_layout.class))
+ lookup.put(s.getCode(), s);
+ }
+
+ private int code;
+
+ H5D_layout(int layout_type) {
+ this.code = layout_type;
+ }
+
+ public int getCode() {
+ return this.code;
+ }
+
+ public static H5D_layout get(int code) {
+ return lookup.get(code);
+ }
+ }
+
+ private static void writeChunk() {
+ long file_id = -1;
+ long filespace_id = -1;
+ long dataset_id = -1;
+ long dcpl_id = -1;
+ long[] dims = { DIM_X, DIM_Y };
+ long[] chunk_dims = { CHUNK_X, CHUNK_Y };
+ int[][] dset_data = new int[DIM_X][DIM_Y];
+
+ // Initialize data to "1", to make it easier to see the selections.
+ for (int indx = 0; indx < DIM_X; indx++)
+ for (int jndx = 0; jndx < DIM_Y; jndx++)
+ dset_data[indx][jndx] = 1;
+
+ // Print the data to the screen.
+ System.out.println("Original Data:");
+ for (int indx = 0; indx < DIM_X; indx++) {
+ System.out.print(" [ ");
+ for (int jndx = 0; jndx < DIM_Y; jndx++)
+ System.out.print(dset_data[indx][jndx] + " ");
+ System.out.println("]");
+ }
+ System.out.println();
+
+ // Create a new file using default properties.
+ try {
+ file_id = H5.H5Fcreate(FILENAME, HDF5Constants.H5F_ACC_TRUNC, HDF5Constants.H5P_DEFAULT,
+ HDF5Constants.H5P_DEFAULT);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Create dataspace. Setting maximum size to NULL sets the maximum
+ // size to be the current size.
+ try {
+ filespace_id = H5.H5Screate_simple(RANK, dims, null);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Create the dataset creation property list.
+ try {
+ dcpl_id = H5.H5Pcreate(HDF5Constants.H5P_DATASET_CREATE);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Set the chunk size.
+ try {
+ if (dcpl_id >= 0)
+ H5.H5Pset_chunk(dcpl_id, NDIMS, chunk_dims);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Create the chunked dataset.
+ try {
+ if ((file_id >= 0) && (filespace_id >= 0) && (dcpl_id >= 0))
+ dataset_id = H5.H5Dcreate(file_id, DATASETNAME, HDF5Constants.H5T_STD_I32LE, filespace_id,
+ HDF5Constants.H5P_DEFAULT, dcpl_id, HDF5Constants.H5P_DEFAULT);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Define and select the first part of the hyperslab selection.
+ long[] start = { 0, 0 };
+ long[] stride = { 3, 3 };
+ long[] count = { 2, 3 };
+ long[] block = { 2, 2 };
+ try {
+ if ((filespace_id >= 0))
+ H5.H5Sselect_hyperslab(filespace_id, HDF5Constants.H5S_SELECT_SET, start, stride, count, block);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+ // Define and select the second part of the hyperslab selection,
+ // which is subtracted from the first selection by the use of
+ // H5S_SELECT_NOTB
+ block[0] = 1;
+ block[1] = 1;
+ try {
+ if ((filespace_id >= 0)) {
+ H5.H5Sselect_hyperslab(filespace_id, HDF5Constants.H5S_SELECT_NOTB, start, stride, count, block);
+
+ // Write the data to the dataset.
+ if (dataset_id >= 0)
+ H5.H5Dwrite(dataset_id, HDF5Constants.H5T_NATIVE_INT, HDF5Constants.H5S_ALL, filespace_id,
+ HDF5Constants.H5P_DEFAULT, dset_data);
+ }
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // End access to the dataset and release resources used by it.
+ try {
+ if (dcpl_id >= 0)
+ H5.H5Pclose(dcpl_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ try {
+ if (dataset_id >= 0)
+ H5.H5Dclose(dataset_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ try {
+ if (filespace_id >= 0)
+ H5.H5Sclose(filespace_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Close the file.
+ try {
+ if (file_id >= 0)
+ H5.H5Fclose(file_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+ }
+
+ private static void readChunk() {
+ long file_id = -1;
+ long filespace_id = -1;
+ long dataset_id = -1;
+ long dcpl_id = -1;
+ int[][] dset_data = new int[DIM_X][DIM_Y];
+
+ // Open an existing file.
+ try {
+ file_id = H5.H5Fopen(FILENAME, HDF5Constants.H5F_ACC_RDONLY, HDF5Constants.H5P_DEFAULT);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Open an existing dataset.
+ try {
+ if (file_id >= 0)
+ dataset_id = H5.H5Dopen(file_id, DATASETNAME, HDF5Constants.H5P_DEFAULT);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Retrieve the dataset creation property list.
+ try {
+ if (dataset_id >= 0)
+ dcpl_id = H5.H5Dget_create_plist(dataset_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Print the storage layout.
+ try {
+ if (dcpl_id >= 0) {
+ int layout_type = H5.H5Pget_layout(dcpl_id);
+ System.out.print("Storage layout for " + DATASETNAME + " is: ");
+ switch (H5D_layout.get(layout_type)) {
+ case H5D_COMPACT:
+ System.out.println("H5D_COMPACT");
+ break;
+ case H5D_CONTIGUOUS:
+ System.out.println("H5D_CONTIGUOUS");
+ break;
+ case H5D_CHUNKED:
+ System.out.println("H5D_CHUNKED");
+ break;
+ case H5D_LAYOUT_ERROR:
+ break;
+ case H5D_NLAYOUTS:
+ break;
+ default:
+ break;
+ }
+ System.out.println();
+ }
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Read the data using the default properties.
+ try {
+ if (dataset_id >= 0)
+ H5.H5Dread(dataset_id, HDF5Constants.H5T_NATIVE_INT, HDF5Constants.H5S_ALL, HDF5Constants.H5S_ALL,
+ HDF5Constants.H5P_DEFAULT, dset_data);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Output the data to the screen.
+ System.out.println("Data as written to disk by hyberslabs:");
+ for (int indx = 0; indx < DIM_X; indx++) {
+ System.out.print(" [ ");
+ for (int jndx = 0; jndx < DIM_Y; jndx++)
+ System.out.print(dset_data[indx][jndx] + " ");
+ System.out.println("]");
+ }
+ System.out.println();
+
+ // Initialize the read array.
+ for (int indx = 0; indx < DIM_X; indx++)
+ for (int jndx = 0; jndx < DIM_Y; jndx++)
+ dset_data[indx][jndx] = 0;
+
+ // Define and select the hyperslab to use for reading.
+ try {
+ if (dataset_id >= 0) {
+ filespace_id = H5.H5Dget_space(dataset_id);
+
+ long[] start = { 0, 1 };
+ long[] stride = { 4, 4 };
+ long[] count = { 2, 2 };
+ long[] block = { 2, 3 };
+
+ if (filespace_id >= 0) {
+ H5.H5Sselect_hyperslab(filespace_id, HDF5Constants.H5S_SELECT_SET, start, stride, count, block);
+
+ // Read the data using the previously defined hyperslab.
+ if ((dataset_id >= 0) && (filespace_id >= 0))
+ H5.H5Dread(dataset_id, HDF5Constants.H5T_NATIVE_INT, HDF5Constants.H5S_ALL, filespace_id,
+ HDF5Constants.H5P_DEFAULT, dset_data);
+ }
+ }
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Output the data to the screen.
+ System.out.println("Data as read from disk by hyberslab:");
+ for (int indx = 0; indx < DIM_X; indx++) {
+ System.out.print(" [ ");
+ for (int jndx = 0; jndx < DIM_Y; jndx++)
+ System.out.print(dset_data[indx][jndx] + " ");
+ System.out.println("]");
+ }
+ System.out.println();
+
+ // End access to the dataset and release resources used by it.
+ try {
+ if (dcpl_id >= 0)
+ H5.H5Pclose(dcpl_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ try {
+ if (dataset_id >= 0)
+ H5.H5Dclose(dataset_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ try {
+ if (filespace_id >= 0)
+ H5.H5Sclose(filespace_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Close the file.
+ try {
+ if (file_id >= 0)
+ H5.H5Fclose(file_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+ }
+
+ public static void main(String[] args) {
+ H5Ex_D_Chunk.writeChunk();
+ H5Ex_D_Chunk.readChunk();
+ }
+
+}
diff --git a/java/examples/datasets/H5Ex_D_Compact.java b/java/examples/datasets/H5Ex_D_Compact.java
new file mode 100644
index 0000000..4f1e2f0
--- /dev/null
+++ b/java/examples/datasets/H5Ex_D_Compact.java
@@ -0,0 +1,289 @@
+/* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
+ * Copyright by The HDF Group. *
+ * Copyright by the Board of Trustees of the University of Illinois. *
+ * All rights reserved. *
+ * *
+ * This file is part of HDF5. The full HDF5 copyright notice, including *
+ * terms governing use, modification, and redistribution, is contained in *
+ * the files COPYING and Copyright.html. COPYING can be found at the root *
+ * of the source code distribution tree; Copyright.html can be found at the *
+ * root level of an installed copy of the electronic HDF5 document set and *
+ * is linked from the top-level documents page. It can also be found at *
+ * http://hdfgroup.org/HDF5/doc/Copyright.html. If you do not have *
+ * access to either file, you may request a copy from help@hdfgroup.org. *
+ * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */
+
+/************************************************************
+ This example shows how to read and write data to a compact
+ dataset. The program first writes integers to a compact
+ dataset with dataspace dimensions of DIM_XxDIM_Y, then
+ closes the file. Next, it reopens the file, reads back
+ the data, and outputs it to the screen.
+ ************************************************************/
+package examples.datasets;
+
+import java.util.EnumSet;
+import java.util.HashMap;
+import java.util.Map;
+
+import hdf.hdf5lib.H5;
+import hdf.hdf5lib.HDF5Constants;
+
+public class H5Ex_D_Compact {
+ private static String FILENAME = "H5Ex_D_Compact.h5";
+ private static String DATASETNAME = "DS1";
+ private static final int DIM_X = 4;
+ private static final int DIM_Y = 7;
+ private static final int RANK = 2;
+
+ // Values for the status of space allocation
+ enum H5D_layout {
+ H5D_LAYOUT_ERROR(-1), H5D_COMPACT(0), H5D_CONTIGUOUS(1), H5D_CHUNKED(2), H5D_NLAYOUTS(3);
+ private static final Map<Integer, H5D_layout> lookup = new HashMap<Integer, H5D_layout>();
+
+ static {
+ for (H5D_layout s : EnumSet.allOf(H5D_layout.class))
+ lookup.put(s.getCode(), s);
+ }
+
+ private int code;
+
+ H5D_layout(int layout_type) {
+ this.code = layout_type;
+ }
+
+ public int getCode() {
+ return this.code;
+ }
+
+ public static H5D_layout get(int code) {
+ return lookup.get(code);
+ }
+ }
+
+ private static void writeCompact() {
+ long file_id = -1;
+ long filespace_id = -1;
+ long dataset_id = -1;
+ long dcpl_id = -1;
+ long[] dims = { DIM_X, DIM_Y };
+ int[][] dset_data = new int[DIM_X][DIM_Y];
+
+ // Initialize data.
+ for (int indx = 0; indx < DIM_X; indx++)
+ for (int jndx = 0; jndx < DIM_Y; jndx++)
+ dset_data[indx][jndx] = indx * jndx - jndx;
+
+ // Create a new file using default properties.
+ try {
+ file_id = H5.H5Fcreate(FILENAME, HDF5Constants.H5F_ACC_TRUNC, HDF5Constants.H5P_DEFAULT,
+ HDF5Constants.H5P_DEFAULT);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Create dataspace. Setting maximum size to NULL sets the maximum
+ // size to be the current size.
+ try {
+ filespace_id = H5.H5Screate_simple(RANK, dims, null);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Create the dataset creation property list.
+ try {
+ dcpl_id = H5.H5Pcreate(HDF5Constants.H5P_DATASET_CREATE);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Set the layout to compact.
+ try {
+ if (dcpl_id >= 0)
+ H5.H5Pset_layout(dcpl_id, H5D_layout.H5D_COMPACT.getCode());
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Create the dataset. We will use all default properties for this example.
+ try {
+ if ((file_id >= 0) && (filespace_id >= 0) && (dcpl_id >= 0))
+ dataset_id = H5.H5Dcreate(file_id, DATASETNAME, HDF5Constants.H5T_STD_I32LE, filespace_id,
+ HDF5Constants.H5P_DEFAULT, dcpl_id, HDF5Constants.H5P_DEFAULT);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Write the data to the dataset.
+ try {
+ if (dataset_id >= 0)
+ H5.H5Dwrite(dataset_id, HDF5Constants.H5T_NATIVE_INT, HDF5Constants.H5S_ALL, HDF5Constants.H5S_ALL,
+ HDF5Constants.H5P_DEFAULT, dset_data);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // End access to the dataset and release resources used by it.
+ try {
+ if (dcpl_id >= 0)
+ H5.H5Pclose(dcpl_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ try {
+ if (dataset_id >= 0)
+ H5.H5Dclose(dataset_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ try {
+ if (filespace_id >= 0)
+ H5.H5Sclose(filespace_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Close the file.
+ try {
+ if (file_id >= 0)
+ H5.H5Fclose(file_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+ }
+
+ private static void readCompact() {
+ long file_id = -1;
+ long filespace_id = -1;
+ long dataset_id = -1;
+ long dcpl_id = -1;
+ int[][] dset_data = new int[DIM_X][DIM_Y];
+
+ // Open file and dataset using the default properties.
+ try {
+ file_id = H5.H5Fopen(FILENAME, HDF5Constants.H5F_ACC_RDONLY, HDF5Constants.H5P_DEFAULT);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Open an existing dataset.
+ try {
+ if (file_id >= 0)
+ dataset_id = H5.H5Dopen(file_id, DATASETNAME, HDF5Constants.H5P_DEFAULT);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Retrieve the dataset creation property list.
+ try {
+ if (dataset_id >= 0)
+ dcpl_id = H5.H5Dget_create_plist(dataset_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Print the storage layout.
+ try {
+ if (dcpl_id >= 0) {
+ int layout_type = H5.H5Pget_layout(dcpl_id);
+ System.out.print("Storage layout for " + DATASETNAME + " is: ");
+ switch (H5D_layout.get(layout_type)) {
+ case H5D_COMPACT:
+ System.out.println("H5D_COMPACT");
+ break;
+ case H5D_CONTIGUOUS:
+ System.out.println("H5D_CONTIGUOUS");
+ break;
+ case H5D_CHUNKED:
+ System.out.println("H5D_CHUNKED");
+ break;
+ case H5D_LAYOUT_ERROR:
+ break;
+ case H5D_NLAYOUTS:
+ break;
+ default:
+ break;
+ }
+ System.out.println();
+ }
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Read the data using the default properties.
+ try {
+ if (dataset_id >= 0)
+ H5.H5Dread(dataset_id, HDF5Constants.H5T_NATIVE_INT, HDF5Constants.H5S_ALL, HDF5Constants.H5S_ALL,
+ HDF5Constants.H5P_DEFAULT, dset_data);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Output the data to the screen.
+ System.out.println("Data for " + DATASETNAME + " is: ");
+ for (int indx = 0; indx < DIM_X; indx++) {
+ System.out.print(" [ ");
+ for (int jndx = 0; jndx < DIM_Y; jndx++)
+ System.out.print(dset_data[indx][jndx] + " ");
+ System.out.println("]");
+ }
+ System.out.println();
+
+ // End access to the dataset and release resources used by it.
+ try {
+ if (dcpl_id >= 0)
+ H5.H5Pclose(dcpl_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ try {
+ if (dataset_id >= 0)
+ H5.H5Dclose(dataset_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ try {
+ if (filespace_id >= 0)
+ H5.H5Sclose(filespace_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Close the file.
+ try {
+ if (file_id >= 0)
+ H5.H5Fclose(file_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+ }
+
+ public static void main(String[] args) {
+ H5Ex_D_Compact.writeCompact();
+ H5Ex_D_Compact.readCompact();
+ }
+
+}
diff --git a/java/examples/datasets/H5Ex_D_External.java b/java/examples/datasets/H5Ex_D_External.java
new file mode 100644
index 0000000..5fdc696
--- /dev/null
+++ b/java/examples/datasets/H5Ex_D_External.java
@@ -0,0 +1,238 @@
+/* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
+ * Copyright by The HDF Group. *
+ * Copyright by the Board of Trustees of the University of Illinois. *
+ * All rights reserved. *
+ * *
+ * This file is part of HDF5. The full HDF5 copyright notice, including *
+ * terms governing use, modification, and redistribution, is contained in *
+ * the files COPYING and Copyright.html. COPYING can be found at the root *
+ * of the source code distribution tree; Copyright.html can be found at the *
+ * root level of an installed copy of the electronic HDF5 document set and *
+ * is linked from the top-level documents page. It can also be found at *
+ * http://hdfgroup.org/HDF5/doc/Copyright.html. If you do not have *
+ * access to either file, you may request a copy from help@hdfgroup.org. *
+ * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */
+
+/************************************************************
+ This example shows how to read and write data to an
+ external dataset. The program first writes integers to an
+ external dataset with dataspace dimensions of DIM_XxDIM_Y,
+ then closes the file. Next, it reopens the file, reads
+ back the data, and outputs the name of the external data
+ file and the data to the screen.
+ ************************************************************/
+package examples.datasets;
+
+import hdf.hdf5lib.H5;
+import hdf.hdf5lib.HDF5Constants;
+
+public class H5Ex_D_External {
+ private static String FILENAME = "H5Ex_D_External.h5";
+ private static String EXTERNALNAME = "H5Ex_D_External.data";
+ private static String DATASETNAME = "DS1";
+ private static final int DIM_X = 4;
+ private static final int DIM_Y = 7;
+ private static final int RANK = 2;
+ private static final int NAME_BUF_SIZE = 32;
+
+ private static void writeExternal() {
+ long file_id = -1;
+ long dcpl_id = -1;
+ long filespace_id = -1;
+ long dataset_id = -1;
+ long[] dims = { DIM_X, DIM_Y };
+ int[][] dset_data = new int[DIM_X][DIM_Y];
+
+ // Initialize the dataset.
+ for (int indx = 0; indx < DIM_X; indx++)
+ for (int jndx = 0; jndx < DIM_Y; jndx++)
+ dset_data[indx][jndx] = indx * jndx - jndx;
+
+ // Create a new file using default properties.
+ try {
+ file_id = H5.H5Fcreate(FILENAME, HDF5Constants.H5F_ACC_TRUNC, HDF5Constants.H5P_DEFAULT,
+ HDF5Constants.H5P_DEFAULT);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Create dataspace. Setting maximum size to NULL sets the maximum
+ // size to be the current size.
+ try {
+ filespace_id = H5.H5Screate_simple(RANK, dims, null);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Create the dataset creation property list.
+ try {
+ dcpl_id = H5.H5Pcreate(HDF5Constants.H5P_DATASET_CREATE);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // set the external file.
+ try {
+ if (dcpl_id >= 0)
+ H5.H5Pset_external(dcpl_id, EXTERNALNAME, 0, HDF5Constants.H5F_UNLIMITED);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Create the HDF5Constants.dataset.
+ try {
+ if ((file_id >= 0) && (filespace_id >= 0) && (dcpl_id >= 0))
+ dataset_id = H5.H5Dcreate(file_id, DATASETNAME, HDF5Constants.H5T_STD_I32LE, filespace_id,
+ HDF5Constants.H5P_DEFAULT, dcpl_id, HDF5Constants.H5P_DEFAULT);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Write the dataset.
+ try {
+ H5.H5Dwrite(dataset_id, HDF5Constants.H5T_NATIVE_INT, HDF5Constants.H5S_ALL, HDF5Constants.H5S_ALL,
+ HDF5Constants.H5P_DEFAULT, dset_data);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // End access to the dataset and release resources used by it.
+ try {
+ if (dataset_id >= 0)
+ H5.H5Dclose(dataset_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Terminate access to the data space.
+ try {
+ if (filespace_id >= 0)
+ H5.H5Sclose(filespace_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ try {
+ if (dcpl_id >= 0)
+ H5.H5Pclose(dcpl_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Close the file.
+ try {
+ if (file_id >= 0)
+ H5.H5Fclose(file_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ }
+
+ private static void readExternal() {
+ long file_id = -1;
+ long dcpl_id = -1;
+ long dataset_id = -1;
+ int[][] dset_data = new int[DIM_X][DIM_Y];
+ String[] Xname = new String[1];
+
+ // Open file using the default properties.
+ try {
+ file_id = H5.H5Fopen(FILENAME, HDF5Constants.H5F_ACC_RDWR, HDF5Constants.H5P_DEFAULT);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Open dataset using the default properties.
+ try {
+ if (file_id >= 0)
+ dataset_id = H5.H5Dopen(file_id, DATASETNAME, HDF5Constants.H5P_DEFAULT);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Retrieve the dataset creation property list.
+ try {
+ if (dataset_id >= 0)
+ dcpl_id = H5.H5Dget_create_plist(dataset_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Retrieve and print the name of the external file.
+ long[] Xsize = new long[NAME_BUF_SIZE];
+ try {
+ if (dcpl_id >= 0)
+ H5.H5Pget_external(dcpl_id, 0, Xsize.length, Xname, Xsize);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+ System.out.println(DATASETNAME + " is stored in file: " + Xname[0]);
+
+ // Read the data using the default properties.
+ try {
+ if (dataset_id >= 0)
+ H5.H5Dread(dataset_id, HDF5Constants.H5T_NATIVE_INT, HDF5Constants.H5S_ALL, HDF5Constants.H5S_ALL,
+ HDF5Constants.H5P_DEFAULT, dset_data);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Output the data to the screen.
+ System.out.println(DATASETNAME + ":");
+ for (int indx = 0; indx < DIM_X; indx++) {
+ System.out.print(" [ ");
+ for (int jndx = 0; jndx < DIM_Y; jndx++)
+ System.out.print(dset_data[indx][jndx] + " ");
+ System.out.println("]");
+ }
+ System.out.println();
+
+ // Close the dataset.
+ try {
+ if (dataset_id >= 0)
+ H5.H5Dclose(dataset_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ try {
+ if (dcpl_id >= 0)
+ H5.H5Pclose(dcpl_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Close the file.
+ try {
+ if (file_id >= 0)
+ H5.H5Fclose(file_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+ }
+
+ public static void main(String[] args) {
+ H5Ex_D_External.writeExternal();
+ H5Ex_D_External.readExternal();
+ }
+
+}
diff --git a/java/examples/datasets/H5Ex_D_FillValue.java b/java/examples/datasets/H5Ex_D_FillValue.java
new file mode 100644
index 0000000..982d2cb
--- /dev/null
+++ b/java/examples/datasets/H5Ex_D_FillValue.java
@@ -0,0 +1,246 @@
+/* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
+ * Copyright by The HDF Group. *
+ * Copyright by the Board of Trustees of the University of Illinois. *
+ * All rights reserved. *
+ * *
+ * This file is part of HDF5. The full HDF5 copyright notice, including *
+ * terms governing use, modification, and redistribution, is contained in *
+ * the files COPYING and Copyright.html. COPYING can be found at the root *
+ * of the source code distribution tree; Copyright.html can be found at the *
+ * root level of an installed copy of the electronic HDF5 document set and *
+ * is linked from the top-level documents page. It can also be found at *
+ * http://hdfgroup.org/HDF5/doc/Copyright.html. If you do not have *
+ * access to either file, you may request a copy from help@hdfgroup.org. *
+ * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */
+
+/************************************************************
+ This example shows how to set the fill value for a
+ dataset. The program first sets the fill value to
+ FILLVAL, creates a dataset with dimensions of DIM_XxDIM_Y,
+ reads from the uninitialized dataset, and outputs the
+ contents to the screen. Next, it writes integers to the
+ dataset, reads the data back, and outputs it to the
+ screen. Finally it extends the dataset, reads from it,
+ and outputs the result to the screen.
+ ************************************************************/
+package examples.datasets;
+
+import hdf.hdf5lib.H5;
+import hdf.hdf5lib.HDF5Constants;
+
+public class H5Ex_D_FillValue {
+ private static String FILENAME = "H5Ex_D_FillValue.h5";
+ private static String DATASETNAME = "ExtendibleArray";
+ private static final int DIM_X = 4;
+ private static final int DIM_Y = 7;
+ private static final int EDIM_X = 6;
+ private static final int EDIM_Y = 10;
+ private static final int CHUNK_X = 4;
+ private static final int CHUNK_Y = 4;
+ private static final int RANK = 2;
+ private static final int NDIMS = 2;
+ private static final int FILLVAL = 99;
+
+ private static void fillValue() {
+ long file_id = -1;
+ long dcpl_id = -1;
+ long dataspace_id = -1;
+ long dataset_id = -1;
+ long[] dims = { DIM_X, DIM_Y };
+ long[] extdims = { EDIM_X, EDIM_Y };
+ long[] chunk_dims = { CHUNK_X, CHUNK_Y };
+ long[] maxdims = { HDF5Constants.H5S_UNLIMITED, HDF5Constants.H5S_UNLIMITED };
+ int[][] write_dset_data = new int[DIM_X][DIM_Y];
+ int[][] read_dset_data = new int[DIM_X][DIM_Y];
+ int[][] extend_dset_data = new int[EDIM_X][EDIM_Y];
+
+ // Initialize the dataset.
+ for (int indx = 0; indx < DIM_X; indx++)
+ for (int jndx = 0; jndx < DIM_Y; jndx++)
+ write_dset_data[indx][jndx] = indx * jndx - jndx;
+
+ // Create a new file using default properties.
+ try {
+ file_id = H5.H5Fcreate(FILENAME, HDF5Constants.H5F_ACC_TRUNC, HDF5Constants.H5P_DEFAULT,
+ HDF5Constants.H5P_DEFAULT);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Create dataspace with unlimited dimensions.
+ try {
+ dataspace_id = H5.H5Screate_simple(RANK, dims, maxdims);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Create the dataset creation property list.
+ try {
+ dcpl_id = H5.H5Pcreate(HDF5Constants.H5P_DATASET_CREATE);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Set the chunk size.
+ try {
+ if (dcpl_id >= 0)
+ H5.H5Pset_chunk(dcpl_id, NDIMS, chunk_dims);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Set the fill value for the dataset
+ try {
+ int[] fill_value = { FILLVAL };
+ if (dcpl_id >= 0)
+ H5.H5Pset_fill_value(dcpl_id, HDF5Constants.H5T_NATIVE_INT, fill_value);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Set the allocation time to "early". This way we can be sure
+ // that reading from the dataset immediately after creation will
+ // return the fill value.
+ try {
+ if (dcpl_id >= 0)
+ H5.H5Pset_alloc_time(dcpl_id, HDF5Constants.H5D_ALLOC_TIME_EARLY);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Create the dataset using the dataset creation property list.
+ try {
+ if ((file_id >= 0) && (dataspace_id >= 0) && (dcpl_id >= 0))
+ dataset_id = H5.H5Dcreate(file_id, DATASETNAME, HDF5Constants.H5T_STD_I32LE, dataspace_id,
+ HDF5Constants.H5P_DEFAULT, dcpl_id, HDF5Constants.H5P_DEFAULT);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Read values from the dataset, which has not been written to yet.
+ try {
+ if (dataset_id >= 0)
+ H5.H5Dread(dataset_id, HDF5Constants.H5T_NATIVE_INT, HDF5Constants.H5S_ALL, HDF5Constants.H5S_ALL,
+ HDF5Constants.H5P_DEFAULT, read_dset_data);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Output the data to the screen.
+ System.out.println("Dataset before being written to:");
+ for (int indx = 0; indx < DIM_X; indx++) {
+ System.out.print(" [ ");
+ for (int jndx = 0; jndx < DIM_Y; jndx++)
+ System.out.print(read_dset_data[indx][jndx] + " ");
+ System.out.println("]");
+ }
+ System.out.println();
+
+ // Write the data to the dataset.
+ try {
+ if (dataset_id >= 0)
+ H5.H5Dwrite(dataset_id, HDF5Constants.H5T_NATIVE_INT, HDF5Constants.H5S_ALL, HDF5Constants.H5S_ALL,
+ HDF5Constants.H5P_DEFAULT, write_dset_data);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Read the data back.
+ try {
+ if (dataset_id >= 0)
+ H5.H5Dread(dataset_id, HDF5Constants.H5T_NATIVE_INT, HDF5Constants.H5S_ALL, HDF5Constants.H5S_ALL,
+ HDF5Constants.H5P_DEFAULT, read_dset_data);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Output the data to the screen.
+ System.out.println("Dataset after being written to:");
+ for (int indx = 0; indx < DIM_X; indx++) {
+ System.out.print(" [ ");
+ for (int jndx = 0; jndx < DIM_Y; jndx++)
+ System.out.print(read_dset_data[indx][jndx] + " ");
+ System.out.println("]");
+ }
+ System.out.println();
+
+ // Extend the dataset.
+ try {
+ if (dataset_id >= 0)
+ H5.H5Dset_extent(dataset_id, extdims);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Read from the extended dataset.
+ try {
+ if (dataset_id >= 0)
+ H5.H5Dread(dataset_id, HDF5Constants.H5T_NATIVE_INT, HDF5Constants.H5S_ALL, HDF5Constants.H5S_ALL,
+ HDF5Constants.H5P_DEFAULT, extend_dset_data);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Output the data to the screen.
+ System.out.println("Dataset after extension:");
+ for (int indx = 0; indx < EDIM_X; indx++) {
+ System.out.print(" [ ");
+ for (int jndx = 0; jndx < EDIM_Y; jndx++)
+ System.out.print(extend_dset_data[indx][jndx] + " ");
+ System.out.println("]");
+ }
+ System.out.println();
+
+ // End access to the dataset and release resources used by it.
+ try {
+ if (dataset_id >= 0)
+ H5.H5Dclose(dataset_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ try {
+ if (dataspace_id >= 0)
+ H5.H5Sclose(dataspace_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ try {
+ if (dcpl_id >= 0)
+ H5.H5Pclose(dcpl_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Close the file.
+ try {
+ if (file_id >= 0)
+ H5.H5Fclose(file_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ }
+
+ public static void main(String[] args) {
+ H5Ex_D_FillValue.fillValue();
+ }
+
+}
diff --git a/java/examples/datasets/H5Ex_D_Gzip.java b/java/examples/datasets/H5Ex_D_Gzip.java
new file mode 100644
index 0000000..b813367
--- /dev/null
+++ b/java/examples/datasets/H5Ex_D_Gzip.java
@@ -0,0 +1,336 @@
+/* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
+ * Copyright by The HDF Group. *
+ * Copyright by the Board of Trustees of the University of Illinois. *
+ * All rights reserved. *
+ * *
+ * This file is part of HDF5. The full HDF5 copyright notice, including *
+ * terms governing use, modification, and redistribution, is contained in *
+ * the files COPYING and Copyright.html. COPYING can be found at the root *
+ * of the source code distribution tree; Copyright.html can be found at the *
+ * root level of an installed copy of the electronic HDF5 document set and *
+ * is linked from the top-level documents page. It can also be found at *
+ * http://hdfgroup.org/HDF5/doc/Copyright.html. If you do not have *
+ * access to either file, you may request a copy from help@hdfgroup.org. *
+ * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */
+
+/************************************************************
+ This example shows how to read and write data to a dataset
+ using gzip compression (also called zlib or deflate). The
+ program first checks if gzip compression is available,
+ then if it is it writes integers to a dataset using gzip,
+ then closes the file. Next, it reopens the file, reads
+ back the data, and outputs the type of compression and the
+ maximum value in the dataset to the screen.
+ ************************************************************/
+package examples.datasets;
+
+import java.util.EnumSet;
+import java.util.HashMap;
+import java.util.Map;
+
+import hdf.hdf5lib.H5;
+import hdf.hdf5lib.HDF5Constants;
+
+public class H5Ex_D_Gzip {
+ private static String FILENAME = "H5Ex_D_Gzip.h5";
+ private static String DATASETNAME = "DS1";
+ private static final int DIM_X = 32;
+ private static final int DIM_Y = 64;
+ private static final int CHUNK_X = 4;
+ private static final int CHUNK_Y = 8;
+ private static final int RANK = 2;
+ private static final int NDIMS = 2;
+
+ // Values for the status of space allocation
+ enum H5Z_filter {
+ H5Z_FILTER_ERROR(HDF5Constants.H5Z_FILTER_ERROR), H5Z_FILTER_NONE(HDF5Constants.H5Z_FILTER_NONE), H5Z_FILTER_DEFLATE(
+ HDF5Constants.H5Z_FILTER_DEFLATE), H5Z_FILTER_SHUFFLE(HDF5Constants.H5Z_FILTER_SHUFFLE), H5Z_FILTER_FLETCHER32(
+ HDF5Constants.H5Z_FILTER_FLETCHER32), H5Z_FILTER_SZIP(HDF5Constants.H5Z_FILTER_SZIP), H5Z_FILTER_NBIT(
+ HDF5Constants.H5Z_FILTER_NBIT), H5Z_FILTER_SCALEOFFSET(HDF5Constants.H5Z_FILTER_SCALEOFFSET), H5Z_FILTER_RESERVED(
+ HDF5Constants.H5Z_FILTER_RESERVED), H5Z_FILTER_MAX(HDF5Constants.H5Z_FILTER_MAX);
+ private static final Map<Integer, H5Z_filter> lookup = new HashMap<Integer, H5Z_filter>();
+
+ static {
+ for (H5Z_filter s : EnumSet.allOf(H5Z_filter.class))
+ lookup.put(s.getCode(), s);
+ }
+
+ private int code;
+
+ H5Z_filter(int layout_type) {
+ this.code = layout_type;
+ }
+
+ public int getCode() {
+ return this.code;
+ }
+
+ public static H5Z_filter get(int code) {
+ return lookup.get(code);
+ }
+ }
+
+ private static boolean checkGzipFilter() {
+ try {
+ int available = H5.H5Zfilter_avail(HDF5Constants.H5Z_FILTER_DEFLATE);
+ if (available == 0) {
+ System.out.println("gzip filter not available.");
+ return false;
+ }
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ try {
+ int filter_info = H5.H5Zget_filter_info(HDF5Constants.H5Z_FILTER_DEFLATE);
+ if (((filter_info & HDF5Constants.H5Z_FILTER_CONFIG_ENCODE_ENABLED) == 0)
+ || ((filter_info & HDF5Constants.H5Z_FILTER_CONFIG_DECODE_ENABLED) == 0)) {
+ System.out.println("gzip filter not available for encoding and decoding.");
+ return false;
+ }
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+ return true;
+ }
+
+ private static void writeGzip() {
+ long file_id = -1;
+ long filespace_id = -1;
+ long dataset_id = -1;
+ long dcpl_id = -1;
+ long[] dims = { DIM_X, DIM_Y };
+ long[] chunk_dims = { CHUNK_X, CHUNK_Y };
+ int[][] dset_data = new int[DIM_X][DIM_Y];
+
+ // Initialize data.
+ for (int indx = 0; indx < DIM_X; indx++)
+ for (int jndx = 0; jndx < DIM_Y; jndx++)
+ dset_data[indx][jndx] = indx * jndx - jndx;
+
+ // Create a new file using default properties.
+ try {
+ file_id = H5.H5Fcreate(FILENAME, HDF5Constants.H5F_ACC_TRUNC, HDF5Constants.H5P_DEFAULT,
+ HDF5Constants.H5P_DEFAULT);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Create dataspace. Setting maximum size to NULL sets the maximum
+ // size to be the current size.
+ try {
+ filespace_id = H5.H5Screate_simple(RANK, dims, null);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Create the dataset creation property list, add the gzip compression
+ // filter.
+ try {
+ dcpl_id = H5.H5Pcreate(HDF5Constants.H5P_DATASET_CREATE);
+ if (dcpl_id >= 0) {
+ H5.H5Pset_deflate(dcpl_id, 9);
+ // Set the chunk size.
+ H5.H5Pset_chunk(dcpl_id, NDIMS, chunk_dims);
+ }
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Create the dataset.
+ try {
+ if ((file_id >= 0) && (filespace_id >= 0) && (dcpl_id >= 0))
+ dataset_id = H5.H5Dcreate(file_id, DATASETNAME, HDF5Constants.H5T_STD_I32LE, filespace_id,
+ HDF5Constants.H5P_DEFAULT, dcpl_id, HDF5Constants.H5P_DEFAULT);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Write the data to the dataset.
+ try {
+ if (dataset_id >= 0)
+ H5.H5Dwrite(dataset_id, HDF5Constants.H5T_NATIVE_INT, HDF5Constants.H5S_ALL, HDF5Constants.H5S_ALL,
+ HDF5Constants.H5P_DEFAULT, dset_data);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // End access to the dataset and release resources used by it.
+ try {
+ if (dcpl_id >= 0)
+ H5.H5Pclose(dcpl_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ try {
+ if (dataset_id >= 0)
+ H5.H5Dclose(dataset_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ try {
+ if (filespace_id >= 0)
+ H5.H5Sclose(filespace_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Close the file.
+ try {
+ if (file_id >= 0)
+ H5.H5Fclose(file_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+ }
+
+ private static void readGzip() {
+ long file_id = -1;
+ long dataset_id = -1;
+ long dcpl_id = -1;
+ int[][] dset_data = new int[DIM_X][DIM_Y];
+
+ // Open an existing file.
+ try {
+ file_id = H5.H5Fopen(FILENAME, HDF5Constants.H5F_ACC_RDONLY, HDF5Constants.H5P_DEFAULT);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Open an existing dataset.
+ try {
+ if (file_id >= 0)
+ dataset_id = H5.H5Dopen(file_id, DATASETNAME, HDF5Constants.H5P_DEFAULT);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Retrieve the dataset creation property list.
+ try {
+ if (dataset_id >= 0)
+ dcpl_id = H5.H5Dget_create_plist(dataset_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Retrieve and print the filter type. Here we only retrieve the
+ // first filter because we know that we only added one filter.
+ try {
+ if (dcpl_id >= 0) {
+ // Java lib requires a valid filter_name object and cd_values
+ int[] flags = { 0 };
+ long[] cd_nelmts = { 1 };
+ int[] cd_values = { 0 };
+ String[] filter_name = { "" };
+ int[] filter_config = { 0 };
+ int filter_type = -1;
+ filter_type = H5
+ .H5Pget_filter(dcpl_id, 0, flags, cd_nelmts, cd_values, 120, filter_name, filter_config);
+ System.out.print("Filter type is: ");
+ switch (H5Z_filter.get(filter_type)) {
+ case H5Z_FILTER_DEFLATE:
+ System.out.println("H5Z_FILTER_DEFLATE");
+ break;
+ case H5Z_FILTER_SHUFFLE:
+ System.out.println("H5Z_FILTER_SHUFFLE");
+ break;
+ case H5Z_FILTER_FLETCHER32:
+ System.out.println("H5Z_FILTER_FLETCHER32");
+ break;
+ case H5Z_FILTER_SZIP:
+ System.out.println("H5Z_FILTER_SZIP");
+ break;
+ case H5Z_FILTER_NBIT:
+ System.out.println("H5Z_FILTER_NBIT");
+ break;
+ case H5Z_FILTER_SCALEOFFSET:
+ System.out.println("H5Z_FILTER_SCALEOFFSET");
+ break;
+ default:
+ System.out.println("H5Z_FILTER_ERROR");
+ }
+ System.out.println();
+ }
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Read the data using the default properties.
+ try {
+ if (dataset_id >= 0) {
+ H5.H5Dread(dataset_id, HDF5Constants.H5T_NATIVE_INT, HDF5Constants.H5S_ALL, HDF5Constants.H5S_ALL,
+ HDF5Constants.H5P_DEFAULT, dset_data);
+ }
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Find the maximum value in the dataset, to verify that it was read
+ // correctly.
+ int max = dset_data[0][0];
+ for (int indx = 0; indx < DIM_X; indx++) {
+ for (int jndx = 0; jndx < DIM_Y; jndx++)
+ if (max < dset_data[indx][jndx])
+ max = dset_data[indx][jndx];
+ }
+ // Print the maximum value.
+ System.out.println("Maximum value in " + DATASETNAME + " is: " + max);
+
+ // End access to the dataset and release resources used by it.
+ try {
+ if (dcpl_id >= 0)
+ H5.H5Pclose(dcpl_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ try {
+ if (dataset_id >= 0)
+ H5.H5Dclose(dataset_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Close the file.
+ try {
+ if (file_id >= 0)
+ H5.H5Fclose(file_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+ }
+
+ public static void main(String[] args) {
+ // Check if gzip compression is available and can be used for both
+ // compression and decompression. Normally we do not perform error
+ // checking in these examples for the sake of clarity, but in this
+ // case we will make an exception because this filter is an
+ // optional part of the hdf5 library.
+ if (H5Ex_D_Gzip.checkGzipFilter()) {
+ H5Ex_D_Gzip.writeGzip();
+ H5Ex_D_Gzip.readGzip();
+ }
+ }
+
+}
diff --git a/java/examples/datasets/H5Ex_D_Hyperslab.java b/java/examples/datasets/H5Ex_D_Hyperslab.java
new file mode 100644
index 0000000..482e2c0
--- /dev/null
+++ b/java/examples/datasets/H5Ex_D_Hyperslab.java
@@ -0,0 +1,269 @@
+/* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
+ * Copyright by The HDF Group. *
+ * Copyright by the Board of Trustees of the University of Illinois. *
+ * All rights reserved. *
+ * *
+ * This file is part of HDF5. The full HDF5 copyright notice, including *
+ * terms governing use, modification, and redistribution, is contained in *
+ * the files COPYING and Copyright.html. COPYING can be found at the root *
+ * of the source code distribution tree; Copyright.html can be found at the *
+ * root level of an installed copy of the electronic HDF5 document set and *
+ * is linked from the top-level documents page. It can also be found at *
+ * http://hdfgroup.org/HDF5/doc/Copyright.html. If you do not have *
+ * access to either file, you may request a copy from help@hdfgroup.org. *
+ * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */
+
+/************************************************************
+ This example shows how to read and write data to a
+ dataset by hyberslabs. The program first writes integers
+ in a hyperslab selection to a dataset with dataspace
+ dimensions of DIM_XxDIM_Y, then closes the file. Next, it
+ reopens the file, reads back the data, and outputs it to
+ the screen. Finally it reads the data again using a
+ different hyperslab selection, and outputs the result to
+ the screen.
+ ************************************************************/
+package examples.datasets;
+
+import hdf.hdf5lib.H5;
+import hdf.hdf5lib.HDF5Constants;
+
+public class H5Ex_D_Hyperslab {
+ private static String FILENAME = "H5Ex_D_Hyperslab.h5";
+ private static String DATASETNAME = "DS1";
+ private static final int DIM_X = 6;
+ private static final int DIM_Y = 8;
+ private static final int RANK = 2;
+
+ private static void writeHyperslab() {
+ long file_id = -1;
+ long filespace_id = -1;
+ long dataset_id = -1;
+ long[] dims = { DIM_X, DIM_Y };
+ int[][] dset_data = new int[DIM_X][DIM_Y];
+
+ // Initialize data to "1", to make it easier to see the selections.
+ for (int indx = 0; indx < DIM_X; indx++)
+ for (int jndx = 0; jndx < DIM_Y; jndx++)
+ dset_data[indx][jndx] = 1;
+
+ // Print the data to the screen.
+ System.out.println("Original Data:");
+ for (int indx = 0; indx < DIM_X; indx++) {
+ System.out.print(" [ ");
+ for (int jndx = 0; jndx < DIM_Y; jndx++)
+ System.out.print(dset_data[indx][jndx] + " ");
+ System.out.println("]");
+ }
+ System.out.println();
+
+ // Create a new file using default properties.
+ try {
+ file_id = H5.H5Fcreate(FILENAME, HDF5Constants.H5F_ACC_TRUNC, HDF5Constants.H5P_DEFAULT,
+ HDF5Constants.H5P_DEFAULT);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Create dataspace. Setting maximum size to NULL sets the maximum
+ // size to be the current size.
+ try {
+ filespace_id = H5.H5Screate_simple(RANK, dims, null);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Create the dataset. We will use all default properties for this example.
+ try {
+ if ((file_id >= 0) && (filespace_id >= 0))
+ dataset_id = H5.H5Dcreate(file_id, DATASETNAME, HDF5Constants.H5T_STD_I32LE, filespace_id,
+ HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Define and select the first part of the hyperslab selection.
+ long[] start = { 0, 0 };
+ long[] stride = { 3, 3 };
+ long[] count = { 2, 3 };
+ long[] block = { 2, 2 };
+ try {
+ if ((filespace_id >= 0))
+ H5.H5Sselect_hyperslab(filespace_id, HDF5Constants.H5S_SELECT_SET, start, stride, count, block);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+ // Define and select the second part of the hyperslab selection,
+ // which is subtracted from the first selection by the use of
+ // H5S_SELECT_NOTB
+ block[0] = 1;
+ block[1] = 1;
+ try {
+ if ((filespace_id >= 0)) {
+ H5.H5Sselect_hyperslab(filespace_id, HDF5Constants.H5S_SELECT_NOTB, start, stride, count, block);
+
+ // Write the data to the dataset.
+ if (dataset_id >= 0)
+ H5.H5Dwrite(dataset_id, HDF5Constants.H5T_NATIVE_INT, HDF5Constants.H5S_ALL, filespace_id,
+ HDF5Constants.H5P_DEFAULT, dset_data);
+ }
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // End access to the dataset and release resources used by it.
+ try {
+ if (dataset_id >= 0)
+ H5.H5Dclose(dataset_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ try {
+ if (filespace_id >= 0)
+ H5.H5Sclose(filespace_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Close the file.
+ try {
+ if (file_id >= 0)
+ H5.H5Fclose(file_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+ }
+
+ private static void readHyperslab() {
+ long file_id = -1;
+ long filespace_id = -1;
+ long dataset_id = -1;
+ long dcpl_id = -1;
+ int[][] dset_data = new int[DIM_X][DIM_Y];
+
+ // Open an existing file.
+ try {
+ file_id = H5.H5Fopen(FILENAME, HDF5Constants.H5F_ACC_RDONLY, HDF5Constants.H5P_DEFAULT);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Open an existing dataset.
+ try {
+ if (file_id >= 0)
+ dataset_id = H5.H5Dopen(file_id, DATASETNAME, HDF5Constants.H5P_DEFAULT);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Read the data using the default properties.
+ try {
+ if (dataset_id >= 0)
+ H5.H5Dread(dataset_id, HDF5Constants.H5T_NATIVE_INT, HDF5Constants.H5S_ALL, HDF5Constants.H5S_ALL,
+ HDF5Constants.H5P_DEFAULT, dset_data);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Output the data to the screen.
+ System.out.println("Data as written to disk by hyberslabs:");
+ for (int indx = 0; indx < DIM_X; indx++) {
+ System.out.print(" [ ");
+ for (int jndx = 0; jndx < DIM_Y; jndx++)
+ System.out.print(dset_data[indx][jndx] + " ");
+ System.out.println("]");
+ }
+ System.out.println();
+
+ // Initialize the read array.
+ for (int indx = 0; indx < DIM_X; indx++)
+ for (int jndx = 0; jndx < DIM_Y; jndx++)
+ dset_data[indx][jndx] = 0;
+
+ // Define and select the hyperslab to use for reading.
+ try {
+ if (dataset_id >= 0) {
+ filespace_id = H5.H5Dget_space(dataset_id);
+
+ long[] start = { 0, 1 };
+ long[] stride = { 4, 4 };
+ long[] count = { 2, 2 };
+ long[] block = { 2, 3 };
+
+ if (filespace_id >= 0) {
+ H5.H5Sselect_hyperslab(filespace_id, HDF5Constants.H5S_SELECT_SET, start, stride, count, block);
+
+ // Read the data using the previously defined hyperslab.
+ if ((dataset_id >= 0) && (filespace_id >= 0))
+ H5.H5Dread(dataset_id, HDF5Constants.H5T_NATIVE_INT, HDF5Constants.H5S_ALL, filespace_id,
+ HDF5Constants.H5P_DEFAULT, dset_data);
+ }
+ }
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Output the data to the screen.
+ System.out.println("Data as read from disk by hyberslab:");
+ for (int indx = 0; indx < DIM_X; indx++) {
+ System.out.print(" [ ");
+ for (int jndx = 0; jndx < DIM_Y; jndx++)
+ System.out.print(dset_data[indx][jndx] + " ");
+ System.out.println("]");
+ }
+ System.out.println();
+
+ // End access to the dataset and release resources used by it.
+ try {
+ if (dcpl_id >= 0)
+ H5.H5Pclose(dcpl_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ try {
+ if (dataset_id >= 0)
+ H5.H5Dclose(dataset_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ try {
+ if (filespace_id >= 0)
+ H5.H5Sclose(filespace_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Close the file.
+ try {
+ if (file_id >= 0)
+ H5.H5Fclose(file_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+ }
+
+ public static void main(String[] args) {
+ H5Ex_D_Hyperslab.writeHyperslab();
+ H5Ex_D_Hyperslab.readHyperslab();
+ }
+
+}
diff --git a/java/examples/datasets/H5Ex_D_Nbit.java b/java/examples/datasets/H5Ex_D_Nbit.java
new file mode 100644
index 0000000..f74b675
--- /dev/null
+++ b/java/examples/datasets/H5Ex_D_Nbit.java
@@ -0,0 +1,305 @@
+/* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
+ * Copyright by The HDF Group. *
+ * Copyright by the Board of Trustees of the University of Illinois. *
+ * All rights reserved. *
+ * *
+ * This file is part of HDF5. The full HDF5 copyright notice, including *
+ * terms governing use, modification, and redistribution, is contained in *
+ * the files COPYING and Copyright.html. COPYING can be found at the root *
+ * of the source code distribution tree; Copyright.html can be found at the *
+ * root level of an installed copy of the electronic HDF5 document set and *
+ * is linked from the top-level documents page. It can also be found at *
+ * http://hdfgroup.org/HDF5/doc/Copyright.html. If you do not have *
+ * access to either file, you may request a copy from help@hdfgroup.org. *
+ * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */
+
+/************************************************************
+ This example shows how to read and write data to a dataset
+ using the N-Bit filter. The program first checks if the
+ N-Bit filter is available, then if it is it writes integers
+ to a dataset using N-Bit, then closes the file. Next, it
+ reopens the file, reads back the data, and outputs the type
+ of filter and the maximum value in the dataset to the screen.
+ ************************************************************/
+
+package examples.datasets;
+
+import java.util.EnumSet;
+import java.util.HashMap;
+import java.util.Map;
+
+import hdf.hdf5lib.H5;
+import hdf.hdf5lib.HDF5Constants;
+
+public class H5Ex_D_Nbit {
+ private static String FILENAME = "H5Ex_D_Nbit.h5";
+ private static String DATASETNAME = "DS1";
+ private static final int DIM_X = 32;
+ private static final int DIM_Y = 64;
+ private static final int CHUNK_X = 4;
+ private static final int CHUNK_Y = 8;
+ private static final int RANK = 2;
+ private static final int NDIMS = 2;
+
+ // Values for the status of space allocation
+ enum H5Z_filter {
+ H5Z_FILTER_ERROR(HDF5Constants.H5Z_FILTER_ERROR), H5Z_FILTER_NONE(HDF5Constants.H5Z_FILTER_NONE), H5Z_FILTER_DEFLATE(
+ HDF5Constants.H5Z_FILTER_DEFLATE), H5Z_FILTER_SHUFFLE(HDF5Constants.H5Z_FILTER_SHUFFLE), H5Z_FILTER_FLETCHER32(
+ HDF5Constants.H5Z_FILTER_FLETCHER32), H5Z_FILTER_SZIP(HDF5Constants.H5Z_FILTER_SZIP), H5Z_FILTER_NBIT(
+ HDF5Constants.H5Z_FILTER_NBIT), H5Z_FILTER_SCALEOFFSET(HDF5Constants.H5Z_FILTER_SCALEOFFSET), H5Z_FILTER_RESERVED(
+ HDF5Constants.H5Z_FILTER_RESERVED), H5Z_FILTER_MAX(HDF5Constants.H5Z_FILTER_MAX);
+ private static final Map<Integer, H5Z_filter> lookup = new HashMap<Integer, H5Z_filter>();
+
+ static {
+ for (H5Z_filter s : EnumSet.allOf(H5Z_filter.class))
+ lookup.put(s.getCode(), s);
+ }
+
+ private int code;
+
+ H5Z_filter(int layout_type) {
+ this.code = layout_type;
+ }
+
+ public int getCode() {
+ return this.code;
+ }
+
+ public static H5Z_filter get(int code) {
+ return lookup.get(code);
+ }
+ }
+
+ private static boolean checkNbitFilter() {
+ try {
+ // Check if N-Bit compression is available and can be used for both compression and decompression.
+ int available = H5.H5Zfilter_avail(HDF5Constants.H5Z_FILTER_NBIT);
+ if (available == 0) {
+ System.out.println("N-Bit filter not available.");
+ return false;
+ }
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ try {
+ int filter_info = H5.H5Zget_filter_info(HDF5Constants.H5Z_FILTER_NBIT);
+ if (((filter_info & HDF5Constants.H5Z_FILTER_CONFIG_ENCODE_ENABLED) == 0)
+ || ((filter_info & HDF5Constants.H5Z_FILTER_CONFIG_DECODE_ENABLED) == 0)) {
+ System.out.println("N-Bit filter not available for encoding and decoding.");
+ return false;
+ }
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+ return true;
+ }
+
+ private static void writeData() throws Exception {
+ long file_id = -1;
+ long filespace_id = -1;
+ long dataset_id = -1;
+ long dtype_id = -1;
+ long dcpl_id = -1;
+ long[] dims = { DIM_X, DIM_Y };
+ long[] chunk_dims = { CHUNK_X, CHUNK_Y };
+ int[][] dset_data = new int[DIM_X][DIM_Y];
+
+ // Initialize data.
+ for (int indx = 0; indx < DIM_X; indx++)
+ for (int jndx = 0; jndx < DIM_Y; jndx++)
+ dset_data[indx][jndx] = indx * jndx - jndx;
+
+ try {
+ // Create a new file using the default properties.
+ file_id = H5.H5Fcreate(FILENAME, HDF5Constants.H5F_ACC_TRUNC, HDF5Constants.H5P_DEFAULT,
+ HDF5Constants.H5P_DEFAULT);
+
+ // Create dataspace. Setting maximum size to NULL sets the maximum
+ // size to be the current size.
+ filespace_id = H5.H5Screate_simple(RANK, dims, null);
+
+ // Create the datatype to use with the N-Bit filter. It has an uncompressed size of 32 bits,
+ // but will have a size of 16 bits after being packed by the N-Bit filter.
+ dtype_id = H5.H5Tcopy(HDF5Constants.H5T_STD_I32LE);
+ H5.H5Tset_precision(dtype_id, 16);
+ H5.H5Tset_offset(dtype_id, 5);
+
+ // Create the dataset creation property list, add the N-Bit filter and set the chunk size.
+ dcpl_id = H5.H5Pcreate(HDF5Constants.H5P_DATASET_CREATE);
+ H5.H5Pset_nbit(dcpl_id);
+ H5.H5Pset_chunk(dcpl_id, NDIMS, chunk_dims);
+
+ // Create the dataset.
+ dataset_id = H5.H5Dcreate(file_id, DATASETNAME, dtype_id, filespace_id, HDF5Constants.H5P_DEFAULT, dcpl_id,
+ HDF5Constants.H5P_DEFAULT);
+
+ // Write the data to the dataset.
+ H5.H5Dwrite(dataset_id, HDF5Constants.H5T_NATIVE_INT, HDF5Constants.H5S_ALL, HDF5Constants.H5S_ALL,
+ HDF5Constants.H5P_DEFAULT, dset_data);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+ finally {
+ // Close and release resources.
+ if (dcpl_id >= 0)
+ H5.H5Pclose(dcpl_id);
+ if (dtype_id >= 0)
+ H5.H5Tclose(dtype_id);
+ if (dataset_id >= 0)
+ H5.H5Dclose(dataset_id);
+ if (filespace_id >= 0)
+ H5.H5Sclose(filespace_id);
+ if (file_id >= 0)
+ H5.H5Fclose(file_id);
+ }
+ }
+
+ private static void readData() throws Exception {
+ long file_id = -1;
+ long dataset_id = -1;
+ long dcpl_id = -1;
+ int[][] dset_data = new int[DIM_X][DIM_Y];
+
+ // Open an existing file.
+ try {
+ file_id = H5.H5Fopen(FILENAME, HDF5Constants.H5F_ACC_RDONLY, HDF5Constants.H5P_DEFAULT);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Open an existing dataset.
+ try {
+ if (file_id >= 0)
+ dataset_id = H5.H5Dopen(file_id, DATASETNAME, HDF5Constants.H5P_DEFAULT);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Retrieve the dataset creation property list.
+ try {
+ if (dataset_id >= 0)
+ dcpl_id = H5.H5Dget_create_plist(dataset_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Retrieve and print the filter type. Here we only retrieve the
+ // first filter because we know that we only added one filter.
+ try {
+ if (dcpl_id >= 0) {
+ // Java lib requires a valid filter_name object and cd_values
+ int[] flags = { 0 };
+ long[] cd_nelmts = { 1 };
+ int[] cd_values = { 0 };
+ String[] filter_name = { "" };
+ int[] filter_config = { 0 };
+ int filter_type = -1;
+ filter_type = H5
+ .H5Pget_filter(dcpl_id, 0, flags, cd_nelmts, cd_values, 120, filter_name, filter_config);
+ System.out.print("Filter type is: ");
+ switch (H5Z_filter.get(filter_type)) {
+ case H5Z_FILTER_DEFLATE:
+ System.out.println("H5Z_FILTER_DEFLATE");
+ break;
+ case H5Z_FILTER_SHUFFLE:
+ System.out.println("H5Z_FILTER_SHUFFLE");
+ break;
+ case H5Z_FILTER_FLETCHER32:
+ System.out.println("H5Z_FILTER_FLETCHER32");
+ break;
+ case H5Z_FILTER_SZIP:
+ System.out.println("H5Z_FILTER_SZIP");
+ break;
+ case H5Z_FILTER_NBIT:
+ System.out.println("H5Z_FILTER_NBIT");
+ break;
+ case H5Z_FILTER_SCALEOFFSET:
+ System.out.println("H5Z_FILTER_SCALEOFFSET");
+ break;
+ default:
+ System.out.println("H5Z_FILTER_ERROR");
+ }
+ System.out.println();
+ }
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Read the data using the default properties.
+ try {
+ if (dataset_id >= 0) {
+ int status = H5.H5Dread(dataset_id, HDF5Constants.H5T_NATIVE_INT, HDF5Constants.H5S_ALL,
+ HDF5Constants.H5S_ALL, HDF5Constants.H5P_DEFAULT, dset_data);
+ // Check if the read was successful.
+ if (status < 0)
+ System.out.print("Dataset read failed!");
+ }
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Find the maximum value in the dataset, to verify that it was read
+ // correctly.
+ int max = dset_data[0][0];
+ for (int indx = 0; indx < DIM_X; indx++) {
+ for (int jndx = 0; jndx < DIM_Y; jndx++)
+ if (max < dset_data[indx][jndx])
+ max = dset_data[indx][jndx];
+ }
+ // Print the maximum value.
+ System.out.println("Maximum value in " + DATASETNAME + " is: " + max);
+
+ // End access to the dataset and release resources used by it.
+ try {
+ if (dcpl_id >= 0)
+ H5.H5Pclose(dcpl_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ try {
+ if (dataset_id >= 0)
+ H5.H5Dclose(dataset_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Close the file.
+ try {
+ if (file_id >= 0)
+ H5.H5Fclose(file_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ }
+
+ public static void main(String[] args) {
+ /*
+ * Check if N-Bit compression is available and can be used for both compression and decompression. Normally we
+ * do not perform error checking in these examples for the sake of clarity, but in this case we will make an
+ * exception because this filter is an optional part of the hdf5 library.
+ */
+ try {
+ if (H5Ex_D_Nbit.checkNbitFilter()) {
+ H5Ex_D_Nbit.writeData();
+ H5Ex_D_Nbit.readData();
+ }
+ }
+ catch (Exception ex) {
+ ex.printStackTrace();
+ }
+ }
+}
diff --git a/java/examples/datasets/H5Ex_D_ReadWrite.java b/java/examples/datasets/H5Ex_D_ReadWrite.java
new file mode 100644
index 0000000..de94ccb
--- /dev/null
+++ b/java/examples/datasets/H5Ex_D_ReadWrite.java
@@ -0,0 +1,179 @@
+/* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
+ * Copyright by The HDF Group. *
+ * Copyright by the Board of Trustees of the University of Illinois. *
+ * All rights reserved. *
+ * *
+ * This file is part of HDF5. The full HDF5 copyright notice, including *
+ * terms governing use, modification, and redistribution, is contained in *
+ * the files COPYING and Copyright.html. COPYING can be found at the root *
+ * of the source code distribution tree; Copyright.html can be found at the *
+ * root level of an installed copy of the electronic HDF5 document set and *
+ * is linked from the top-level documents page. It can also be found at *
+ * http://hdfgroup.org/HDF5/doc/Copyright.html. If you do not have *
+ * access to either file, you may request a copy from help@hdfgroup.org. *
+ * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */
+
+/************************************************************
+
+ This example shows how to read and write data to a
+ dataset. The program first writes integers to a dataset
+ with dataspace dimensions of DIM_XxDIM_Y, then closes the
+ file. Next, it reopens the file, reads back the data, and
+ outputs it to the screen.
+ ************************************************************/
+package examples.datasets;
+
+import hdf.hdf5lib.H5;
+import hdf.hdf5lib.HDF5Constants;
+
+public class H5Ex_D_ReadWrite {
+ private static String FILENAME = "H5Ex_D_ReadWrite.h5";
+ private static String DATASETNAME = "DS1";
+ private static final int DIM_X = 4;
+ private static final int DIM_Y = 7;
+ private static final int RANK = 2;
+
+ private static void WriteDataset() {
+ long file_id = -1;
+ long filespace_id = -1;
+ long dataset_id = -1;
+ long[] dims = { DIM_X, DIM_Y };
+ int[][] dset_data = new int[DIM_X][DIM_Y];
+
+ // Initialize data.
+ for (int indx = 0; indx < DIM_X; indx++)
+ for (int jndx = 0; jndx < DIM_Y; jndx++)
+ dset_data[indx][jndx] = indx * jndx - jndx;
+
+ // Create a new file using default properties.
+ try {
+ file_id = H5.H5Fcreate(FILENAME, HDF5Constants.H5F_ACC_TRUNC, HDF5Constants.H5P_DEFAULT,
+ HDF5Constants.H5P_DEFAULT);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Create dataspace. Setting maximum size to NULL sets the maximum
+ // size to be the current size.
+ try {
+ filespace_id = H5.H5Screate_simple(RANK, dims, null);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Create the dataset. We will use all default properties for this example.
+ try {
+ if ((file_id >= 0) && (filespace_id >= 0))
+ dataset_id = H5.H5Dcreate(file_id, DATASETNAME, HDF5Constants.H5T_STD_I32LE, filespace_id,
+ HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Write the data to the dataset.
+ try {
+ if (dataset_id >= 0)
+ H5.H5Dwrite(dataset_id, HDF5Constants.H5T_NATIVE_INT, HDF5Constants.H5S_ALL, HDF5Constants.H5S_ALL,
+ HDF5Constants.H5P_DEFAULT, dset_data);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // End access to the dataset and release resources used by it.
+ try {
+ if (dataset_id >= 0)
+ H5.H5Dclose(dataset_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ try {
+ if (filespace_id >= 0)
+ H5.H5Sclose(filespace_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Close the file.
+ try {
+ if (file_id >= 0)
+ H5.H5Fclose(file_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+ }
+
+ private static void ReadDataset() {
+ long file_id = -1;
+ long dataset_id = -1;
+ int[][] dset_data = new int[DIM_X][DIM_Y];
+
+ // Open file using the default properties.
+ try {
+ file_id = H5.H5Fopen(FILENAME, HDF5Constants.H5F_ACC_RDWR, HDF5Constants.H5P_DEFAULT);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Open dataset using the default properties.
+ try {
+ if (file_id >= 0)
+ dataset_id = H5.H5Dopen(file_id, DATASETNAME, HDF5Constants.H5P_DEFAULT);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Read the data using the default properties.
+ try {
+ if (dataset_id >= 0)
+ H5.H5Dread(dataset_id, HDF5Constants.H5T_NATIVE_INT, HDF5Constants.H5S_ALL, HDF5Constants.H5S_ALL,
+ HDF5Constants.H5P_DEFAULT, dset_data);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Output the data to the screen.
+ System.out.println(DATASETNAME + ":");
+ for (int indx = 0; indx < DIM_X; indx++) {
+ System.out.print(" [ ");
+ for (int jndx = 0; jndx < DIM_Y; jndx++)
+ System.out.print(dset_data[indx][jndx] + " ");
+ System.out.println("]");
+ }
+ System.out.println();
+
+ // Close the dataset.
+ try {
+ if (dataset_id >= 0)
+ H5.H5Dclose(dataset_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Close the file.
+ try {
+ if (file_id >= 0)
+ H5.H5Fclose(file_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+ }
+
+ public static void main(String[] args) {
+ H5Ex_D_ReadWrite.WriteDataset();
+ H5Ex_D_ReadWrite.ReadDataset();
+ }
+
+}
diff --git a/java/examples/datasets/H5Ex_D_Shuffle.java b/java/examples/datasets/H5Ex_D_Shuffle.java
new file mode 100644
index 0000000..ac3c1b4
--- /dev/null
+++ b/java/examples/datasets/H5Ex_D_Shuffle.java
@@ -0,0 +1,373 @@
+/* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
+ * Copyright by The HDF Group. *
+ * Copyright by the Board of Trustees of the University of Illinois. *
+ * All rights reserved. *
+ * *
+ * This file is part of HDF5. The full HDF5 copyright notice, including *
+ * terms governing use, modification, and redistribution, is contained in *
+ * the files COPYING and Copyright.html. COPYING can be found at the root *
+ * of the source code distribution tree; Copyright.html can be found at the *
+ * root level of an installed copy of the electronic HDF5 document set and *
+ * is linked from the top-level documents page. It can also be found at *
+ * http://hdfgroup.org/HDF5/doc/Copyright.html. If you do not have *
+ * access to either file, you may request a copy from help@hdfgroup.org. *
+ * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */
+
+/************************************************************
+ This example shows how to read and write data to a dataset
+ using the shuffle filter with gzip compression. The
+ program first checks if the shuffle and gzip filters are
+ available, then if they are it writes integers to a
+ dataset using shuffle+gzip, then closes the file. Next,
+ it reopens the file, reads back the data, and outputs the
+ types of filters and the maximum value in the dataset to
+ the screen.
+ ************************************************************/
+package examples.datasets;
+
+import java.util.EnumSet;
+import java.util.HashMap;
+import java.util.Map;
+
+import hdf.hdf5lib.H5;
+import hdf.hdf5lib.HDF5Constants;
+
+public class H5Ex_D_Shuffle {
+ private static String FILENAME = "H5Ex_D_Shuffle.h5";
+ private static String DATASETNAME = "DS1";
+ private static final int DIM_X = 32;
+ private static final int DIM_Y = 64;
+ private static final int CHUNK_X = 4;
+ private static final int CHUNK_Y = 8;
+ private static final int RANK = 2;
+ private static final int NDIMS = 2;
+
+ // Values for the status of space allocation
+ enum H5Z_filter {
+ H5Z_FILTER_ERROR(HDF5Constants.H5Z_FILTER_ERROR), H5Z_FILTER_NONE(HDF5Constants.H5Z_FILTER_NONE), H5Z_FILTER_DEFLATE(
+ HDF5Constants.H5Z_FILTER_DEFLATE), H5Z_FILTER_SHUFFLE(HDF5Constants.H5Z_FILTER_SHUFFLE), H5Z_FILTER_FLETCHER32(
+ HDF5Constants.H5Z_FILTER_FLETCHER32), H5Z_FILTER_SZIP(HDF5Constants.H5Z_FILTER_SZIP), H5Z_FILTER_NBIT(
+ HDF5Constants.H5Z_FILTER_NBIT), H5Z_FILTER_SCALEOFFSET(HDF5Constants.H5Z_FILTER_SCALEOFFSET), H5Z_FILTER_RESERVED(
+ HDF5Constants.H5Z_FILTER_RESERVED), H5Z_FILTER_MAX(HDF5Constants.H5Z_FILTER_MAX);
+ private static final Map<Integer, H5Z_filter> lookup = new HashMap<Integer, H5Z_filter>();
+
+ static {
+ for (H5Z_filter s : EnumSet.allOf(H5Z_filter.class))
+ lookup.put(s.getCode(), s);
+ }
+
+ private int code;
+
+ H5Z_filter(int layout_type) {
+ this.code = layout_type;
+ }
+
+ public int getCode() {
+ return this.code;
+ }
+
+ public static H5Z_filter get(int code) {
+ return lookup.get(code);
+ }
+ }
+
+ private static boolean checkGzipFilter() {
+ try {
+ int available = H5.H5Zfilter_avail(HDF5Constants.H5Z_FILTER_DEFLATE);
+ if (available == 0) {
+ System.out.println("gzip filter not available.");
+ return false;
+ }
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ try {
+ int filter_info = H5.H5Zget_filter_info(HDF5Constants.H5Z_FILTER_DEFLATE);
+ if (((filter_info & HDF5Constants.H5Z_FILTER_CONFIG_ENCODE_ENABLED) == 0)
+ || ((filter_info & HDF5Constants.H5Z_FILTER_CONFIG_DECODE_ENABLED) == 0)) {
+ System.out.println("gzip filter not available for encoding and decoding.");
+ return false;
+ }
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+ return true;
+ }
+
+ private static boolean checkShuffleFilter() {
+ try {
+ int available = H5.H5Zfilter_avail(HDF5Constants.H5Z_FILTER_SHUFFLE);
+ if (available == 0) {
+ System.out.println("Shuffle filter not available.");
+ return false;
+ }
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ try {
+ int filter_info = H5.H5Zget_filter_info(HDF5Constants.H5Z_FILTER_SHUFFLE);
+ if (((filter_info & HDF5Constants.H5Z_FILTER_CONFIG_ENCODE_ENABLED) == 0)
+ || ((filter_info & HDF5Constants.H5Z_FILTER_CONFIG_DECODE_ENABLED) == 0)) {
+ System.out.println("Shuffle filter not available for encoding and decoding.");
+ return false;
+ }
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+ return true;
+ }
+
+ private static void writeShuffle() {
+ long file_id = -1;
+ long filespace_id = -1;
+ long dataset_id = -1;
+ long dcpl_id = -1;
+ long[] dims = { DIM_X, DIM_Y };
+ long[] chunk_dims = { CHUNK_X, CHUNK_Y };
+ int[][] dset_data = new int[DIM_X][DIM_Y];
+
+ // Initialize data.
+ for (int indx = 0; indx < DIM_X; indx++)
+ for (int jndx = 0; jndx < DIM_Y; jndx++)
+ dset_data[indx][jndx] = indx * jndx - jndx;
+
+ // Create a new file using default properties.
+ try {
+ file_id = H5.H5Fcreate(FILENAME, HDF5Constants.H5F_ACC_TRUNC, HDF5Constants.H5P_DEFAULT,
+ HDF5Constants.H5P_DEFAULT);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Create dataspace. Setting maximum size to NULL sets the maximum
+ // size to be the current size.
+ try {
+ filespace_id = H5.H5Screate_simple(RANK, dims, null);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Create the dataset creation property list, add the shuffle
+ // filter and the gzip compression filter.
+ // The order in which the filters are added here is significant -
+ // we will see much greater results when the shuffle is applied
+ // first. The order in which the filters are added to the property
+ // list is the order in which they will be invoked when writing
+ // data.
+ try {
+ dcpl_id = H5.H5Pcreate(HDF5Constants.H5P_DATASET_CREATE);
+ if (dcpl_id >= 0) {
+ H5.H5Pset_shuffle(dcpl_id);
+ H5.H5Pset_deflate(dcpl_id, 9);
+ // Set the chunk size.
+ H5.H5Pset_chunk(dcpl_id, NDIMS, chunk_dims);
+ }
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Create the dataset.
+ try {
+ if ((file_id >= 0) && (filespace_id >= 0) && (dcpl_id >= 0))
+ dataset_id = H5.H5Dcreate(file_id, DATASETNAME, HDF5Constants.H5T_STD_I32LE, filespace_id,
+ HDF5Constants.H5P_DEFAULT, dcpl_id, HDF5Constants.H5P_DEFAULT);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Write the data to the dataset.
+ try {
+ if (dataset_id >= 0)
+ H5.H5Dwrite(dataset_id, HDF5Constants.H5T_NATIVE_INT, HDF5Constants.H5S_ALL, HDF5Constants.H5S_ALL,
+ HDF5Constants.H5P_DEFAULT, dset_data);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // End access to the dataset and release resources used by it.
+ try {
+ if (dcpl_id >= 0)
+ H5.H5Pclose(dcpl_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ try {
+ if (dataset_id >= 0)
+ H5.H5Dclose(dataset_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ try {
+ if (filespace_id >= 0)
+ H5.H5Sclose(filespace_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Close the file.
+ try {
+ if (file_id >= 0)
+ H5.H5Fclose(file_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+ }
+
+ private static void readShuffle() {
+ long file_id = -1;
+ long dataset_id = -1;
+ long dcpl_id = -1;
+ int[][] dset_data = new int[DIM_X][DIM_Y];
+
+ // Open an existing file.
+ try {
+ file_id = H5.H5Fopen(FILENAME, HDF5Constants.H5F_ACC_RDONLY, HDF5Constants.H5P_DEFAULT);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Open an existing dataset.
+ try {
+ if (file_id >= 0)
+ dataset_id = H5.H5Dopen(file_id, DATASETNAME, HDF5Constants.H5P_DEFAULT);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Retrieve the dataset creation property list.
+ try {
+ if (dataset_id >= 0)
+ dcpl_id = H5.H5Dget_create_plist(dataset_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Retrieve the number of filters, and retrieve and print the
+ // type of each.
+ try {
+ if (dcpl_id >= 0) {
+ int nfilters = H5.H5Pget_nfilters(dcpl_id);
+ for (int indx = 0; indx < nfilters; indx++) {
+ // Java lib requires a valid filter_name object and cd_values
+ int[] flags = { 0 };
+ long[] cd_nelmts = { 1 };
+ int[] cd_values = { 0 };
+ String[] filter_name = { "" };
+ int[] filter_config = { 0 };
+ int filter_type = -1;
+ filter_type = H5.H5Pget_filter(dcpl_id, indx, flags, cd_nelmts, cd_values, 120, filter_name,
+ filter_config);
+ System.out.print("Filter " + indx + ": Type is: ");
+ switch (H5Z_filter.get(filter_type)) {
+ case H5Z_FILTER_DEFLATE:
+ System.out.println("H5Z_FILTER_DEFLATE");
+ break;
+ case H5Z_FILTER_SHUFFLE:
+ System.out.println("H5Z_FILTER_SHUFFLE");
+ break;
+ case H5Z_FILTER_FLETCHER32:
+ System.out.println("H5Z_FILTER_FLETCHER32");
+ break;
+ case H5Z_FILTER_SZIP:
+ System.out.println("H5Z_FILTER_SZIP");
+ break;
+ case H5Z_FILTER_NBIT:
+ System.out.println("H5Z_FILTER_NBIT");
+ break;
+ case H5Z_FILTER_SCALEOFFSET:
+ System.out.println("H5Z_FILTER_SCALEOFFSET");
+ break;
+ default:
+ System.out.println("H5Z_FILTER_ERROR");
+ }
+ System.out.println();
+ }
+ }
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Read the data using the default properties.
+ try {
+ if (dataset_id >= 0) {
+ H5.H5Dread(dataset_id, HDF5Constants.H5T_NATIVE_INT, HDF5Constants.H5S_ALL, HDF5Constants.H5S_ALL,
+ HDF5Constants.H5P_DEFAULT, dset_data);
+ }
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Find the maximum value in the dataset, to verify that it was read
+ // correctly.
+ int max = dset_data[0][0];
+ for (int indx = 0; indx < DIM_X; indx++) {
+ for (int jndx = 0; jndx < DIM_Y; jndx++)
+ if (max < dset_data[indx][jndx])
+ max = dset_data[indx][jndx];
+ }
+ // Print the maximum value.
+ System.out.println("Maximum value in " + DATASETNAME + " is: " + max);
+
+ // End access to the dataset and release resources used by it.
+ try {
+ if (dcpl_id >= 0)
+ H5.H5Pclose(dcpl_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ try {
+ if (dataset_id >= 0)
+ H5.H5Dclose(dataset_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Close the file.
+ try {
+ if (file_id >= 0)
+ H5.H5Fclose(file_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+ }
+
+ public static void main(String[] args) {
+ // Check if gzip compression is available and can be used for both
+ // compression and decompression. Normally we do not perform error
+ // checking in these examples for the sake of clarity, but in this
+ // case we will make an exception because this filter is an
+ // optional part of the hdf5 library.
+ // Similarly, check for availability of the shuffle filter.
+ if (H5Ex_D_Shuffle.checkGzipFilter() && H5Ex_D_Shuffle.checkShuffleFilter()) {
+ H5Ex_D_Shuffle.writeShuffle();
+ H5Ex_D_Shuffle.readShuffle();
+ }
+ }
+
+}
diff --git a/java/examples/datasets/H5Ex_D_Sofloat.java b/java/examples/datasets/H5Ex_D_Sofloat.java
new file mode 100644
index 0000000..26c8d49
--- /dev/null
+++ b/java/examples/datasets/H5Ex_D_Sofloat.java
@@ -0,0 +1,356 @@
+/* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
+ * Copyright by The HDF Group. *
+ * Copyright by the Board of Trustees of the University of Illinois. *
+ * All rights reserved. *
+ * *
+ * This file is part of HDF5. The full HDF5 copyright notice, including *
+ * terms governing use, modification, and redistribution, is contained in *
+ * the files COPYING and Copyright.html. COPYING can be found at the root *
+ * of the source code distribution tree; Copyright.html can be found at the *
+ * root level of an installed copy of the electronic HDF5 document set and *
+ * is linked from the top-level documents page. It can also be found at *
+ * http://hdfgroup.org/HDF5/doc/Copyright.html. If you do not have *
+ * access to either file, you may request a copy from help@hdfgroup.org. *
+ * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */
+
+/************************************************************
+ This example shows how to read and write data to a dataset
+ using the Scale-Offset filter. The program first checks
+ if the Scale-Offset filter is available, then if it is it
+ writes floating point numbers to a dataset using
+ Scale-Offset, then closes the file Next, it reopens the
+ file, reads back the data, and outputs the type of filter
+ and the maximum value in the dataset to the screen.
+ ************************************************************/
+package examples.datasets;
+
+import java.util.EnumSet;
+import java.util.HashMap;
+import java.util.Map;
+
+import hdf.hdf5lib.H5;
+import hdf.hdf5lib.HDF5Constants;
+
+public class H5Ex_D_Sofloat {
+
+ private static String FILENAME = "H5Ex_D_Sofloat.h5";
+ private static String DATASETNAME = "DS1";
+ private static final int DIM_X = 32;
+ private static final int DIM_Y = 64;
+ private static final int CHUNK_X = 4;
+ private static final int CHUNK_Y = 8;
+ private static final int RANK = 2;
+ private static final int NDIMS = 2;
+
+ // Values for the status of space allocation
+ enum H5Z_filter {
+ H5Z_FILTER_ERROR(HDF5Constants.H5Z_FILTER_ERROR), H5Z_FILTER_NONE(HDF5Constants.H5Z_FILTER_NONE), H5Z_FILTER_DEFLATE(
+ HDF5Constants.H5Z_FILTER_DEFLATE), H5Z_FILTER_SHUFFLE(HDF5Constants.H5Z_FILTER_SHUFFLE), H5Z_FILTER_FLETCHER32(
+ HDF5Constants.H5Z_FILTER_FLETCHER32), H5Z_FILTER_SZIP(HDF5Constants.H5Z_FILTER_SZIP), H5Z_FILTER_NBIT(
+ HDF5Constants.H5Z_FILTER_NBIT), H5Z_FILTER_SCALEOFFSET(HDF5Constants.H5Z_FILTER_SCALEOFFSET), H5Z_FILTER_RESERVED(
+ HDF5Constants.H5Z_FILTER_RESERVED), H5Z_FILTER_MAX(HDF5Constants.H5Z_FILTER_MAX);
+ private static final Map<Integer, H5Z_filter> lookup = new HashMap<Integer, H5Z_filter>();
+
+ static {
+ for (H5Z_filter s : EnumSet.allOf(H5Z_filter.class))
+ lookup.put(s.getCode(), s);
+ }
+
+ private int code;
+
+ H5Z_filter(int layout_type) {
+ this.code = layout_type;
+ }
+
+ public int getCode() {
+ return this.code;
+ }
+
+ public static H5Z_filter get(int code) {
+ return lookup.get(code);
+ }
+ }
+
+ private static boolean checkScaleoffsetFilter() {
+ try {
+ int available = H5.H5Zfilter_avail(HDF5Constants.H5Z_FILTER_SCALEOFFSET);
+ if (available == 0) {
+ System.out.println("Scale-Offset filter not available.");
+ return false;
+ }
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ try {
+ int filter_info = H5.H5Zget_filter_info(HDF5Constants.H5Z_FILTER_SCALEOFFSET);
+ if (((filter_info & HDF5Constants.H5Z_FILTER_CONFIG_ENCODE_ENABLED) == 0)
+ || ((filter_info & HDF5Constants.H5Z_FILTER_CONFIG_DECODE_ENABLED) == 0)) {
+ System.out.println("Scale-Offset filter not available for encoding and decoding.");
+ return false;
+ }
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+ return true;
+ }
+
+ private static void writeData() {
+ long file_id = -1;
+ long filespace_id = -1;
+ long dataset_id = -1;
+ long dcpl_id = -1;
+ long[] dims = { DIM_X, DIM_Y };
+ long[] chunk_dims = { CHUNK_X, CHUNK_Y };
+ double[][] dset_data = new double[DIM_X][DIM_Y];
+
+ // Initialize data.
+ for (int indx = 0; indx < DIM_X; indx++)
+ for (int jndx = 0; jndx < DIM_Y; jndx++) {
+ double x = indx;
+ double y = jndx;
+ dset_data[indx][jndx] = (x + 1) / (y + 0.3) + y;
+ }
+
+ // Find the maximum value in the dataset, to verify that it was read correctly.
+ double max = dset_data[0][0];
+ double min = dset_data[0][0];
+ for (int indx = 0; indx < DIM_X; indx++)
+ for (int jndx = 0; jndx < DIM_Y; jndx++) {
+ if (max < dset_data[indx][jndx])
+ max = dset_data[indx][jndx];
+ if (min > dset_data[indx][jndx])
+ min = dset_data[indx][jndx];
+ }
+
+ // Print the maximum value.
+ System.out.println("Maximum value in write buffer is: " + max);
+ System.out.println("Minimum value in write buffer is: " + min);
+
+ // Create a new file using the default properties.
+ try {
+ file_id = H5.H5Fcreate(FILENAME, HDF5Constants.H5F_ACC_TRUNC, HDF5Constants.H5P_DEFAULT,
+ HDF5Constants.H5P_DEFAULT);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Create dataspace. Setting maximum size to NULL sets the maximum size to be the current size.
+ try {
+ filespace_id = H5.H5Screate_simple(RANK, dims, null);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Create the dataset creation property list, add the Scale-Offset
+ // filter and set the chunk size.
+ try {
+ dcpl_id = H5.H5Pcreate(HDF5Constants.H5P_DATASET_CREATE);
+ if (dcpl_id >= 0) {
+ H5.H5Pset_scaleoffset(dcpl_id, HDF5Constants.H5Z_SO_FLOAT_DSCALE, 2);
+ H5.H5Pset_chunk(dcpl_id, NDIMS, chunk_dims);
+ }
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Create the dataset.
+ try {
+ if ((file_id >= 0) && (filespace_id >= 0) && (dcpl_id >= 0))
+ dataset_id = H5.H5Dcreate(file_id, DATASETNAME, HDF5Constants.H5T_IEEE_F64LE, filespace_id,
+ HDF5Constants.H5P_DEFAULT, dcpl_id, HDF5Constants.H5P_DEFAULT);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Write the data to the dataset.
+ try {
+ if (dataset_id >= 0)
+ H5.H5Dwrite(dataset_id, HDF5Constants.H5T_NATIVE_DOUBLE, HDF5Constants.H5S_ALL, HDF5Constants.H5S_ALL,
+ HDF5Constants.H5P_DEFAULT, dset_data);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Close and release resources.
+ try {
+ if (dcpl_id >= 0)
+ H5.H5Pclose(dcpl_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ try {
+ if (dataset_id >= 0)
+ H5.H5Dclose(dataset_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ try {
+ if (filespace_id >= 0)
+ H5.H5Sclose(filespace_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Close file
+ try {
+ if (file_id >= 0)
+ H5.H5Fclose(file_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+ }
+
+ private static void readData() {
+ long file_id = -1;
+ long dataset_id = -1;
+ long dcpl_id = -1;
+ double[][] dset_data = new double[DIM_X][DIM_Y];
+
+ // Open file using the default properties.
+ try {
+ file_id = H5.H5Fopen(FILENAME, HDF5Constants.H5F_ACC_RDONLY, HDF5Constants.H5P_DEFAULT);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+ // Open dataset using the default properties.
+ try {
+ if (file_id >= 0)
+ dataset_id = H5.H5Dopen(file_id, DATASETNAME, HDF5Constants.H5P_DEFAULT);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Retrieve dataset creation property list.
+ try {
+ if (dataset_id >= 0)
+ dcpl_id = H5.H5Dget_create_plist(dataset_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Retrieve and print the filter type. Here we only retrieve the
+ // first filter because we know that we only added one filter.
+ try {
+ if (dcpl_id >= 0) {
+ // Java lib requires a valid filter_name object and cd_values
+ int[] flags = { 0 };
+ long[] cd_nelmts = { 1 };
+ int[] cd_values = { 0 };
+ String[] filter_name = { "" };
+ int[] filter_config = { 0 };
+ int filter_type = -1;
+
+ filter_type = H5
+ .H5Pget_filter(dcpl_id, 0, flags, cd_nelmts, cd_values, 120, filter_name, filter_config);
+ System.out.print("Filter type is: ");
+ switch (H5Z_filter.get(filter_type)) {
+ case H5Z_FILTER_DEFLATE:
+ System.out.println("H5Z_FILTER_DEFLATE");
+ break;
+ case H5Z_FILTER_SHUFFLE:
+ System.out.println("H5Z_FILTER_SHUFFLE");
+ break;
+ case H5Z_FILTER_FLETCHER32:
+ System.out.println("H5Z_FILTER_FLETCHER32");
+ break;
+ case H5Z_FILTER_SZIP:
+ System.out.println("H5Z_FILTER_SZIP");
+ break;
+ case H5Z_FILTER_NBIT:
+ System.out.println("H5Z_FILTER_NBIT");
+ break;
+ case H5Z_FILTER_SCALEOFFSET:
+ System.out.println("H5Z_FILTER_SCALEOFFSET");
+ break;
+ default:
+ System.out.println("H5Z_FILTER_ERROR");
+ }
+ System.out.println();
+ }
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Read the data using the default properties.
+ try {
+ if (dataset_id >= 0)
+ H5.H5Dread(dataset_id, HDF5Constants.H5T_NATIVE_DOUBLE, HDF5Constants.H5S_ALL, HDF5Constants.H5S_ALL,
+ HDF5Constants.H5P_DEFAULT, dset_data);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Find the maximum value in the dataset, to verify that it was read correctly.
+ double max = dset_data[0][0];
+ double min = dset_data[0][0];
+ for (int indx = 0; indx < DIM_X; indx++)
+ for (int jndx = 0; jndx < DIM_Y; jndx++) {
+ if (max < dset_data[indx][jndx])
+ max = dset_data[indx][jndx];
+ if (min > dset_data[indx][jndx])
+ min = dset_data[indx][jndx];
+ }
+
+ // Print the maximum value.
+ System.out.println("Maximum value in " + DATASETNAME + " is: " + max);
+ System.out.println("Minimum value in " + DATASETNAME + " is: " + min);
+
+ // End access to the dataset and release resources used by it.
+ try {
+ if (dcpl_id >= 0)
+ H5.H5Pclose(dcpl_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ try {
+ if (dataset_id >= 0)
+ H5.H5Dclose(dataset_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Close the file.
+ try {
+ if (file_id >= 0)
+ H5.H5Fclose(file_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+ }
+
+ public static void main(String[] args) {
+
+ // Check if Scale-Offset compression is available and can be used
+ // for both compression and decompression. Normally we do not
+ // perform error checking in these examples for the sake of
+ // clarity, but in this case we will make an exception because this
+ // filter is an optional part of the hdf5 library.
+ if (H5Ex_D_Sofloat.checkScaleoffsetFilter()) {
+ H5Ex_D_Sofloat.writeData();
+ H5Ex_D_Sofloat.readData();
+ }
+ }
+}
diff --git a/java/examples/datasets/H5Ex_D_Soint.java b/java/examples/datasets/H5Ex_D_Soint.java
new file mode 100644
index 0000000..7939883
--- /dev/null
+++ b/java/examples/datasets/H5Ex_D_Soint.java
@@ -0,0 +1,335 @@
+/* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
+ * Copyright by The HDF Group. *
+ * Copyright by the Board of Trustees of the University of Illinois. *
+ * All rights reserved. *
+ * *
+ * This file is part of HDF5. The full HDF5 copyright notice, including *
+ * terms governing use, modification, and redistribution, is contained in *
+ * the files COPYING and Copyright.html. COPYING can be found at the root *
+ * of the source code distribution tree; Copyright.html can be found at the *
+ * root level of an installed copy of the electronic HDF5 document set and *
+ * is linked from the top-level documents page. It can also be found at *
+ * http://hdfgroup.org/HDF5/doc/Copyright.html. If you do not have *
+ * access to either file, you may request a copy from help@hdfgroup.org. *
+ * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */
+
+/************************************************************
+ This example shows how to read and write data to a dataset
+ using the Scale-Offset filter. The program first checks
+ if the Scale-Offset filter is available, then if it is it
+ writes integers to a dataset using Scale-Offset, then
+ closes the file Next, it reopens the file, reads back the
+ data, and outputs the type of filter and the maximum value
+ in the dataset to the screen.
+ ************************************************************/
+package examples.datasets;
+
+import java.util.EnumSet;
+import java.util.HashMap;
+import java.util.Map;
+
+import hdf.hdf5lib.H5;
+import hdf.hdf5lib.HDF5Constants;
+
+public class H5Ex_D_Soint {
+
+ private static String FILENAME = "H5Ex_D_Soint.h5";
+ private static String DATASETNAME = "DS1";
+ private static final int DIM_X = 32;
+ private static final int DIM_Y = 64;
+ private static final int CHUNK_X = 4;
+ private static final int CHUNK_Y = 8;
+ private static final int RANK = 2;
+ private static final int NDIMS = 2;
+
+ // Values for the status of space allocation
+ enum H5Z_filter {
+ H5Z_FILTER_ERROR(HDF5Constants.H5Z_FILTER_ERROR), H5Z_FILTER_NONE(HDF5Constants.H5Z_FILTER_NONE), H5Z_FILTER_DEFLATE(
+ HDF5Constants.H5Z_FILTER_DEFLATE), H5Z_FILTER_SHUFFLE(HDF5Constants.H5Z_FILTER_SHUFFLE), H5Z_FILTER_FLETCHER32(
+ HDF5Constants.H5Z_FILTER_FLETCHER32), H5Z_FILTER_SZIP(HDF5Constants.H5Z_FILTER_SZIP), H5Z_FILTER_NBIT(
+ HDF5Constants.H5Z_FILTER_NBIT), H5Z_FILTER_SCALEOFFSET(HDF5Constants.H5Z_FILTER_SCALEOFFSET), H5Z_FILTER_RESERVED(
+ HDF5Constants.H5Z_FILTER_RESERVED), H5Z_FILTER_MAX(HDF5Constants.H5Z_FILTER_MAX);
+ private static final Map<Integer, H5Z_filter> lookup = new HashMap<Integer, H5Z_filter>();
+
+ static {
+ for (H5Z_filter s : EnumSet.allOf(H5Z_filter.class))
+ lookup.put(s.getCode(), s);
+ }
+
+ private int code;
+
+ H5Z_filter(int layout_type) {
+ this.code = layout_type;
+ }
+
+ public int getCode() {
+ return this.code;
+ }
+
+ public static H5Z_filter get(int code) {
+ return lookup.get(code);
+ }
+ }
+
+ private static boolean checkScaleoffsetFilter() {
+ try {
+ int available = H5.H5Zfilter_avail(HDF5Constants.H5Z_FILTER_SCALEOFFSET);
+ if (available == 0) {
+ System.out.println("Scale-Offset filter not available.");
+ return false;
+ }
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ try {
+ int filter_info = H5.H5Zget_filter_info(HDF5Constants.H5Z_FILTER_SCALEOFFSET);
+ if (((filter_info & HDF5Constants.H5Z_FILTER_CONFIG_ENCODE_ENABLED) == 0)
+ || ((filter_info & HDF5Constants.H5Z_FILTER_CONFIG_DECODE_ENABLED) == 0)) {
+ System.out.println("Scale-Offset filter not available for encoding and decoding.");
+ return false;
+ }
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+ return true;
+ }
+
+ private static void writeData() {
+ long file_id = -1;
+ long filespace_id = -1;
+ long dataset_id = -1;
+ long dcpl_id = -1;
+ long[] dims = { DIM_X, DIM_Y };
+ long[] chunk_dims = { CHUNK_X, CHUNK_Y };
+ int[][] dset_data = new int[DIM_X][DIM_Y];
+
+ // Initialize data.
+ for (int indx = 0; indx < DIM_X; indx++)
+ for (int jndx = 0; jndx < DIM_Y; jndx++)
+ dset_data[indx][jndx] = indx * jndx - jndx;
+
+ // Create a new file using the default properties.
+ try {
+ file_id = H5.H5Fcreate(FILENAME, HDF5Constants.H5F_ACC_TRUNC, HDF5Constants.H5P_DEFAULT,
+ HDF5Constants.H5P_DEFAULT);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Create dataspace. Setting maximum size to NULL sets the maximum size to be the current size.
+ try {
+ filespace_id = H5.H5Screate_simple(RANK, dims, null);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Create the dataset creation property list, add the Scale-Offset
+ // filter and set the chunk size.
+ try {
+ dcpl_id = H5.H5Pcreate(HDF5Constants.H5P_DATASET_CREATE);
+ if (dcpl_id >= 0) {
+ H5.H5Pset_scaleoffset(dcpl_id, HDF5Constants.H5Z_SO_INT, HDF5Constants.H5Z_SO_INT_MINBITS_DEFAULT);
+ H5.H5Pset_chunk(dcpl_id, NDIMS, chunk_dims);
+ }
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Create the dataset.
+ try {
+ if ((file_id >= 0) && (filespace_id >= 0) && (dcpl_id >= 0))
+ dataset_id = H5.H5Dcreate(file_id, DATASETNAME, HDF5Constants.H5T_STD_I32LE, filespace_id,
+ HDF5Constants.H5P_DEFAULT, dcpl_id, HDF5Constants.H5P_DEFAULT);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Write the data to the dataset.
+ try {
+ if (dataset_id >= 0)
+ H5.H5Dwrite(dataset_id, HDF5Constants.H5T_NATIVE_INT, HDF5Constants.H5S_ALL, HDF5Constants.H5S_ALL,
+ HDF5Constants.H5P_DEFAULT, dset_data);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Close and release resources.
+ try {
+ if (dcpl_id >= 0)
+ H5.H5Pclose(dcpl_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ try {
+ if (dataset_id >= 0)
+ H5.H5Dclose(dataset_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ try {
+ if (filespace_id >= 0)
+ H5.H5Sclose(filespace_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Close file
+ try {
+ if (file_id >= 0)
+ H5.H5Fclose(file_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+ }
+
+ private static void readData() {
+ long file_id = -1;
+ long dataset_id = -1;
+ long dcpl_id = -1;
+ int[][] dset_data = new int[DIM_X][DIM_Y];
+
+ // Open file using the default properties.
+ try {
+ file_id = H5.H5Fopen(FILENAME, HDF5Constants.H5F_ACC_RDONLY, HDF5Constants.H5P_DEFAULT);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+ // Open dataset using the default properties.
+ try {
+ if (file_id >= 0)
+ dataset_id = H5.H5Dopen(file_id, DATASETNAME, HDF5Constants.H5P_DEFAULT);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Retrieve dataset creation property list.
+ try {
+ if (dataset_id >= 0)
+ dcpl_id = H5.H5Dget_create_plist(dataset_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Retrieve and print the filter type. Here we only retrieve the
+ // first filter because we know that we only added one filter.
+ try {
+ if (dcpl_id >= 0) {
+ // Java lib requires a valid filter_name object and cd_values
+ int[] flags = { 0 };
+ long[] cd_nelmts = { 1 };
+ int[] cd_values = { 0 };
+ String[] filter_name = { "" };
+ int[] filter_config = { 0 };
+ int filter_type = -1;
+
+ filter_type = H5
+ .H5Pget_filter(dcpl_id, 0, flags, cd_nelmts, cd_values, 120, filter_name, filter_config);
+ System.out.print("Filter type is: ");
+ switch (H5Z_filter.get(filter_type)) {
+ case H5Z_FILTER_DEFLATE:
+ System.out.println("H5Z_FILTER_DEFLATE");
+ break;
+ case H5Z_FILTER_SHUFFLE:
+ System.out.println("H5Z_FILTER_SHUFFLE");
+ break;
+ case H5Z_FILTER_FLETCHER32:
+ System.out.println("H5Z_FILTER_FLETCHER32");
+ break;
+ case H5Z_FILTER_SZIP:
+ System.out.println("H5Z_FILTER_SZIP");
+ break;
+ case H5Z_FILTER_NBIT:
+ System.out.println("H5Z_FILTER_NBIT");
+ break;
+ case H5Z_FILTER_SCALEOFFSET:
+ System.out.println("H5Z_FILTER_SCALEOFFSET");
+ break;
+ default:
+ System.out.println("H5Z_FILTER_ERROR");
+ }
+ System.out.println();
+ }
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Read the data using the default properties.
+ try {
+ if (dataset_id >= 0)
+ H5.H5Dread(dataset_id, HDF5Constants.H5T_NATIVE_INT, HDF5Constants.H5S_ALL, HDF5Constants.H5S_ALL,
+ HDF5Constants.H5P_DEFAULT, dset_data);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Find the maximum value in the dataset, to verify that it was read correctly.
+ int max = dset_data[0][0];
+ for (int indx = 0; indx < DIM_X; indx++)
+ for (int jndx = 0; jndx < DIM_Y; jndx++) {
+ if (max < dset_data[indx][jndx])
+ max = dset_data[indx][jndx];
+ }
+
+ // Print the maximum value.
+ System.out.println("Maximum value in " + DATASETNAME + " is: " + max);
+
+ // End access to the dataset and release resources used by it.
+ try {
+ if (dcpl_id >= 0)
+ H5.H5Pclose(dcpl_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ try {
+ if (dataset_id >= 0)
+ H5.H5Dclose(dataset_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Close the file.
+ try {
+ if (file_id >= 0)
+ H5.H5Fclose(file_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+ }
+
+ public static void main(String[] args) {
+
+ // Check if Scale-Offset compression is available and can be used
+ // for both compression and decompression. Normally we do not
+ // perform error checking in these examples for the sake of
+ // clarity, but in this case we will make an exception because this
+ // filter is an optional part of the hdf5 library.
+ if (H5Ex_D_Soint.checkScaleoffsetFilter()) {
+ H5Ex_D_Soint.writeData();
+ H5Ex_D_Soint.readData();
+ }
+ }
+
+}
diff --git a/java/examples/datasets/H5Ex_D_Szip.java b/java/examples/datasets/H5Ex_D_Szip.java
new file mode 100644
index 0000000..5258234
--- /dev/null
+++ b/java/examples/datasets/H5Ex_D_Szip.java
@@ -0,0 +1,337 @@
+/* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
+ * Copyright by The HDF Group. *
+ * Copyright by the Board of Trustees of the University of Illinois. *
+ * All rights reserved. *
+ * *
+ * This file is part of HDF5. The full HDF5 copyright notice, including *
+ * terms governing use, modification, and redistribution, is contained in *
+ * the files COPYING and Copyright.html. COPYING can be found at the root *
+ * of the source code distribution tree; Copyright.html can be found at the *
+ * root level of an installed copy of the electronic HDF5 document set and *
+ * is linked from the top-level documents page. It can also be found at *
+ * http://hdfgroup.org/HDF5/doc/Copyright.html. If you do not have *
+ * access to either file, you may request a copy from help@hdfgroup.org. *
+ * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */
+
+/************************************************************
+ This example shows how to read and write data to a dataset
+ using szip compression. The program first checks if
+ szip compression is available, then if it is it writes
+ integers to a dataset using szip, then closes the file.
+ Next, it reopens the file, reads back the data, and
+ outputs the type of compression and the maximum value in
+ the dataset to the screen.
+ ************************************************************/
+package examples.datasets;
+
+import java.util.EnumSet;
+import java.util.HashMap;
+import java.util.Map;
+
+import hdf.hdf5lib.H5;
+import hdf.hdf5lib.HDF5Constants;
+
+public class H5Ex_D_Szip {
+ private static String FILENAME = "H5Ex_D_Szip.h5";
+ private static String DATASETNAME = "DS1";
+ private static final int DIM_X = 32;
+ private static final int DIM_Y = 64;
+ private static final int CHUNK_X = 4;
+ private static final int CHUNK_Y = 8;
+ private static final int RANK = 2;
+ private static final int NDIMS = 2;
+
+ // Values for the status of space allocation
+ enum H5Z_filter {
+ H5Z_FILTER_ERROR(HDF5Constants.H5Z_FILTER_ERROR), H5Z_FILTER_NONE(HDF5Constants.H5Z_FILTER_NONE), H5Z_FILTER_DEFLATE(
+ HDF5Constants.H5Z_FILTER_DEFLATE), H5Z_FILTER_SHUFFLE(HDF5Constants.H5Z_FILTER_SHUFFLE), H5Z_FILTER_FLETCHER32(
+ HDF5Constants.H5Z_FILTER_FLETCHER32), H5Z_FILTER_SZIP(HDF5Constants.H5Z_FILTER_SZIP), H5Z_FILTER_NBIT(
+ HDF5Constants.H5Z_FILTER_NBIT), H5Z_FILTER_SCALEOFFSET(HDF5Constants.H5Z_FILTER_SCALEOFFSET), H5Z_FILTER_RESERVED(
+ HDF5Constants.H5Z_FILTER_RESERVED), H5Z_FILTER_MAX(HDF5Constants.H5Z_FILTER_MAX);
+ private static final Map<Integer, H5Z_filter> lookup = new HashMap<Integer, H5Z_filter>();
+
+ static {
+ for (H5Z_filter s : EnumSet.allOf(H5Z_filter.class))
+ lookup.put(s.getCode(), s);
+ }
+
+ private int code;
+
+ H5Z_filter(int layout_type) {
+ this.code = layout_type;
+ }
+
+ public int getCode() {
+ return this.code;
+ }
+
+ public static H5Z_filter get(int code) {
+ return lookup.get(code);
+ }
+ }
+
+ private static boolean checkSzipFilter() {
+ try {
+ int available = H5.H5Zfilter_avail(HDF5Constants.H5Z_FILTER_SZIP);
+ if (available == 0) {
+ System.out.println("szip filter not available.");
+ return false;
+ }
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ try {
+ int filter_info = H5.H5Zget_filter_info(HDF5Constants.H5Z_FILTER_SZIP);
+ if (((filter_info & HDF5Constants.H5Z_FILTER_CONFIG_ENCODE_ENABLED) == 0)
+ || ((filter_info & HDF5Constants.H5Z_FILTER_CONFIG_DECODE_ENABLED) == 0)) {
+ System.out.println("szip filter not available for encoding and decoding.");
+ return false;
+ }
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+ return true;
+ }
+
+ private static void writeSzip() {
+ long file_id = -1;
+ long filespace_id = -1;
+ long dataset_id = -1;
+ long dcpl_id = -1;
+ long[] dims = { DIM_X, DIM_Y };
+ long[] chunk_dims = { CHUNK_X, CHUNK_Y };
+ int[][] dset_data = new int[DIM_X][DIM_Y];
+
+ // Initialize data.
+ for (int indx = 0; indx < DIM_X; indx++)
+ for (int jndx = 0; jndx < DIM_Y; jndx++)
+ dset_data[indx][jndx] = indx * jndx - jndx;
+
+ // Create a new file using default properties.
+ try {
+ file_id = H5.H5Fcreate(FILENAME, HDF5Constants.H5F_ACC_TRUNC, HDF5Constants.H5P_DEFAULT,
+ HDF5Constants.H5P_DEFAULT);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Create dataspace. Setting maximum size to NULL sets the maximum
+ // size to be the current size.
+ try {
+ filespace_id = H5.H5Screate_simple(RANK, dims, null);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Create the dataset creation property list, add the szip compression
+ // filter.
+ try {
+ dcpl_id = H5.H5Pcreate(HDF5Constants.H5P_DATASET_CREATE);
+ if (dcpl_id >= 0) {
+ H5.H5Pset_szip(dcpl_id, HDF5Constants.H5_SZIP_NN_OPTION_MASK, 8);
+ // Set the chunk size.
+ H5.H5Pset_chunk(dcpl_id, NDIMS, chunk_dims);
+ }
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Create the dataset.
+ try {
+ if ((file_id >= 0) && (filespace_id >= 0) && (dcpl_id >= 0))
+ dataset_id = H5.H5Dcreate(file_id, DATASETNAME, HDF5Constants.H5T_STD_I32LE, filespace_id,
+ HDF5Constants.H5P_DEFAULT, dcpl_id, HDF5Constants.H5P_DEFAULT);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Write the data to the dataset.
+ try {
+ if (dataset_id >= 0)
+ H5.H5Dwrite(dataset_id, HDF5Constants.H5T_NATIVE_INT, HDF5Constants.H5S_ALL, HDF5Constants.H5S_ALL,
+ HDF5Constants.H5P_DEFAULT, dset_data);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // End access to the dataset and release resources used by it.
+ try {
+ if (dcpl_id >= 0)
+ H5.H5Pclose(dcpl_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ try {
+ if (dataset_id >= 0)
+ H5.H5Dclose(dataset_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ try {
+ if (filespace_id >= 0)
+ H5.H5Sclose(filespace_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Close the file.
+ try {
+ if (file_id >= 0)
+ H5.H5Fclose(file_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+ }
+
+ private static void readSzip() {
+ long file_id = -1;
+ long dataset_id = -1;
+ long dcpl_id = -1;
+ int[][] dset_data = new int[DIM_X][DIM_Y];
+
+ // Open an existing file.
+ try {
+ file_id = H5.H5Fopen(FILENAME, HDF5Constants.H5F_ACC_RDONLY, HDF5Constants.H5P_DEFAULT);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Open an existing dataset.
+ try {
+ if (file_id >= 0)
+ dataset_id = H5.H5Dopen(file_id, DATASETNAME, HDF5Constants.H5P_DEFAULT);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Retrieve the dataset creation property list.
+ try {
+ if (dataset_id >= 0)
+ dcpl_id = H5.H5Dget_create_plist(dataset_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Retrieve and print the filter type. Here we only retrieve the
+ // first filter because we know that we only added one filter.
+ try {
+ if (dcpl_id >= 0) {
+ // Java lib requires a valid filter_name object and cd_values
+ int[] flags = { 0 };
+ long[] cd_nelmts = { 1 };
+ int[] cd_values = { 0 };
+ String[] filter_name = { "" };
+ int[] filter_config = { 0 };
+ int filter_type = -1;
+
+ filter_type = H5
+ .H5Pget_filter(dcpl_id, 0, flags, cd_nelmts, cd_values, 120, filter_name, filter_config);
+ System.out.print("Filter type is: ");
+ switch (H5Z_filter.get(filter_type)) {
+ case H5Z_FILTER_DEFLATE:
+ System.out.println("H5Z_FILTER_DEFLATE");
+ break;
+ case H5Z_FILTER_SHUFFLE:
+ System.out.println("H5Z_FILTER_SHUFFLE");
+ break;
+ case H5Z_FILTER_FLETCHER32:
+ System.out.println("H5Z_FILTER_FLETCHER32");
+ break;
+ case H5Z_FILTER_SZIP:
+ System.out.println("H5Z_FILTER_SZIP");
+ break;
+ case H5Z_FILTER_NBIT:
+ System.out.println("H5Z_FILTER_NBIT");
+ break;
+ case H5Z_FILTER_SCALEOFFSET:
+ System.out.println("H5Z_FILTER_SCALEOFFSET");
+ break;
+ default:
+ System.out.println("H5Z_FILTER_ERROR");
+ }
+ System.out.println();
+ }
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Read the data using the default properties.
+ try {
+ if (dataset_id >= 0) {
+ H5.H5Dread(dataset_id, HDF5Constants.H5T_NATIVE_INT, HDF5Constants.H5S_ALL, HDF5Constants.H5S_ALL,
+ HDF5Constants.H5P_DEFAULT, dset_data);
+ }
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Find the maximum value in the dataset, to verify that it was read
+ // correctly.
+ int max = dset_data[0][0];
+ for (int indx = 0; indx < DIM_X; indx++) {
+ for (int jndx = 0; jndx < DIM_Y; jndx++)
+ if (max < dset_data[indx][jndx])
+ max = dset_data[indx][jndx];
+ }
+ // Print the maximum value.
+ System.out.println("Maximum value in " + DATASETNAME + " is: " + max);
+
+ // End access to the dataset and release resources used by it.
+ try {
+ if (dcpl_id >= 0)
+ H5.H5Pclose(dcpl_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ try {
+ if (dataset_id >= 0)
+ H5.H5Dclose(dataset_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Close the file.
+ try {
+ if (file_id >= 0)
+ H5.H5Fclose(file_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+ }
+
+ public static void main(String[] args) {
+ // Check if gzip compression is available and can be used for both
+ // compression and decompression. Normally we do not perform error
+ // checking in these examples for the sake of clarity, but in this
+ // case we will make an exception because this filter is an
+ // optional part of the hdf5 library.
+ if (H5Ex_D_Szip.checkSzipFilter()) {
+ H5Ex_D_Szip.writeSzip();
+ H5Ex_D_Szip.readSzip();
+ }
+ }
+
+}
diff --git a/java/examples/datasets/H5Ex_D_Transform.java b/java/examples/datasets/H5Ex_D_Transform.java
new file mode 100644
index 0000000..1f289f3
--- /dev/null
+++ b/java/examples/datasets/H5Ex_D_Transform.java
@@ -0,0 +1,250 @@
+/* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
+ * Copyright by The HDF Group. *
+ * Copyright by the Board of Trustees of the University of Illinois. *
+ * All rights reserved. *
+ * *
+ * This file is part of HDF5. The full HDF5 copyright notice, including *
+ * terms governing use, modification, and redistribution, is contained in *
+ * the files COPYING and Copyright.html. COPYING can be found at the root *
+ * of the source code distribution tree; Copyright.html can be found at the *
+ * root level of an installed copy of the electronic HDF5 document set and *
+ * is linked from the top-level documents page. It can also be found at *
+ * http://hdfgroup.org/HDF5/doc/Copyright.html. If you do not have *
+ * access to either file, you may request a copy from help@hdfgroup.org. *
+ * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */
+
+/************************************************************
+ This example shows how to read and write data to a dataset
+ using a data transform expression. The program first
+ writes integers to a dataset using the transform
+ expression TRANSFORM, then closes the file. Next, it
+ reopens the file, reads back the data without a transform,
+ and outputs the data to the screen. Finally it reads the
+ data using the transform expression RTRANSFORM and outputs
+ the results to the screen.
+ ************************************************************/
+package examples.datasets;
+
+import hdf.hdf5lib.H5;
+import hdf.hdf5lib.HDF5Constants;
+
+public class H5Ex_D_Transform {
+
+ private static String FILE = "H5Ex_D_Transform.h5";
+ private static String DATASET = "DS1";
+ private static final int DIM_X = 4;
+ private static final int DIM_Y = 7;
+ private static String TRANSFORM = "x+1";
+ private static String RTRANSFORM = "x-1";
+
+ private static void writeData() {
+ long file_id = -1;
+ long filespace_id = -1;
+ long dataset_id = -1;
+ long dxpl_id = -1;
+
+ long[] dims = { DIM_X, DIM_Y };
+ int[][] dset_data = new int[DIM_X][DIM_Y];
+
+ // Initialize data.
+ for (int i = 0; i < DIM_X; i++)
+ for (int j = 0; j < DIM_Y; j++)
+ dset_data[i][j] = i * j - j;
+
+ // Output the data to the screen.
+ System.out.println("Original Data:");
+ for (int i = 0; i < DIM_X; i++) {
+ System.out.print(" [");
+ for (int j = 0; j < DIM_Y; j++)
+ System.out.print(" " + dset_data[i][j] + " ");
+ System.out.println("]");
+ }
+
+ // Create a new file using the default properties.
+ try {
+ file_id = H5.H5Fcreate(FILE, HDF5Constants.H5F_ACC_TRUNC, HDF5Constants.H5P_DEFAULT,
+ HDF5Constants.H5P_DEFAULT);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Create dataspace. Setting maximum size to NULL sets the maximum
+ // size to be the current size.
+ try {
+ filespace_id = H5.H5Screate_simple(2, dims, null);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Create the dataset transfer property list and define the transform expression.
+ try {
+ dxpl_id = H5.H5Pcreate(HDF5Constants.H5P_DATASET_XFER);
+ if (dxpl_id >= 0)
+ H5.H5Pset_data_transform(dxpl_id, TRANSFORM);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Create the dataset using the default properties. Unfortunately we must save as
+ // a native type or the transform operation will fail.
+ try {
+ if ((file_id >= 0) && (filespace_id >= 0))
+ dataset_id = H5.H5Dcreate(file_id, DATASET, HDF5Constants.H5T_NATIVE_INT, filespace_id,
+ HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Write the data to the dataset using the dataset transfer property list.
+ try {
+ if ((dataset_id >= 0) && (dxpl_id >= 0))
+ H5.H5Dwrite(dataset_id, HDF5Constants.H5T_NATIVE_INT, HDF5Constants.H5S_ALL, HDF5Constants.H5S_ALL,
+ dxpl_id, dset_data);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // End access to the dataset and release resources used by it.
+ try {
+ if (dxpl_id >= 0)
+ H5.H5Pclose(dxpl_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ try {
+ if (dataset_id >= 0)
+ H5.H5Dclose(dataset_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ try {
+ if (filespace_id >= 0)
+ H5.H5Sclose(filespace_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Close the file.
+ try {
+ if (file_id >= 0)
+ H5.H5Fclose(file_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+ }
+
+ private static void readData() {
+
+ long file_id = -1;
+ long dataset_id = -1;
+ long dxpl_id = -1;
+ int[][] dset_data = new int[DIM_X][DIM_Y];
+
+ // Open an existing file using the default properties.
+ try {
+ file_id = H5.H5Fopen(FILE, HDF5Constants.H5F_ACC_RDONLY, HDF5Constants.H5P_DEFAULT);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Open an existing dataset using the default properties.
+ try {
+ if (file_id >= 0)
+ dataset_id = H5.H5Dopen(file_id, DATASET, HDF5Constants.H5P_DEFAULT);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Read the data using the default properties.
+ try {
+ if (dataset_id >= 0)
+ H5.H5Dread(dataset_id, HDF5Constants.H5T_NATIVE_INT, HDF5Constants.H5S_ALL, HDF5Constants.H5S_ALL,
+ HDF5Constants.H5P_DEFAULT, dset_data);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Output the data to the screen.
+ System.out.println("Data as written with transform '" + TRANSFORM + "'");
+ for (int i = 0; i < DIM_X; i++) {
+ System.out.print(" [");
+ for (int j = 0; j < DIM_Y; j++)
+ System.out.print(" " + dset_data[i][j] + " ");
+ System.out.println("]");
+ }
+
+ // Create the dataset transfer property list and define the transform expression.
+ try {
+ dxpl_id = H5.H5Pcreate(HDF5Constants.H5P_DATASET_XFER);
+ if (dxpl_id >= 0)
+ H5.H5Pset_data_transform(dxpl_id, RTRANSFORM);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Read the data using the dataset transfer property list.
+ try {
+ if ((dataset_id >= 0) && (dxpl_id >= 0))
+ H5.H5Dread(dataset_id, HDF5Constants.H5T_NATIVE_INT, HDF5Constants.H5S_ALL, HDF5Constants.H5S_ALL,
+ dxpl_id, dset_data);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Output the data to the screen.
+
+ System.out.println("Data as written with transform '" + TRANSFORM + "' and read with transform '"
+ + RTRANSFORM + "'");
+ for (int i = 0; i < DIM_X; i++) {
+ System.out.print(" [");
+ for (int j = 0; j < DIM_Y; j++)
+ System.out.print(" " + dset_data[i][j] + " ");
+ System.out.println("]");
+ }
+
+ // Close and release resources.
+ try {
+ if (dxpl_id >= 0)
+ H5.H5Pclose(dxpl_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+ try {
+ if (dataset_id >= 0)
+ H5.H5Dclose(dataset_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+ try {
+ if (file_id >= 0)
+ H5.H5Fclose(file_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+ }
+
+ public static void main(String[] args) {
+ H5Ex_D_Transform.writeData();
+ H5Ex_D_Transform.readData();
+ }
+
+}
diff --git a/java/examples/datasets/H5Ex_D_UnlimitedAdd.java b/java/examples/datasets/H5Ex_D_UnlimitedAdd.java
new file mode 100644
index 0000000..ada8df0
--- /dev/null
+++ b/java/examples/datasets/H5Ex_D_UnlimitedAdd.java
@@ -0,0 +1,393 @@
+/* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
+ * Copyright by The HDF Group. *
+ * Copyright by the Board of Trustees of the University of Illinois. *
+ * All rights reserved. *
+ * *
+ * This file is part of HDF5. The full HDF5 copyright notice, including *
+ * terms governing use, modification, and redistribution, is contained in *
+ * the files COPYING and Copyright.html. COPYING can be found at the root *
+ * of the source code distribution tree; Copyright.html can be found at the *
+ * root level of an installed copy of the electronic HDF5 document set and *
+ * is linked from the top-level documents page. It can also be found at *
+ * http://hdfgroup.org/HDF5/doc/Copyright.html. If you do not have *
+ * access to either file, you may request a copy from help@hdfgroup.org. *
+ * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */
+
+/************************************************************
+ This example shows how to create and extend an unlimited
+ dataset. The program first writes integers to a dataset
+ with dataspace dimensions of DIM_XxDIM_Y, then closes the
+ file. Next, it reopens the file, reads back the data,
+ outputs it to the screen, extends the dataset, and writes
+ new data to the extended portions of the dataset. Finally
+ it reopens the file again, reads back the data, and
+ outputs it to the screen.
+ ************************************************************/
+package examples.datasets;
+
+import hdf.hdf5lib.H5;
+import hdf.hdf5lib.HDF5Constants;
+
+public class H5Ex_D_UnlimitedAdd {
+ private static String FILENAME = "H5Ex_D_UnlimitedAdd.h5";
+ private static String DATASETNAME = "DS1";
+ private static final int DIM_X = 4;
+ private static final int DIM_Y = 7;
+ private static final int EDIM_X = 6;
+ private static final int EDIM_Y = 10;
+ private static final int CHUNK_X = 4;
+ private static final int CHUNK_Y = 4;
+ private static final int RANK = 2;
+ private static final int NDIMS = 2;
+
+ private static void writeUnlimited() {
+ long file_id = -1;
+ long dcpl_id = -1;
+ long dataspace_id = -1;
+ long dataset_id = -1;
+ long[] dims = { DIM_X, DIM_Y };
+ long[] chunk_dims = { CHUNK_X, CHUNK_Y };
+ long[] maxdims = { HDF5Constants.H5S_UNLIMITED, HDF5Constants.H5S_UNLIMITED };
+ int[][] dset_data = new int[DIM_X][DIM_Y];
+
+ // Initialize the dataset.
+ for (int indx = 0; indx < DIM_X; indx++)
+ for (int jndx = 0; jndx < DIM_Y; jndx++)
+ dset_data[indx][jndx] = indx * jndx - jndx;
+
+ // Create a new file using default properties.
+ try {
+ file_id = H5.H5Fcreate(FILENAME, HDF5Constants.H5F_ACC_TRUNC, HDF5Constants.H5P_DEFAULT,
+ HDF5Constants.H5P_DEFAULT);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Create dataspace with unlimited dimensions.
+ try {
+ dataspace_id = H5.H5Screate_simple(RANK, dims, maxdims);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Create the dataset creation property list.
+ try {
+ dcpl_id = H5.H5Pcreate(HDF5Constants.H5P_DATASET_CREATE);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Set the chunk size.
+ try {
+ if (dcpl_id >= 0)
+ H5.H5Pset_chunk(dcpl_id, NDIMS, chunk_dims);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Create the unlimited dataset.
+ try {
+ if ((file_id >= 0) && (dataspace_id >= 0) && (dcpl_id >= 0))
+ dataset_id = H5.H5Dcreate(file_id, DATASETNAME, HDF5Constants.H5T_STD_I32LE, dataspace_id,
+ HDF5Constants.H5P_DEFAULT, dcpl_id, HDF5Constants.H5P_DEFAULT);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Write the data to the dataset.
+ try {
+ if (dataset_id >= 0)
+ H5.H5Dwrite(dataset_id, HDF5Constants.H5T_NATIVE_INT, HDF5Constants.H5S_ALL, HDF5Constants.H5S_ALL,
+ HDF5Constants.H5P_DEFAULT, dset_data);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // End access to the dataset and release resources used by it.
+ try {
+ if (dataset_id >= 0)
+ H5.H5Dclose(dataset_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ try {
+ if (dataspace_id >= 0)
+ H5.H5Sclose(dataspace_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ try {
+ if (dcpl_id >= 0)
+ H5.H5Pclose(dcpl_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Close the file.
+ try {
+ if (file_id >= 0)
+ H5.H5Fclose(file_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+ }
+
+ private static void extendUnlimited() {
+ long file_id = -1;
+ long dataspace_id = -1;
+ long dataset_id = -1;
+ long[] dims = { DIM_X, DIM_Y };
+ long[] extdims = { EDIM_X, EDIM_Y };
+ long[] start = { 0, 0 };
+ long[] count = new long[2];
+ int[][] dset_data;
+ int[][] extend_dset_data = new int[EDIM_X][EDIM_Y];
+
+ // Open an existing file.
+ try {
+ file_id = H5.H5Fopen(FILENAME, HDF5Constants.H5F_ACC_RDWR, HDF5Constants.H5P_DEFAULT);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Open an existing dataset.
+ try {
+ if (file_id >= 0)
+ dataset_id = H5.H5Dopen(file_id, DATASETNAME, HDF5Constants.H5P_DEFAULT);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Get dataspace and allocate memory for read buffer. This is a
+ // two dimensional dataset so the dynamic allocation must be done
+ // in steps.
+ try {
+ if (dataset_id >= 0)
+ dataspace_id = H5.H5Dget_space(dataset_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ try {
+ if (dataspace_id >= 0)
+ H5.H5Sget_simple_extent_dims(dataspace_id, dims, null);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Allocate array of pointers to rows.
+ dset_data = new int[(int) dims[0]][(int) dims[1]];
+
+ // Read the data using the default properties.
+ try {
+ if (dataset_id >= 0)
+ H5.H5Dread(dataset_id, HDF5Constants.H5T_NATIVE_INT, HDF5Constants.H5S_ALL, HDF5Constants.H5S_ALL,
+ HDF5Constants.H5P_DEFAULT, dset_data);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Output the data to the screen.
+ System.out.println("Dataset before extension:");
+ for (int indx = 0; indx < DIM_X; indx++) {
+ System.out.print(" [ ");
+ for (int jndx = 0; jndx < DIM_Y; jndx++)
+ System.out.print(dset_data[indx][jndx] + " ");
+ System.out.println("]");
+ }
+ System.out.println();
+
+ try {
+ if (dataspace_id >= 0)
+ H5.H5Sclose(dataspace_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Extend the dataset.
+ try {
+ if (dataset_id >= 0)
+ H5.H5Dset_extent(dataset_id, extdims);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Retrieve the dataspace for the newly extended dataset.
+ try {
+ if (dataset_id >= 0)
+ dataspace_id = H5.H5Dget_space(dataset_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Initialize data for writing to the extended dataset.
+ for (int indx = 0; indx < EDIM_X; indx++)
+ for (int jndx = 0; jndx < EDIM_Y; jndx++)
+ extend_dset_data[indx][jndx] = jndx;
+
+ // Select the entire dataspace.
+ try {
+ if (dataspace_id >= 0) {
+ H5.H5Sselect_all(dataspace_id);
+
+ // Subtract a hyperslab reflecting the original dimensions from the
+ // selection. The selection now contains only the newly extended
+ // portions of the dataset.
+ count[0] = dims[0];
+ count[1] = dims[1];
+ H5.H5Sselect_hyperslab(dataspace_id, HDF5Constants.H5S_SELECT_NOTB, start, null, count, null);
+
+ // Write the data to the selected portion of the dataset.
+ if (dataset_id >= 0)
+ H5.H5Dwrite(dataset_id, HDF5Constants.H5T_NATIVE_INT, HDF5Constants.H5S_ALL, dataspace_id,
+ HDF5Constants.H5P_DEFAULT, extend_dset_data);
+ }
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // End access to the dataset and release resources used by it.
+ try {
+ if (dataset_id >= 0)
+ H5.H5Dclose(dataset_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ try {
+ if (dataspace_id >= 0)
+ H5.H5Sclose(dataspace_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Close the file.
+ try {
+ if (file_id >= 0)
+ H5.H5Fclose(file_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+ }
+
+ private static void readUnlimited() {
+ long file_id = -1;
+ long dataspace_id = -1;
+ long dataset_id = -1;
+ long[] dims = { DIM_X, DIM_Y };
+ int[][] dset_data;
+
+ // Open an existing file.
+ try {
+ file_id = H5.H5Fopen(FILENAME, HDF5Constants.H5F_ACC_RDONLY, HDF5Constants.H5P_DEFAULT);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Open an existing dataset.
+ try {
+ if (file_id >= 0)
+ dataset_id = H5.H5Dopen(file_id, DATASETNAME, HDF5Constants.H5P_DEFAULT);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Get dataspace and allocate memory for the read buffer as before.
+ try {
+ if (dataset_id >= 0)
+ dataspace_id = H5.H5Dget_space(dataset_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ try {
+ if (dataspace_id >= 0)
+ H5.H5Sget_simple_extent_dims(dataspace_id, dims, null);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+ // Allocate array of pointers to rows.
+ dset_data = new int[(int) dims[0]][(int) dims[1]];
+
+ // Read the data using the default properties.
+ try {
+ if (dataset_id >= 0)
+ H5.H5Dread(dataset_id, HDF5Constants.H5T_NATIVE_INT, HDF5Constants.H5S_ALL, HDF5Constants.H5S_ALL,
+ HDF5Constants.H5P_DEFAULT, dset_data);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Output the data to the screen.
+ System.out.println("Dataset after extension:");
+ for (int indx = 0; indx < dims[0]; indx++) {
+ System.out.print(" [ ");
+ for (int jndx = 0; jndx < dims[1]; jndx++)
+ System.out.print(dset_data[indx][jndx] + " ");
+ System.out.println("]");
+ }
+ System.out.println();
+
+ // End access to the dataset and release resources used by it.
+ try {
+ if (dataset_id >= 0)
+ H5.H5Dclose(dataset_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ try {
+ if (dataspace_id >= 0)
+ H5.H5Sclose(dataspace_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Close the file.
+ try {
+ if (file_id >= 0)
+ H5.H5Fclose(file_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+ }
+
+ public static void main(String[] args) {
+ H5Ex_D_UnlimitedAdd.writeUnlimited();
+ H5Ex_D_UnlimitedAdd.extendUnlimited();
+ H5Ex_D_UnlimitedAdd.readUnlimited();
+ }
+
+}
diff --git a/java/examples/datasets/H5Ex_D_UnlimitedGzip.java b/java/examples/datasets/H5Ex_D_UnlimitedGzip.java
new file mode 100644
index 0000000..c08ceef
--- /dev/null
+++ b/java/examples/datasets/H5Ex_D_UnlimitedGzip.java
@@ -0,0 +1,504 @@
+/* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
+ * Copyright by The HDF Group. *
+ * Copyright by the Board of Trustees of the University of Illinois. *
+ * All rights reserved. *
+ * *
+ * This file is part of HDF5. The full HDF5 copyright notice, including *
+ * terms governing use, modification, and redistribution, is contained in *
+ * the files COPYING and Copyright.html. COPYING can be found at the root *
+ * of the source code distribution tree; Copyright.html can be found at the *
+ * root level of an installed copy of the electronic HDF5 document set and *
+ * is linked from the top-level documents page. It can also be found at *
+ * http://hdfgroup.org/HDF5/doc/Copyright.html. If you do not have *
+ * access to either file, you may request a copy from help@hdfgroup.org. *
+ * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */
+
+/************************************************************
+ This example shows how to create and extend an unlimited
+ dataset with gzip compression. The program first writes
+ integers to a gzip compressed dataset with dataspace
+ dimensions of DIM_XxDIM_Y, then closes the file. Next, it
+ reopens the file, reads back the data, outputs it to the
+ screen, extends the dataset, and writes new data to the
+ extended portions of the dataset. Finally it reopens the
+ file again, reads back the data, and outputs it to the
+ screen.
+ ************************************************************/
+package examples.datasets;
+
+import java.util.EnumSet;
+import java.util.HashMap;
+import java.util.Map;
+
+import hdf.hdf5lib.H5;
+import hdf.hdf5lib.HDF5Constants;
+
+public class H5Ex_D_UnlimitedGzip {
+ private static String FILENAME = "H5Ex_D_UnlimitedGzip.h5";
+ private static String DATASETNAME = "DS1";
+ private static final int DIM_X = 4;
+ private static final int DIM_Y = 7;
+ private static final int EDIM_X = 6;
+ private static final int EDIM_Y = 10;
+ private static final int CHUNK_X = 4;
+ private static final int CHUNK_Y = 4;
+ private static final int RANK = 2;
+ private static final int NDIMS = 2;
+
+ // Values for the status of space allocation
+ enum H5Z_filter {
+ H5Z_FILTER_ERROR(HDF5Constants.H5Z_FILTER_ERROR), H5Z_FILTER_NONE(HDF5Constants.H5Z_FILTER_NONE), H5Z_FILTER_DEFLATE(
+ HDF5Constants.H5Z_FILTER_DEFLATE), H5Z_FILTER_SHUFFLE(HDF5Constants.H5Z_FILTER_SHUFFLE), H5Z_FILTER_FLETCHER32(
+ HDF5Constants.H5Z_FILTER_FLETCHER32), H5Z_FILTER_SZIP(HDF5Constants.H5Z_FILTER_SZIP), H5Z_FILTER_NBIT(
+ HDF5Constants.H5Z_FILTER_NBIT), H5Z_FILTER_SCALEOFFSET(HDF5Constants.H5Z_FILTER_SCALEOFFSET), H5Z_FILTER_RESERVED(
+ HDF5Constants.H5Z_FILTER_RESERVED), H5Z_FILTER_MAX(HDF5Constants.H5Z_FILTER_MAX);
+ private static final Map<Integer, H5Z_filter> lookup = new HashMap<Integer, H5Z_filter>();
+
+ static {
+ for (H5Z_filter s : EnumSet.allOf(H5Z_filter.class))
+ lookup.put(s.getCode(), s);
+ }
+
+ private int code;
+
+ H5Z_filter(int layout_type) {
+ this.code = layout_type;
+ }
+
+ public int getCode() {
+ return this.code;
+ }
+
+ public static H5Z_filter get(int code) {
+ return lookup.get(code);
+ }
+ }
+
+ private static boolean checkGzipFilter() {
+ try {
+ int available = H5.H5Zfilter_avail(HDF5Constants.H5Z_FILTER_DEFLATE);
+ if (available == 0) {
+ System.out.println("gzip filter not available.");
+ return false;
+ }
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ try {
+ int filter_info = H5.H5Zget_filter_info(HDF5Constants.H5Z_FILTER_DEFLATE);
+ if (((filter_info & HDF5Constants.H5Z_FILTER_CONFIG_ENCODE_ENABLED) == 0)
+ || ((filter_info & HDF5Constants.H5Z_FILTER_CONFIG_DECODE_ENABLED) == 0)) {
+ System.out.println("gzip filter not available for encoding and decoding.");
+ return false;
+ }
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+ return true;
+ }
+
+ private static void writeUnlimited() {
+ long file_id = -1;
+ long dcpl_id = -1;
+ long dataspace_id = -1;
+ long dataset_id = -1;
+ long[] dims = { DIM_X, DIM_Y };
+ long[] chunk_dims = { CHUNK_X, CHUNK_Y };
+ long[] maxdims = { HDF5Constants.H5S_UNLIMITED, HDF5Constants.H5S_UNLIMITED };
+ int[][] dset_data = new int[DIM_X][DIM_Y];
+
+ // Initialize the dataset.
+ for (int indx = 0; indx < DIM_X; indx++)
+ for (int jndx = 0; jndx < DIM_Y; jndx++)
+ dset_data[indx][jndx] = indx * jndx - jndx;
+
+ // Create a new file using default properties.
+ try {
+ file_id = H5.H5Fcreate(FILENAME, HDF5Constants.H5F_ACC_TRUNC, HDF5Constants.H5P_DEFAULT,
+ HDF5Constants.H5P_DEFAULT);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Create dataspace with unlimited dimensions.
+ try {
+ dataspace_id = H5.H5Screate_simple(RANK, dims, maxdims);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Create the dataset creation property list, add the gzip compression
+ // filter.
+ try {
+ dcpl_id = H5.H5Pcreate(HDF5Constants.H5P_DATASET_CREATE);
+ if (dcpl_id >= 0) {
+ H5.H5Pset_deflate(dcpl_id, 9);
+ // Set the chunk size.
+ H5.H5Pset_chunk(dcpl_id, NDIMS, chunk_dims);
+ }
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Create the unlimited dataset.
+ try {
+ if ((file_id >= 0) && (dataspace_id >= 0) && (dcpl_id >= 0))
+ dataset_id = H5.H5Dcreate(file_id, DATASETNAME, HDF5Constants.H5T_STD_I32LE, dataspace_id,
+ HDF5Constants.H5P_DEFAULT, dcpl_id, HDF5Constants.H5P_DEFAULT);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Write the data to the dataset.
+ try {
+ if (dataset_id >= 0)
+ H5.H5Dwrite(dataset_id, HDF5Constants.H5T_NATIVE_INT, HDF5Constants.H5S_ALL, HDF5Constants.H5S_ALL,
+ HDF5Constants.H5P_DEFAULT, dset_data);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // End access to the dataset and release resources used by it.
+ try {
+ if (dataset_id >= 0)
+ H5.H5Dclose(dataset_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ try {
+ if (dataspace_id >= 0)
+ H5.H5Sclose(dataspace_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ try {
+ if (dcpl_id >= 0)
+ H5.H5Pclose(dcpl_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Close the file.
+ try {
+ if (file_id >= 0)
+ H5.H5Fclose(file_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+ }
+
+ private static void extendUnlimited() {
+ long file_id = -1;
+ long dataspace_id = -1;
+ long dataset_id = -1;
+ long[] dims = { DIM_X, DIM_Y };
+ long[] extdims = { EDIM_X, EDIM_Y };
+ long[] start = { 0, 0 };
+ long[] count = new long[2];
+ int[][] dset_data;
+ int[][] extend_dset_data = new int[EDIM_X][EDIM_Y];
+
+ // Open an existing file.
+ try {
+ file_id = H5.H5Fopen(FILENAME, HDF5Constants.H5F_ACC_RDWR, HDF5Constants.H5P_DEFAULT);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Open an existing dataset.
+ try {
+ if (file_id >= 0)
+ dataset_id = H5.H5Dopen(file_id, DATASETNAME, HDF5Constants.H5P_DEFAULT);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Get dataspace and allocate memory for read buffer. This is a
+ // two dimensional dataset so the dynamic allocation must be done
+ // in steps.
+ try {
+ if (dataset_id >= 0)
+ dataspace_id = H5.H5Dget_space(dataset_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ try {
+ if (dataspace_id >= 0)
+ H5.H5Sget_simple_extent_dims(dataspace_id, dims, null);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Allocate array of pointers to rows.
+ dset_data = new int[(int) dims[0]][(int) dims[1]];
+
+ // Read the data using the default properties.
+ try {
+ if (dataset_id >= 0)
+ H5.H5Dread(dataset_id, HDF5Constants.H5T_NATIVE_INT, HDF5Constants.H5S_ALL, HDF5Constants.H5S_ALL,
+ HDF5Constants.H5P_DEFAULT, dset_data);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Output the data to the screen.
+ System.out.println("Dataset before extension:");
+ for (int indx = 0; indx < DIM_X; indx++) {
+ System.out.print(" [ ");
+ for (int jndx = 0; jndx < DIM_Y; jndx++)
+ System.out.print(dset_data[indx][jndx] + " ");
+ System.out.println("]");
+ }
+ System.out.println();
+
+ try {
+ if (dataspace_id >= 0)
+ H5.H5Sclose(dataspace_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Extend the dataset.
+ try {
+ if (dataset_id >= 0)
+ H5.H5Dset_extent(dataset_id, extdims);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Retrieve the dataspace for the newly extended dataset.
+ try {
+ if (dataset_id >= 0)
+ dataspace_id = H5.H5Dget_space(dataset_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Initialize data for writing to the extended dataset.
+ for (int indx = 0; indx < EDIM_X; indx++)
+ for (int jndx = 0; jndx < EDIM_Y; jndx++)
+ extend_dset_data[indx][jndx] = jndx;
+
+ // Select the entire dataspace.
+ try {
+ if (dataspace_id >= 0) {
+ H5.H5Sselect_all(dataspace_id);
+
+ // Subtract a hyperslab reflecting the original dimensions from the
+ // selection. The selection now contains only the newly extended
+ // portions of the dataset.
+ count[0] = dims[0];
+ count[1] = dims[1];
+ H5.H5Sselect_hyperslab(dataspace_id, HDF5Constants.H5S_SELECT_NOTB, start, null, count, null);
+
+ // Write the data to the selected portion of the dataset.
+ if (dataset_id >= 0)
+ H5.H5Dwrite(dataset_id, HDF5Constants.H5T_NATIVE_INT, HDF5Constants.H5S_ALL, dataspace_id,
+ HDF5Constants.H5P_DEFAULT, extend_dset_data);
+ }
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // End access to the dataset and release resources used by it.
+ try {
+ if (dataset_id >= 0)
+ H5.H5Dclose(dataset_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ try {
+ if (dataspace_id >= 0)
+ H5.H5Sclose(dataspace_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Close the file.
+ try {
+ if (file_id >= 0)
+ H5.H5Fclose(file_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+ }
+
+ private static void readUnlimited() {
+ long file_id = -1;
+ long dataspace_id = -1;
+ long dataset_id = -1;
+ long dcpl_id = -1;
+ long[] dims = { DIM_X, DIM_Y };
+ int[][] dset_data;
+
+ // Open an existing file.
+ try {
+ file_id = H5.H5Fopen(FILENAME, HDF5Constants.H5F_ACC_RDONLY, HDF5Constants.H5P_DEFAULT);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Open an existing dataset.
+ try {
+ if (file_id >= 0)
+ dataset_id = H5.H5Dopen(file_id, DATASETNAME, HDF5Constants.H5P_DEFAULT);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Retrieve the dataset creation property list.
+ try {
+ if (dataset_id >= 0)
+ dcpl_id = H5.H5Dget_create_plist(dataset_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Retrieve and print the filter type. Here we only retrieve the
+ // first filter because we know that we only added one filter.
+ try {
+ if (dcpl_id >= 0) {
+ // Java lib requires a valid filter_name object and cd_values
+ int[] flags = { 0 };
+ long[] cd_nelmts = { 1 };
+ int[] cd_values = { 0 };
+ String[] filter_name = { "" };
+ int[] filter_config = { 0 };
+ int filter_type = -1;
+ filter_type = H5
+ .H5Pget_filter(dcpl_id, 0, flags, cd_nelmts, cd_values, 120, filter_name, filter_config);
+ System.out.print("Filter type is: ");
+ switch (H5Z_filter.get(filter_type)) {
+ case H5Z_FILTER_DEFLATE:
+ System.out.println("H5Z_FILTER_DEFLATE");
+ break;
+ case H5Z_FILTER_SHUFFLE:
+ System.out.println("H5Z_FILTER_SHUFFLE");
+ break;
+ case H5Z_FILTER_FLETCHER32:
+ System.out.println("H5Z_FILTER_FLETCHER32");
+ break;
+ case H5Z_FILTER_SZIP:
+ System.out.println("H5Z_FILTER_SZIP");
+ break;
+ default:
+ System.out.println("H5Z_FILTER_ERROR");
+ }
+ System.out.println();
+ }
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Get dataspace and allocate memory for the read buffer as before.
+ try {
+ if (dataset_id >= 0)
+ dataspace_id = H5.H5Dget_space(dataset_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ try {
+ if (dataspace_id >= 0)
+ H5.H5Sget_simple_extent_dims(dataspace_id, dims, null);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+ // Allocate array of pointers to rows.
+ dset_data = new int[(int) dims[0]][(int) dims[1]];
+
+ // Read the data using the default properties.
+ try {
+ if (dataset_id >= 0)
+ H5.H5Dread(dataset_id, HDF5Constants.H5T_NATIVE_INT, HDF5Constants.H5S_ALL, HDF5Constants.H5S_ALL,
+ HDF5Constants.H5P_DEFAULT, dset_data);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Output the data to the screen.
+ System.out.println("Dataset after extension:");
+ for (int indx = 0; indx < dims[0]; indx++) {
+ System.out.print(" [ ");
+ for (int jndx = 0; jndx < dims[1]; jndx++)
+ System.out.print(dset_data[indx][jndx] + " ");
+ System.out.println("]");
+ }
+ System.out.println();
+
+ // End access to the dataset and release resources used by it.
+ try {
+ if (dataset_id >= 0)
+ H5.H5Dclose(dataset_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ try {
+ if (dataspace_id >= 0)
+ H5.H5Sclose(dataspace_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Close the file.
+ try {
+ if (file_id >= 0)
+ H5.H5Fclose(file_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+ }
+
+ public static void main(String[] args) {
+ // Check if gzip compression is available and can be used for both
+ // compression and decompression. Normally we do not perform error
+ // checking in these examples for the sake of clarity, but in this
+ // case we will make an exception because this filter is an
+ // optional part of the hdf5 library.
+ if (H5Ex_D_UnlimitedGzip.checkGzipFilter()) {
+ H5Ex_D_UnlimitedGzip.writeUnlimited();
+ H5Ex_D_UnlimitedGzip.extendUnlimited();
+ H5Ex_D_UnlimitedGzip.readUnlimited();
+ }
+ }
+
+}
diff --git a/java/examples/datasets/H5Ex_D_UnlimitedMod.java b/java/examples/datasets/H5Ex_D_UnlimitedMod.java
new file mode 100644
index 0000000..884cad3
--- /dev/null
+++ b/java/examples/datasets/H5Ex_D_UnlimitedMod.java
@@ -0,0 +1,379 @@
+/* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
+ * Copyright by The HDF Group. *
+ * Copyright by the Board of Trustees of the University of Illinois. *
+ * All rights reserved. *
+ * *
+ * This file is part of HDF5. The full HDF5 copyright notice, including *
+ * terms governing use, modification, and redistribution, is contained in *
+ * the files COPYING and Copyright.html. COPYING can be found at the root *
+ * of the source code distribution tree; Copyright.html can be found at the *
+ * root level of an installed copy of the electronic HDF5 document set and *
+ * is linked from the top-level documents page. It can also be found at *
+ * http://hdfgroup.org/HDF5/doc/Copyright.html. If you do not have *
+ * access to either file, you may request a copy from help@hdfgroup.org. *
+ * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */
+
+/************************************************************
+ This example shows how to create and extend an unlimited
+ dataset. The program first writes integers to a dataset
+ with dataspace dimensions of DIM_XxDIM_Y, then closes the
+ file. Next, it reopens the file, reads back the data,
+ outputs it to the screen, extends the dataset, and writes
+ new data to the entire extended dataset. Finally it
+ reopens the file again, reads back the data, and utputs it
+ to the screen.
+ ************************************************************/
+package examples.datasets;
+
+import hdf.hdf5lib.H5;
+import hdf.hdf5lib.HDF5Constants;
+
+public class H5Ex_D_UnlimitedMod {
+ private static String FILENAME = "H5Ex_D_UnlimitedMod.h5";
+ private static String DATASETNAME = "DS1";
+ private static final int DIM_X = 4;
+ private static final int DIM_Y = 7;
+ private static final int EDIM_X = 6;
+ private static final int EDIM_Y = 10;
+ private static final int CHUNK_X = 4;
+ private static final int CHUNK_Y = 4;
+ private static final int RANK = 2;
+ private static final int NDIMS = 2;
+
+ private static void writeUnlimited() {
+ long file_id = -1;
+ long dcpl_id = -1;
+ long dataspace_id = -1;
+ long dataset_id = -1;
+ long[] dims = { DIM_X, DIM_Y };
+ long[] chunk_dims = { CHUNK_X, CHUNK_Y };
+ long[] maxdims = { HDF5Constants.H5S_UNLIMITED, HDF5Constants.H5S_UNLIMITED };
+ int[][] dset_data = new int[DIM_X][DIM_Y];
+
+ // Initialize the dataset.
+ for (int indx = 0; indx < DIM_X; indx++)
+ for (int jndx = 0; jndx < DIM_Y; jndx++)
+ dset_data[indx][jndx] = indx * jndx - jndx;
+
+ // Create a new file using default properties.
+ try {
+ file_id = H5.H5Fcreate(FILENAME, HDF5Constants.H5F_ACC_TRUNC, HDF5Constants.H5P_DEFAULT,
+ HDF5Constants.H5P_DEFAULT);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Create dataspace with unlimited dimensions.
+ try {
+ dataspace_id = H5.H5Screate_simple(RANK, dims, maxdims);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Create the dataset creation property list.
+ try {
+ dcpl_id = H5.H5Pcreate(HDF5Constants.H5P_DATASET_CREATE);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Set the chunk size.
+ try {
+ if (dcpl_id >= 0)
+ H5.H5Pset_chunk(dcpl_id, NDIMS, chunk_dims);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Create the unlimited dataset.
+ try {
+ if ((file_id >= 0) && (dataspace_id >= 0) && (dcpl_id >= 0))
+ dataset_id = H5.H5Dcreate(file_id, DATASETNAME, HDF5Constants.H5T_STD_I32LE, dataspace_id,
+ HDF5Constants.H5P_DEFAULT, dcpl_id, HDF5Constants.H5P_DEFAULT);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Write the data to the dataset.
+ try {
+ if (dataset_id >= 0)
+ H5.H5Dwrite(dataset_id, HDF5Constants.H5T_NATIVE_INT, HDF5Constants.H5S_ALL, HDF5Constants.H5S_ALL,
+ HDF5Constants.H5P_DEFAULT, dset_data);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // End access to the dataset and release resources used by it.
+ try {
+ if (dataset_id >= 0)
+ H5.H5Dclose(dataset_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ try {
+ if (dataspace_id >= 0)
+ H5.H5Sclose(dataspace_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ try {
+ if (dcpl_id >= 0)
+ H5.H5Pclose(dcpl_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Close the file.
+ try {
+ if (file_id >= 0)
+ H5.H5Fclose(file_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+ }
+
+ private static void extendUnlimited() {
+ long file_id = -1;
+ long dataspace_id = -1;
+ long dataset_id = -1;
+ long[] dims = { DIM_X, DIM_Y };
+ long[] extdims = { EDIM_X, EDIM_Y };
+ int[][] dset_data;
+ int[][] extend_dset_data = new int[EDIM_X][EDIM_Y];
+
+ // Open an existing file.
+ try {
+ file_id = H5.H5Fopen(FILENAME, HDF5Constants.H5F_ACC_RDWR, HDF5Constants.H5P_DEFAULT);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Open an existing dataset.
+ try {
+ if (file_id >= 0)
+ dataset_id = H5.H5Dopen(file_id, DATASETNAME, HDF5Constants.H5P_DEFAULT);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Get dataspace and allocate memory for read buffer. This is a
+ // two dimensional dataset so the dynamic allocation must be done
+ // in steps.
+ try {
+ if (dataset_id >= 0)
+ dataspace_id = H5.H5Dget_space(dataset_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ try {
+ if (dataspace_id >= 0)
+ H5.H5Sget_simple_extent_dims(dataspace_id, dims, null);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Allocate array of pointers to rows.
+ dset_data = new int[(int) dims[0]][(int) dims[1]];
+
+ // Read the data using the default properties.
+ try {
+ if (dataset_id >= 0)
+ H5.H5Dread(dataset_id, HDF5Constants.H5T_NATIVE_INT, HDF5Constants.H5S_ALL, HDF5Constants.H5S_ALL,
+ HDF5Constants.H5P_DEFAULT, dset_data);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Output the data to the screen.
+ System.out.println("Dataset before extension:");
+ for (int indx = 0; indx < DIM_X; indx++) {
+ System.out.print(" [ ");
+ for (int jndx = 0; jndx < DIM_Y; jndx++)
+ System.out.print(dset_data[indx][jndx] + " ");
+ System.out.println("]");
+ }
+ System.out.println();
+
+ try {
+ if (dataspace_id >= 0)
+ H5.H5Sclose(dataspace_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Extend the dataset.
+ try {
+ if (dataset_id >= 0)
+ H5.H5Dset_extent(dataset_id, extdims);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Retrieve the dataspace for the newly extended dataset.
+ try {
+ if (dataset_id >= 0)
+ dataspace_id = H5.H5Dget_space(dataset_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Initialize data for writing to the extended dataset.
+ for (int indx = 0; indx < EDIM_X; indx++)
+ for (int jndx = 0; jndx < EDIM_Y; jndx++)
+ extend_dset_data[indx][jndx] = jndx;
+
+ // Write the data tto the extended dataset.
+ try {
+ if ((dataspace_id >= 0) && (dataset_id >= 0))
+ H5.H5Dwrite(dataset_id, HDF5Constants.H5T_NATIVE_INT, HDF5Constants.H5S_ALL, dataspace_id,
+ HDF5Constants.H5P_DEFAULT, extend_dset_data);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // End access to the dataset and release resources used by it.
+ try {
+ if (dataset_id >= 0)
+ H5.H5Dclose(dataset_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ try {
+ if (dataspace_id >= 0)
+ H5.H5Sclose(dataspace_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Close the file.
+ try {
+ if (file_id >= 0)
+ H5.H5Fclose(file_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+ }
+
+ private static void readUnlimited() {
+ long file_id = -1;
+ long dataspace_id = -1;
+ long dataset_id = -1;
+ long[] dims = { DIM_X, DIM_Y };
+ int[][] dset_data;
+
+ // Open an existing file.
+ try {
+ file_id = H5.H5Fopen(FILENAME, HDF5Constants.H5F_ACC_RDONLY, HDF5Constants.H5P_DEFAULT);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Open an existing dataset.
+ try {
+ if (file_id >= 0)
+ dataset_id = H5.H5Dopen(file_id, DATASETNAME, HDF5Constants.H5P_DEFAULT);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Get dataspace and allocate memory for the read buffer as before.
+ try {
+ if (dataset_id >= 0)
+ dataspace_id = H5.H5Dget_space(dataset_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ try {
+ if (dataspace_id >= 0)
+ H5.H5Sget_simple_extent_dims(dataspace_id, dims, null);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+ // Allocate array of pointers to rows.
+ dset_data = new int[(int) dims[0]][(int) dims[1]];
+
+ // Read the data using the default properties.
+ try {
+ if (dataset_id >= 0)
+ H5.H5Dread(dataset_id, HDF5Constants.H5T_NATIVE_INT, HDF5Constants.H5S_ALL, HDF5Constants.H5S_ALL,
+ HDF5Constants.H5P_DEFAULT, dset_data);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Output the data to the screen.
+ System.out.println("Dataset after extension:");
+ for (int indx = 0; indx < dims[0]; indx++) {
+ System.out.print(" [ ");
+ for (int jndx = 0; jndx < dims[1]; jndx++)
+ System.out.print(dset_data[indx][jndx] + " ");
+ System.out.println("]");
+ }
+ System.out.println();
+
+ // End access to the dataset and release resources used by it.
+ try {
+ if (dataset_id >= 0)
+ H5.H5Dclose(dataset_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ try {
+ if (dataspace_id >= 0)
+ H5.H5Sclose(dataspace_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Close the file.
+ try {
+ if (file_id >= 0)
+ H5.H5Fclose(file_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+ }
+
+ public static void main(String[] args) {
+ H5Ex_D_UnlimitedMod.writeUnlimited();
+ H5Ex_D_UnlimitedMod.extendUnlimited();
+ H5Ex_D_UnlimitedMod.readUnlimited();
+ }
+
+}
diff --git a/java/examples/datasets/Makefile.am b/java/examples/datasets/Makefile.am
new file mode 100644
index 0000000..49888a0
--- /dev/null
+++ b/java/examples/datasets/Makefile.am
@@ -0,0 +1,78 @@
+#
+# Copyright by The HDF Group.
+# Copyright by the Board of Trustees of the University of Illinois.
+# All rights reserved.
+#
+# This file is part of HDF5. The full HDF5 copyright notice, including
+# terms governing use, modification, and redistribution, is contained in
+# the files COPYING and Copyright.html. COPYING can be found at the root
+# of the source code distribution tree; Copyright.html can be found at the
+# root level of an installed copy of the electronic HDF5 document set and
+# is linked from the top-level documents page. It can also be found at
+# http://hdfgroup.org/HDF5/doc/Copyright.html. If you do not have
+# access to either file, you may request a copy from help@hdfgroup.org.
+##
+## Makefile.am
+## Run automake to generate a Makefile.in from this file.
+##
+#
+# HDF5 Java Library Examples Makefile(.in)
+
+include $(top_srcdir)/config/commence.am
+
+# Mark this directory as part of the JNI API
+JAVA_API=yes
+
+JAVAROOT = .classes
+
+classes:
+ $(MKDIR_P) $(@D)/$(JAVAROOT)
+
+pkgpath = examples/datasets
+hdfjarfile = jar$(PACKAGE_TARNAME)-$(PACKAGE_VERSION).jar
+CLASSPATH_ENV=CLASSPATH=.:$(JAVAROOT):$(top_builddir)/java/src/$(hdfjarfile):$(top_srcdir)/java/lib/slf4j-api-1.7.5.jar:$(top_srcdir)/java/lib/ext/slf4j-simple-1.7.5.jar:$$CLASSPATH
+
+jarfile = jar$(PACKAGE_TARNAME)datasets.jar
+
+AM_JAVACFLAGS = $(H5_JAVACFLAGS) -deprecation
+
+TESTPACKAGE =
+
+noinst_JAVA = \
+ H5Ex_D_Alloc.java \
+ H5Ex_D_Checksum.java \
+ H5Ex_D_Chunk.java \
+ H5Ex_D_Compact.java \
+ H5Ex_D_External.java \
+ H5Ex_D_FillValue.java \
+ H5Ex_D_Gzip.java \
+ H5Ex_D_Hyperslab.java \
+ H5Ex_D_ReadWrite.java \
+ H5Ex_D_Shuffle.java \
+ H5Ex_D_Szip.java \
+ H5Ex_D_UnlimitedAdd.java \
+ H5Ex_D_UnlimitedGzip.java \
+ H5Ex_D_UnlimitedMod.java \
+ H5Ex_D_Nbit.java \
+ H5Ex_D_Transform.java \
+ H5Ex_D_Sofloat.java \
+ H5Ex_D_Soint.java
+
+$(jarfile): classnoinst.stamp classes
+ $(JAR) cvf $@ -C $(JAVAROOT)/ $(pkgpath)
+
+noinst_DATA = $(jarfile)
+
+.PHONY: classes
+
+check_SCRIPTS = runExample.sh
+TEST_SCRIPT = $(check_SCRIPTS)
+
+CLEANFILES = classnoinst.stamp $(jarfile) $(JAVAROOT)/$(pkgpath)/*.class runExample.sh
+
+clean:
+ rm -rf $(JAVAROOT)
+ rm -f $(jarfile)
+ rm -f classnoinst.stamp
+
+include $(top_srcdir)/config/conclude.am
diff --git a/java/examples/datasets/runExample.sh.in b/java/examples/datasets/runExample.sh.in
new file mode 100644
index 0000000..8ac28a7
--- /dev/null
+++ b/java/examples/datasets/runExample.sh.in
@@ -0,0 +1,413 @@
+#! /bin/sh
+#
+# Copyright by The HDF Group.
+# Copyright by the Board of Trustees of the University of Illinois.
+# All rights reserved.
+#
+# This file is part of HDF5. The full HDF5 copyright notice, including
+# terms governing use, modification, and redistribution, is contained in
+# the files COPYING and Copyright.html. COPYING can be found at the root
+# of the source code distribution tree; Copyright.html can be found at the
+# root level of an installed copy of the electronic HDF5 document set and
+# is linked from the top-level documents page. It can also be found at
+# http://hdfgroup.org/HDF5/doc/Copyright.html. If you do not have
+# access to either file, you may request a copy from help@hdfgroup.org.
+#
+
+top_builddir=@top_builddir@
+top_srcdir=@top_srcdir@
+srcdir=@srcdir@
+
+USE_FILTER_SZIP="@USE_FILTER_SZIP@"
+USE_FILTER_DEFLATE="@USE_FILTER_DEFLATE@"
+
+TESTNAME=EX_Datasets
+EXIT_SUCCESS=0
+EXIT_FAILURE=1
+
+# Set up default variable values if not supplied by the user.
+RM='rm -rf'
+CMP='cmp'
+DIFF='diff -c'
+CP='cp'
+DIRNAME='dirname'
+LS='ls'
+AWK='awk'
+
+nerrors=0
+
+# where the libs exist
+HDFLIB_HOME="$top_srcdir/java/lib"
+BLDLIBDIR="./lib"
+BLDDIR="."
+HDFTEST_HOME="$top_srcdir/java/examples/datasets"
+JARFILE=jar@PACKAGE_TARNAME@-@PACKAGE_VERSION@.jar
+TESTJARFILE=jar@PACKAGE_TARNAME@datasets.jar
+test -d $BLDLIBDIR || mkdir -p $BLDLIBDIR
+
+######################################################################
+# library files
+# --------------------------------------------------------------------
+# All the library files copy from source directory to test directory
+# NOTE: Keep this framework to add/remove test files.
+# This list are also used for checking exist.
+# Comment '#' without space can be used.
+# --------------------------------------------------------------------
+LIST_LIBRARY_FILES="
+$HDFLIB_HOME/slf4j-api-1.7.5.jar
+$HDFLIB_HOME/ext/slf4j-simple-1.7.5.jar
+$top_builddir/src/.libs/libhdf5.*
+$top_builddir/java/src/jni/.libs/libhdf5_java.*
+$top_builddir/java/src/$JARFILE
+"
+LIST_DATA_FILES="
+$HDFTEST_HOME/../testfiles/examples.datasets.H5Ex_D_Alloc.txt
+$HDFTEST_HOME/../testfiles/examples.datasets.H5Ex_D_Checksum.txt
+$HDFTEST_HOME/../testfiles/examples.datasets.H5Ex_D_Chunk.txt
+$HDFTEST_HOME/../testfiles/examples.datasets.H5Ex_D_Compact.txt
+$HDFTEST_HOME/../testfiles/examples.datasets.H5Ex_D_External.txt
+$HDFTEST_HOME/../testfiles/examples.datasets.H5Ex_D_FillValue.txt
+$HDFTEST_HOME/../testfiles/examples.datasets.H5Ex_D_Gzip.txt
+$HDFTEST_HOME/../testfiles/examples.datasets.H5Ex_D_Hyperslab.txt
+$HDFTEST_HOME/../testfiles/examples.datasets.H5Ex_D_ReadWrite.txt
+$HDFTEST_HOME/../testfiles/examples.datasets.H5Ex_D_Shuffle.txt
+$HDFTEST_HOME/../testfiles/examples.datasets.H5Ex_D_Szip.txt
+$HDFTEST_HOME/../testfiles/examples.datasets.H5Ex_D_UnlimitedAdd.txt
+$HDFTEST_HOME/../testfiles/examples.datasets.H5Ex_D_UnlimitedGzip.txt
+$HDFTEST_HOME/../testfiles/examples.datasets.H5Ex_D_UnlimitedMod.txt
+$HDFTEST_HOME/../testfiles/examples.datasets.H5Ex_D_Nbit.txt
+$HDFTEST_HOME/../testfiles/examples.datasets.H5Ex_D_Transform.txt
+$HDFTEST_HOME/../testfiles/examples.datasets.H5Ex_D_Sofloat.txt
+$HDFTEST_HOME/../testfiles/examples.datasets.H5Ex_D_Soint.txt
+"
+
+#
+# copy files from source dirs to test dir
+#
+COPY_LIBFILES="$LIST_LIBRARY_FILES"
+
+COPY_LIBFILES_TO_BLDLIBDIR()
+{
+ # copy test files. Used -f to make sure get a new copy
+ for tstfile in $COPY_LIBFILES
+ do
+ # ignore '#' comment
+ echo $tstfile | tr -d ' ' | grep '^#' > /dev/null
+ RET=$?
+ if [ $RET -eq 1 ]; then
+ # skip cp if srcdir is same as destdir
+ # this occurs when build/test performed in source dir and
+ # make cp fail
+ SDIR=`$DIRNAME $tstfile`
+ INODE_SDIR=`$LS -i -d $SDIR | $AWK -F' ' '{print $1}'`
+ INODE_DDIR=`$LS -i -d $TESTDIR | $AWK -F' ' '{print $1}'`
+ if [ "$INODE_SDIR" != "$INODE_DDIR" ]; then
+ $CP -f $tstfile $BLDLIBDIR
+ if [ $? -ne 0 ]; then
+ echo "Error: FAILED to copy $tstfile ."
+
+ # Comment out this to CREATE expected file
+ exit $EXIT_FAILURE
+ fi
+ fi
+ fi
+ done
+}
+
+CLEAN_LIBFILES_AND_BLDLIBDIR()
+{
+ # skip rm if srcdir is same as destdir
+ # this occurs when build/test performed in source dir and
+ # make cp fail
+ SDIR=`$DIRNAME $tstfile`
+ INODE_SDIR=`$LS -i -d $SDIR | $AWK -F' ' '{print $1}'`
+ INODE_DDIR=`$LS -i -d $TESTDIR | $AWK -F' ' '{print $1}'`
+ if [ "$INODE_SDIR" != "$INODE_DDIR" ]; then
+ $RM $BLDLIBDIR
+ fi
+}
+
+COPY_DATAFILES="$LIST_DATA_FILES"
+
+COPY_DATAFILES_TO_BLDDIR()
+{
+ # copy test files. Used -f to make sure get a new copy
+ for tstfile in $COPY_DATAFILES
+ do
+ # ignore '#' comment
+ echo $tstfile | tr -d ' ' | grep '^#' > /dev/null
+ RET=$?
+ if [ $RET -eq 1 ]; then
+ # skip cp if srcdir is same as destdir
+ # this occurs when build/test performed in source dir and
+ # make cp fail
+ SDIR=`$DIRNAME $tstfile`
+ INODE_SDIR=`$LS -i -d $SDIR | $AWK -F' ' '{print $1}'`
+ INODE_DDIR=`$LS -i -d $TESTDIR | $AWK -F' ' '{print $1}'`
+ if [ "$INODE_SDIR" != "$INODE_DDIR" ]; then
+ $CP -f $tstfile $BLDDIR
+ if [ $? -ne 0 ]; then
+ echo "Error: FAILED to copy $tstfile ."
+
+ # Comment out this to CREATE expected file
+ exit $EXIT_FAILURE
+ fi
+ fi
+ fi
+ done
+}
+
+CLEAN_DATAFILES_AND_BLDDIR()
+{
+ # skip rm if srcdir is same as destdir
+ # this occurs when build/test performed in source dir and
+ # make cp fail
+ SDIR=`$DIRNAME $tstfile`
+ INODE_SDIR=`$LS -i -d $SDIR | $AWK -F' ' '{print $1}'`
+ INODE_DDIR=`$LS -i -d $TESTDIR | $AWK -F' ' '{print $1}'`
+ if [ "$INODE_SDIR" != "$INODE_DDIR" ]; then
+ $RM $BLDDIR/*.txt
+ $RM $BLDDIR/*.out
+ fi
+}
+
+# Print a line-line message left justified in a field of 70 characters
+# beginning with the word "Testing".
+#
+TESTING() {
+ SPACES=" "
+ echo "Testing $* $SPACES" | cut -c1-70 | tr -d '\012'
+}
+
+# where Java is installed (requires jdk1.7.x)
+JAVAEXE=@JAVA@
+JAVAEXEFLAGS=@H5_JAVAFLAGS@
+
+###############################################################################
+# DO NOT MODIFY BELOW THIS LINE
+###############################################################################
+
+# prepare for test
+COPY_LIBFILES_TO_BLDLIBDIR
+COPY_DATAFILES_TO_BLDDIR
+
+CPATH=".:"$BLDLIBDIR"/"$JARFILE":"$BLDLIBDIR"/slf4j-api-1.7.5.jar:"$BLDLIBDIR"/slf4j-simple-1.7.5.jar:"$TESTJARFILE""
+
+TEST=/usr/bin/test
+if [ ! -x /usr/bin/test ]
+then
+TEST=`which test`
+fi
+
+if $TEST -z "$CLASSPATH"; then
+ CLASSPATH=""
+fi
+CLASSPATH=$CPATH":"$CLASSPATH
+export CLASSPATH
+
+if $TEST -n "$JAVAPATH" ; then
+ PATH=$JAVAPATH":"$PATH
+ export PATH
+fi
+
+if $TEST -e /bin/uname; then
+ os_name=`/bin/uname -s`
+elif $TEST -e /usr/bin/uname; then
+ os_name=`/usr/bin/uname -s`
+else
+ os_name=unknown
+fi
+
+if $TEST -z "$LD_LIBRARY_PATH" ; then
+ LD_LIBRARY_PATH=""
+fi
+
+case $os_name in
+ Darwin)
+ DYLD_LIBRARY_PATH=$BLDLIBDIR:$DYLD_LIBRARY_PATH
+ export DYLD_LIBRARY_PATH
+ LD_LIBRARY_PATH=$DYLD_LIBRARY_PATH
+ ;;
+ *)
+ LD_LIBRARY_PATH=$BLDLIBDIR:$LD_LIBRARY_PATH
+ ;;
+esac
+
+export LD_LIBRARY_PATH
+
+echo "$JAVAEXE $JAVAEXEFLAGS -Xmx1024M -Dorg.slf4j.simpleLogger.defaultLog=trace -Djava.library.path=$BLDLIBDIR -cp $CLASSPATH examples.datasets.H5Ex_D_Alloc"
+($JAVAEXE $JAVAEXEFLAGS -Xmx1024M -Dorg.slf4j.simpleLogger.defaultLog=trace -Djava.library.path=$BLDLIBDIR -cp $CLASSPATH examples.datasets.H5Ex_D_Alloc > H5Ex_D_Alloc.out)
+if diff H5Ex_D_Alloc.out examples.datasets.H5Ex_D_Alloc.txt > /dev/null; then
+ echo " PASSED datasets.H5Ex_D_Alloc"
+else
+ echo "**FAILED** datasets.H5Ex_D_Alloc"
+ nerrors="`expr $nerrors + 1`"
+fi
+
+echo "$JAVAEXE $JAVAEXEFLAGS -Xmx1024M -Dorg.slf4j.simpleLogger.defaultLog=trace -Djava.library.path=$BLDLIBDIR -cp $CLASSPATH examples.datasets.H5Ex_D_Checksum"
+($JAVAEXE $JAVAEXEFLAGS -Xmx1024M -Dorg.slf4j.simpleLogger.defaultLog=trace -Djava.library.path=$BLDLIBDIR -cp $CLASSPATH examples.datasets.H5Ex_D_Checksum > H5Ex_D_Checksum.out)
+if diff H5Ex_D_Checksum.out examples.datasets.H5Ex_D_Checksum.txt > /dev/null; then
+ echo " PASSED datasets.H5Ex_D_Checksum"
+else
+ echo "**FAILED** datasets.H5Ex_D_Checksum"
+ nerrors="`expr $nerrors + 1`"
+fi
+
+echo "$JAVAEXE $JAVAEXEFLAGS -Xmx1024M -Dorg.slf4j.simpleLogger.defaultLog=trace -Djava.library.path=$BLDLIBDIR -cp $CLASSPATH examples.datasets.H5Ex_D_Chunk"
+($JAVAEXE $JAVAEXEFLAGS -Xmx1024M -Dorg.slf4j.simpleLogger.defaultLog=trace -Djava.library.path=$BLDLIBDIR -cp $CLASSPATH examples.datasets.H5Ex_D_Chunk > H5Ex_D_Chunk.out)
+if diff H5Ex_D_Chunk.out examples.datasets.H5Ex_D_Chunk.txt > /dev/null; then
+ echo " PASSED datasets.H5Ex_D_Chunk"
+else
+ echo "**FAILED** datasets.H5Ex_D_Chunk"
+ nerrors="`expr $nerrors + 1`"
+fi
+
+echo "$JAVAEXE $JAVAEXEFLAGS -Xmx1024M -Dorg.slf4j.simpleLogger.defaultLog=trace -Djava.library.path=$BLDLIBDIR -cp $CLASSPATH examples.datasets.H5Ex_D_Compact"
+($JAVAEXE $JAVAEXEFLAGS -Xmx1024M -Dorg.slf4j.simpleLogger.defaultLog=trace -Djava.library.path=$BLDLIBDIR -cp $CLASSPATH examples.datasets.H5Ex_D_Compact > H5Ex_D_Compact.out)
+if diff H5Ex_D_Compact.out examples.datasets.H5Ex_D_Compact.txt > /dev/null; then
+ echo " PASSED datasets.H5Ex_D_Compact"
+else
+ echo "**FAILED** datasets.H5Ex_D_Compact"
+ nerrors="`expr $nerrors + 1`"
+fi
+
+echo "$JAVAEXE $JAVAEXEFLAGS -Xmx1024M -Dorg.slf4j.simpleLogger.defaultLog=trace -Djava.library.path=$BLDLIBDIR -cp $CLASSPATH examples.datasets.H5Ex_D_External"
+($JAVAEXE $JAVAEXEFLAGS -Xmx1024M -Dorg.slf4j.simpleLogger.defaultLog=trace -Djava.library.path=$BLDLIBDIR -cp $CLASSPATH examples.datasets.H5Ex_D_External > H5Ex_D_External.out)
+if diff H5Ex_D_External.out examples.datasets.H5Ex_D_External.txt > /dev/null; then
+ echo " PASSED datasets.H5Ex_D_External"
+else
+ echo "**FAILED** datasets.H5Ex_D_External"
+ nerrors="`expr $nerrors + 1`"
+fi
+
+echo "$JAVAEXE $JAVAEXEFLAGS -Xmx1024M -Dorg.slf4j.simpleLogger.defaultLog=trace -Djava.library.path=$BLDLIBDIR -cp $CLASSPATH examples.datasets.H5Ex_D_FillValue"
+($JAVAEXE $JAVAEXEFLAGS -Xmx1024M -Dorg.slf4j.simpleLogger.defaultLog=trace -Djava.library.path=$BLDLIBDIR -cp $CLASSPATH examples.datasets.H5Ex_D_FillValue > H5Ex_D_FillValue.out)
+if diff H5Ex_D_FillValue.out examples.datasets.H5Ex_D_FillValue.txt > /dev/null; then
+ echo " PASSED datasets.H5Ex_D_FillValue"
+else
+ echo "**FAILED** datasets.H5Ex_D_FillValue"
+ nerrors="`expr $nerrors + 1`"
+fi
+
+echo "$JAVAEXE $JAVAEXEFLAGS -Xmx1024M -Dorg.slf4j.simpleLogger.defaultLog=trace -Djava.library.path=$BLDLIBDIR -cp $CLASSPATH examples.datasets.H5Ex_D_Gzip"
+($JAVAEXE $JAVAEXEFLAGS -Xmx1024M -Dorg.slf4j.simpleLogger.defaultLog=trace -Djava.library.path=$BLDLIBDIR -cp $CLASSPATH examples.datasets.H5Ex_D_Gzip > H5Ex_D_Gzip.out)
+if diff H5Ex_D_Gzip.out examples.datasets.H5Ex_D_Gzip.txt > /dev/null; then
+ echo " PASSED datasets.H5Ex_D_Gzip"
+else
+ echo "**FAILED** datasets.H5Ex_D_Gzip"
+ nerrors="`expr $nerrors + 1`"
+fi
+
+echo "$JAVAEXE $JAVAEXEFLAGS -Xmx1024M -Dorg.slf4j.simpleLogger.defaultLog=trace -Djava.library.path=$BLDLIBDIR -cp $CLASSPATH examples.datasets.H5Ex_D_Hyperslab"
+($JAVAEXE $JAVAEXEFLAGS -Xmx1024M -Dorg.slf4j.simpleLogger.defaultLog=trace -Djava.library.path=$BLDLIBDIR -cp $CLASSPATH examples.datasets.H5Ex_D_Hyperslab > H5Ex_D_Hyperslab.out)
+if diff H5Ex_D_Hyperslab.out examples.datasets.H5Ex_D_Hyperslab.txt > /dev/null; then
+ echo " PASSED datasets.H5Ex_D_Hyperslab"
+else
+ echo "**FAILED** datasets.H5Ex_D_Hyperslab"
+ nerrors="`expr $nerrors + 1`"
+fi
+
+echo "$JAVAEXE $JAVAEXEFLAGS -Xmx1024M -Dorg.slf4j.simpleLogger.defaultLog=trace -Djava.library.path=$BLDLIBDIR -cp $CLASSPATH examples.datasets.H5Ex_D_ReadWrite"
+($JAVAEXE $JAVAEXEFLAGS -Xmx1024M -Dorg.slf4j.simpleLogger.defaultLog=trace -Djava.library.path=$BLDLIBDIR -cp $CLASSPATH examples.datasets.H5Ex_D_ReadWrite > H5Ex_D_ReadWrite.out)
+if diff H5Ex_D_ReadWrite.out examples.datasets.H5Ex_D_ReadWrite.txt > /dev/null; then
+ echo " PASSED datasets.H5Ex_D_ReadWrite"
+else
+ echo "**FAILED** datasets.H5Ex_D_ReadWrite"
+ nerrors="`expr $nerrors + 1`"
+fi
+
+echo "$JAVAEXE $JAVAEXEFLAGS -Xmx1024M -Dorg.slf4j.simpleLogger.defaultLog=trace -Djava.library.path=$BLDLIBDIR -cp $CLASSPATH examples.datasets.H5Ex_D_Shuffle"
+($JAVAEXE $JAVAEXEFLAGS -Xmx1024M -Dorg.slf4j.simpleLogger.defaultLog=trace -Djava.library.path=$BLDLIBDIR -cp $CLASSPATH examples.datasets.H5Ex_D_Shuffle > H5Ex_D_Shuffle.out)
+if diff H5Ex_D_Shuffle.out examples.datasets.H5Ex_D_Shuffle.txt > /dev/null; then
+ echo " PASSED datasets.H5Ex_D_Shuffle"
+else
+ echo "**FAILED** datasets.H5Ex_D_Shuffle"
+ nerrors="`expr $nerrors + 1`"
+fi
+
+if test $USE_FILTER_SZIP = "yes"; then
+ echo "$JAVAEXE $JAVAEXEFLAGS -Xmx1024M -Dorg.slf4j.simpleLogger.defaultLog=trace -Djava.library.path=$BLDLIBDIR -cp $CLASSPATH examples.datasets.H5Ex_D_Szip"
+ ($JAVAEXE $JAVAEXEFLAGS -Xmx1024M -Dorg.slf4j.simpleLogger.defaultLog=trace -Djava.library.path=$BLDLIBDIR -cp $CLASSPATH examples.datasets.H5Ex_D_Szip > H5Ex_D_Szip.out)
+ if diff H5Ex_D_Szip.out examples.datasets.H5Ex_D_Szip.txt > /dev/null; then
+ echo " PASSED datasets.H5Ex_D_Szip"
+ else
+ echo "**FAILED** datasets.H5Ex_D_Szip"
+ nerrors="`expr $nerrors + 1`"
+ fi
+fi
+
+echo "$JAVAEXE $JAVAEXEFLAGS -Xmx1024M -Dorg.slf4j.simpleLogger.defaultLog=trace -Djava.library.path=$BLDLIBDIR -cp $CLASSPATH examples.datasets.H5Ex_D_UnlimitedAdd"
+($JAVAEXE $JAVAEXEFLAGS -Xmx1024M -Dorg.slf4j.simpleLogger.defaultLog=trace -Djava.library.path=$BLDLIBDIR -cp $CLASSPATH examples.datasets.H5Ex_D_UnlimitedAdd > H5Ex_D_UnlimitedAdd.out)
+if diff H5Ex_D_UnlimitedAdd.out examples.datasets.H5Ex_D_UnlimitedAdd.txt > /dev/null; then
+ echo " PASSED datasets.H5Ex_D_UnlimitedAdd"
+else
+ echo "**FAILED** datasets.H5Ex_D_UnlimitedAdd"
+ nerrors="`expr $nerrors + 1`"
+fi
+
+echo "$JAVAEXE $JAVAEXEFLAGS -Xmx1024M -Dorg.slf4j.simpleLogger.defaultLog=trace -Djava.library.path=$BLDLIBDIR -cp $CLASSPATH examples.datasets.H5Ex_D_UnlimitedGzip"
+($JAVAEXE $JAVAEXEFLAGS -Xmx1024M -Dorg.slf4j.simpleLogger.defaultLog=trace -Djava.library.path=$BLDLIBDIR -cp $CLASSPATH examples.datasets.H5Ex_D_UnlimitedGzip > H5Ex_D_UnlimitedGzip.out)
+if diff H5Ex_D_External.out examples.datasets.H5Ex_D_External.txt > /dev/null; then
+ echo " PASSED datasets.H5Ex_D_UnlimitedGzip"
+else
+ echo "**FAILED** datasets.H5Ex_D_UnlimitedGzip"
+ nerrors="`expr $nerrors + 1`"
+fi
+
+echo "$JAVAEXE $JAVAEXEFLAGS -Xmx1024M -Dorg.slf4j.simpleLogger.defaultLog=trace -Djava.library.path=$BLDLIBDIR -cp $CLASSPATH examples.datasets.H5Ex_D_UnlimitedMod"
+($JAVAEXE $JAVAEXEFLAGS -Xmx1024M -Dorg.slf4j.simpleLogger.defaultLog=trace -Djava.library.path=$BLDLIBDIR -cp $CLASSPATH examples.datasets.H5Ex_D_UnlimitedMod > H5Ex_D_UnlimitedMod.out)
+if diff H5Ex_D_UnlimitedMod.out examples.datasets.H5Ex_D_UnlimitedMod.txt > /dev/null; then
+ echo " PASSED datasets.H5Ex_D_UnlimitedMod"
+else
+ echo "**FAILED** datasets.H5Ex_D_UnlimitedMod"
+ nerrors="`expr $nerrors + 1`"
+fi
+
+echo "$JAVAEXE $JAVAEXEFLAGS -Xmx1024M -Dorg.slf4j.simpleLogger.defaultLog=trace -Djava.library.path=$BLDLIBDIR -cp $CLASSPATH examples.datasets.H5Ex_D_Nbit"
+($JAVAEXE $JAVAEXEFLAGS -Xmx1024M -Dorg.slf4j.simpleLogger.defaultLog=trace -Djava.library.path=$BLDLIBDIR -cp $CLASSPATH examples.datasets.H5Ex_D_Nbit > H5Ex_D_Nbit.out)
+if diff H5Ex_D_Nbit.out examples.datasets.H5Ex_D_Nbit.txt > /dev/null; then
+ echo " PASSED datasets.H5Ex_D_Nbit"
+else
+ echo "**FAILED** datasets.H5Ex_D_Nbit"
+ nerrors="`expr $nerrors + 1`"
+fi
+
+echo "$JAVAEXE $JAVAEXEFLAGS -Xmx1024M -Dorg.slf4j.simpleLogger.defaultLog=trace -Djava.library.path=$BLDLIBDIR -cp $CLASSPATH examples.datasets.H5Ex_D_Transform"
+($JAVAEXE $JAVAEXEFLAGS -Xmx1024M -Dorg.slf4j.simpleLogger.defaultLog=trace -Djava.library.path=$BLDLIBDIR -cp $CLASSPATH examples.datasets.H5Ex_D_Transform > H5Ex_D_Transform.out)
+if diff H5Ex_D_Transform.out examples.datasets.H5Ex_D_Transform.txt > /dev/null; then
+ echo " PASSED datasets.H5Ex_D_Transform"
+else
+ echo "**FAILED** datasets.H5Ex_D_Transform"
+ nerrors="`expr $nerrors + 1`"
+fi
+
+echo "$JAVAEXE $JAVAEXEFLAGS -Xmx1024M -Dorg.slf4j.simpleLogger.defaultLog=trace -Djava.library.path=$BLDLIBDIR -cp $CLASSPATH examples.datasets.H5Ex_D_Sofloat"
+($JAVAEXE $JAVAEXEFLAGS -Xmx1024M -Dorg.slf4j.simpleLogger.defaultLog=trace -Djava.library.path=$BLDLIBDIR -cp $CLASSPATH examples.datasets.H5Ex_D_Sofloat > H5Ex_D_Sofloat.out)
+if diff H5Ex_D_Sofloat.out examples.datasets.H5Ex_D_Sofloat.txt > /dev/null; then
+ echo " PASSED datasets.H5Ex_D_Sofloat"
+else
+ echo "**FAILED** datasets.H5Ex_D_Sofloat"
+ nerrors="`expr $nerrors + 1`"
+fi
+
+echo "$JAVAEXE $JAVAEXEFLAGS -Xmx1024M -Dorg.slf4j.simpleLogger.defaultLog=trace -Djava.library.path=$BLDLIBDIR -cp $CLASSPATH examples.datasets.H5Ex_D_Soint"
+($JAVAEXE $JAVAEXEFLAGS -Xmx1024M -Dorg.slf4j.simpleLogger.defaultLog=trace -Djava.library.path=$BLDLIBDIR -cp $CLASSPATH examples.datasets.H5Ex_D_Soint > H5Ex_D_Soint.out)
+if diff H5Ex_D_Soint.out examples.datasets.H5Ex_D_Soint.txt > /dev/null; then
+ echo " PASSED datasets.H5Ex_D_Soint"
+else
+ echo "**FAILED** datasets.H5Ex_D_Soint"
+ nerrors="`expr $nerrors + 1`"
+fi
+
+# Clean up temporary files/directories
+CLEAN_LIBFILES_AND_BLDLIBDIR
+CLEAN_DATAFILES_AND_BLDDIR
+
+# Report test results and exit
+if test $nerrors -eq 0 ; then
+ echo "All $TESTNAME tests passed."
+ exit $EXIT_SUCCESS
+else
+ echo "$TESTNAME tests failed with $nerrors errors."
+ exit $EXIT_FAILURE
+fi