diff options
author | Allen Byrne <byrn@hdfgroup.org> | 2016-02-29 02:43:00 (GMT) |
---|---|---|
committer | Allen Byrne <byrn@hdfgroup.org> | 2016-02-29 02:43:00 (GMT) |
commit | 63249be0e10a8726acb5a7cf64491319eaa46227 (patch) | |
tree | ebb2b75ece8852e8a58804631c0004f06a9bba9f /java/examples | |
parent | a1617b7cdbe14173fcf690b4627059fa4528c19b (diff) | |
download | hdf5-63249be0e10a8726acb5a7cf64491319eaa46227.zip hdf5-63249be0e10a8726acb5a7cf64491319eaa46227.tar.gz hdf5-63249be0e10a8726acb5a7cf64491319eaa46227.tar.bz2 |
[svn-r29226] HDFFV-9552: merge in java code.
Diffstat (limited to 'java/examples')
117 files changed, 15978 insertions, 0 deletions
diff --git a/java/examples/CMakeLists.txt b/java/examples/CMakeLists.txt new file mode 100644 index 0000000..0430bdb --- /dev/null +++ b/java/examples/CMakeLists.txt @@ -0,0 +1,7 @@ +cmake_minimum_required (VERSION 3.1.0) +PROJECT (HDFJAVA_EXAMPLES) + +add_subdirectory (${HDFJAVA_EXAMPLES_SOURCE_DIR}/datasets datasets) +add_subdirectory (${HDFJAVA_EXAMPLES_SOURCE_DIR}/datatypes datatypes) +add_subdirectory (${HDFJAVA_EXAMPLES_SOURCE_DIR}/groups groups) +add_subdirectory (${HDFJAVA_EXAMPLES_SOURCE_DIR}/intro intro) diff --git a/java/examples/Makefile.am b/java/examples/Makefile.am new file mode 100644 index 0000000..e685e01 --- /dev/null +++ b/java/examples/Makefile.am @@ -0,0 +1,31 @@ +# +# Copyright by The HDF Group. +# Copyright by the Board of Trustees of the University of Illinois. +# All rights reserved. +# +# This file is part of HDF5. The full HDF5 copyright notice, including +# terms governing use, modification, and redistribution, is contained in +# the files COPYING and Copyright.html. COPYING can be found at the root +# of the source code distribution tree; Copyright.html can be found at the +# root level of an installed copy of the electronic HDF5 document set and +# is linked from the top-level documents page. It can also be found at +# http://hdfgroup.org/HDF5/doc/Copyright.html. If you do not have +# access to either file, you may request a copy from help@hdfgroup.org. +# +# +# This makefile mostly just reinvokes make in the various subdirectories +# but does so in the correct order. You can alternatively invoke make from +# each subdirectory manually. +## +## Makefile.am +## Run automake to generate a Makefile.in from this file. +## +# +# HDF5 Java Library Examples Makefile(.in) + +include $(top_srcdir)/config/commence.am + +## Only recurse into subdirectories if the Java (JNI) interface is enabled. + SUBDIRS=intro groups datasets datatypes + +include $(top_srcdir)/config/conclude.am diff --git a/java/examples/datasets/CMakeLists.txt b/java/examples/datasets/CMakeLists.txt new file mode 100644 index 0000000..2338325 --- /dev/null +++ b/java/examples/datasets/CMakeLists.txt @@ -0,0 +1,121 @@ +cmake_minimum_required (VERSION 3.1.0) +PROJECT (HDFJAVA_EXAMPLES_DATASETS Java) + +set (CMAKE_VERBOSE_MAKEFILE 1) + +INCLUDE_DIRECTORIES ( + ${HDF5_JAVA_JNI_BINARY_DIR} + ${HDF5_JAVA_HDF5_LIB_DIR} +) + +set (HDF_JAVA_EXAMPLES + H5Ex_D_Alloc + H5Ex_D_Checksum + H5Ex_D_Chunk + H5Ex_D_Compact + H5Ex_D_External + H5Ex_D_FillValue + H5Ex_D_Gzip + H5Ex_D_Hyperslab + H5Ex_D_ReadWrite + H5Ex_D_Shuffle + H5Ex_D_Szip + H5Ex_D_UnlimitedAdd + H5Ex_D_UnlimitedGzip + H5Ex_D_UnlimitedMod + H5Ex_D_Nbit + H5Ex_D_Transform + H5Ex_D_Sofloat + H5Ex_D_Soint +) + +if (WIN32) + set (CMAKE_JAVA_INCLUDE_FLAG_SEP ";") +else (WIN32) + set (CMAKE_JAVA_INCLUDE_FLAG_SEP ":") +endif (WIN32) + +set (CMAKE_JAVA_INCLUDE_PATH "${HDF5_JAVA_JARS}") + +set (CMAKE_JAVA_CLASSPATH ".") +foreach (CMAKE_INCLUDE_PATH ${CMAKE_JAVA_INCLUDE_PATH}) + set (CMAKE_JAVA_CLASSPATH "${CMAKE_JAVA_CLASSPATH}${CMAKE_JAVA_INCLUDE_FLAG_SEP}${CMAKE_INCLUDE_PATH}") +endforeach (CMAKE_INCLUDE_PATH) + +foreach (example ${HDF_JAVA_EXAMPLES}) + file (WRITE ${PROJECT_BINARY_DIR}/${example}_Manifest.txt + "Main-Class: examples.datasets.${example} +" + ) + add_jar (${example} MANIFEST ${PROJECT_BINARY_DIR}/${example}_Manifest.txt ${example}.java) + get_target_property (${example}_JAR_FILE ${example} JAR_FILE) +# install_jar (${example} ${HJAVA_INSTALL_DATA_DIR}/examples examples) + get_target_property (${example}_CLASSPATH ${example} CLASSDIR) + add_dependencies (${example} ${HDF5_JAVA_HDF5_LIB_TARGET}) +endforeach (example ${HDF_JAVA_EXAMPLES}) + +set (CMAKE_JAVA_INCLUDE_PATH "${HDF5_JAVA_JARS};${HDF5_JAVA_LOGGING_JAR};${HDF5_JAVA_LOGGING_NOP_JAR}") + +set (CMAKE_JAVA_CLASSPATH ".") +foreach (HDFJAVA_JAR ${CMAKE_JAVA_INCLUDE_PATH}) + set (CMAKE_JAVA_CLASSPATH "${CMAKE_JAVA_CLASSPATH}${CMAKE_JAVA_INCLUDE_FLAG_SEP}${HDFJAVA_JAR}") +endforeach (HDFJAVA_JAR) + +MACRO (ADD_H5_TEST resultfile resultcode) + add_test ( + NAME JAVA_datasets-${resultfile} + COMMAND "${CMAKE_COMMAND}" + -D "TEST_TESTER=${CMAKE_Java_RUNTIME};${CMAKE_Java_RUNTIME_FLAGS}" + -D "TEST_CLASSPATH:STRING=${CMAKE_JAVA_CLASSPATH}${CMAKE_JAVA_INCLUDE_FLAG_SEP}${${resultfile}_JAR_FILE}" + -D "TEST_ARGS:STRING=${ARGN}" + -D "TEST_PROGRAM=examples.datasets.${resultfile}" + -D "TEST_LIBRARY_DIRECTORY=${CMAKE_TEST_OUTPUT_DIRECTORY}" + -D "TEST_FOLDER=${HDFJAVA_EXAMPLES_BINARY_DIR}" + -D "TEST_OUTPUT=datasets/${resultfile}.out" + -D "TEST_EXPECT=${resultcode}" + -D "TEST_REFERENCE=datasets/${resultfile}.txt" + -P "${HDF_RESOURCES_DIR}/jrunTest.cmake" + ) + if (NOT "${last_test}" STREQUAL "") + set_tests_properties (JAVA_datasets-${resultfile} PROPERTIES DEPENDS ${last_test}) + endif (NOT "${last_test}" STREQUAL "") + set (last_test "JAVA_datasets-${resultfile}") +ENDMACRO (ADD_H5_TEST file) + +if (BUILD_TESTING) + foreach (example ${HDF_JAVA_EXAMPLES}) + if (${example} STREQUAL "H5Ex_D_External") + add_test ( + NAME JAVA_datasets-${example}-clearall-objects + COMMAND ${CMAKE_COMMAND} + -E remove + ${HDFJAVA_EXAMPLES_BINARY_DIR}/${example}.h5 + ${HDFJAVA_EXAMPLES_BINARY_DIR}/${example}.data + ${example}.out + ${example}.out.err + ) + else (${example} STREQUAL "H5Ex_D_External") + add_test ( + NAME JAVA_datasets-${example}-clearall-objects + COMMAND ${CMAKE_COMMAND} + -E remove + ${HDFJAVA_EXAMPLES_BINARY_DIR}/${example}.h5 + ${example}.out + ${example}.out.err + ) + endif (${example} STREQUAL "H5Ex_D_External") + if (NOT "${last_test}" STREQUAL "") + set_tests_properties (JAVA_datasets-${example}-clearall-objects PROPERTIES DEPENDS ${last_test}) + endif (NOT "${last_test}" STREQUAL "") + add_test ( + NAME JAVA_datasets-${example}-copy-objects + COMMAND ${CMAKE_COMMAND} + -E copy_if_different + ${HDFJAVA_EXAMPLES_SOURCE_DIR}/testfiles/examples.datasets.${example}.txt + ${HDFJAVA_EXAMPLES_DATASETS_BINARY_DIR}/${example}.txt + ) + set_tests_properties (JAVA_datasets-${example}-copy-objects PROPERTIES DEPENDS JAVA_datasets-${example}-clearall-objects) + set (last_test "JAVA_datasets-${example}-copy-objects") + ADD_H5_TEST (${example} 0) + endforeach (example ${HDF_JAVA_EXAMPLES}) +endif (BUILD_TESTING) diff --git a/java/examples/datasets/H5Ex_D_Alloc.java b/java/examples/datasets/H5Ex_D_Alloc.java new file mode 100644 index 0000000..69fee38 --- /dev/null +++ b/java/examples/datasets/H5Ex_D_Alloc.java @@ -0,0 +1,301 @@ +/* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * + * Copyright by The HDF Group. * + * Copyright by the Board of Trustees of the University of Illinois. * + * All rights reserved. * + * * + * This file is part of HDF5. The full HDF5 copyright notice, including * + * terms governing use, modification, and redistribution, is contained in * + * the files COPYING and Copyright.html. COPYING can be found at the root * + * of the source code distribution tree; Copyright.html can be found at the * + * root level of an installed copy of the electronic HDF5 document set and * + * is linked from the top-level documents page. It can also be found at * + * http://hdfgroup.org/HDF5/doc/Copyright.html. If you do not have * + * access to either file, you may request a copy from help@hdfgroup.org. * + * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */ + +/************************************************************ + This example shows how to set the space allocation time + for a dataset. The program first creates two datasets, + one with the default allocation time (late) and one with + early allocation time, and displays whether each has been + allocated and their allocation size. Next, it writes data + to the datasets, and again displays whether each has been + allocated and their allocation size. + ************************************************************/ +package examples.datasets; + +import java.util.EnumSet; +import java.util.HashMap; +import java.util.Map; + +import hdf.hdf5lib.H5; +import hdf.hdf5lib.HDF5Constants; + +public class H5Ex_D_Alloc { + private static String FILENAME = "H5Ex_D_Alloc.h5"; + private static String DATASETNAME1 = "DS1"; + private static String DATASETNAME2 = "DS2"; + private static final int DIM_X = 4; + private static final int DIM_Y = 7; + private static final int FILLVAL = 99; + private static final int RANK = 2; + + // Values for the status of space allocation + enum H5D_space_status { + H5D_SPACE_STATUS_ERROR(-1), H5D_SPACE_STATUS_NOT_ALLOCATED(0), H5D_SPACE_STATUS_PART_ALLOCATED(1), H5D_SPACE_STATUS_ALLOCATED( + 2); + private static final Map<Integer, H5D_space_status> lookup = new HashMap<Integer, H5D_space_status>(); + + static { + for (H5D_space_status s : EnumSet.allOf(H5D_space_status.class)) + lookup.put(s.getCode(), s); + } + + private int code; + + H5D_space_status(int space_status) { + this.code = space_status; + } + + public int getCode() { + return this.code; + } + + public static H5D_space_status get(int code) { + return lookup.get(code); + } + } + + private static void allocation() { + long file_id = -1; + long filespace_id = -1; + long dataset_id1 = -1; + long dataset_id2 = -1; + long dcpl_id = -1; + long[] dims = { DIM_X, DIM_Y }; + int[][] dset_data = new int[DIM_X][DIM_Y]; + int space_status = 0; + long storage_size = 0; + + // Initialize the dataset. + for (int indx = 0; indx < DIM_X; indx++) + for (int jndx = 0; jndx < DIM_Y; jndx++) + dset_data[indx][jndx] = FILLVAL; + + // Create a file using default properties. + try { + file_id = H5.H5Fcreate(FILENAME, HDF5Constants.H5F_ACC_TRUNC, HDF5Constants.H5P_DEFAULT, + HDF5Constants.H5P_DEFAULT); + } + catch (Exception e) { + e.printStackTrace(); + } + + // Create dataspace. Setting maximum size to NULL sets the maximum + // size to be the current size. + try { + filespace_id = H5.H5Screate_simple(RANK, dims, null); + } + catch (Exception e) { + e.printStackTrace(); + } + + // Create the dataset creation property list, and set the chunk size. + try { + dcpl_id = H5.H5Pcreate(HDF5Constants.H5P_DATASET_CREATE); + } + catch (Exception e) { + e.printStackTrace(); + } + + // Set the allocation time to "early". This way we can be sure + // that reading from the dataset immediately after creation will + // return the fill value. + try { + if (dcpl_id >= 0) + H5.H5Pset_alloc_time(dcpl_id, HDF5Constants.H5D_ALLOC_TIME_EARLY); + } + catch (Exception e) { + e.printStackTrace(); + } + + System.out.println("Creating datasets..."); + System.out.println(DATASETNAME1 + " has allocation time H5D_ALLOC_TIME_LATE"); + System.out.println(DATASETNAME2 + " has allocation time H5D_ALLOC_TIME_EARLY"); + System.out.println(); + + // Create the dataset using the dataset default creation property list. + try { + if ((file_id >= 0) && (filespace_id >= 0)) + dataset_id1 = H5.H5Dcreate(file_id, DATASETNAME1, HDF5Constants.H5T_NATIVE_INT, filespace_id, + HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT); + } + catch (Exception e) { + e.printStackTrace(); + } + + // Create the dataset using the dataset creation property list. + try { + if ((file_id >= 0) && (filespace_id >= 0) && (dcpl_id >= 0)) + dataset_id2 = H5.H5Dcreate(file_id, DATASETNAME2, HDF5Constants.H5T_NATIVE_INT, filespace_id, + HDF5Constants.H5P_DEFAULT, dcpl_id, HDF5Constants.H5P_DEFAULT); + } + catch (Exception e) { + e.printStackTrace(); + } + + // Retrieve and print space status and storage size for dset1. + try { + if (dataset_id1 >= 0) + space_status = H5.H5Dget_space_status(dataset_id1); + } + catch (Exception e) { + e.printStackTrace(); + } + try { + if (dataset_id1 >= 0) + storage_size = H5.H5Dget_storage_size(dataset_id1); + } + catch (Exception e) { + e.printStackTrace(); + } + String the_space = " "; + if (H5D_space_status.get(space_status) != H5D_space_status.H5D_SPACE_STATUS_ALLOCATED) + the_space += "not "; + System.out.println("Space for " + DATASETNAME1 + " has" + the_space + "been allocated."); + System.out.println("Storage size for " + DATASETNAME1 + " is: " + storage_size + " bytes."); + + // Retrieve and print space status and storage size for dset2. + try { + if (dataset_id2 >= 0) + space_status = H5.H5Dget_space_status(dataset_id2); + } + catch (Exception e) { + e.printStackTrace(); + } + try { + if (dataset_id2 >= 0) + storage_size = H5.H5Dget_storage_size(dataset_id2); + } + catch (Exception e) { + e.printStackTrace(); + } + the_space = " "; + if (H5D_space_status.get(space_status) != H5D_space_status.H5D_SPACE_STATUS_ALLOCATED) + the_space += "not "; + System.out.println("Space for " + DATASETNAME2 + " has" + the_space + "been allocated."); + System.out.println("Storage size for " + DATASETNAME2 + " is: " + storage_size + " bytes."); + System.out.println(); + + System.out.println("Writing data..."); + System.out.println(); + + // Write the data to the datasets. + try { + if (dataset_id1 >= 0) + H5.H5Dwrite(dataset_id1, HDF5Constants.H5T_NATIVE_INT, HDF5Constants.H5S_ALL, HDF5Constants.H5S_ALL, + HDF5Constants.H5P_DEFAULT, dset_data[0]); + } + catch (Exception e) { + e.printStackTrace(); + } + try { + if (dataset_id2 >= 0) + H5.H5Dwrite(dataset_id2, HDF5Constants.H5T_NATIVE_INT, HDF5Constants.H5S_ALL, HDF5Constants.H5S_ALL, + HDF5Constants.H5P_DEFAULT, dset_data[0]); + } + catch (Exception e) { + e.printStackTrace(); + } + + // Retrieve and print space status and storage size for dset1. + try { + if (dataset_id1 >= 0) + space_status = H5.H5Dget_space_status(dataset_id1); + } + catch (Exception e) { + e.printStackTrace(); + } + try { + if (dataset_id1 >= 0) + storage_size = H5.H5Dget_storage_size(dataset_id1); + } + catch (Exception e) { + e.printStackTrace(); + } + the_space = " "; + if (H5D_space_status.get(space_status) != H5D_space_status.H5D_SPACE_STATUS_ALLOCATED) + the_space += "not "; + System.out.println("Space for " + DATASETNAME1 + " has" + the_space + "been allocated."); + System.out.println("Storage size for " + DATASETNAME1 + " is: " + storage_size + " bytes."); + + // Retrieve and print space status and storage size for dset2. + try { + if (dataset_id2 >= 0) + space_status = H5.H5Dget_space_status(dataset_id2); + } + catch (Exception e) { + e.printStackTrace(); + } + try { + if (dataset_id2 >= 0) + storage_size = H5.H5Dget_storage_size(dataset_id2); + } + catch (Exception e) { + e.printStackTrace(); + } + the_space = " "; + if (H5D_space_status.get(space_status) != H5D_space_status.H5D_SPACE_STATUS_ALLOCATED) + the_space += "not "; + System.out.println("Space for " + DATASETNAME2 + " has" + the_space + "been allocated."); + System.out.println("Storage size for " + DATASETNAME2 + " is: " + storage_size + " bytes."); + System.out.println(); + + // End access to the dataset and release resources used by it. + try { + if (dcpl_id >= 0) + H5.H5Pclose(dcpl_id); + } + catch (Exception e) { + e.printStackTrace(); + } + + try { + if (dataset_id1 >= 0) + H5.H5Dclose(dataset_id1); + } + catch (Exception e) { + e.printStackTrace(); + } + + try { + if (dataset_id2 >= 0) + H5.H5Dclose(dataset_id2); + } + catch (Exception e) { + e.printStackTrace(); + } + + try { + if (filespace_id >= 0) + H5.H5Sclose(filespace_id); + } + catch (Exception e) { + e.printStackTrace(); + } + + // Close the file. + try { + if (file_id >= 0) + H5.H5Fclose(file_id); + } + catch (Exception e) { + e.printStackTrace(); + } + } + + public static void main(String[] args) { + H5Ex_D_Alloc.allocation(); + } + +} diff --git a/java/examples/datasets/H5Ex_D_Checksum.java b/java/examples/datasets/H5Ex_D_Checksum.java new file mode 100644 index 0000000..3a2f98f --- /dev/null +++ b/java/examples/datasets/H5Ex_D_Checksum.java @@ -0,0 +1,347 @@ +/* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * + * Copyright by The HDF Group. * + * Copyright by the Board of Trustees of the University of Illinois. * + * All rights reserved. * + * * + * This file is part of HDF5. The full HDF5 copyright notice, including * + * terms governing use, modification, and redistribution, is contained in * + * the files COPYING and Copyright.html. COPYING can be found at the root * + * of the source code distribution tree; Copyright.html can be found at the * + * root level of an installed copy of the electronic HDF5 document set and * + * is linked from the top-level documents page. It can also be found at * + * http://hdfgroup.org/HDF5/doc/Copyright.html. If you do not have * + * access to either file, you may request a copy from help@hdfgroup.org. * + * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */ + +/************************************************************ + This example shows how to read and write data to a dataset + using the Fletcher32 checksum filter. The program first + checks if the Fletcher32 filter is available, then if it + is it writes integers to a dataset using Fletcher32, then + closes the file. Next, it reopens the file, reads back + the data, checks if the filter detected an error and + outputs the type of filter and the maximum value in the + dataset to the screen. + ************************************************************/ +package examples.datasets; + +import java.util.EnumSet; +import java.util.HashMap; +import java.util.Map; + +import hdf.hdf5lib.H5; +import hdf.hdf5lib.HDF5Constants; + +public class H5Ex_D_Checksum { + private static String FILENAME = "H5Ex_D_Checksum.h5"; + private static String DATASETNAME = "DS1"; + private static final int DIM_X = 32; + private static final int DIM_Y = 64; + private static final int CHUNK_X = 4; + private static final int CHUNK_Y = 8; + private static final int RANK = 2; + private static final int NDIMS = 2; + + // Values for the status of space allocation + enum H5Z_filter { + H5Z_FILTER_ERROR(-1), H5Z_FILTER_NONE(0), H5Z_FILTER_DEFLATE(1), H5Z_FILTER_SHUFFLE(2), H5Z_FILTER_FLETCHER32(3), H5Z_FILTER_SZIP( + 4), H5Z_FILTER_NBIT(5), H5Z_FILTER_SCALEOFFSET(6), H5Z_FILTER_RESERVED(256), H5Z_FILTER_MAX(65535); + private static final Map<Integer, H5Z_filter> lookup = new HashMap<Integer, H5Z_filter>(); + + static { + for (H5Z_filter s : EnumSet.allOf(H5Z_filter.class)) + lookup.put(s.getCode(), s); + } + + private int code; + + H5Z_filter(int layout_type) { + this.code = layout_type; + } + + public int getCode() { + return this.code; + } + + public static H5Z_filter get(int code) { + return lookup.get(code); + } + } + + private static boolean checkFletcher32Filter() { + try { + int available = H5.H5Zfilter_avail(H5Z_filter.H5Z_FILTER_FLETCHER32.getCode()); + if (available == 0) { + System.out.println("N-Bit filter not available."); + return false; + } + } + catch (Exception e) { + e.printStackTrace(); + } + + try { + int filter_info = H5.H5Zget_filter_info(HDF5Constants.H5Z_FILTER_FLETCHER32); + if (((filter_info & HDF5Constants.H5Z_FILTER_CONFIG_ENCODE_ENABLED) == 0) + || ((filter_info & HDF5Constants.H5Z_FILTER_CONFIG_DECODE_ENABLED) == 0)) { + System.out.println("N-Bit filter not available for encoding and decoding."); + return false; + } + } + catch (Exception e) { + e.printStackTrace(); + } + return true; + } + + private static void writeChecksum() { + long file_id = -1; + long filespace_id = -1; + long dataset_id = -1; + long dcpl_id = -1; + long[] dims = { DIM_X, DIM_Y }; + long[] chunk_dims = { CHUNK_X, CHUNK_Y }; + int[][] dset_data = new int[DIM_X][DIM_Y]; + + // Initialize data. + for (int indx = 0; indx < DIM_X; indx++) + for (int jndx = 0; jndx < DIM_Y; jndx++) + dset_data[indx][jndx] = indx * jndx - jndx; + + // Create a new file using default properties. + try { + file_id = H5.H5Fcreate(FILENAME, HDF5Constants.H5F_ACC_TRUNC, HDF5Constants.H5P_DEFAULT, + HDF5Constants.H5P_DEFAULT); + } + catch (Exception e) { + e.printStackTrace(); + } + + // Create dataspace. Setting maximum size to NULL sets the maximum + // size to be the current size. + try { + filespace_id = H5.H5Screate_simple(RANK, dims, null); + } + catch (Exception e) { + e.printStackTrace(); + } + + // Create the dataset creation property list, add the N-Bit filter. + try { + dcpl_id = H5.H5Pcreate(HDF5Constants.H5P_DATASET_CREATE); + if (dcpl_id >= 0) { + H5.H5Pset_fletcher32(dcpl_id); + // Set the chunk size. + H5.H5Pset_chunk(dcpl_id, NDIMS, chunk_dims); + } + } + catch (Exception e) { + e.printStackTrace(); + } + + // Create the dataset. + try { + if ((file_id >= 0) && (filespace_id >= 0) && (dcpl_id >= 0)) + dataset_id = H5.H5Dcreate(file_id, DATASETNAME, HDF5Constants.H5T_STD_I32LE, filespace_id, + HDF5Constants.H5P_DEFAULT, dcpl_id, HDF5Constants.H5P_DEFAULT); + } + catch (Exception e) { + e.printStackTrace(); + } + + // Write the data to the dataset. + try { + if (dataset_id >= 0) + H5.H5Dwrite(dataset_id, HDF5Constants.H5T_NATIVE_INT, HDF5Constants.H5S_ALL, HDF5Constants.H5S_ALL, + HDF5Constants.H5P_DEFAULT, dset_data); + } + catch (Exception e) { + e.printStackTrace(); + } + + // End access to the dataset and release resources used by it. + try { + if (dcpl_id >= 0) + H5.H5Pclose(dcpl_id); + } + catch (Exception e) { + e.printStackTrace(); + } + + try { + if (dataset_id >= 0) + H5.H5Dclose(dataset_id); + } + catch (Exception e) { + e.printStackTrace(); + } + + try { + if (filespace_id >= 0) + H5.H5Sclose(filespace_id); + } + catch (Exception e) { + e.printStackTrace(); + } + + // Close the file. + try { + if (file_id >= 0) + H5.H5Fclose(file_id); + } + catch (Exception e) { + e.printStackTrace(); + } + } + + private static void readChecksum() { + long file_id = -1; + long dataset_id = -1; + long dcpl_id = -1; + int[][] dset_data = new int[DIM_X][DIM_Y]; + + // Open an existing file. + try { + file_id = H5.H5Fopen(FILENAME, HDF5Constants.H5F_ACC_RDONLY, HDF5Constants.H5P_DEFAULT); + } + catch (Exception e) { + e.printStackTrace(); + } + + // Open an existing dataset. + try { + if (file_id >= 0) + dataset_id = H5.H5Dopen(file_id, DATASETNAME, HDF5Constants.H5P_DEFAULT); + } + catch (Exception e) { + e.printStackTrace(); + } + + // Retrieve the dataset creation property list. + try { + if (dataset_id >= 0) + dcpl_id = H5.H5Dget_create_plist(dataset_id); + } + catch (Exception e) { + e.printStackTrace(); + } + + // Retrieve and print the filter type. Here we only retrieve the + // first filter because we know that we only added one filter. + try { + if (dcpl_id >= 0) { + // Java lib requires a valid filter_name object and cd_values + int[] flags = { 0 }; + long[] cd_nelmts = { 1 }; + int[] cd_values = { 0 }; + String[] filter_name = { "" }; + int[] filter_config = { 0 }; + int filter_type = -1; + filter_type = H5 + .H5Pget_filter(dcpl_id, 0, flags, cd_nelmts, cd_values, 120, filter_name, filter_config); + System.out.print("Filter type is: "); + switch (H5Z_filter.get(filter_type)) { + case H5Z_FILTER_DEFLATE: + System.out.println("H5Z_FILTER_DEFLATE"); + break; + case H5Z_FILTER_SHUFFLE: + System.out.println("H5Z_FILTER_SHUFFLE"); + break; + case H5Z_FILTER_FLETCHER32: + System.out.println("H5Z_FILTER_FLETCHER32"); + break; + case H5Z_FILTER_SZIP: + System.out.println("H5Z_FILTER_SZIP"); + break; + default: + System.out.println("H5Z_FILTER_ERROR"); + } + System.out.println(); + } + } + catch (Exception e) { + e.printStackTrace(); + } + + // Read the data using the default properties. + try { + if (dataset_id >= 0) { + int status = H5.H5Dread(dataset_id, HDF5Constants.H5T_NATIVE_INT, HDF5Constants.H5S_ALL, + HDF5Constants.H5S_ALL, HDF5Constants.H5P_DEFAULT, dset_data); + // Check if the read was successful. Normally we do not perform + // error checking in these examples for the sake of clarity, but in + // this case we will make an exception because this is how the + // fletcher32 checksum filter reports data errors. + if (status < 0) { + System.out.print("Dataset read failed!"); + try { + if (dcpl_id >= 0) + H5.H5Pclose(dcpl_id); + if (dataset_id >= 0) + H5.H5Dclose(dataset_id); + if (file_id >= 0) + H5.H5Fclose(file_id); + } + catch (Exception e) { + e.printStackTrace(); + } + return; + } + } + } + catch (Exception e) { + e.printStackTrace(); + } + + // Find the maximum value in the dataset, to verify that it was read + // correctly. + int max = dset_data[0][0]; + for (int indx = 0; indx < DIM_X; indx++) { + for (int jndx = 0; jndx < DIM_Y; jndx++) + if (max < dset_data[indx][jndx]) + max = dset_data[indx][jndx]; + } + // Print the maximum value. + System.out.println("Maximum value in " + DATASETNAME + " is: " + max); + + // End access to the dataset and release resources used by it. + try { + if (dcpl_id >= 0) + H5.H5Pclose(dcpl_id); + } + catch (Exception e) { + e.printStackTrace(); + } + + try { + if (dataset_id >= 0) + H5.H5Dclose(dataset_id); + } + catch (Exception e) { + e.printStackTrace(); + } + + // Close the file. + try { + if (file_id >= 0) + H5.H5Fclose(file_id); + } + catch (Exception e) { + e.printStackTrace(); + } + } + + public static void main(String[] args) { + // Check if the Fletcher32 filter is available and can be used for + // both encoding and decoding. Normally we do not perform error + // checking in these examples for the sake of clarity, but in this + // case we will make an exception because this filter is an + // optional part of the hdf5 library. + // size to be the current size. + if (H5Ex_D_Checksum.checkFletcher32Filter()) { + H5Ex_D_Checksum.writeChecksum(); + H5Ex_D_Checksum.readChecksum(); + } + } + +} diff --git a/java/examples/datasets/H5Ex_D_Chunk.java b/java/examples/datasets/H5Ex_D_Chunk.java new file mode 100644 index 0000000..7f02e5a --- /dev/null +++ b/java/examples/datasets/H5Ex_D_Chunk.java @@ -0,0 +1,366 @@ +/* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * + * Copyright by The HDF Group. * + * Copyright by the Board of Trustees of the University of Illinois. * + * All rights reserved. * + * * + * This file is part of HDF5. The full HDF5 copyright notice, including * + * terms governing use, modification, and redistribution, is contained in * + * the files COPYING and Copyright.html. COPYING can be found at the root * + * of the source code distribution tree; Copyright.html can be found at the * + * root level of an installed copy of the electronic HDF5 document set and * + * is linked from the top-level documents page. It can also be found at * + * http://hdfgroup.org/HDF5/doc/Copyright.html. If you do not have * + * access to either file, you may request a copy from help@hdfgroup.org. * + * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */ + +/************************************************************ + This example shows how to create a chunked dataset. The + program first writes integers in a hyperslab selection to + a chunked dataset with dataspace dimensions of DIM_XxDIM_Y + and chunk size of CHUNK_XxCHUNK_Y, then closes the file. + Next, it reopens the file, reads back the data, and + outputs it to the screen. Finally it reads the data again + using a different hyperslab selection, and outputs + the result to the screen. + ************************************************************/ +package examples.datasets; + +import java.util.EnumSet; +import java.util.HashMap; +import java.util.Map; + +import hdf.hdf5lib.H5; +import hdf.hdf5lib.HDF5Constants; + +public class H5Ex_D_Chunk { + private static String FILENAME = "H5Ex_D_Chunk.h5"; + private static String DATASETNAME = "DS1"; + private static final int DIM_X = 6; + private static final int DIM_Y = 8; + private static final int CHUNK_X = 4; + private static final int CHUNK_Y = 4; + private static final int RANK = 2; + private static final int NDIMS = 2; + + // Values for the status of space allocation + enum H5D_layout { + H5D_LAYOUT_ERROR(-1), H5D_COMPACT(0), H5D_CONTIGUOUS(1), H5D_CHUNKED(2), H5D_NLAYOUTS(3); + private static final Map<Integer, H5D_layout> lookup = new HashMap<Integer, H5D_layout>(); + + static { + for (H5D_layout s : EnumSet.allOf(H5D_layout.class)) + lookup.put(s.getCode(), s); + } + + private int code; + + H5D_layout(int layout_type) { + this.code = layout_type; + } + + public int getCode() { + return this.code; + } + + public static H5D_layout get(int code) { + return lookup.get(code); + } + } + + private static void writeChunk() { + long file_id = -1; + long filespace_id = -1; + long dataset_id = -1; + long dcpl_id = -1; + long[] dims = { DIM_X, DIM_Y }; + long[] chunk_dims = { CHUNK_X, CHUNK_Y }; + int[][] dset_data = new int[DIM_X][DIM_Y]; + + // Initialize data to "1", to make it easier to see the selections. + for (int indx = 0; indx < DIM_X; indx++) + for (int jndx = 0; jndx < DIM_Y; jndx++) + dset_data[indx][jndx] = 1; + + // Print the data to the screen. + System.out.println("Original Data:"); + for (int indx = 0; indx < DIM_X; indx++) { + System.out.print(" [ "); + for (int jndx = 0; jndx < DIM_Y; jndx++) + System.out.print(dset_data[indx][jndx] + " "); + System.out.println("]"); + } + System.out.println(); + + // Create a new file using default properties. + try { + file_id = H5.H5Fcreate(FILENAME, HDF5Constants.H5F_ACC_TRUNC, HDF5Constants.H5P_DEFAULT, + HDF5Constants.H5P_DEFAULT); + } + catch (Exception e) { + e.printStackTrace(); + } + + // Create dataspace. Setting maximum size to NULL sets the maximum + // size to be the current size. + try { + filespace_id = H5.H5Screate_simple(RANK, dims, null); + } + catch (Exception e) { + e.printStackTrace(); + } + + // Create the dataset creation property list. + try { + dcpl_id = H5.H5Pcreate(HDF5Constants.H5P_DATASET_CREATE); + } + catch (Exception e) { + e.printStackTrace(); + } + + // Set the chunk size. + try { + if (dcpl_id >= 0) + H5.H5Pset_chunk(dcpl_id, NDIMS, chunk_dims); + } + catch (Exception e) { + e.printStackTrace(); + } + + // Create the chunked dataset. + try { + if ((file_id >= 0) && (filespace_id >= 0) && (dcpl_id >= 0)) + dataset_id = H5.H5Dcreate(file_id, DATASETNAME, HDF5Constants.H5T_STD_I32LE, filespace_id, + HDF5Constants.H5P_DEFAULT, dcpl_id, HDF5Constants.H5P_DEFAULT); + } + catch (Exception e) { + e.printStackTrace(); + } + + // Define and select the first part of the hyperslab selection. + long[] start = { 0, 0 }; + long[] stride = { 3, 3 }; + long[] count = { 2, 3 }; + long[] block = { 2, 2 }; + try { + if ((filespace_id >= 0)) + H5.H5Sselect_hyperslab(filespace_id, HDF5Constants.H5S_SELECT_SET, start, stride, count, block); + } + catch (Exception e) { + e.printStackTrace(); + } + // Define and select the second part of the hyperslab selection, + // which is subtracted from the first selection by the use of + // H5S_SELECT_NOTB + block[0] = 1; + block[1] = 1; + try { + if ((filespace_id >= 0)) { + H5.H5Sselect_hyperslab(filespace_id, HDF5Constants.H5S_SELECT_NOTB, start, stride, count, block); + + // Write the data to the dataset. + if (dataset_id >= 0) + H5.H5Dwrite(dataset_id, HDF5Constants.H5T_NATIVE_INT, HDF5Constants.H5S_ALL, filespace_id, + HDF5Constants.H5P_DEFAULT, dset_data); + } + } + catch (Exception e) { + e.printStackTrace(); + } + + // End access to the dataset and release resources used by it. + try { + if (dcpl_id >= 0) + H5.H5Pclose(dcpl_id); + } + catch (Exception e) { + e.printStackTrace(); + } + + try { + if (dataset_id >= 0) + H5.H5Dclose(dataset_id); + } + catch (Exception e) { + e.printStackTrace(); + } + + try { + if (filespace_id >= 0) + H5.H5Sclose(filespace_id); + } + catch (Exception e) { + e.printStackTrace(); + } + + // Close the file. + try { + if (file_id >= 0) + H5.H5Fclose(file_id); + } + catch (Exception e) { + e.printStackTrace(); + } + } + + private static void readChunk() { + long file_id = -1; + long filespace_id = -1; + long dataset_id = -1; + long dcpl_id = -1; + int[][] dset_data = new int[DIM_X][DIM_Y]; + + // Open an existing file. + try { + file_id = H5.H5Fopen(FILENAME, HDF5Constants.H5F_ACC_RDONLY, HDF5Constants.H5P_DEFAULT); + } + catch (Exception e) { + e.printStackTrace(); + } + + // Open an existing dataset. + try { + if (file_id >= 0) + dataset_id = H5.H5Dopen(file_id, DATASETNAME, HDF5Constants.H5P_DEFAULT); + } + catch (Exception e) { + e.printStackTrace(); + } + + // Retrieve the dataset creation property list. + try { + if (dataset_id >= 0) + dcpl_id = H5.H5Dget_create_plist(dataset_id); + } + catch (Exception e) { + e.printStackTrace(); + } + + // Print the storage layout. + try { + if (dcpl_id >= 0) { + int layout_type = H5.H5Pget_layout(dcpl_id); + System.out.print("Storage layout for " + DATASETNAME + " is: "); + switch (H5D_layout.get(layout_type)) { + case H5D_COMPACT: + System.out.println("H5D_COMPACT"); + break; + case H5D_CONTIGUOUS: + System.out.println("H5D_CONTIGUOUS"); + break; + case H5D_CHUNKED: + System.out.println("H5D_CHUNKED"); + break; + case H5D_LAYOUT_ERROR: + break; + case H5D_NLAYOUTS: + break; + default: + break; + } + System.out.println(); + } + } + catch (Exception e) { + e.printStackTrace(); + } + + // Read the data using the default properties. + try { + if (dataset_id >= 0) + H5.H5Dread(dataset_id, HDF5Constants.H5T_NATIVE_INT, HDF5Constants.H5S_ALL, HDF5Constants.H5S_ALL, + HDF5Constants.H5P_DEFAULT, dset_data); + } + catch (Exception e) { + e.printStackTrace(); + } + + // Output the data to the screen. + System.out.println("Data as written to disk by hyberslabs:"); + for (int indx = 0; indx < DIM_X; indx++) { + System.out.print(" [ "); + for (int jndx = 0; jndx < DIM_Y; jndx++) + System.out.print(dset_data[indx][jndx] + " "); + System.out.println("]"); + } + System.out.println(); + + // Initialize the read array. + for (int indx = 0; indx < DIM_X; indx++) + for (int jndx = 0; jndx < DIM_Y; jndx++) + dset_data[indx][jndx] = 0; + + // Define and select the hyperslab to use for reading. + try { + if (dataset_id >= 0) { + filespace_id = H5.H5Dget_space(dataset_id); + + long[] start = { 0, 1 }; + long[] stride = { 4, 4 }; + long[] count = { 2, 2 }; + long[] block = { 2, 3 }; + + if (filespace_id >= 0) { + H5.H5Sselect_hyperslab(filespace_id, HDF5Constants.H5S_SELECT_SET, start, stride, count, block); + + // Read the data using the previously defined hyperslab. + if ((dataset_id >= 0) && (filespace_id >= 0)) + H5.H5Dread(dataset_id, HDF5Constants.H5T_NATIVE_INT, HDF5Constants.H5S_ALL, filespace_id, + HDF5Constants.H5P_DEFAULT, dset_data); + } + } + } + catch (Exception e) { + e.printStackTrace(); + } + + // Output the data to the screen. + System.out.println("Data as read from disk by hyberslab:"); + for (int indx = 0; indx < DIM_X; indx++) { + System.out.print(" [ "); + for (int jndx = 0; jndx < DIM_Y; jndx++) + System.out.print(dset_data[indx][jndx] + " "); + System.out.println("]"); + } + System.out.println(); + + // End access to the dataset and release resources used by it. + try { + if (dcpl_id >= 0) + H5.H5Pclose(dcpl_id); + } + catch (Exception e) { + e.printStackTrace(); + } + + try { + if (dataset_id >= 0) + H5.H5Dclose(dataset_id); + } + catch (Exception e) { + e.printStackTrace(); + } + + try { + if (filespace_id >= 0) + H5.H5Sclose(filespace_id); + } + catch (Exception e) { + e.printStackTrace(); + } + + // Close the file. + try { + if (file_id >= 0) + H5.H5Fclose(file_id); + } + catch (Exception e) { + e.printStackTrace(); + } + } + + public static void main(String[] args) { + H5Ex_D_Chunk.writeChunk(); + H5Ex_D_Chunk.readChunk(); + } + +} diff --git a/java/examples/datasets/H5Ex_D_Compact.java b/java/examples/datasets/H5Ex_D_Compact.java new file mode 100644 index 0000000..4f1e2f0 --- /dev/null +++ b/java/examples/datasets/H5Ex_D_Compact.java @@ -0,0 +1,289 @@ +/* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * + * Copyright by The HDF Group. * + * Copyright by the Board of Trustees of the University of Illinois. * + * All rights reserved. * + * * + * This file is part of HDF5. The full HDF5 copyright notice, including * + * terms governing use, modification, and redistribution, is contained in * + * the files COPYING and Copyright.html. COPYING can be found at the root * + * of the source code distribution tree; Copyright.html can be found at the * + * root level of an installed copy of the electronic HDF5 document set and * + * is linked from the top-level documents page. It can also be found at * + * http://hdfgroup.org/HDF5/doc/Copyright.html. If you do not have * + * access to either file, you may request a copy from help@hdfgroup.org. * + * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */ + +/************************************************************ + This example shows how to read and write data to a compact + dataset. The program first writes integers to a compact + dataset with dataspace dimensions of DIM_XxDIM_Y, then + closes the file. Next, it reopens the file, reads back + the data, and outputs it to the screen. + ************************************************************/ +package examples.datasets; + +import java.util.EnumSet; +import java.util.HashMap; +import java.util.Map; + +import hdf.hdf5lib.H5; +import hdf.hdf5lib.HDF5Constants; + +public class H5Ex_D_Compact { + private static String FILENAME = "H5Ex_D_Compact.h5"; + private static String DATASETNAME = "DS1"; + private static final int DIM_X = 4; + private static final int DIM_Y = 7; + private static final int RANK = 2; + + // Values for the status of space allocation + enum H5D_layout { + H5D_LAYOUT_ERROR(-1), H5D_COMPACT(0), H5D_CONTIGUOUS(1), H5D_CHUNKED(2), H5D_NLAYOUTS(3); + private static final Map<Integer, H5D_layout> lookup = new HashMap<Integer, H5D_layout>(); + + static { + for (H5D_layout s : EnumSet.allOf(H5D_layout.class)) + lookup.put(s.getCode(), s); + } + + private int code; + + H5D_layout(int layout_type) { + this.code = layout_type; + } + + public int getCode() { + return this.code; + } + + public static H5D_layout get(int code) { + return lookup.get(code); + } + } + + private static void writeCompact() { + long file_id = -1; + long filespace_id = -1; + long dataset_id = -1; + long dcpl_id = -1; + long[] dims = { DIM_X, DIM_Y }; + int[][] dset_data = new int[DIM_X][DIM_Y]; + + // Initialize data. + for (int indx = 0; indx < DIM_X; indx++) + for (int jndx = 0; jndx < DIM_Y; jndx++) + dset_data[indx][jndx] = indx * jndx - jndx; + + // Create a new file using default properties. + try { + file_id = H5.H5Fcreate(FILENAME, HDF5Constants.H5F_ACC_TRUNC, HDF5Constants.H5P_DEFAULT, + HDF5Constants.H5P_DEFAULT); + } + catch (Exception e) { + e.printStackTrace(); + } + + // Create dataspace. Setting maximum size to NULL sets the maximum + // size to be the current size. + try { + filespace_id = H5.H5Screate_simple(RANK, dims, null); + } + catch (Exception e) { + e.printStackTrace(); + } + + // Create the dataset creation property list. + try { + dcpl_id = H5.H5Pcreate(HDF5Constants.H5P_DATASET_CREATE); + } + catch (Exception e) { + e.printStackTrace(); + } + + // Set the layout to compact. + try { + if (dcpl_id >= 0) + H5.H5Pset_layout(dcpl_id, H5D_layout.H5D_COMPACT.getCode()); + } + catch (Exception e) { + e.printStackTrace(); + } + + // Create the dataset. We will use all default properties for this example. + try { + if ((file_id >= 0) && (filespace_id >= 0) && (dcpl_id >= 0)) + dataset_id = H5.H5Dcreate(file_id, DATASETNAME, HDF5Constants.H5T_STD_I32LE, filespace_id, + HDF5Constants.H5P_DEFAULT, dcpl_id, HDF5Constants.H5P_DEFAULT); + } + catch (Exception e) { + e.printStackTrace(); + } + + // Write the data to the dataset. + try { + if (dataset_id >= 0) + H5.H5Dwrite(dataset_id, HDF5Constants.H5T_NATIVE_INT, HDF5Constants.H5S_ALL, HDF5Constants.H5S_ALL, + HDF5Constants.H5P_DEFAULT, dset_data); + } + catch (Exception e) { + e.printStackTrace(); + } + + // End access to the dataset and release resources used by it. + try { + if (dcpl_id >= 0) + H5.H5Pclose(dcpl_id); + } + catch (Exception e) { + e.printStackTrace(); + } + + try { + if (dataset_id >= 0) + H5.H5Dclose(dataset_id); + } + catch (Exception e) { + e.printStackTrace(); + } + + try { + if (filespace_id >= 0) + H5.H5Sclose(filespace_id); + } + catch (Exception e) { + e.printStackTrace(); + } + + // Close the file. + try { + if (file_id >= 0) + H5.H5Fclose(file_id); + } + catch (Exception e) { + e.printStackTrace(); + } + } + + private static void readCompact() { + long file_id = -1; + long filespace_id = -1; + long dataset_id = -1; + long dcpl_id = -1; + int[][] dset_data = new int[DIM_X][DIM_Y]; + + // Open file and dataset using the default properties. + try { + file_id = H5.H5Fopen(FILENAME, HDF5Constants.H5F_ACC_RDONLY, HDF5Constants.H5P_DEFAULT); + } + catch (Exception e) { + e.printStackTrace(); + } + + // Open an existing dataset. + try { + if (file_id >= 0) + dataset_id = H5.H5Dopen(file_id, DATASETNAME, HDF5Constants.H5P_DEFAULT); + } + catch (Exception e) { + e.printStackTrace(); + } + + // Retrieve the dataset creation property list. + try { + if (dataset_id >= 0) + dcpl_id = H5.H5Dget_create_plist(dataset_id); + } + catch (Exception e) { + e.printStackTrace(); + } + + // Print the storage layout. + try { + if (dcpl_id >= 0) { + int layout_type = H5.H5Pget_layout(dcpl_id); + System.out.print("Storage layout for " + DATASETNAME + " is: "); + switch (H5D_layout.get(layout_type)) { + case H5D_COMPACT: + System.out.println("H5D_COMPACT"); + break; + case H5D_CONTIGUOUS: + System.out.println("H5D_CONTIGUOUS"); + break; + case H5D_CHUNKED: + System.out.println("H5D_CHUNKED"); + break; + case H5D_LAYOUT_ERROR: + break; + case H5D_NLAYOUTS: + break; + default: + break; + } + System.out.println(); + } + } + catch (Exception e) { + e.printStackTrace(); + } + + // Read the data using the default properties. + try { + if (dataset_id >= 0) + H5.H5Dread(dataset_id, HDF5Constants.H5T_NATIVE_INT, HDF5Constants.H5S_ALL, HDF5Constants.H5S_ALL, + HDF5Constants.H5P_DEFAULT, dset_data); + } + catch (Exception e) { + e.printStackTrace(); + } + + // Output the data to the screen. + System.out.println("Data for " + DATASETNAME + " is: "); + for (int indx = 0; indx < DIM_X; indx++) { + System.out.print(" [ "); + for (int jndx = 0; jndx < DIM_Y; jndx++) + System.out.print(dset_data[indx][jndx] + " "); + System.out.println("]"); + } + System.out.println(); + + // End access to the dataset and release resources used by it. + try { + if (dcpl_id >= 0) + H5.H5Pclose(dcpl_id); + } + catch (Exception e) { + e.printStackTrace(); + } + + try { + if (dataset_id >= 0) + H5.H5Dclose(dataset_id); + } + catch (Exception e) { + e.printStackTrace(); + } + + try { + if (filespace_id >= 0) + H5.H5Sclose(filespace_id); + } + catch (Exception e) { + e.printStackTrace(); + } + + // Close the file. + try { + if (file_id >= 0) + H5.H5Fclose(file_id); + } + catch (Exception e) { + e.printStackTrace(); + } + } + + public static void main(String[] args) { + H5Ex_D_Compact.writeCompact(); + H5Ex_D_Compact.readCompact(); + } + +} diff --git a/java/examples/datasets/H5Ex_D_External.java b/java/examples/datasets/H5Ex_D_External.java new file mode 100644 index 0000000..5fdc696 --- /dev/null +++ b/java/examples/datasets/H5Ex_D_External.java @@ -0,0 +1,238 @@ +/* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * + * Copyright by The HDF Group. * + * Copyright by the Board of Trustees of the University of Illinois. * + * All rights reserved. * + * * + * This file is part of HDF5. The full HDF5 copyright notice, including * + * terms governing use, modification, and redistribution, is contained in * + * the files COPYING and Copyright.html. COPYING can be found at the root * + * of the source code distribution tree; Copyright.html can be found at the * + * root level of an installed copy of the electronic HDF5 document set and * + * is linked from the top-level documents page. It can also be found at * + * http://hdfgroup.org/HDF5/doc/Copyright.html. If you do not have * + * access to either file, you may request a copy from help@hdfgroup.org. * + * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */ + +/************************************************************ + This example shows how to read and write data to an + external dataset. The program first writes integers to an + external dataset with dataspace dimensions of DIM_XxDIM_Y, + then closes the file. Next, it reopens the file, reads + back the data, and outputs the name of the external data + file and the data to the screen. + ************************************************************/ +package examples.datasets; + +import hdf.hdf5lib.H5; +import hdf.hdf5lib.HDF5Constants; + +public class H5Ex_D_External { + private static String FILENAME = "H5Ex_D_External.h5"; + private static String EXTERNALNAME = "H5Ex_D_External.data"; + private static String DATASETNAME = "DS1"; + private static final int DIM_X = 4; + private static final int DIM_Y = 7; + private static final int RANK = 2; + private static final int NAME_BUF_SIZE = 32; + + private static void writeExternal() { + long file_id = -1; + long dcpl_id = -1; + long filespace_id = -1; + long dataset_id = -1; + long[] dims = { DIM_X, DIM_Y }; + int[][] dset_data = new int[DIM_X][DIM_Y]; + + // Initialize the dataset. + for (int indx = 0; indx < DIM_X; indx++) + for (int jndx = 0; jndx < DIM_Y; jndx++) + dset_data[indx][jndx] = indx * jndx - jndx; + + // Create a new file using default properties. + try { + file_id = H5.H5Fcreate(FILENAME, HDF5Constants.H5F_ACC_TRUNC, HDF5Constants.H5P_DEFAULT, + HDF5Constants.H5P_DEFAULT); + } + catch (Exception e) { + e.printStackTrace(); + } + + // Create dataspace. Setting maximum size to NULL sets the maximum + // size to be the current size. + try { + filespace_id = H5.H5Screate_simple(RANK, dims, null); + } + catch (Exception e) { + e.printStackTrace(); + } + + // Create the dataset creation property list. + try { + dcpl_id = H5.H5Pcreate(HDF5Constants.H5P_DATASET_CREATE); + } + catch (Exception e) { + e.printStackTrace(); + } + + // set the external file. + try { + if (dcpl_id >= 0) + H5.H5Pset_external(dcpl_id, EXTERNALNAME, 0, HDF5Constants.H5F_UNLIMITED); + } + catch (Exception e) { + e.printStackTrace(); + } + + // Create the HDF5Constants.dataset. + try { + if ((file_id >= 0) && (filespace_id >= 0) && (dcpl_id >= 0)) + dataset_id = H5.H5Dcreate(file_id, DATASETNAME, HDF5Constants.H5T_STD_I32LE, filespace_id, + HDF5Constants.H5P_DEFAULT, dcpl_id, HDF5Constants.H5P_DEFAULT); + } + catch (Exception e) { + e.printStackTrace(); + } + + // Write the dataset. + try { + H5.H5Dwrite(dataset_id, HDF5Constants.H5T_NATIVE_INT, HDF5Constants.H5S_ALL, HDF5Constants.H5S_ALL, + HDF5Constants.H5P_DEFAULT, dset_data); + } + catch (Exception e) { + e.printStackTrace(); + } + + // End access to the dataset and release resources used by it. + try { + if (dataset_id >= 0) + H5.H5Dclose(dataset_id); + } + catch (Exception e) { + e.printStackTrace(); + } + + // Terminate access to the data space. + try { + if (filespace_id >= 0) + H5.H5Sclose(filespace_id); + } + catch (Exception e) { + e.printStackTrace(); + } + + try { + if (dcpl_id >= 0) + H5.H5Pclose(dcpl_id); + } + catch (Exception e) { + e.printStackTrace(); + } + + // Close the file. + try { + if (file_id >= 0) + H5.H5Fclose(file_id); + } + catch (Exception e) { + e.printStackTrace(); + } + + } + + private static void readExternal() { + long file_id = -1; + long dcpl_id = -1; + long dataset_id = -1; + int[][] dset_data = new int[DIM_X][DIM_Y]; + String[] Xname = new String[1]; + + // Open file using the default properties. + try { + file_id = H5.H5Fopen(FILENAME, HDF5Constants.H5F_ACC_RDWR, HDF5Constants.H5P_DEFAULT); + } + catch (Exception e) { + e.printStackTrace(); + } + + // Open dataset using the default properties. + try { + if (file_id >= 0) + dataset_id = H5.H5Dopen(file_id, DATASETNAME, HDF5Constants.H5P_DEFAULT); + } + catch (Exception e) { + e.printStackTrace(); + } + + // Retrieve the dataset creation property list. + try { + if (dataset_id >= 0) + dcpl_id = H5.H5Dget_create_plist(dataset_id); + } + catch (Exception e) { + e.printStackTrace(); + } + + // Retrieve and print the name of the external file. + long[] Xsize = new long[NAME_BUF_SIZE]; + try { + if (dcpl_id >= 0) + H5.H5Pget_external(dcpl_id, 0, Xsize.length, Xname, Xsize); + } + catch (Exception e) { + e.printStackTrace(); + } + System.out.println(DATASETNAME + " is stored in file: " + Xname[0]); + + // Read the data using the default properties. + try { + if (dataset_id >= 0) + H5.H5Dread(dataset_id, HDF5Constants.H5T_NATIVE_INT, HDF5Constants.H5S_ALL, HDF5Constants.H5S_ALL, + HDF5Constants.H5P_DEFAULT, dset_data); + } + catch (Exception e) { + e.printStackTrace(); + } + + // Output the data to the screen. + System.out.println(DATASETNAME + ":"); + for (int indx = 0; indx < DIM_X; indx++) { + System.out.print(" [ "); + for (int jndx = 0; jndx < DIM_Y; jndx++) + System.out.print(dset_data[indx][jndx] + " "); + System.out.println("]"); + } + System.out.println(); + + // Close the dataset. + try { + if (dataset_id >= 0) + H5.H5Dclose(dataset_id); + } + catch (Exception e) { + e.printStackTrace(); + } + + try { + if (dcpl_id >= 0) + H5.H5Pclose(dcpl_id); + } + catch (Exception e) { + e.printStackTrace(); + } + + // Close the file. + try { + if (file_id >= 0) + H5.H5Fclose(file_id); + } + catch (Exception e) { + e.printStackTrace(); + } + } + + public static void main(String[] args) { + H5Ex_D_External.writeExternal(); + H5Ex_D_External.readExternal(); + } + +} diff --git a/java/examples/datasets/H5Ex_D_FillValue.java b/java/examples/datasets/H5Ex_D_FillValue.java new file mode 100644 index 0000000..982d2cb --- /dev/null +++ b/java/examples/datasets/H5Ex_D_FillValue.java @@ -0,0 +1,246 @@ +/* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * + * Copyright by The HDF Group. * + * Copyright by the Board of Trustees of the University of Illinois. * + * All rights reserved. * + * * + * This file is part of HDF5. The full HDF5 copyright notice, including * + * terms governing use, modification, and redistribution, is contained in * + * the files COPYING and Copyright.html. COPYING can be found at the root * + * of the source code distribution tree; Copyright.html can be found at the * + * root level of an installed copy of the electronic HDF5 document set and * + * is linked from the top-level documents page. It can also be found at * + * http://hdfgroup.org/HDF5/doc/Copyright.html. If you do not have * + * access to either file, you may request a copy from help@hdfgroup.org. * + * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */ + +/************************************************************ + This example shows how to set the fill value for a + dataset. The program first sets the fill value to + FILLVAL, creates a dataset with dimensions of DIM_XxDIM_Y, + reads from the uninitialized dataset, and outputs the + contents to the screen. Next, it writes integers to the + dataset, reads the data back, and outputs it to the + screen. Finally it extends the dataset, reads from it, + and outputs the result to the screen. + ************************************************************/ +package examples.datasets; + +import hdf.hdf5lib.H5; +import hdf.hdf5lib.HDF5Constants; + +public class H5Ex_D_FillValue { + private static String FILENAME = "H5Ex_D_FillValue.h5"; + private static String DATASETNAME = "ExtendibleArray"; + private static final int DIM_X = 4; + private static final int DIM_Y = 7; + private static final int EDIM_X = 6; + private static final int EDIM_Y = 10; + private static final int CHUNK_X = 4; + private static final int CHUNK_Y = 4; + private static final int RANK = 2; + private static final int NDIMS = 2; + private static final int FILLVAL = 99; + + private static void fillValue() { + long file_id = -1; + long dcpl_id = -1; + long dataspace_id = -1; + long dataset_id = -1; + long[] dims = { DIM_X, DIM_Y }; + long[] extdims = { EDIM_X, EDIM_Y }; + long[] chunk_dims = { CHUNK_X, CHUNK_Y }; + long[] maxdims = { HDF5Constants.H5S_UNLIMITED, HDF5Constants.H5S_UNLIMITED }; + int[][] write_dset_data = new int[DIM_X][DIM_Y]; + int[][] read_dset_data = new int[DIM_X][DIM_Y]; + int[][] extend_dset_data = new int[EDIM_X][EDIM_Y]; + + // Initialize the dataset. + for (int indx = 0; indx < DIM_X; indx++) + for (int jndx = 0; jndx < DIM_Y; jndx++) + write_dset_data[indx][jndx] = indx * jndx - jndx; + + // Create a new file using default properties. + try { + file_id = H5.H5Fcreate(FILENAME, HDF5Constants.H5F_ACC_TRUNC, HDF5Constants.H5P_DEFAULT, + HDF5Constants.H5P_DEFAULT); + } + catch (Exception e) { + e.printStackTrace(); + } + + // Create dataspace with unlimited dimensions. + try { + dataspace_id = H5.H5Screate_simple(RANK, dims, maxdims); + } + catch (Exception e) { + e.printStackTrace(); + } + + // Create the dataset creation property list. + try { + dcpl_id = H5.H5Pcreate(HDF5Constants.H5P_DATASET_CREATE); + } + catch (Exception e) { + e.printStackTrace(); + } + + // Set the chunk size. + try { + if (dcpl_id >= 0) + H5.H5Pset_chunk(dcpl_id, NDIMS, chunk_dims); + } + catch (Exception e) { + e.printStackTrace(); + } + + // Set the fill value for the dataset + try { + int[] fill_value = { FILLVAL }; + if (dcpl_id >= 0) + H5.H5Pset_fill_value(dcpl_id, HDF5Constants.H5T_NATIVE_INT, fill_value); + } + catch (Exception e) { + e.printStackTrace(); + } + + // Set the allocation time to "early". This way we can be sure + // that reading from the dataset immediately after creation will + // return the fill value. + try { + if (dcpl_id >= 0) + H5.H5Pset_alloc_time(dcpl_id, HDF5Constants.H5D_ALLOC_TIME_EARLY); + } + catch (Exception e) { + e.printStackTrace(); + } + + // Create the dataset using the dataset creation property list. + try { + if ((file_id >= 0) && (dataspace_id >= 0) && (dcpl_id >= 0)) + dataset_id = H5.H5Dcreate(file_id, DATASETNAME, HDF5Constants.H5T_STD_I32LE, dataspace_id, + HDF5Constants.H5P_DEFAULT, dcpl_id, HDF5Constants.H5P_DEFAULT); + } + catch (Exception e) { + e.printStackTrace(); + } + + // Read values from the dataset, which has not been written to yet. + try { + if (dataset_id >= 0) + H5.H5Dread(dataset_id, HDF5Constants.H5T_NATIVE_INT, HDF5Constants.H5S_ALL, HDF5Constants.H5S_ALL, + HDF5Constants.H5P_DEFAULT, read_dset_data); + } + catch (Exception e) { + e.printStackTrace(); + } + + // Output the data to the screen. + System.out.println("Dataset before being written to:"); + for (int indx = 0; indx < DIM_X; indx++) { + System.out.print(" [ "); + for (int jndx = 0; jndx < DIM_Y; jndx++) + System.out.print(read_dset_data[indx][jndx] + " "); + System.out.println("]"); + } + System.out.println(); + + // Write the data to the dataset. + try { + if (dataset_id >= 0) + H5.H5Dwrite(dataset_id, HDF5Constants.H5T_NATIVE_INT, HDF5Constants.H5S_ALL, HDF5Constants.H5S_ALL, + HDF5Constants.H5P_DEFAULT, write_dset_data); + } + catch (Exception e) { + e.printStackTrace(); + } + + // Read the data back. + try { + if (dataset_id >= 0) + H5.H5Dread(dataset_id, HDF5Constants.H5T_NATIVE_INT, HDF5Constants.H5S_ALL, HDF5Constants.H5S_ALL, + HDF5Constants.H5P_DEFAULT, read_dset_data); + } + catch (Exception e) { + e.printStackTrace(); + } + + // Output the data to the screen. + System.out.println("Dataset after being written to:"); + for (int indx = 0; indx < DIM_X; indx++) { + System.out.print(" [ "); + for (int jndx = 0; jndx < DIM_Y; jndx++) + System.out.print(read_dset_data[indx][jndx] + " "); + System.out.println("]"); + } + System.out.println(); + + // Extend the dataset. + try { + if (dataset_id >= 0) + H5.H5Dset_extent(dataset_id, extdims); + } + catch (Exception e) { + e.printStackTrace(); + } + + // Read from the extended dataset. + try { + if (dataset_id >= 0) + H5.H5Dread(dataset_id, HDF5Constants.H5T_NATIVE_INT, HDF5Constants.H5S_ALL, HDF5Constants.H5S_ALL, + HDF5Constants.H5P_DEFAULT, extend_dset_data); + } + catch (Exception e) { + e.printStackTrace(); + } + + // Output the data to the screen. + System.out.println("Dataset after extension:"); + for (int indx = 0; indx < EDIM_X; indx++) { + System.out.print(" [ "); + for (int jndx = 0; jndx < EDIM_Y; jndx++) + System.out.print(extend_dset_data[indx][jndx] + " "); + System.out.println("]"); + } + System.out.println(); + + // End access to the dataset and release resources used by it. + try { + if (dataset_id >= 0) + H5.H5Dclose(dataset_id); + } + catch (Exception e) { + e.printStackTrace(); + } + + try { + if (dataspace_id >= 0) + H5.H5Sclose(dataspace_id); + } + catch (Exception e) { + e.printStackTrace(); + } + + try { + if (dcpl_id >= 0) + H5.H5Pclose(dcpl_id); + } + catch (Exception e) { + e.printStackTrace(); + } + + // Close the file. + try { + if (file_id >= 0) + H5.H5Fclose(file_id); + } + catch (Exception e) { + e.printStackTrace(); + } + + } + + public static void main(String[] args) { + H5Ex_D_FillValue.fillValue(); + } + +} diff --git a/java/examples/datasets/H5Ex_D_Gzip.java b/java/examples/datasets/H5Ex_D_Gzip.java new file mode 100644 index 0000000..b813367 --- /dev/null +++ b/java/examples/datasets/H5Ex_D_Gzip.java @@ -0,0 +1,336 @@ +/* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * + * Copyright by The HDF Group. * + * Copyright by the Board of Trustees of the University of Illinois. * + * All rights reserved. * + * * + * This file is part of HDF5. The full HDF5 copyright notice, including * + * terms governing use, modification, and redistribution, is contained in * + * the files COPYING and Copyright.html. COPYING can be found at the root * + * of the source code distribution tree; Copyright.html can be found at the * + * root level of an installed copy of the electronic HDF5 document set and * + * is linked from the top-level documents page. It can also be found at * + * http://hdfgroup.org/HDF5/doc/Copyright.html. If you do not have * + * access to either file, you may request a copy from help@hdfgroup.org. * + * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */ + +/************************************************************ + This example shows how to read and write data to a dataset + using gzip compression (also called zlib or deflate). The + program first checks if gzip compression is available, + then if it is it writes integers to a dataset using gzip, + then closes the file. Next, it reopens the file, reads + back the data, and outputs the type of compression and the + maximum value in the dataset to the screen. + ************************************************************/ +package examples.datasets; + +import java.util.EnumSet; +import java.util.HashMap; +import java.util.Map; + +import hdf.hdf5lib.H5; +import hdf.hdf5lib.HDF5Constants; + +public class H5Ex_D_Gzip { + private static String FILENAME = "H5Ex_D_Gzip.h5"; + private static String DATASETNAME = "DS1"; + private static final int DIM_X = 32; + private static final int DIM_Y = 64; + private static final int CHUNK_X = 4; + private static final int CHUNK_Y = 8; + private static final int RANK = 2; + private static final int NDIMS = 2; + + // Values for the status of space allocation + enum H5Z_filter { + H5Z_FILTER_ERROR(HDF5Constants.H5Z_FILTER_ERROR), H5Z_FILTER_NONE(HDF5Constants.H5Z_FILTER_NONE), H5Z_FILTER_DEFLATE( + HDF5Constants.H5Z_FILTER_DEFLATE), H5Z_FILTER_SHUFFLE(HDF5Constants.H5Z_FILTER_SHUFFLE), H5Z_FILTER_FLETCHER32( + HDF5Constants.H5Z_FILTER_FLETCHER32), H5Z_FILTER_SZIP(HDF5Constants.H5Z_FILTER_SZIP), H5Z_FILTER_NBIT( + HDF5Constants.H5Z_FILTER_NBIT), H5Z_FILTER_SCALEOFFSET(HDF5Constants.H5Z_FILTER_SCALEOFFSET), H5Z_FILTER_RESERVED( + HDF5Constants.H5Z_FILTER_RESERVED), H5Z_FILTER_MAX(HDF5Constants.H5Z_FILTER_MAX); + private static final Map<Integer, H5Z_filter> lookup = new HashMap<Integer, H5Z_filter>(); + + static { + for (H5Z_filter s : EnumSet.allOf(H5Z_filter.class)) + lookup.put(s.getCode(), s); + } + + private int code; + + H5Z_filter(int layout_type) { + this.code = layout_type; + } + + public int getCode() { + return this.code; + } + + public static H5Z_filter get(int code) { + return lookup.get(code); + } + } + + private static boolean checkGzipFilter() { + try { + int available = H5.H5Zfilter_avail(HDF5Constants.H5Z_FILTER_DEFLATE); + if (available == 0) { + System.out.println("gzip filter not available."); + return false; + } + } + catch (Exception e) { + e.printStackTrace(); + } + + try { + int filter_info = H5.H5Zget_filter_info(HDF5Constants.H5Z_FILTER_DEFLATE); + if (((filter_info & HDF5Constants.H5Z_FILTER_CONFIG_ENCODE_ENABLED) == 0) + || ((filter_info & HDF5Constants.H5Z_FILTER_CONFIG_DECODE_ENABLED) == 0)) { + System.out.println("gzip filter not available for encoding and decoding."); + return false; + } + } + catch (Exception e) { + e.printStackTrace(); + } + return true; + } + + private static void writeGzip() { + long file_id = -1; + long filespace_id = -1; + long dataset_id = -1; + long dcpl_id = -1; + long[] dims = { DIM_X, DIM_Y }; + long[] chunk_dims = { CHUNK_X, CHUNK_Y }; + int[][] dset_data = new int[DIM_X][DIM_Y]; + + // Initialize data. + for (int indx = 0; indx < DIM_X; indx++) + for (int jndx = 0; jndx < DIM_Y; jndx++) + dset_data[indx][jndx] = indx * jndx - jndx; + + // Create a new file using default properties. + try { + file_id = H5.H5Fcreate(FILENAME, HDF5Constants.H5F_ACC_TRUNC, HDF5Constants.H5P_DEFAULT, + HDF5Constants.H5P_DEFAULT); + } + catch (Exception e) { + e.printStackTrace(); + } + + // Create dataspace. Setting maximum size to NULL sets the maximum + // size to be the current size. + try { + filespace_id = H5.H5Screate_simple(RANK, dims, null); + } + catch (Exception e) { + e.printStackTrace(); + } + + // Create the dataset creation property list, add the gzip compression + // filter. + try { + dcpl_id = H5.H5Pcreate(HDF5Constants.H5P_DATASET_CREATE); + if (dcpl_id >= 0) { + H5.H5Pset_deflate(dcpl_id, 9); + // Set the chunk size. + H5.H5Pset_chunk(dcpl_id, NDIMS, chunk_dims); + } + } + catch (Exception e) { + e.printStackTrace(); + } + + // Create the dataset. + try { + if ((file_id >= 0) && (filespace_id >= 0) && (dcpl_id >= 0)) + dataset_id = H5.H5Dcreate(file_id, DATASETNAME, HDF5Constants.H5T_STD_I32LE, filespace_id, + HDF5Constants.H5P_DEFAULT, dcpl_id, HDF5Constants.H5P_DEFAULT); + } + catch (Exception e) { + e.printStackTrace(); + } + + // Write the data to the dataset. + try { + if (dataset_id >= 0) + H5.H5Dwrite(dataset_id, HDF5Constants.H5T_NATIVE_INT, HDF5Constants.H5S_ALL, HDF5Constants.H5S_ALL, + HDF5Constants.H5P_DEFAULT, dset_data); + } + catch (Exception e) { + e.printStackTrace(); + } + + // End access to the dataset and release resources used by it. + try { + if (dcpl_id >= 0) + H5.H5Pclose(dcpl_id); + } + catch (Exception e) { + e.printStackTrace(); + } + + try { + if (dataset_id >= 0) + H5.H5Dclose(dataset_id); + } + catch (Exception e) { + e.printStackTrace(); + } + + try { + if (filespace_id >= 0) + H5.H5Sclose(filespace_id); + } + catch (Exception e) { + e.printStackTrace(); + } + + // Close the file. + try { + if (file_id >= 0) + H5.H5Fclose(file_id); + } + catch (Exception e) { + e.printStackTrace(); + } + } + + private static void readGzip() { + long file_id = -1; + long dataset_id = -1; + long dcpl_id = -1; + int[][] dset_data = new int[DIM_X][DIM_Y]; + + // Open an existing file. + try { + file_id = H5.H5Fopen(FILENAME, HDF5Constants.H5F_ACC_RDONLY, HDF5Constants.H5P_DEFAULT); + } + catch (Exception e) { + e.printStackTrace(); + } + + // Open an existing dataset. + try { + if (file_id >= 0) + dataset_id = H5.H5Dopen(file_id, DATASETNAME, HDF5Constants.H5P_DEFAULT); + } + catch (Exception e) { + e.printStackTrace(); + } + + // Retrieve the dataset creation property list. + try { + if (dataset_id >= 0) + dcpl_id = H5.H5Dget_create_plist(dataset_id); + } + catch (Exception e) { + e.printStackTrace(); + } + + // Retrieve and print the filter type. Here we only retrieve the + // first filter because we know that we only added one filter. + try { + if (dcpl_id >= 0) { + // Java lib requires a valid filter_name object and cd_values + int[] flags = { 0 }; + long[] cd_nelmts = { 1 }; + int[] cd_values = { 0 }; + String[] filter_name = { "" }; + int[] filter_config = { 0 }; + int filter_type = -1; + filter_type = H5 + .H5Pget_filter(dcpl_id, 0, flags, cd_nelmts, cd_values, 120, filter_name, filter_config); + System.out.print("Filter type is: "); + switch (H5Z_filter.get(filter_type)) { + case H5Z_FILTER_DEFLATE: + System.out.println("H5Z_FILTER_DEFLATE"); + break; + case H5Z_FILTER_SHUFFLE: + System.out.println("H5Z_FILTER_SHUFFLE"); + break; + case H5Z_FILTER_FLETCHER32: + System.out.println("H5Z_FILTER_FLETCHER32"); + break; + case H5Z_FILTER_SZIP: + System.out.println("H5Z_FILTER_SZIP"); + break; + case H5Z_FILTER_NBIT: + System.out.println("H5Z_FILTER_NBIT"); + break; + case H5Z_FILTER_SCALEOFFSET: + System.out.println("H5Z_FILTER_SCALEOFFSET"); + break; + default: + System.out.println("H5Z_FILTER_ERROR"); + } + System.out.println(); + } + } + catch (Exception e) { + e.printStackTrace(); + } + + // Read the data using the default properties. + try { + if (dataset_id >= 0) { + H5.H5Dread(dataset_id, HDF5Constants.H5T_NATIVE_INT, HDF5Constants.H5S_ALL, HDF5Constants.H5S_ALL, + HDF5Constants.H5P_DEFAULT, dset_data); + } + } + catch (Exception e) { + e.printStackTrace(); + } + + // Find the maximum value in the dataset, to verify that it was read + // correctly. + int max = dset_data[0][0]; + for (int indx = 0; indx < DIM_X; indx++) { + for (int jndx = 0; jndx < DIM_Y; jndx++) + if (max < dset_data[indx][jndx]) + max = dset_data[indx][jndx]; + } + // Print the maximum value. + System.out.println("Maximum value in " + DATASETNAME + " is: " + max); + + // End access to the dataset and release resources used by it. + try { + if (dcpl_id >= 0) + H5.H5Pclose(dcpl_id); + } + catch (Exception e) { + e.printStackTrace(); + } + + try { + if (dataset_id >= 0) + H5.H5Dclose(dataset_id); + } + catch (Exception e) { + e.printStackTrace(); + } + + // Close the file. + try { + if (file_id >= 0) + H5.H5Fclose(file_id); + } + catch (Exception e) { + e.printStackTrace(); + } + } + + public static void main(String[] args) { + // Check if gzip compression is available and can be used for both + // compression and decompression. Normally we do not perform error + // checking in these examples for the sake of clarity, but in this + // case we will make an exception because this filter is an + // optional part of the hdf5 library. + if (H5Ex_D_Gzip.checkGzipFilter()) { + H5Ex_D_Gzip.writeGzip(); + H5Ex_D_Gzip.readGzip(); + } + } + +} diff --git a/java/examples/datasets/H5Ex_D_Hyperslab.java b/java/examples/datasets/H5Ex_D_Hyperslab.java new file mode 100644 index 0000000..482e2c0 --- /dev/null +++ b/java/examples/datasets/H5Ex_D_Hyperslab.java @@ -0,0 +1,269 @@ +/* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * + * Copyright by The HDF Group. * + * Copyright by the Board of Trustees of the University of Illinois. * + * All rights reserved. * + * * + * This file is part of HDF5. The full HDF5 copyright notice, including * + * terms governing use, modification, and redistribution, is contained in * + * the files COPYING and Copyright.html. COPYING can be found at the root * + * of the source code distribution tree; Copyright.html can be found at the * + * root level of an installed copy of the electronic HDF5 document set and * + * is linked from the top-level documents page. It can also be found at * + * http://hdfgroup.org/HDF5/doc/Copyright.html. If you do not have * + * access to either file, you may request a copy from help@hdfgroup.org. * + * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */ + +/************************************************************ + This example shows how to read and write data to a + dataset by hyberslabs. The program first writes integers + in a hyperslab selection to a dataset with dataspace + dimensions of DIM_XxDIM_Y, then closes the file. Next, it + reopens the file, reads back the data, and outputs it to + the screen. Finally it reads the data again using a + different hyperslab selection, and outputs the result to + the screen. + ************************************************************/ +package examples.datasets; + +import hdf.hdf5lib.H5; +import hdf.hdf5lib.HDF5Constants; + +public class H5Ex_D_Hyperslab { + private static String FILENAME = "H5Ex_D_Hyperslab.h5"; + private static String DATASETNAME = "DS1"; + private static final int DIM_X = 6; + private static final int DIM_Y = 8; + private static final int RANK = 2; + + private static void writeHyperslab() { + long file_id = -1; + long filespace_id = -1; + long dataset_id = -1; + long[] dims = { DIM_X, DIM_Y }; + int[][] dset_data = new int[DIM_X][DIM_Y]; + + // Initialize data to "1", to make it easier to see the selections. + for (int indx = 0; indx < DIM_X; indx++) + for (int jndx = 0; jndx < DIM_Y; jndx++) + dset_data[indx][jndx] = 1; + + // Print the data to the screen. + System.out.println("Original Data:"); + for (int indx = 0; indx < DIM_X; indx++) { + System.out.print(" [ "); + for (int jndx = 0; jndx < DIM_Y; jndx++) + System.out.print(dset_data[indx][jndx] + " "); + System.out.println("]"); + } + System.out.println(); + + // Create a new file using default properties. + try { + file_id = H5.H5Fcreate(FILENAME, HDF5Constants.H5F_ACC_TRUNC, HDF5Constants.H5P_DEFAULT, + HDF5Constants.H5P_DEFAULT); + } + catch (Exception e) { + e.printStackTrace(); + } + + // Create dataspace. Setting maximum size to NULL sets the maximum + // size to be the current size. + try { + filespace_id = H5.H5Screate_simple(RANK, dims, null); + } + catch (Exception e) { + e.printStackTrace(); + } + + // Create the dataset. We will use all default properties for this example. + try { + if ((file_id >= 0) && (filespace_id >= 0)) + dataset_id = H5.H5Dcreate(file_id, DATASETNAME, HDF5Constants.H5T_STD_I32LE, filespace_id, + HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT); + } + catch (Exception e) { + e.printStackTrace(); + } + + // Define and select the first part of the hyperslab selection. + long[] start = { 0, 0 }; + long[] stride = { 3, 3 }; + long[] count = { 2, 3 }; + long[] block = { 2, 2 }; + try { + if ((filespace_id >= 0)) + H5.H5Sselect_hyperslab(filespace_id, HDF5Constants.H5S_SELECT_SET, start, stride, count, block); + } + catch (Exception e) { + e.printStackTrace(); + } + // Define and select the second part of the hyperslab selection, + // which is subtracted from the first selection by the use of + // H5S_SELECT_NOTB + block[0] = 1; + block[1] = 1; + try { + if ((filespace_id >= 0)) { + H5.H5Sselect_hyperslab(filespace_id, HDF5Constants.H5S_SELECT_NOTB, start, stride, count, block); + + // Write the data to the dataset. + if (dataset_id >= 0) + H5.H5Dwrite(dataset_id, HDF5Constants.H5T_NATIVE_INT, HDF5Constants.H5S_ALL, filespace_id, + HDF5Constants.H5P_DEFAULT, dset_data); + } + } + catch (Exception e) { + e.printStackTrace(); + } + + // End access to the dataset and release resources used by it. + try { + if (dataset_id >= 0) + H5.H5Dclose(dataset_id); + } + catch (Exception e) { + e.printStackTrace(); + } + + try { + if (filespace_id >= 0) + H5.H5Sclose(filespace_id); + } + catch (Exception e) { + e.printStackTrace(); + } + + // Close the file. + try { + if (file_id >= 0) + H5.H5Fclose(file_id); + } + catch (Exception e) { + e.printStackTrace(); + } + } + + private static void readHyperslab() { + long file_id = -1; + long filespace_id = -1; + long dataset_id = -1; + long dcpl_id = -1; + int[][] dset_data = new int[DIM_X][DIM_Y]; + + // Open an existing file. + try { + file_id = H5.H5Fopen(FILENAME, HDF5Constants.H5F_ACC_RDONLY, HDF5Constants.H5P_DEFAULT); + } + catch (Exception e) { + e.printStackTrace(); + } + + // Open an existing dataset. + try { + if (file_id >= 0) + dataset_id = H5.H5Dopen(file_id, DATASETNAME, HDF5Constants.H5P_DEFAULT); + } + catch (Exception e) { + e.printStackTrace(); + } + + // Read the data using the default properties. + try { + if (dataset_id >= 0) + H5.H5Dread(dataset_id, HDF5Constants.H5T_NATIVE_INT, HDF5Constants.H5S_ALL, HDF5Constants.H5S_ALL, + HDF5Constants.H5P_DEFAULT, dset_data); + } + catch (Exception e) { + e.printStackTrace(); + } + + // Output the data to the screen. + System.out.println("Data as written to disk by hyberslabs:"); + for (int indx = 0; indx < DIM_X; indx++) { + System.out.print(" [ "); + for (int jndx = 0; jndx < DIM_Y; jndx++) + System.out.print(dset_data[indx][jndx] + " "); + System.out.println("]"); + } + System.out.println(); + + // Initialize the read array. + for (int indx = 0; indx < DIM_X; indx++) + for (int jndx = 0; jndx < DIM_Y; jndx++) + dset_data[indx][jndx] = 0; + + // Define and select the hyperslab to use for reading. + try { + if (dataset_id >= 0) { + filespace_id = H5.H5Dget_space(dataset_id); + + long[] start = { 0, 1 }; + long[] stride = { 4, 4 }; + long[] count = { 2, 2 }; + long[] block = { 2, 3 }; + + if (filespace_id >= 0) { + H5.H5Sselect_hyperslab(filespace_id, HDF5Constants.H5S_SELECT_SET, start, stride, count, block); + + // Read the data using the previously defined hyperslab. + if ((dataset_id >= 0) && (filespace_id >= 0)) + H5.H5Dread(dataset_id, HDF5Constants.H5T_NATIVE_INT, HDF5Constants.H5S_ALL, filespace_id, + HDF5Constants.H5P_DEFAULT, dset_data); + } + } + } + catch (Exception e) { + e.printStackTrace(); + } + + // Output the data to the screen. + System.out.println("Data as read from disk by hyberslab:"); + for (int indx = 0; indx < DIM_X; indx++) { + System.out.print(" [ "); + for (int jndx = 0; jndx < DIM_Y; jndx++) + System.out.print(dset_data[indx][jndx] + " "); + System.out.println("]"); + } + System.out.println(); + + // End access to the dataset and release resources used by it. + try { + if (dcpl_id >= 0) + H5.H5Pclose(dcpl_id); + } + catch (Exception e) { + e.printStackTrace(); + } + + try { + if (dataset_id >= 0) + H5.H5Dclose(dataset_id); + } + catch (Exception e) { + e.printStackTrace(); + } + + try { + if (filespace_id >= 0) + H5.H5Sclose(filespace_id); + } + catch (Exception e) { + e.printStackTrace(); + } + + // Close the file. + try { + if (file_id >= 0) + H5.H5Fclose(file_id); + } + catch (Exception e) { + e.printStackTrace(); + } + } + + public static void main(String[] args) { + H5Ex_D_Hyperslab.writeHyperslab(); + H5Ex_D_Hyperslab.readHyperslab(); + } + +} diff --git a/java/examples/datasets/H5Ex_D_Nbit.java b/java/examples/datasets/H5Ex_D_Nbit.java new file mode 100644 index 0000000..f74b675 --- /dev/null +++ b/java/examples/datasets/H5Ex_D_Nbit.java @@ -0,0 +1,305 @@ +/* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * + * Copyright by The HDF Group. * + * Copyright by the Board of Trustees of the University of Illinois. * + * All rights reserved. * + * * + * This file is part of HDF5. The full HDF5 copyright notice, including * + * terms governing use, modification, and redistribution, is contained in * + * the files COPYING and Copyright.html. COPYING can be found at the root * + * of the source code distribution tree; Copyright.html can be found at the * + * root level of an installed copy of the electronic HDF5 document set and * + * is linked from the top-level documents page. It can also be found at * + * http://hdfgroup.org/HDF5/doc/Copyright.html. If you do not have * + * access to either file, you may request a copy from help@hdfgroup.org. * + * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */ + +/************************************************************ + This example shows how to read and write data to a dataset + using the N-Bit filter. The program first checks if the + N-Bit filter is available, then if it is it writes integers + to a dataset using N-Bit, then closes the file. Next, it + reopens the file, reads back the data, and outputs the type + of filter and the maximum value in the dataset to the screen. + ************************************************************/ + +package examples.datasets; + +import java.util.EnumSet; +import java.util.HashMap; +import java.util.Map; + +import hdf.hdf5lib.H5; +import hdf.hdf5lib.HDF5Constants; + +public class H5Ex_D_Nbit { + private static String FILENAME = "H5Ex_D_Nbit.h5"; + private static String DATASETNAME = "DS1"; + private static final int DIM_X = 32; + private static final int DIM_Y = 64; + private static final int CHUNK_X = 4; + private static final int CHUNK_Y = 8; + private static final int RANK = 2; + private static final int NDIMS = 2; + + // Values for the status of space allocation + enum H5Z_filter { + H5Z_FILTER_ERROR(HDF5Constants.H5Z_FILTER_ERROR), H5Z_FILTER_NONE(HDF5Constants.H5Z_FILTER_NONE), H5Z_FILTER_DEFLATE( + HDF5Constants.H5Z_FILTER_DEFLATE), H5Z_FILTER_SHUFFLE(HDF5Constants.H5Z_FILTER_SHUFFLE), H5Z_FILTER_FLETCHER32( + HDF5Constants.H5Z_FILTER_FLETCHER32), H5Z_FILTER_SZIP(HDF5Constants.H5Z_FILTER_SZIP), H5Z_FILTER_NBIT( + HDF5Constants.H5Z_FILTER_NBIT), H5Z_FILTER_SCALEOFFSET(HDF5Constants.H5Z_FILTER_SCALEOFFSET), H5Z_FILTER_RESERVED( + HDF5Constants.H5Z_FILTER_RESERVED), H5Z_FILTER_MAX(HDF5Constants.H5Z_FILTER_MAX); + private static final Map<Integer, H5Z_filter> lookup = new HashMap<Integer, H5Z_filter>(); + + static { + for (H5Z_filter s : EnumSet.allOf(H5Z_filter.class)) + lookup.put(s.getCode(), s); + } + + private int code; + + H5Z_filter(int layout_type) { + this.code = layout_type; + } + + public int getCode() { + return this.code; + } + + public static H5Z_filter get(int code) { + return lookup.get(code); + } + } + + private static boolean checkNbitFilter() { + try { + // Check if N-Bit compression is available and can be used for both compression and decompression. + int available = H5.H5Zfilter_avail(HDF5Constants.H5Z_FILTER_NBIT); + if (available == 0) { + System.out.println("N-Bit filter not available."); + return false; + } + } + catch (Exception e) { + e.printStackTrace(); + } + + try { + int filter_info = H5.H5Zget_filter_info(HDF5Constants.H5Z_FILTER_NBIT); + if (((filter_info & HDF5Constants.H5Z_FILTER_CONFIG_ENCODE_ENABLED) == 0) + || ((filter_info & HDF5Constants.H5Z_FILTER_CONFIG_DECODE_ENABLED) == 0)) { + System.out.println("N-Bit filter not available for encoding and decoding."); + return false; + } + } + catch (Exception e) { + e.printStackTrace(); + } + return true; + } + + private static void writeData() throws Exception { + long file_id = -1; + long filespace_id = -1; + long dataset_id = -1; + long dtype_id = -1; + long dcpl_id = -1; + long[] dims = { DIM_X, DIM_Y }; + long[] chunk_dims = { CHUNK_X, CHUNK_Y }; + int[][] dset_data = new int[DIM_X][DIM_Y]; + + // Initialize data. + for (int indx = 0; indx < DIM_X; indx++) + for (int jndx = 0; jndx < DIM_Y; jndx++) + dset_data[indx][jndx] = indx * jndx - jndx; + + try { + // Create a new file using the default properties. + file_id = H5.H5Fcreate(FILENAME, HDF5Constants.H5F_ACC_TRUNC, HDF5Constants.H5P_DEFAULT, + HDF5Constants.H5P_DEFAULT); + + // Create dataspace. Setting maximum size to NULL sets the maximum + // size to be the current size. + filespace_id = H5.H5Screate_simple(RANK, dims, null); + + // Create the datatype to use with the N-Bit filter. It has an uncompressed size of 32 bits, + // but will have a size of 16 bits after being packed by the N-Bit filter. + dtype_id = H5.H5Tcopy(HDF5Constants.H5T_STD_I32LE); + H5.H5Tset_precision(dtype_id, 16); + H5.H5Tset_offset(dtype_id, 5); + + // Create the dataset creation property list, add the N-Bit filter and set the chunk size. + dcpl_id = H5.H5Pcreate(HDF5Constants.H5P_DATASET_CREATE); + H5.H5Pset_nbit(dcpl_id); + H5.H5Pset_chunk(dcpl_id, NDIMS, chunk_dims); + + // Create the dataset. + dataset_id = H5.H5Dcreate(file_id, DATASETNAME, dtype_id, filespace_id, HDF5Constants.H5P_DEFAULT, dcpl_id, + HDF5Constants.H5P_DEFAULT); + + // Write the data to the dataset. + H5.H5Dwrite(dataset_id, HDF5Constants.H5T_NATIVE_INT, HDF5Constants.H5S_ALL, HDF5Constants.H5S_ALL, + HDF5Constants.H5P_DEFAULT, dset_data); + } + catch (Exception e) { + e.printStackTrace(); + } + finally { + // Close and release resources. + if (dcpl_id >= 0) + H5.H5Pclose(dcpl_id); + if (dtype_id >= 0) + H5.H5Tclose(dtype_id); + if (dataset_id >= 0) + H5.H5Dclose(dataset_id); + if (filespace_id >= 0) + H5.H5Sclose(filespace_id); + if (file_id >= 0) + H5.H5Fclose(file_id); + } + } + + private static void readData() throws Exception { + long file_id = -1; + long dataset_id = -1; + long dcpl_id = -1; + int[][] dset_data = new int[DIM_X][DIM_Y]; + + // Open an existing file. + try { + file_id = H5.H5Fopen(FILENAME, HDF5Constants.H5F_ACC_RDONLY, HDF5Constants.H5P_DEFAULT); + } + catch (Exception e) { + e.printStackTrace(); + } + + // Open an existing dataset. + try { + if (file_id >= 0) + dataset_id = H5.H5Dopen(file_id, DATASETNAME, HDF5Constants.H5P_DEFAULT); + } + catch (Exception e) { + e.printStackTrace(); + } + + // Retrieve the dataset creation property list. + try { + if (dataset_id >= 0) + dcpl_id = H5.H5Dget_create_plist(dataset_id); + } + catch (Exception e) { + e.printStackTrace(); + } + + // Retrieve and print the filter type. Here we only retrieve the + // first filter because we know that we only added one filter. + try { + if (dcpl_id >= 0) { + // Java lib requires a valid filter_name object and cd_values + int[] flags = { 0 }; + long[] cd_nelmts = { 1 }; + int[] cd_values = { 0 }; + String[] filter_name = { "" }; + int[] filter_config = { 0 }; + int filter_type = -1; + filter_type = H5 + .H5Pget_filter(dcpl_id, 0, flags, cd_nelmts, cd_values, 120, filter_name, filter_config); + System.out.print("Filter type is: "); + switch (H5Z_filter.get(filter_type)) { + case H5Z_FILTER_DEFLATE: + System.out.println("H5Z_FILTER_DEFLATE"); + break; + case H5Z_FILTER_SHUFFLE: + System.out.println("H5Z_FILTER_SHUFFLE"); + break; + case H5Z_FILTER_FLETCHER32: + System.out.println("H5Z_FILTER_FLETCHER32"); + break; + case H5Z_FILTER_SZIP: + System.out.println("H5Z_FILTER_SZIP"); + break; + case H5Z_FILTER_NBIT: + System.out.println("H5Z_FILTER_NBIT"); + break; + case H5Z_FILTER_SCALEOFFSET: + System.out.println("H5Z_FILTER_SCALEOFFSET"); + break; + default: + System.out.println("H5Z_FILTER_ERROR"); + } + System.out.println(); + } + } + catch (Exception e) { + e.printStackTrace(); + } + + // Read the data using the default properties. + try { + if (dataset_id >= 0) { + int status = H5.H5Dread(dataset_id, HDF5Constants.H5T_NATIVE_INT, HDF5Constants.H5S_ALL, + HDF5Constants.H5S_ALL, HDF5Constants.H5P_DEFAULT, dset_data); + // Check if the read was successful. + if (status < 0) + System.out.print("Dataset read failed!"); + } + } + catch (Exception e) { + e.printStackTrace(); + } + + // Find the maximum value in the dataset, to verify that it was read + // correctly. + int max = dset_data[0][0]; + for (int indx = 0; indx < DIM_X; indx++) { + for (int jndx = 0; jndx < DIM_Y; jndx++) + if (max < dset_data[indx][jndx]) + max = dset_data[indx][jndx]; + } + // Print the maximum value. + System.out.println("Maximum value in " + DATASETNAME + " is: " + max); + + // End access to the dataset and release resources used by it. + try { + if (dcpl_id >= 0) + H5.H5Pclose(dcpl_id); + } + catch (Exception e) { + e.printStackTrace(); + } + + try { + if (dataset_id >= 0) + H5.H5Dclose(dataset_id); + } + catch (Exception e) { + e.printStackTrace(); + } + + // Close the file. + try { + if (file_id >= 0) + H5.H5Fclose(file_id); + } + catch (Exception e) { + e.printStackTrace(); + } + + } + + public static void main(String[] args) { + /* + * Check if N-Bit compression is available and can be used for both compression and decompression. Normally we + * do not perform error checking in these examples for the sake of clarity, but in this case we will make an + * exception because this filter is an optional part of the hdf5 library. + */ + try { + if (H5Ex_D_Nbit.checkNbitFilter()) { + H5Ex_D_Nbit.writeData(); + H5Ex_D_Nbit.readData(); + } + } + catch (Exception ex) { + ex.printStackTrace(); + } + } +} diff --git a/java/examples/datasets/H5Ex_D_ReadWrite.java b/java/examples/datasets/H5Ex_D_ReadWrite.java new file mode 100644 index 0000000..de94ccb --- /dev/null +++ b/java/examples/datasets/H5Ex_D_ReadWrite.java @@ -0,0 +1,179 @@ +/* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * + * Copyright by The HDF Group. * + * Copyright by the Board of Trustees of the University of Illinois. * + * All rights reserved. * + * * + * This file is part of HDF5. The full HDF5 copyright notice, including * + * terms governing use, modification, and redistribution, is contained in * + * the files COPYING and Copyright.html. COPYING can be found at the root * + * of the source code distribution tree; Copyright.html can be found at the * + * root level of an installed copy of the electronic HDF5 document set and * + * is linked from the top-level documents page. It can also be found at * + * http://hdfgroup.org/HDF5/doc/Copyright.html. If you do not have * + * access to either file, you may request a copy from help@hdfgroup.org. * + * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */ + +/************************************************************ + + This example shows how to read and write data to a + dataset. The program first writes integers to a dataset + with dataspace dimensions of DIM_XxDIM_Y, then closes the + file. Next, it reopens the file, reads back the data, and + outputs it to the screen. + ************************************************************/ +package examples.datasets; + +import hdf.hdf5lib.H5; +import hdf.hdf5lib.HDF5Constants; + +public class H5Ex_D_ReadWrite { + private static String FILENAME = "H5Ex_D_ReadWrite.h5"; + private static String DATASETNAME = "DS1"; + private static final int DIM_X = 4; + private static final int DIM_Y = 7; + private static final int RANK = 2; + + private static void WriteDataset() { + long file_id = -1; + long filespace_id = -1; + long dataset_id = -1; + long[] dims = { DIM_X, DIM_Y }; + int[][] dset_data = new int[DIM_X][DIM_Y]; + + // Initialize data. + for (int indx = 0; indx < DIM_X; indx++) + for (int jndx = 0; jndx < DIM_Y; jndx++) + dset_data[indx][jndx] = indx * jndx - jndx; + + // Create a new file using default properties. + try { + file_id = H5.H5Fcreate(FILENAME, HDF5Constants.H5F_ACC_TRUNC, HDF5Constants.H5P_DEFAULT, + HDF5Constants.H5P_DEFAULT); + } + catch (Exception e) { + e.printStackTrace(); + } + + // Create dataspace. Setting maximum size to NULL sets the maximum + // size to be the current size. + try { + filespace_id = H5.H5Screate_simple(RANK, dims, null); + } + catch (Exception e) { + e.printStackTrace(); + } + + // Create the dataset. We will use all default properties for this example. + try { + if ((file_id >= 0) && (filespace_id >= 0)) + dataset_id = H5.H5Dcreate(file_id, DATASETNAME, HDF5Constants.H5T_STD_I32LE, filespace_id, + HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT); + } + catch (Exception e) { + e.printStackTrace(); + } + + // Write the data to the dataset. + try { + if (dataset_id >= 0) + H5.H5Dwrite(dataset_id, HDF5Constants.H5T_NATIVE_INT, HDF5Constants.H5S_ALL, HDF5Constants.H5S_ALL, + HDF5Constants.H5P_DEFAULT, dset_data); + } + catch (Exception e) { + e.printStackTrace(); + } + + // End access to the dataset and release resources used by it. + try { + if (dataset_id >= 0) + H5.H5Dclose(dataset_id); + } + catch (Exception e) { + e.printStackTrace(); + } + + try { + if (filespace_id >= 0) + H5.H5Sclose(filespace_id); + } + catch (Exception e) { + e.printStackTrace(); + } + + // Close the file. + try { + if (file_id >= 0) + H5.H5Fclose(file_id); + } + catch (Exception e) { + e.printStackTrace(); + } + } + + private static void ReadDataset() { + long file_id = -1; + long dataset_id = -1; + int[][] dset_data = new int[DIM_X][DIM_Y]; + + // Open file using the default properties. + try { + file_id = H5.H5Fopen(FILENAME, HDF5Constants.H5F_ACC_RDWR, HDF5Constants.H5P_DEFAULT); + } + catch (Exception e) { + e.printStackTrace(); + } + + // Open dataset using the default properties. + try { + if (file_id >= 0) + dataset_id = H5.H5Dopen(file_id, DATASETNAME, HDF5Constants.H5P_DEFAULT); + } + catch (Exception e) { + e.printStackTrace(); + } + + // Read the data using the default properties. + try { + if (dataset_id >= 0) + H5.H5Dread(dataset_id, HDF5Constants.H5T_NATIVE_INT, HDF5Constants.H5S_ALL, HDF5Constants.H5S_ALL, + HDF5Constants.H5P_DEFAULT, dset_data); + } + catch (Exception e) { + e.printStackTrace(); + } + + // Output the data to the screen. + System.out.println(DATASETNAME + ":"); + for (int indx = 0; indx < DIM_X; indx++) { + System.out.print(" [ "); + for (int jndx = 0; jndx < DIM_Y; jndx++) + System.out.print(dset_data[indx][jndx] + " "); + System.out.println("]"); + } + System.out.println(); + + // Close the dataset. + try { + if (dataset_id >= 0) + H5.H5Dclose(dataset_id); + } + catch (Exception e) { + e.printStackTrace(); + } + + // Close the file. + try { + if (file_id >= 0) + H5.H5Fclose(file_id); + } + catch (Exception e) { + e.printStackTrace(); + } + } + + public static void main(String[] args) { + H5Ex_D_ReadWrite.WriteDataset(); + H5Ex_D_ReadWrite.ReadDataset(); + } + +} diff --git a/java/examples/datasets/H5Ex_D_Shuffle.java b/java/examples/datasets/H5Ex_D_Shuffle.java new file mode 100644 index 0000000..ac3c1b4 --- /dev/null +++ b/java/examples/datasets/H5Ex_D_Shuffle.java @@ -0,0 +1,373 @@ +/* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * + * Copyright by The HDF Group. * + * Copyright by the Board of Trustees of the University of Illinois. * + * All rights reserved. * + * * + * This file is part of HDF5. The full HDF5 copyright notice, including * + * terms governing use, modification, and redistribution, is contained in * + * the files COPYING and Copyright.html. COPYING can be found at the root * + * of the source code distribution tree; Copyright.html can be found at the * + * root level of an installed copy of the electronic HDF5 document set and * + * is linked from the top-level documents page. It can also be found at * + * http://hdfgroup.org/HDF5/doc/Copyright.html. If you do not have * + * access to either file, you may request a copy from help@hdfgroup.org. * + * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */ + +/************************************************************ + This example shows how to read and write data to a dataset + using the shuffle filter with gzip compression. The + program first checks if the shuffle and gzip filters are + available, then if they are it writes integers to a + dataset using shuffle+gzip, then closes the file. Next, + it reopens the file, reads back the data, and outputs the + types of filters and the maximum value in the dataset to + the screen. + ************************************************************/ +package examples.datasets; + +import java.util.EnumSet; +import java.util.HashMap; +import java.util.Map; + +import hdf.hdf5lib.H5; +import hdf.hdf5lib.HDF5Constants; + +public class H5Ex_D_Shuffle { + private static String FILENAME = "H5Ex_D_Shuffle.h5"; + private static String DATASETNAME = "DS1"; + private static final int DIM_X = 32; + private static final int DIM_Y = 64; + private static final int CHUNK_X = 4; + private static final int CHUNK_Y = 8; + private static final int RANK = 2; + private static final int NDIMS = 2; + + // Values for the status of space allocation + enum H5Z_filter { + H5Z_FILTER_ERROR(HDF5Constants.H5Z_FILTER_ERROR), H5Z_FILTER_NONE(HDF5Constants.H5Z_FILTER_NONE), H5Z_FILTER_DEFLATE( + HDF5Constants.H5Z_FILTER_DEFLATE), H5Z_FILTER_SHUFFLE(HDF5Constants.H5Z_FILTER_SHUFFLE), H5Z_FILTER_FLETCHER32( + HDF5Constants.H5Z_FILTER_FLETCHER32), H5Z_FILTER_SZIP(HDF5Constants.H5Z_FILTER_SZIP), H5Z_FILTER_NBIT( + HDF5Constants.H5Z_FILTER_NBIT), H5Z_FILTER_SCALEOFFSET(HDF5Constants.H5Z_FILTER_SCALEOFFSET), H5Z_FILTER_RESERVED( + HDF5Constants.H5Z_FILTER_RESERVED), H5Z_FILTER_MAX(HDF5Constants.H5Z_FILTER_MAX); + private static final Map<Integer, H5Z_filter> lookup = new HashMap<Integer, H5Z_filter>(); + + static { + for (H5Z_filter s : EnumSet.allOf(H5Z_filter.class)) + lookup.put(s.getCode(), s); + } + + private int code; + + H5Z_filter(int layout_type) { + this.code = layout_type; + } + + public int getCode() { + return this.code; + } + + public static H5Z_filter get(int code) { + return lookup.get(code); + } + } + + private static boolean checkGzipFilter() { + try { + int available = H5.H5Zfilter_avail(HDF5Constants.H5Z_FILTER_DEFLATE); + if (available == 0) { + System.out.println("gzip filter not available."); + return false; + } + } + catch (Exception e) { + e.printStackTrace(); + } + + try { + int filter_info = H5.H5Zget_filter_info(HDF5Constants.H5Z_FILTER_DEFLATE); + if (((filter_info & HDF5Constants.H5Z_FILTER_CONFIG_ENCODE_ENABLED) == 0) + || ((filter_info & HDF5Constants.H5Z_FILTER_CONFIG_DECODE_ENABLED) == 0)) { + System.out.println("gzip filter not available for encoding and decoding."); + return false; + } + } + catch (Exception e) { + e.printStackTrace(); + } + return true; + } + + private static boolean checkShuffleFilter() { + try { + int available = H5.H5Zfilter_avail(HDF5Constants.H5Z_FILTER_SHUFFLE); + if (available == 0) { + System.out.println("Shuffle filter not available."); + return false; + } + } + catch (Exception e) { + e.printStackTrace(); + } + + try { + int filter_info = H5.H5Zget_filter_info(HDF5Constants.H5Z_FILTER_SHUFFLE); + if (((filter_info & HDF5Constants.H5Z_FILTER_CONFIG_ENCODE_ENABLED) == 0) + || ((filter_info & HDF5Constants.H5Z_FILTER_CONFIG_DECODE_ENABLED) == 0)) { + System.out.println("Shuffle filter not available for encoding and decoding."); + return false; + } + } + catch (Exception e) { + e.printStackTrace(); + } + return true; + } + + private static void writeShuffle() { + long file_id = -1; + long filespace_id = -1; + long dataset_id = -1; + long dcpl_id = -1; + long[] dims = { DIM_X, DIM_Y }; + long[] chunk_dims = { CHUNK_X, CHUNK_Y }; + int[][] dset_data = new int[DIM_X][DIM_Y]; + + // Initialize data. + for (int indx = 0; indx < DIM_X; indx++) + for (int jndx = 0; jndx < DIM_Y; jndx++) + dset_data[indx][jndx] = indx * jndx - jndx; + + // Create a new file using default properties. + try { + file_id = H5.H5Fcreate(FILENAME, HDF5Constants.H5F_ACC_TRUNC, HDF5Constants.H5P_DEFAULT, + HDF5Constants.H5P_DEFAULT); + } + catch (Exception e) { + e.printStackTrace(); + } + + // Create dataspace. Setting maximum size to NULL sets the maximum + // size to be the current size. + try { + filespace_id = H5.H5Screate_simple(RANK, dims, null); + } + catch (Exception e) { + e.printStackTrace(); + } + + // Create the dataset creation property list, add the shuffle + // filter and the gzip compression filter. + // The order in which the filters are added here is significant - + // we will see much greater results when the shuffle is applied + // first. The order in which the filters are added to the property + // list is the order in which they will be invoked when writing + // data. + try { + dcpl_id = H5.H5Pcreate(HDF5Constants.H5P_DATASET_CREATE); + if (dcpl_id >= 0) { + H5.H5Pset_shuffle(dcpl_id); + H5.H5Pset_deflate(dcpl_id, 9); + // Set the chunk size. + H5.H5Pset_chunk(dcpl_id, NDIMS, chunk_dims); + } + } + catch (Exception e) { + e.printStackTrace(); + } + + // Create the dataset. + try { + if ((file_id >= 0) && (filespace_id >= 0) && (dcpl_id >= 0)) + dataset_id = H5.H5Dcreate(file_id, DATASETNAME, HDF5Constants.H5T_STD_I32LE, filespace_id, + HDF5Constants.H5P_DEFAULT, dcpl_id, HDF5Constants.H5P_DEFAULT); + } + catch (Exception e) { + e.printStackTrace(); + } + + // Write the data to the dataset. + try { + if (dataset_id >= 0) + H5.H5Dwrite(dataset_id, HDF5Constants.H5T_NATIVE_INT, HDF5Constants.H5S_ALL, HDF5Constants.H5S_ALL, + HDF5Constants.H5P_DEFAULT, dset_data); + } + catch (Exception e) { + e.printStackTrace(); + } + + // End access to the dataset and release resources used by it. + try { + if (dcpl_id >= 0) + H5.H5Pclose(dcpl_id); + } + catch (Exception e) { + e.printStackTrace(); + } + + try { + if (dataset_id >= 0) + H5.H5Dclose(dataset_id); + } + catch (Exception e) { + e.printStackTrace(); + } + + try { + if (filespace_id >= 0) + H5.H5Sclose(filespace_id); + } + catch (Exception e) { + e.printStackTrace(); + } + + // Close the file. + try { + if (file_id >= 0) + H5.H5Fclose(file_id); + } + catch (Exception e) { + e.printStackTrace(); + } + } + + private static void readShuffle() { + long file_id = -1; + long dataset_id = -1; + long dcpl_id = -1; + int[][] dset_data = new int[DIM_X][DIM_Y]; + + // Open an existing file. + try { + file_id = H5.H5Fopen(FILENAME, HDF5Constants.H5F_ACC_RDONLY, HDF5Constants.H5P_DEFAULT); + } + catch (Exception e) { + e.printStackTrace(); + } + + // Open an existing dataset. + try { + if (file_id >= 0) + dataset_id = H5.H5Dopen(file_id, DATASETNAME, HDF5Constants.H5P_DEFAULT); + } + catch (Exception e) { + e.printStackTrace(); + } + + // Retrieve the dataset creation property list. + try { + if (dataset_id >= 0) + dcpl_id = H5.H5Dget_create_plist(dataset_id); + } + catch (Exception e) { + e.printStackTrace(); + } + + // Retrieve the number of filters, and retrieve and print the + // type of each. + try { + if (dcpl_id >= 0) { + int nfilters = H5.H5Pget_nfilters(dcpl_id); + for (int indx = 0; indx < nfilters; indx++) { + // Java lib requires a valid filter_name object and cd_values + int[] flags = { 0 }; + long[] cd_nelmts = { 1 }; + int[] cd_values = { 0 }; + String[] filter_name = { "" }; + int[] filter_config = { 0 }; + int filter_type = -1; + filter_type = H5.H5Pget_filter(dcpl_id, indx, flags, cd_nelmts, cd_values, 120, filter_name, + filter_config); + System.out.print("Filter " + indx + ": Type is: "); + switch (H5Z_filter.get(filter_type)) { + case H5Z_FILTER_DEFLATE: + System.out.println("H5Z_FILTER_DEFLATE"); + break; + case H5Z_FILTER_SHUFFLE: + System.out.println("H5Z_FILTER_SHUFFLE"); + break; + case H5Z_FILTER_FLETCHER32: + System.out.println("H5Z_FILTER_FLETCHER32"); + break; + case H5Z_FILTER_SZIP: + System.out.println("H5Z_FILTER_SZIP"); + break; + case H5Z_FILTER_NBIT: + System.out.println("H5Z_FILTER_NBIT"); + break; + case H5Z_FILTER_SCALEOFFSET: + System.out.println("H5Z_FILTER_SCALEOFFSET"); + break; + default: + System.out.println("H5Z_FILTER_ERROR"); + } + System.out.println(); + } + } + } + catch (Exception e) { + e.printStackTrace(); + } + + // Read the data using the default properties. + try { + if (dataset_id >= 0) { + H5.H5Dread(dataset_id, HDF5Constants.H5T_NATIVE_INT, HDF5Constants.H5S_ALL, HDF5Constants.H5S_ALL, + HDF5Constants.H5P_DEFAULT, dset_data); + } + } + catch (Exception e) { + e.printStackTrace(); + } + + // Find the maximum value in the dataset, to verify that it was read + // correctly. + int max = dset_data[0][0]; + for (int indx = 0; indx < DIM_X; indx++) { + for (int jndx = 0; jndx < DIM_Y; jndx++) + if (max < dset_data[indx][jndx]) + max = dset_data[indx][jndx]; + } + // Print the maximum value. + System.out.println("Maximum value in " + DATASETNAME + " is: " + max); + + // End access to the dataset and release resources used by it. + try { + if (dcpl_id >= 0) + H5.H5Pclose(dcpl_id); + } + catch (Exception e) { + e.printStackTrace(); + } + + try { + if (dataset_id >= 0) + H5.H5Dclose(dataset_id); + } + catch (Exception e) { + e.printStackTrace(); + } + + // Close the file. + try { + if (file_id >= 0) + H5.H5Fclose(file_id); + } + catch (Exception e) { + e.printStackTrace(); + } + } + + public static void main(String[] args) { + // Check if gzip compression is available and can be used for both + // compression and decompression. Normally we do not perform error + // checking in these examples for the sake of clarity, but in this + // case we will make an exception because this filter is an + // optional part of the hdf5 library. + // Similarly, check for availability of the shuffle filter. + if (H5Ex_D_Shuffle.checkGzipFilter() && H5Ex_D_Shuffle.checkShuffleFilter()) { + H5Ex_D_Shuffle.writeShuffle(); + H5Ex_D_Shuffle.readShuffle(); + } + } + +} diff --git a/java/examples/datasets/H5Ex_D_Sofloat.java b/java/examples/datasets/H5Ex_D_Sofloat.java new file mode 100644 index 0000000..26c8d49 --- /dev/null +++ b/java/examples/datasets/H5Ex_D_Sofloat.java @@ -0,0 +1,356 @@ +/* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * + * Copyright by The HDF Group. * + * Copyright by the Board of Trustees of the University of Illinois. * + * All rights reserved. * + * * + * This file is part of HDF5. The full HDF5 copyright notice, including * + * terms governing use, modification, and redistribution, is contained in * + * the files COPYING and Copyright.html. COPYING can be found at the root * + * of the source code distribution tree; Copyright.html can be found at the * + * root level of an installed copy of the electronic HDF5 document set and * + * is linked from the top-level documents page. It can also be found at * + * http://hdfgroup.org/HDF5/doc/Copyright.html. If you do not have * + * access to either file, you may request a copy from help@hdfgroup.org. * + * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */ + +/************************************************************ + This example shows how to read and write data to a dataset + using the Scale-Offset filter. The program first checks + if the Scale-Offset filter is available, then if it is it + writes floating point numbers to a dataset using + Scale-Offset, then closes the file Next, it reopens the + file, reads back the data, and outputs the type of filter + and the maximum value in the dataset to the screen. + ************************************************************/ +package examples.datasets; + +import java.util.EnumSet; +import java.util.HashMap; +import java.util.Map; + +import hdf.hdf5lib.H5; +import hdf.hdf5lib.HDF5Constants; + +public class H5Ex_D_Sofloat { + + private static String FILENAME = "H5Ex_D_Sofloat.h5"; + private static String DATASETNAME = "DS1"; + private static final int DIM_X = 32; + private static final int DIM_Y = 64; + private static final int CHUNK_X = 4; + private static final int CHUNK_Y = 8; + private static final int RANK = 2; + private static final int NDIMS = 2; + + // Values for the status of space allocation + enum H5Z_filter { + H5Z_FILTER_ERROR(HDF5Constants.H5Z_FILTER_ERROR), H5Z_FILTER_NONE(HDF5Constants.H5Z_FILTER_NONE), H5Z_FILTER_DEFLATE( + HDF5Constants.H5Z_FILTER_DEFLATE), H5Z_FILTER_SHUFFLE(HDF5Constants.H5Z_FILTER_SHUFFLE), H5Z_FILTER_FLETCHER32( + HDF5Constants.H5Z_FILTER_FLETCHER32), H5Z_FILTER_SZIP(HDF5Constants.H5Z_FILTER_SZIP), H5Z_FILTER_NBIT( + HDF5Constants.H5Z_FILTER_NBIT), H5Z_FILTER_SCALEOFFSET(HDF5Constants.H5Z_FILTER_SCALEOFFSET), H5Z_FILTER_RESERVED( + HDF5Constants.H5Z_FILTER_RESERVED), H5Z_FILTER_MAX(HDF5Constants.H5Z_FILTER_MAX); + private static final Map<Integer, H5Z_filter> lookup = new HashMap<Integer, H5Z_filter>(); + + static { + for (H5Z_filter s : EnumSet.allOf(H5Z_filter.class)) + lookup.put(s.getCode(), s); + } + + private int code; + + H5Z_filter(int layout_type) { + this.code = layout_type; + } + + public int getCode() { + return this.code; + } + + public static H5Z_filter get(int code) { + return lookup.get(code); + } + } + + private static boolean checkScaleoffsetFilter() { + try { + int available = H5.H5Zfilter_avail(HDF5Constants.H5Z_FILTER_SCALEOFFSET); + if (available == 0) { + System.out.println("Scale-Offset filter not available."); + return false; + } + } + catch (Exception e) { + e.printStackTrace(); + } + + try { + int filter_info = H5.H5Zget_filter_info(HDF5Constants.H5Z_FILTER_SCALEOFFSET); + if (((filter_info & HDF5Constants.H5Z_FILTER_CONFIG_ENCODE_ENABLED) == 0) + || ((filter_info & HDF5Constants.H5Z_FILTER_CONFIG_DECODE_ENABLED) == 0)) { + System.out.println("Scale-Offset filter not available for encoding and decoding."); + return false; + } + } + catch (Exception e) { + e.printStackTrace(); + } + return true; + } + + private static void writeData() { + long file_id = -1; + long filespace_id = -1; + long dataset_id = -1; + long dcpl_id = -1; + long[] dims = { DIM_X, DIM_Y }; + long[] chunk_dims = { CHUNK_X, CHUNK_Y }; + double[][] dset_data = new double[DIM_X][DIM_Y]; + + // Initialize data. + for (int indx = 0; indx < DIM_X; indx++) + for (int jndx = 0; jndx < DIM_Y; jndx++) { + double x = indx; + double y = jndx; + dset_data[indx][jndx] = (x + 1) / (y + 0.3) + y; + } + + // Find the maximum value in the dataset, to verify that it was read correctly. + double max = dset_data[0][0]; + double min = dset_data[0][0]; + for (int indx = 0; indx < DIM_X; indx++) + for (int jndx = 0; jndx < DIM_Y; jndx++) { + if (max < dset_data[indx][jndx]) + max = dset_data[indx][jndx]; + if (min > dset_data[indx][jndx]) + min = dset_data[indx][jndx]; + } + + // Print the maximum value. + System.out.println("Maximum value in write buffer is: " + max); + System.out.println("Minimum value in write buffer is: " + min); + + // Create a new file using the default properties. + try { + file_id = H5.H5Fcreate(FILENAME, HDF5Constants.H5F_ACC_TRUNC, HDF5Constants.H5P_DEFAULT, + HDF5Constants.H5P_DEFAULT); + } + catch (Exception e) { + e.printStackTrace(); + } + + // Create dataspace. Setting maximum size to NULL sets the maximum size to be the current size. + try { + filespace_id = H5.H5Screate_simple(RANK, dims, null); + } + catch (Exception e) { + e.printStackTrace(); + } + + // Create the dataset creation property list, add the Scale-Offset + // filter and set the chunk size. + try { + dcpl_id = H5.H5Pcreate(HDF5Constants.H5P_DATASET_CREATE); + if (dcpl_id >= 0) { + H5.H5Pset_scaleoffset(dcpl_id, HDF5Constants.H5Z_SO_FLOAT_DSCALE, 2); + H5.H5Pset_chunk(dcpl_id, NDIMS, chunk_dims); + } + } + catch (Exception e) { + e.printStackTrace(); + } + + // Create the dataset. + try { + if ((file_id >= 0) && (filespace_id >= 0) && (dcpl_id >= 0)) + dataset_id = H5.H5Dcreate(file_id, DATASETNAME, HDF5Constants.H5T_IEEE_F64LE, filespace_id, + HDF5Constants.H5P_DEFAULT, dcpl_id, HDF5Constants.H5P_DEFAULT); + } + catch (Exception e) { + e.printStackTrace(); + } + + // Write the data to the dataset. + try { + if (dataset_id >= 0) + H5.H5Dwrite(dataset_id, HDF5Constants.H5T_NATIVE_DOUBLE, HDF5Constants.H5S_ALL, HDF5Constants.H5S_ALL, + HDF5Constants.H5P_DEFAULT, dset_data); + } + catch (Exception e) { + e.printStackTrace(); + } + + // Close and release resources. + try { + if (dcpl_id >= 0) + H5.H5Pclose(dcpl_id); + } + catch (Exception e) { + e.printStackTrace(); + } + + try { + if (dataset_id >= 0) + H5.H5Dclose(dataset_id); + } + catch (Exception e) { + e.printStackTrace(); + } + + try { + if (filespace_id >= 0) + H5.H5Sclose(filespace_id); + } + catch (Exception e) { + e.printStackTrace(); + } + + // Close file + try { + if (file_id >= 0) + H5.H5Fclose(file_id); + } + catch (Exception e) { + e.printStackTrace(); + } + } + + private static void readData() { + long file_id = -1; + long dataset_id = -1; + long dcpl_id = -1; + double[][] dset_data = new double[DIM_X][DIM_Y]; + + // Open file using the default properties. + try { + file_id = H5.H5Fopen(FILENAME, HDF5Constants.H5F_ACC_RDONLY, HDF5Constants.H5P_DEFAULT); + } + catch (Exception e) { + e.printStackTrace(); + } + // Open dataset using the default properties. + try { + if (file_id >= 0) + dataset_id = H5.H5Dopen(file_id, DATASETNAME, HDF5Constants.H5P_DEFAULT); + } + catch (Exception e) { + e.printStackTrace(); + } + + // Retrieve dataset creation property list. + try { + if (dataset_id >= 0) + dcpl_id = H5.H5Dget_create_plist(dataset_id); + } + catch (Exception e) { + e.printStackTrace(); + } + + // Retrieve and print the filter type. Here we only retrieve the + // first filter because we know that we only added one filter. + try { + if (dcpl_id >= 0) { + // Java lib requires a valid filter_name object and cd_values + int[] flags = { 0 }; + long[] cd_nelmts = { 1 }; + int[] cd_values = { 0 }; + String[] filter_name = { "" }; + int[] filter_config = { 0 }; + int filter_type = -1; + + filter_type = H5 + .H5Pget_filter(dcpl_id, 0, flags, cd_nelmts, cd_values, 120, filter_name, filter_config); + System.out.print("Filter type is: "); + switch (H5Z_filter.get(filter_type)) { + case H5Z_FILTER_DEFLATE: + System.out.println("H5Z_FILTER_DEFLATE"); + break; + case H5Z_FILTER_SHUFFLE: + System.out.println("H5Z_FILTER_SHUFFLE"); + break; + case H5Z_FILTER_FLETCHER32: + System.out.println("H5Z_FILTER_FLETCHER32"); + break; + case H5Z_FILTER_SZIP: + System.out.println("H5Z_FILTER_SZIP"); + break; + case H5Z_FILTER_NBIT: + System.out.println("H5Z_FILTER_NBIT"); + break; + case H5Z_FILTER_SCALEOFFSET: + System.out.println("H5Z_FILTER_SCALEOFFSET"); + break; + default: + System.out.println("H5Z_FILTER_ERROR"); + } + System.out.println(); + } + } + catch (Exception e) { + e.printStackTrace(); + } + + // Read the data using the default properties. + try { + if (dataset_id >= 0) + H5.H5Dread(dataset_id, HDF5Constants.H5T_NATIVE_DOUBLE, HDF5Constants.H5S_ALL, HDF5Constants.H5S_ALL, + HDF5Constants.H5P_DEFAULT, dset_data); + } + catch (Exception e) { + e.printStackTrace(); + } + + // Find the maximum value in the dataset, to verify that it was read correctly. + double max = dset_data[0][0]; + double min = dset_data[0][0]; + for (int indx = 0; indx < DIM_X; indx++) + for (int jndx = 0; jndx < DIM_Y; jndx++) { + if (max < dset_data[indx][jndx]) + max = dset_data[indx][jndx]; + if (min > dset_data[indx][jndx]) + min = dset_data[indx][jndx]; + } + + // Print the maximum value. + System.out.println("Maximum value in " + DATASETNAME + " is: " + max); + System.out.println("Minimum value in " + DATASETNAME + " is: " + min); + + // End access to the dataset and release resources used by it. + try { + if (dcpl_id >= 0) + H5.H5Pclose(dcpl_id); + } + catch (Exception e) { + e.printStackTrace(); + } + + try { + if (dataset_id >= 0) + H5.H5Dclose(dataset_id); + } + catch (Exception e) { + e.printStackTrace(); + } + + // Close the file. + try { + if (file_id >= 0) + H5.H5Fclose(file_id); + } + catch (Exception e) { + e.printStackTrace(); + } + } + + public static void main(String[] args) { + + // Check if Scale-Offset compression is available and can be used + // for both compression and decompression. Normally we do not + // perform error checking in these examples for the sake of + // clarity, but in this case we will make an exception because this + // filter is an optional part of the hdf5 library. + if (H5Ex_D_Sofloat.checkScaleoffsetFilter()) { + H5Ex_D_Sofloat.writeData(); + H5Ex_D_Sofloat.readData(); + } + } +} diff --git a/java/examples/datasets/H5Ex_D_Soint.java b/java/examples/datasets/H5Ex_D_Soint.java new file mode 100644 index 0000000..7939883 --- /dev/null +++ b/java/examples/datasets/H5Ex_D_Soint.java @@ -0,0 +1,335 @@ +/* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * + * Copyright by The HDF Group. * + * Copyright by the Board of Trustees of the University of Illinois. * + * All rights reserved. * + * * + * This file is part of HDF5. The full HDF5 copyright notice, including * + * terms governing use, modification, and redistribution, is contained in * + * the files COPYING and Copyright.html. COPYING can be found at the root * + * of the source code distribution tree; Copyright.html can be found at the * + * root level of an installed copy of the electronic HDF5 document set and * + * is linked from the top-level documents page. It can also be found at * + * http://hdfgroup.org/HDF5/doc/Copyright.html. If you do not have * + * access to either file, you may request a copy from help@hdfgroup.org. * + * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */ + +/************************************************************ + This example shows how to read and write data to a dataset + using the Scale-Offset filter. The program first checks + if the Scale-Offset filter is available, then if it is it + writes integers to a dataset using Scale-Offset, then + closes the file Next, it reopens the file, reads back the + data, and outputs the type of filter and the maximum value + in the dataset to the screen. + ************************************************************/ +package examples.datasets; + +import java.util.EnumSet; +import java.util.HashMap; +import java.util.Map; + +import hdf.hdf5lib.H5; +import hdf.hdf5lib.HDF5Constants; + +public class H5Ex_D_Soint { + + private static String FILENAME = "H5Ex_D_Soint.h5"; + private static String DATASETNAME = "DS1"; + private static final int DIM_X = 32; + private static final int DIM_Y = 64; + private static final int CHUNK_X = 4; + private static final int CHUNK_Y = 8; + private static final int RANK = 2; + private static final int NDIMS = 2; + + // Values for the status of space allocation + enum H5Z_filter { + H5Z_FILTER_ERROR(HDF5Constants.H5Z_FILTER_ERROR), H5Z_FILTER_NONE(HDF5Constants.H5Z_FILTER_NONE), H5Z_FILTER_DEFLATE( + HDF5Constants.H5Z_FILTER_DEFLATE), H5Z_FILTER_SHUFFLE(HDF5Constants.H5Z_FILTER_SHUFFLE), H5Z_FILTER_FLETCHER32( + HDF5Constants.H5Z_FILTER_FLETCHER32), H5Z_FILTER_SZIP(HDF5Constants.H5Z_FILTER_SZIP), H5Z_FILTER_NBIT( + HDF5Constants.H5Z_FILTER_NBIT), H5Z_FILTER_SCALEOFFSET(HDF5Constants.H5Z_FILTER_SCALEOFFSET), H5Z_FILTER_RESERVED( + HDF5Constants.H5Z_FILTER_RESERVED), H5Z_FILTER_MAX(HDF5Constants.H5Z_FILTER_MAX); + private static final Map<Integer, H5Z_filter> lookup = new HashMap<Integer, H5Z_filter>(); + + static { + for (H5Z_filter s : EnumSet.allOf(H5Z_filter.class)) + lookup.put(s.getCode(), s); + } + + private int code; + + H5Z_filter(int layout_type) { + this.code = layout_type; + } + + public int getCode() { + return this.code; + } + + public static H5Z_filter get(int code) { + return lookup.get(code); + } + } + + private static boolean checkScaleoffsetFilter() { + try { + int available = H5.H5Zfilter_avail(HDF5Constants.H5Z_FILTER_SCALEOFFSET); + if (available == 0) { + System.out.println("Scale-Offset filter not available."); + return false; + } + } + catch (Exception e) { + e.printStackTrace(); + } + + try { + int filter_info = H5.H5Zget_filter_info(HDF5Constants.H5Z_FILTER_SCALEOFFSET); + if (((filter_info & HDF5Constants.H5Z_FILTER_CONFIG_ENCODE_ENABLED) == 0) + || ((filter_info & HDF5Constants.H5Z_FILTER_CONFIG_DECODE_ENABLED) == 0)) { + System.out.println("Scale-Offset filter not available for encoding and decoding."); + return false; + } + } + catch (Exception e) { + e.printStackTrace(); + } + return true; + } + + private static void writeData() { + long file_id = -1; + long filespace_id = -1; + long dataset_id = -1; + long dcpl_id = -1; + long[] dims = { DIM_X, DIM_Y }; + long[] chunk_dims = { CHUNK_X, CHUNK_Y }; + int[][] dset_data = new int[DIM_X][DIM_Y]; + + // Initialize data. + for (int indx = 0; indx < DIM_X; indx++) + for (int jndx = 0; jndx < DIM_Y; jndx++) + dset_data[indx][jndx] = indx * jndx - jndx; + + // Create a new file using the default properties. + try { + file_id = H5.H5Fcreate(FILENAME, HDF5Constants.H5F_ACC_TRUNC, HDF5Constants.H5P_DEFAULT, + HDF5Constants.H5P_DEFAULT); + } + catch (Exception e) { + e.printStackTrace(); + } + + // Create dataspace. Setting maximum size to NULL sets the maximum size to be the current size. + try { + filespace_id = H5.H5Screate_simple(RANK, dims, null); + } + catch (Exception e) { + e.printStackTrace(); + } + + // Create the dataset creation property list, add the Scale-Offset + // filter and set the chunk size. + try { + dcpl_id = H5.H5Pcreate(HDF5Constants.H5P_DATASET_CREATE); + if (dcpl_id >= 0) { + H5.H5Pset_scaleoffset(dcpl_id, HDF5Constants.H5Z_SO_INT, HDF5Constants.H5Z_SO_INT_MINBITS_DEFAULT); + H5.H5Pset_chunk(dcpl_id, NDIMS, chunk_dims); + } + } + catch (Exception e) { + e.printStackTrace(); + } + + // Create the dataset. + try { + if ((file_id >= 0) && (filespace_id >= 0) && (dcpl_id >= 0)) + dataset_id = H5.H5Dcreate(file_id, DATASETNAME, HDF5Constants.H5T_STD_I32LE, filespace_id, + HDF5Constants.H5P_DEFAULT, dcpl_id, HDF5Constants.H5P_DEFAULT); + } + catch (Exception e) { + e.printStackTrace(); + } + + // Write the data to the dataset. + try { + if (dataset_id >= 0) + H5.H5Dwrite(dataset_id, HDF5Constants.H5T_NATIVE_INT, HDF5Constants.H5S_ALL, HDF5Constants.H5S_ALL, + HDF5Constants.H5P_DEFAULT, dset_data); + } + catch (Exception e) { + e.printStackTrace(); + } + + // Close and release resources. + try { + if (dcpl_id >= 0) + H5.H5Pclose(dcpl_id); + } + catch (Exception e) { + e.printStackTrace(); + } + + try { + if (dataset_id >= 0) + H5.H5Dclose(dataset_id); + } + catch (Exception e) { + e.printStackTrace(); + } + + try { + if (filespace_id >= 0) + H5.H5Sclose(filespace_id); + } + catch (Exception e) { + e.printStackTrace(); + } + + // Close file + try { + if (file_id >= 0) + H5.H5Fclose(file_id); + } + catch (Exception e) { + e.printStackTrace(); + } + } + + private static void readData() { + long file_id = -1; + long dataset_id = -1; + long dcpl_id = -1; + int[][] dset_data = new int[DIM_X][DIM_Y]; + + // Open file using the default properties. + try { + file_id = H5.H5Fopen(FILENAME, HDF5Constants.H5F_ACC_RDONLY, HDF5Constants.H5P_DEFAULT); + } + catch (Exception e) { + e.printStackTrace(); + } + // Open dataset using the default properties. + try { + if (file_id >= 0) + dataset_id = H5.H5Dopen(file_id, DATASETNAME, HDF5Constants.H5P_DEFAULT); + } + catch (Exception e) { + e.printStackTrace(); + } + + // Retrieve dataset creation property list. + try { + if (dataset_id >= 0) + dcpl_id = H5.H5Dget_create_plist(dataset_id); + } + catch (Exception e) { + e.printStackTrace(); + } + + // Retrieve and print the filter type. Here we only retrieve the + // first filter because we know that we only added one filter. + try { + if (dcpl_id >= 0) { + // Java lib requires a valid filter_name object and cd_values + int[] flags = { 0 }; + long[] cd_nelmts = { 1 }; + int[] cd_values = { 0 }; + String[] filter_name = { "" }; + int[] filter_config = { 0 }; + int filter_type = -1; + + filter_type = H5 + .H5Pget_filter(dcpl_id, 0, flags, cd_nelmts, cd_values, 120, filter_name, filter_config); + System.out.print("Filter type is: "); + switch (H5Z_filter.get(filter_type)) { + case H5Z_FILTER_DEFLATE: + System.out.println("H5Z_FILTER_DEFLATE"); + break; + case H5Z_FILTER_SHUFFLE: + System.out.println("H5Z_FILTER_SHUFFLE"); + break; + case H5Z_FILTER_FLETCHER32: + System.out.println("H5Z_FILTER_FLETCHER32"); + break; + case H5Z_FILTER_SZIP: + System.out.println("H5Z_FILTER_SZIP"); + break; + case H5Z_FILTER_NBIT: + System.out.println("H5Z_FILTER_NBIT"); + break; + case H5Z_FILTER_SCALEOFFSET: + System.out.println("H5Z_FILTER_SCALEOFFSET"); + break; + default: + System.out.println("H5Z_FILTER_ERROR"); + } + System.out.println(); + } + } + catch (Exception e) { + e.printStackTrace(); + } + + // Read the data using the default properties. + try { + if (dataset_id >= 0) + H5.H5Dread(dataset_id, HDF5Constants.H5T_NATIVE_INT, HDF5Constants.H5S_ALL, HDF5Constants.H5S_ALL, + HDF5Constants.H5P_DEFAULT, dset_data); + } + catch (Exception e) { + e.printStackTrace(); + } + + // Find the maximum value in the dataset, to verify that it was read correctly. + int max = dset_data[0][0]; + for (int indx = 0; indx < DIM_X; indx++) + for (int jndx = 0; jndx < DIM_Y; jndx++) { + if (max < dset_data[indx][jndx]) + max = dset_data[indx][jndx]; + } + + // Print the maximum value. + System.out.println("Maximum value in " + DATASETNAME + " is: " + max); + + // End access to the dataset and release resources used by it. + try { + if (dcpl_id >= 0) + H5.H5Pclose(dcpl_id); + } + catch (Exception e) { + e.printStackTrace(); + } + + try { + if (dataset_id >= 0) + H5.H5Dclose(dataset_id); + } + catch (Exception e) { + e.printStackTrace(); + } + + // Close the file. + try { + if (file_id >= 0) + H5.H5Fclose(file_id); + } + catch (Exception e) { + e.printStackTrace(); + } + } + + public static void main(String[] args) { + + // Check if Scale-Offset compression is available and can be used + // for both compression and decompression. Normally we do not + // perform error checking in these examples for the sake of + // clarity, but in this case we will make an exception because this + // filter is an optional part of the hdf5 library. + if (H5Ex_D_Soint.checkScaleoffsetFilter()) { + H5Ex_D_Soint.writeData(); + H5Ex_D_Soint.readData(); + } + } + +} diff --git a/java/examples/datasets/H5Ex_D_Szip.java b/java/examples/datasets/H5Ex_D_Szip.java new file mode 100644 index 0000000..5258234 --- /dev/null +++ b/java/examples/datasets/H5Ex_D_Szip.java @@ -0,0 +1,337 @@ +/* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * + * Copyright by The HDF Group. * + * Copyright by the Board of Trustees of the University of Illinois. * + * All rights reserved. * + * * + * This file is part of HDF5. The full HDF5 copyright notice, including * + * terms governing use, modification, and redistribution, is contained in * + * the files COPYING and Copyright.html. COPYING can be found at the root * + * of the source code distribution tree; Copyright.html can be found at the * + * root level of an installed copy of the electronic HDF5 document set and * + * is linked from the top-level documents page. It can also be found at * + * http://hdfgroup.org/HDF5/doc/Copyright.html. If you do not have * + * access to either file, you may request a copy from help@hdfgroup.org. * + * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */ + +/************************************************************ + This example shows how to read and write data to a dataset + using szip compression. The program first checks if + szip compression is available, then if it is it writes + integers to a dataset using szip, then closes the file. + Next, it reopens the file, reads back the data, and + outputs the type of compression and the maximum value in + the dataset to the screen. + ************************************************************/ +package examples.datasets; + +import java.util.EnumSet; +import java.util.HashMap; +import java.util.Map; + +import hdf.hdf5lib.H5; +import hdf.hdf5lib.HDF5Constants; + +public class H5Ex_D_Szip { + private static String FILENAME = "H5Ex_D_Szip.h5"; + private static String DATASETNAME = "DS1"; + private static final int DIM_X = 32; + private static final int DIM_Y = 64; + private static final int CHUNK_X = 4; + private static final int CHUNK_Y = 8; + private static final int RANK = 2; + private static final int NDIMS = 2; + + // Values for the status of space allocation + enum H5Z_filter { + H5Z_FILTER_ERROR(HDF5Constants.H5Z_FILTER_ERROR), H5Z_FILTER_NONE(HDF5Constants.H5Z_FILTER_NONE), H5Z_FILTER_DEFLATE( + HDF5Constants.H5Z_FILTER_DEFLATE), H5Z_FILTER_SHUFFLE(HDF5Constants.H5Z_FILTER_SHUFFLE), H5Z_FILTER_FLETCHER32( + HDF5Constants.H5Z_FILTER_FLETCHER32), H5Z_FILTER_SZIP(HDF5Constants.H5Z_FILTER_SZIP), H5Z_FILTER_NBIT( + HDF5Constants.H5Z_FILTER_NBIT), H5Z_FILTER_SCALEOFFSET(HDF5Constants.H5Z_FILTER_SCALEOFFSET), H5Z_FILTER_RESERVED( + HDF5Constants.H5Z_FILTER_RESERVED), H5Z_FILTER_MAX(HDF5Constants.H5Z_FILTER_MAX); + private static final Map<Integer, H5Z_filter> lookup = new HashMap<Integer, H5Z_filter>(); + + static { + for (H5Z_filter s : EnumSet.allOf(H5Z_filter.class)) + lookup.put(s.getCode(), s); + } + + private int code; + + H5Z_filter(int layout_type) { + this.code = layout_type; + } + + public int getCode() { + return this.code; + } + + public static H5Z_filter get(int code) { + return lookup.get(code); + } + } + + private static boolean checkSzipFilter() { + try { + int available = H5.H5Zfilter_avail(HDF5Constants.H5Z_FILTER_SZIP); + if (available == 0) { + System.out.println("szip filter not available."); + return false; + } + } + catch (Exception e) { + e.printStackTrace(); + } + + try { + int filter_info = H5.H5Zget_filter_info(HDF5Constants.H5Z_FILTER_SZIP); + if (((filter_info & HDF5Constants.H5Z_FILTER_CONFIG_ENCODE_ENABLED) == 0) + || ((filter_info & HDF5Constants.H5Z_FILTER_CONFIG_DECODE_ENABLED) == 0)) { + System.out.println("szip filter not available for encoding and decoding."); + return false; + } + } + catch (Exception e) { + e.printStackTrace(); + } + return true; + } + + private static void writeSzip() { + long file_id = -1; + long filespace_id = -1; + long dataset_id = -1; + long dcpl_id = -1; + long[] dims = { DIM_X, DIM_Y }; + long[] chunk_dims = { CHUNK_X, CHUNK_Y }; + int[][] dset_data = new int[DIM_X][DIM_Y]; + + // Initialize data. + for (int indx = 0; indx < DIM_X; indx++) + for (int jndx = 0; jndx < DIM_Y; jndx++) + dset_data[indx][jndx] = indx * jndx - jndx; + + // Create a new file using default properties. + try { + file_id = H5.H5Fcreate(FILENAME, HDF5Constants.H5F_ACC_TRUNC, HDF5Constants.H5P_DEFAULT, + HDF5Constants.H5P_DEFAULT); + } + catch (Exception e) { + e.printStackTrace(); + } + + // Create dataspace. Setting maximum size to NULL sets the maximum + // size to be the current size. + try { + filespace_id = H5.H5Screate_simple(RANK, dims, null); + } + catch (Exception e) { + e.printStackTrace(); + } + + // Create the dataset creation property list, add the szip compression + // filter. + try { + dcpl_id = H5.H5Pcreate(HDF5Constants.H5P_DATASET_CREATE); + if (dcpl_id >= 0) { + H5.H5Pset_szip(dcpl_id, HDF5Constants.H5_SZIP_NN_OPTION_MASK, 8); + // Set the chunk size. + H5.H5Pset_chunk(dcpl_id, NDIMS, chunk_dims); + } + } + catch (Exception e) { + e.printStackTrace(); + } + + // Create the dataset. + try { + if ((file_id >= 0) && (filespace_id >= 0) && (dcpl_id >= 0)) + dataset_id = H5.H5Dcreate(file_id, DATASETNAME, HDF5Constants.H5T_STD_I32LE, filespace_id, + HDF5Constants.H5P_DEFAULT, dcpl_id, HDF5Constants.H5P_DEFAULT); + } + catch (Exception e) { + e.printStackTrace(); + } + + // Write the data to the dataset. + try { + if (dataset_id >= 0) + H5.H5Dwrite(dataset_id, HDF5Constants.H5T_NATIVE_INT, HDF5Constants.H5S_ALL, HDF5Constants.H5S_ALL, + HDF5Constants.H5P_DEFAULT, dset_data); + } + catch (Exception e) { + e.printStackTrace(); + } + + // End access to the dataset and release resources used by it. + try { + if (dcpl_id >= 0) + H5.H5Pclose(dcpl_id); + } + catch (Exception e) { + e.printStackTrace(); + } + + try { + if (dataset_id >= 0) + H5.H5Dclose(dataset_id); + } + catch (Exception e) { + e.printStackTrace(); + } + + try { + if (filespace_id >= 0) + H5.H5Sclose(filespace_id); + } + catch (Exception e) { + e.printStackTrace(); + } + + // Close the file. + try { + if (file_id >= 0) + H5.H5Fclose(file_id); + } + catch (Exception e) { + e.printStackTrace(); + } + } + + private static void readSzip() { + long file_id = -1; + long dataset_id = -1; + long dcpl_id = -1; + int[][] dset_data = new int[DIM_X][DIM_Y]; + + // Open an existing file. + try { + file_id = H5.H5Fopen(FILENAME, HDF5Constants.H5F_ACC_RDONLY, HDF5Constants.H5P_DEFAULT); + } + catch (Exception e) { + e.printStackTrace(); + } + + // Open an existing dataset. + try { + if (file_id >= 0) + dataset_id = H5.H5Dopen(file_id, DATASETNAME, HDF5Constants.H5P_DEFAULT); + } + catch (Exception e) { + e.printStackTrace(); + } + + // Retrieve the dataset creation property list. + try { + if (dataset_id >= 0) + dcpl_id = H5.H5Dget_create_plist(dataset_id); + } + catch (Exception e) { + e.printStackTrace(); + } + + // Retrieve and print the filter type. Here we only retrieve the + // first filter because we know that we only added one filter. + try { + if (dcpl_id >= 0) { + // Java lib requires a valid filter_name object and cd_values + int[] flags = { 0 }; + long[] cd_nelmts = { 1 }; + int[] cd_values = { 0 }; + String[] filter_name = { "" }; + int[] filter_config = { 0 }; + int filter_type = -1; + + filter_type = H5 + .H5Pget_filter(dcpl_id, 0, flags, cd_nelmts, cd_values, 120, filter_name, filter_config); + System.out.print("Filter type is: "); + switch (H5Z_filter.get(filter_type)) { + case H5Z_FILTER_DEFLATE: + System.out.println("H5Z_FILTER_DEFLATE"); + break; + case H5Z_FILTER_SHUFFLE: + System.out.println("H5Z_FILTER_SHUFFLE"); + break; + case H5Z_FILTER_FLETCHER32: + System.out.println("H5Z_FILTER_FLETCHER32"); + break; + case H5Z_FILTER_SZIP: + System.out.println("H5Z_FILTER_SZIP"); + break; + case H5Z_FILTER_NBIT: + System.out.println("H5Z_FILTER_NBIT"); + break; + case H5Z_FILTER_SCALEOFFSET: + System.out.println("H5Z_FILTER_SCALEOFFSET"); + break; + default: + System.out.println("H5Z_FILTER_ERROR"); + } + System.out.println(); + } + } + catch (Exception e) { + e.printStackTrace(); + } + + // Read the data using the default properties. + try { + if (dataset_id >= 0) { + H5.H5Dread(dataset_id, HDF5Constants.H5T_NATIVE_INT, HDF5Constants.H5S_ALL, HDF5Constants.H5S_ALL, + HDF5Constants.H5P_DEFAULT, dset_data); + } + } + catch (Exception e) { + e.printStackTrace(); + } + + // Find the maximum value in the dataset, to verify that it was read + // correctly. + int max = dset_data[0][0]; + for (int indx = 0; indx < DIM_X; indx++) { + for (int jndx = 0; jndx < DIM_Y; jndx++) + if (max < dset_data[indx][jndx]) + max = dset_data[indx][jndx]; + } + // Print the maximum value. + System.out.println("Maximum value in " + DATASETNAME + " is: " + max); + + // End access to the dataset and release resources used by it. + try { + if (dcpl_id >= 0) + H5.H5Pclose(dcpl_id); + } + catch (Exception e) { + e.printStackTrace(); + } + + try { + if (dataset_id >= 0) + H5.H5Dclose(dataset_id); + } + catch (Exception e) { + e.printStackTrace(); + } + + // Close the file. + try { + if (file_id >= 0) + H5.H5Fclose(file_id); + } + catch (Exception e) { + e.printStackTrace(); + } + } + + public static void main(String[] args) { + // Check if gzip compression is available and can be used for both + // compression and decompression. Normally we do not perform error + // checking in these examples for the sake of clarity, but in this + // case we will make an exception because this filter is an + // optional part of the hdf5 library. + if (H5Ex_D_Szip.checkSzipFilter()) { + H5Ex_D_Szip.writeSzip(); + H5Ex_D_Szip.readSzip(); + } + } + +} diff --git a/java/examples/datasets/H5Ex_D_Transform.java b/java/examples/datasets/H5Ex_D_Transform.java new file mode 100644 index 0000000..1f289f3 --- /dev/null +++ b/java/examples/datasets/H5Ex_D_Transform.java @@ -0,0 +1,250 @@ +/* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * + * Copyright by The HDF Group. * + * Copyright by the Board of Trustees of the University of Illinois. * + * All rights reserved. * + * * + * This file is part of HDF5. The full HDF5 copyright notice, including * + * terms governing use, modification, and redistribution, is contained in * + * the files COPYING and Copyright.html. COPYING can be found at the root * + * of the source code distribution tree; Copyright.html can be found at the * + * root level of an installed copy of the electronic HDF5 document set and * + * is linked from the top-level documents page. It can also be found at * + * http://hdfgroup.org/HDF5/doc/Copyright.html. If you do not have * + * access to either file, you may request a copy from help@hdfgroup.org. * + * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */ + +/************************************************************ + This example shows how to read and write data to a dataset + using a data transform expression. The program first + writes integers to a dataset using the transform + expression TRANSFORM, then closes the file. Next, it + reopens the file, reads back the data without a transform, + and outputs the data to the screen. Finally it reads the + data using the transform expression RTRANSFORM and outputs + the results to the screen. + ************************************************************/ +package examples.datasets; + +import hdf.hdf5lib.H5; +import hdf.hdf5lib.HDF5Constants; + +public class H5Ex_D_Transform { + + private static String FILE = "H5Ex_D_Transform.h5"; + private static String DATASET = "DS1"; + private static final int DIM_X = 4; + private static final int DIM_Y = 7; + private static String TRANSFORM = "x+1"; + private static String RTRANSFORM = "x-1"; + + private static void writeData() { + long file_id = -1; + long filespace_id = -1; + long dataset_id = -1; + long dxpl_id = -1; + + long[] dims = { DIM_X, DIM_Y }; + int[][] dset_data = new int[DIM_X][DIM_Y]; + + // Initialize data. + for (int i = 0; i < DIM_X; i++) + for (int j = 0; j < DIM_Y; j++) + dset_data[i][j] = i * j - j; + + // Output the data to the screen. + System.out.println("Original Data:"); + for (int i = 0; i < DIM_X; i++) { + System.out.print(" ["); + for (int j = 0; j < DIM_Y; j++) + System.out.print(" " + dset_data[i][j] + " "); + System.out.println("]"); + } + + // Create a new file using the default properties. + try { + file_id = H5.H5Fcreate(FILE, HDF5Constants.H5F_ACC_TRUNC, HDF5Constants.H5P_DEFAULT, + HDF5Constants.H5P_DEFAULT); + } + catch (Exception e) { + e.printStackTrace(); + } + + // Create dataspace. Setting maximum size to NULL sets the maximum + // size to be the current size. + try { + filespace_id = H5.H5Screate_simple(2, dims, null); + } + catch (Exception e) { + e.printStackTrace(); + } + + // Create the dataset transfer property list and define the transform expression. + try { + dxpl_id = H5.H5Pcreate(HDF5Constants.H5P_DATASET_XFER); + if (dxpl_id >= 0) + H5.H5Pset_data_transform(dxpl_id, TRANSFORM); + } + catch (Exception e) { + e.printStackTrace(); + } + + // Create the dataset using the default properties. Unfortunately we must save as + // a native type or the transform operation will fail. + try { + if ((file_id >= 0) && (filespace_id >= 0)) + dataset_id = H5.H5Dcreate(file_id, DATASET, HDF5Constants.H5T_NATIVE_INT, filespace_id, + HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT); + } + catch (Exception e) { + e.printStackTrace(); + } + + // Write the data to the dataset using the dataset transfer property list. + try { + if ((dataset_id >= 0) && (dxpl_id >= 0)) + H5.H5Dwrite(dataset_id, HDF5Constants.H5T_NATIVE_INT, HDF5Constants.H5S_ALL, HDF5Constants.H5S_ALL, + dxpl_id, dset_data); + } + catch (Exception e) { + e.printStackTrace(); + } + + // End access to the dataset and release resources used by it. + try { + if (dxpl_id >= 0) + H5.H5Pclose(dxpl_id); + } + catch (Exception e) { + e.printStackTrace(); + } + + try { + if (dataset_id >= 0) + H5.H5Dclose(dataset_id); + } + catch (Exception e) { + e.printStackTrace(); + } + + try { + if (filespace_id >= 0) + H5.H5Sclose(filespace_id); + } + catch (Exception e) { + e.printStackTrace(); + } + + // Close the file. + try { + if (file_id >= 0) + H5.H5Fclose(file_id); + } + catch (Exception e) { + e.printStackTrace(); + } + } + + private static void readData() { + + long file_id = -1; + long dataset_id = -1; + long dxpl_id = -1; + int[][] dset_data = new int[DIM_X][DIM_Y]; + + // Open an existing file using the default properties. + try { + file_id = H5.H5Fopen(FILE, HDF5Constants.H5F_ACC_RDONLY, HDF5Constants.H5P_DEFAULT); + } + catch (Exception e) { + e.printStackTrace(); + } + + // Open an existing dataset using the default properties. + try { + if (file_id >= 0) + dataset_id = H5.H5Dopen(file_id, DATASET, HDF5Constants.H5P_DEFAULT); + } + catch (Exception e) { + e.printStackTrace(); + } + + // Read the data using the default properties. + try { + if (dataset_id >= 0) + H5.H5Dread(dataset_id, HDF5Constants.H5T_NATIVE_INT, HDF5Constants.H5S_ALL, HDF5Constants.H5S_ALL, + HDF5Constants.H5P_DEFAULT, dset_data); + } + catch (Exception e) { + e.printStackTrace(); + } + + // Output the data to the screen. + System.out.println("Data as written with transform '" + TRANSFORM + "'"); + for (int i = 0; i < DIM_X; i++) { + System.out.print(" ["); + for (int j = 0; j < DIM_Y; j++) + System.out.print(" " + dset_data[i][j] + " "); + System.out.println("]"); + } + + // Create the dataset transfer property list and define the transform expression. + try { + dxpl_id = H5.H5Pcreate(HDF5Constants.H5P_DATASET_XFER); + if (dxpl_id >= 0) + H5.H5Pset_data_transform(dxpl_id, RTRANSFORM); + } + catch (Exception e) { + e.printStackTrace(); + } + + // Read the data using the dataset transfer property list. + try { + if ((dataset_id >= 0) && (dxpl_id >= 0)) + H5.H5Dread(dataset_id, HDF5Constants.H5T_NATIVE_INT, HDF5Constants.H5S_ALL, HDF5Constants.H5S_ALL, + dxpl_id, dset_data); + } + catch (Exception e) { + e.printStackTrace(); + } + + // Output the data to the screen. + + System.out.println("Data as written with transform '" + TRANSFORM + "' and read with transform '" + + RTRANSFORM + "'"); + for (int i = 0; i < DIM_X; i++) { + System.out.print(" ["); + for (int j = 0; j < DIM_Y; j++) + System.out.print(" " + dset_data[i][j] + " "); + System.out.println("]"); + } + + // Close and release resources. + try { + if (dxpl_id >= 0) + H5.H5Pclose(dxpl_id); + } + catch (Exception e) { + e.printStackTrace(); + } + try { + if (dataset_id >= 0) + H5.H5Dclose(dataset_id); + } + catch (Exception e) { + e.printStackTrace(); + } + try { + if (file_id >= 0) + H5.H5Fclose(file_id); + } + catch (Exception e) { + e.printStackTrace(); + } + } + + public static void main(String[] args) { + H5Ex_D_Transform.writeData(); + H5Ex_D_Transform.readData(); + } + +} diff --git a/java/examples/datasets/H5Ex_D_UnlimitedAdd.java b/java/examples/datasets/H5Ex_D_UnlimitedAdd.java new file mode 100644 index 0000000..ada8df0 --- /dev/null +++ b/java/examples/datasets/H5Ex_D_UnlimitedAdd.java @@ -0,0 +1,393 @@ +/* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * + * Copyright by The HDF Group. * + * Copyright by the Board of Trustees of the University of Illinois. * + * All rights reserved. * + * * + * This file is part of HDF5. The full HDF5 copyright notice, including * + * terms governing use, modification, and redistribution, is contained in * + * the files COPYING and Copyright.html. COPYING can be found at the root * + * of the source code distribution tree; Copyright.html can be found at the * + * root level of an installed copy of the electronic HDF5 document set and * + * is linked from the top-level documents page. It can also be found at * + * http://hdfgroup.org/HDF5/doc/Copyright.html. If you do not have * + * access to either file, you may request a copy from help@hdfgroup.org. * + * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */ + +/************************************************************ + This example shows how to create and extend an unlimited + dataset. The program first writes integers to a dataset + with dataspace dimensions of DIM_XxDIM_Y, then closes the + file. Next, it reopens the file, reads back the data, + outputs it to the screen, extends the dataset, and writes + new data to the extended portions of the dataset. Finally + it reopens the file again, reads back the data, and + outputs it to the screen. + ************************************************************/ +package examples.datasets; + +import hdf.hdf5lib.H5; +import hdf.hdf5lib.HDF5Constants; + +public class H5Ex_D_UnlimitedAdd { + private static String FILENAME = "H5Ex_D_UnlimitedAdd.h5"; + private static String DATASETNAME = "DS1"; + private static final int DIM_X = 4; + private static final int DIM_Y = 7; + private static final int EDIM_X = 6; + private static final int EDIM_Y = 10; + private static final int CHUNK_X = 4; + private static final int CHUNK_Y = 4; + private static final int RANK = 2; + private static final int NDIMS = 2; + + private static void writeUnlimited() { + long file_id = -1; + long dcpl_id = -1; + long dataspace_id = -1; + long dataset_id = -1; + long[] dims = { DIM_X, DIM_Y }; + long[] chunk_dims = { CHUNK_X, CHUNK_Y }; + long[] maxdims = { HDF5Constants.H5S_UNLIMITED, HDF5Constants.H5S_UNLIMITED }; + int[][] dset_data = new int[DIM_X][DIM_Y]; + + // Initialize the dataset. + for (int indx = 0; indx < DIM_X; indx++) + for (int jndx = 0; jndx < DIM_Y; jndx++) + dset_data[indx][jndx] = indx * jndx - jndx; + + // Create a new file using default properties. + try { + file_id = H5.H5Fcreate(FILENAME, HDF5Constants.H5F_ACC_TRUNC, HDF5Constants.H5P_DEFAULT, + HDF5Constants.H5P_DEFAULT); + } + catch (Exception e) { + e.printStackTrace(); + } + + // Create dataspace with unlimited dimensions. + try { + dataspace_id = H5.H5Screate_simple(RANK, dims, maxdims); + } + catch (Exception e) { + e.printStackTrace(); + } + + // Create the dataset creation property list. + try { + dcpl_id = H5.H5Pcreate(HDF5Constants.H5P_DATASET_CREATE); + } + catch (Exception e) { + e.printStackTrace(); + } + + // Set the chunk size. + try { + if (dcpl_id >= 0) + H5.H5Pset_chunk(dcpl_id, NDIMS, chunk_dims); + } + catch (Exception e) { + e.printStackTrace(); + } + + // Create the unlimited dataset. + try { + if ((file_id >= 0) && (dataspace_id >= 0) && (dcpl_id >= 0)) + dataset_id = H5.H5Dcreate(file_id, DATASETNAME, HDF5Constants.H5T_STD_I32LE, dataspace_id, + HDF5Constants.H5P_DEFAULT, dcpl_id, HDF5Constants.H5P_DEFAULT); + } + catch (Exception e) { + e.printStackTrace(); + } + + // Write the data to the dataset. + try { + if (dataset_id >= 0) + H5.H5Dwrite(dataset_id, HDF5Constants.H5T_NATIVE_INT, HDF5Constants.H5S_ALL, HDF5Constants.H5S_ALL, + HDF5Constants.H5P_DEFAULT, dset_data); + } + catch (Exception e) { + e.printStackTrace(); + } + + // End access to the dataset and release resources used by it. + try { + if (dataset_id >= 0) + H5.H5Dclose(dataset_id); + } + catch (Exception e) { + e.printStackTrace(); + } + + try { + if (dataspace_id >= 0) + H5.H5Sclose(dataspace_id); + } + catch (Exception e) { + e.printStackTrace(); + } + + try { + if (dcpl_id >= 0) + H5.H5Pclose(dcpl_id); + } + catch (Exception e) { + e.printStackTrace(); + } + + // Close the file. + try { + if (file_id >= 0) + H5.H5Fclose(file_id); + } + catch (Exception e) { + e.printStackTrace(); + } + } + + private static void extendUnlimited() { + long file_id = -1; + long dataspace_id = -1; + long dataset_id = -1; + long[] dims = { DIM_X, DIM_Y }; + long[] extdims = { EDIM_X, EDIM_Y }; + long[] start = { 0, 0 }; + long[] count = new long[2]; + int[][] dset_data; + int[][] extend_dset_data = new int[EDIM_X][EDIM_Y]; + + // Open an existing file. + try { + file_id = H5.H5Fopen(FILENAME, HDF5Constants.H5F_ACC_RDWR, HDF5Constants.H5P_DEFAULT); + } + catch (Exception e) { + e.printStackTrace(); + } + + // Open an existing dataset. + try { + if (file_id >= 0) + dataset_id = H5.H5Dopen(file_id, DATASETNAME, HDF5Constants.H5P_DEFAULT); + } + catch (Exception e) { + e.printStackTrace(); + } + + // Get dataspace and allocate memory for read buffer. This is a + // two dimensional dataset so the dynamic allocation must be done + // in steps. + try { + if (dataset_id >= 0) + dataspace_id = H5.H5Dget_space(dataset_id); + } + catch (Exception e) { + e.printStackTrace(); + } + + try { + if (dataspace_id >= 0) + H5.H5Sget_simple_extent_dims(dataspace_id, dims, null); + } + catch (Exception e) { + e.printStackTrace(); + } + + // Allocate array of pointers to rows. + dset_data = new int[(int) dims[0]][(int) dims[1]]; + + // Read the data using the default properties. + try { + if (dataset_id >= 0) + H5.H5Dread(dataset_id, HDF5Constants.H5T_NATIVE_INT, HDF5Constants.H5S_ALL, HDF5Constants.H5S_ALL, + HDF5Constants.H5P_DEFAULT, dset_data); + } + catch (Exception e) { + e.printStackTrace(); + } + + // Output the data to the screen. + System.out.println("Dataset before extension:"); + for (int indx = 0; indx < DIM_X; indx++) { + System.out.print(" [ "); + for (int jndx = 0; jndx < DIM_Y; jndx++) + System.out.print(dset_data[indx][jndx] + " "); + System.out.println("]"); + } + System.out.println(); + + try { + if (dataspace_id >= 0) + H5.H5Sclose(dataspace_id); + } + catch (Exception e) { + e.printStackTrace(); + } + + // Extend the dataset. + try { + if (dataset_id >= 0) + H5.H5Dset_extent(dataset_id, extdims); + } + catch (Exception e) { + e.printStackTrace(); + } + + // Retrieve the dataspace for the newly extended dataset. + try { + if (dataset_id >= 0) + dataspace_id = H5.H5Dget_space(dataset_id); + } + catch (Exception e) { + e.printStackTrace(); + } + + // Initialize data for writing to the extended dataset. + for (int indx = 0; indx < EDIM_X; indx++) + for (int jndx = 0; jndx < EDIM_Y; jndx++) + extend_dset_data[indx][jndx] = jndx; + + // Select the entire dataspace. + try { + if (dataspace_id >= 0) { + H5.H5Sselect_all(dataspace_id); + + // Subtract a hyperslab reflecting the original dimensions from the + // selection. The selection now contains only the newly extended + // portions of the dataset. + count[0] = dims[0]; + count[1] = dims[1]; + H5.H5Sselect_hyperslab(dataspace_id, HDF5Constants.H5S_SELECT_NOTB, start, null, count, null); + + // Write the data to the selected portion of the dataset. + if (dataset_id >= 0) + H5.H5Dwrite(dataset_id, HDF5Constants.H5T_NATIVE_INT, HDF5Constants.H5S_ALL, dataspace_id, + HDF5Constants.H5P_DEFAULT, extend_dset_data); + } + } + catch (Exception e) { + e.printStackTrace(); + } + + // End access to the dataset and release resources used by it. + try { + if (dataset_id >= 0) + H5.H5Dclose(dataset_id); + } + catch (Exception e) { + e.printStackTrace(); + } + + try { + if (dataspace_id >= 0) + H5.H5Sclose(dataspace_id); + } + catch (Exception e) { + e.printStackTrace(); + } + + // Close the file. + try { + if (file_id >= 0) + H5.H5Fclose(file_id); + } + catch (Exception e) { + e.printStackTrace(); + } + } + + private static void readUnlimited() { + long file_id = -1; + long dataspace_id = -1; + long dataset_id = -1; + long[] dims = { DIM_X, DIM_Y }; + int[][] dset_data; + + // Open an existing file. + try { + file_id = H5.H5Fopen(FILENAME, HDF5Constants.H5F_ACC_RDONLY, HDF5Constants.H5P_DEFAULT); + } + catch (Exception e) { + e.printStackTrace(); + } + + // Open an existing dataset. + try { + if (file_id >= 0) + dataset_id = H5.H5Dopen(file_id, DATASETNAME, HDF5Constants.H5P_DEFAULT); + } + catch (Exception e) { + e.printStackTrace(); + } + + // Get dataspace and allocate memory for the read buffer as before. + try { + if (dataset_id >= 0) + dataspace_id = H5.H5Dget_space(dataset_id); + } + catch (Exception e) { + e.printStackTrace(); + } + + try { + if (dataspace_id >= 0) + H5.H5Sget_simple_extent_dims(dataspace_id, dims, null); + } + catch (Exception e) { + e.printStackTrace(); + } + // Allocate array of pointers to rows. + dset_data = new int[(int) dims[0]][(int) dims[1]]; + + // Read the data using the default properties. + try { + if (dataset_id >= 0) + H5.H5Dread(dataset_id, HDF5Constants.H5T_NATIVE_INT, HDF5Constants.H5S_ALL, HDF5Constants.H5S_ALL, + HDF5Constants.H5P_DEFAULT, dset_data); + } + catch (Exception e) { + e.printStackTrace(); + } + + // Output the data to the screen. + System.out.println("Dataset after extension:"); + for (int indx = 0; indx < dims[0]; indx++) { + System.out.print(" [ "); + for (int jndx = 0; jndx < dims[1]; jndx++) + System.out.print(dset_data[indx][jndx] + " "); + System.out.println("]"); + } + System.out.println(); + + // End access to the dataset and release resources used by it. + try { + if (dataset_id >= 0) + H5.H5Dclose(dataset_id); + } + catch (Exception e) { + e.printStackTrace(); + } + + try { + if (dataspace_id >= 0) + H5.H5Sclose(dataspace_id); + } + catch (Exception e) { + e.printStackTrace(); + } + + // Close the file. + try { + if (file_id >= 0) + H5.H5Fclose(file_id); + } + catch (Exception e) { + e.printStackTrace(); + } + } + + public static void main(String[] args) { + H5Ex_D_UnlimitedAdd.writeUnlimited(); + H5Ex_D_UnlimitedAdd.extendUnlimited(); + H5Ex_D_UnlimitedAdd.readUnlimited(); + } + +} diff --git a/java/examples/datasets/H5Ex_D_UnlimitedGzip.java b/java/examples/datasets/H5Ex_D_UnlimitedGzip.java new file mode 100644 index 0000000..c08ceef --- /dev/null +++ b/java/examples/datasets/H5Ex_D_UnlimitedGzip.java @@ -0,0 +1,504 @@ +/* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * + * Copyright by The HDF Group. * + * Copyright by the Board of Trustees of the University of Illinois. * + * All rights reserved. * + * * + * This file is part of HDF5. The full HDF5 copyright notice, including * + * terms governing use, modification, and redistribution, is contained in * + * the files COPYING and Copyright.html. COPYING can be found at the root * + * of the source code distribution tree; Copyright.html can be found at the * + * root level of an installed copy of the electronic HDF5 document set and * + * is linked from the top-level documents page. It can also be found at * + * http://hdfgroup.org/HDF5/doc/Copyright.html. If you do not have * + * access to either file, you may request a copy from help@hdfgroup.org. * + * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */ + +/************************************************************ + This example shows how to create and extend an unlimited + dataset with gzip compression. The program first writes + integers to a gzip compressed dataset with dataspace + dimensions of DIM_XxDIM_Y, then closes the file. Next, it + reopens the file, reads back the data, outputs it to the + screen, extends the dataset, and writes new data to the + extended portions of the dataset. Finally it reopens the + file again, reads back the data, and outputs it to the + screen. + ************************************************************/ +package examples.datasets; + +import java.util.EnumSet; +import java.util.HashMap; +import java.util.Map; + +import hdf.hdf5lib.H5; +import hdf.hdf5lib.HDF5Constants; + +public class H5Ex_D_UnlimitedGzip { + private static String FILENAME = "H5Ex_D_UnlimitedGzip.h5"; + private static String DATASETNAME = "DS1"; + private static final int DIM_X = 4; + private static final int DIM_Y = 7; + private static final int EDIM_X = 6; + private static final int EDIM_Y = 10; + private static final int CHUNK_X = 4; + private static final int CHUNK_Y = 4; + private static final int RANK = 2; + private static final int NDIMS = 2; + + // Values for the status of space allocation + enum H5Z_filter { + H5Z_FILTER_ERROR(HDF5Constants.H5Z_FILTER_ERROR), H5Z_FILTER_NONE(HDF5Constants.H5Z_FILTER_NONE), H5Z_FILTER_DEFLATE( + HDF5Constants.H5Z_FILTER_DEFLATE), H5Z_FILTER_SHUFFLE(HDF5Constants.H5Z_FILTER_SHUFFLE), H5Z_FILTER_FLETCHER32( + HDF5Constants.H5Z_FILTER_FLETCHER32), H5Z_FILTER_SZIP(HDF5Constants.H5Z_FILTER_SZIP), H5Z_FILTER_NBIT( + HDF5Constants.H5Z_FILTER_NBIT), H5Z_FILTER_SCALEOFFSET(HDF5Constants.H5Z_FILTER_SCALEOFFSET), H5Z_FILTER_RESERVED( + HDF5Constants.H5Z_FILTER_RESERVED), H5Z_FILTER_MAX(HDF5Constants.H5Z_FILTER_MAX); + private static final Map<Integer, H5Z_filter> lookup = new HashMap<Integer, H5Z_filter>(); + + static { + for (H5Z_filter s : EnumSet.allOf(H5Z_filter.class)) + lookup.put(s.getCode(), s); + } + + private int code; + + H5Z_filter(int layout_type) { + this.code = layout_type; + } + + public int getCode() { + return this.code; + } + + public static H5Z_filter get(int code) { + return lookup.get(code); + } + } + + private static boolean checkGzipFilter() { + try { + int available = H5.H5Zfilter_avail(HDF5Constants.H5Z_FILTER_DEFLATE); + if (available == 0) { + System.out.println("gzip filter not available."); + return false; + } + } + catch (Exception e) { + e.printStackTrace(); + } + + try { + int filter_info = H5.H5Zget_filter_info(HDF5Constants.H5Z_FILTER_DEFLATE); + if (((filter_info & HDF5Constants.H5Z_FILTER_CONFIG_ENCODE_ENABLED) == 0) + || ((filter_info & HDF5Constants.H5Z_FILTER_CONFIG_DECODE_ENABLED) == 0)) { + System.out.println("gzip filter not available for encoding and decoding."); + return false; + } + } + catch (Exception e) { + e.printStackTrace(); + } + return true; + } + + private static void writeUnlimited() { + long file_id = -1; + long dcpl_id = -1; + long dataspace_id = -1; + long dataset_id = -1; + long[] dims = { DIM_X, DIM_Y }; + long[] chunk_dims = { CHUNK_X, CHUNK_Y }; + long[] maxdims = { HDF5Constants.H5S_UNLIMITED, HDF5Constants.H5S_UNLIMITED }; + int[][] dset_data = new int[DIM_X][DIM_Y]; + + // Initialize the dataset. + for (int indx = 0; indx < DIM_X; indx++) + for (int jndx = 0; jndx < DIM_Y; jndx++) + dset_data[indx][jndx] = indx * jndx - jndx; + + // Create a new file using default properties. + try { + file_id = H5.H5Fcreate(FILENAME, HDF5Constants.H5F_ACC_TRUNC, HDF5Constants.H5P_DEFAULT, + HDF5Constants.H5P_DEFAULT); + } + catch (Exception e) { + e.printStackTrace(); + } + + // Create dataspace with unlimited dimensions. + try { + dataspace_id = H5.H5Screate_simple(RANK, dims, maxdims); + } + catch (Exception e) { + e.printStackTrace(); + } + + // Create the dataset creation property list, add the gzip compression + // filter. + try { + dcpl_id = H5.H5Pcreate(HDF5Constants.H5P_DATASET_CREATE); + if (dcpl_id >= 0) { + H5.H5Pset_deflate(dcpl_id, 9); + // Set the chunk size. + H5.H5Pset_chunk(dcpl_id, NDIMS, chunk_dims); + } + } + catch (Exception e) { + e.printStackTrace(); + } + + // Create the unlimited dataset. + try { + if ((file_id >= 0) && (dataspace_id >= 0) && (dcpl_id >= 0)) + dataset_id = H5.H5Dcreate(file_id, DATASETNAME, HDF5Constants.H5T_STD_I32LE, dataspace_id, + HDF5Constants.H5P_DEFAULT, dcpl_id, HDF5Constants.H5P_DEFAULT); + } + catch (Exception e) { + e.printStackTrace(); + } + + // Write the data to the dataset. + try { + if (dataset_id >= 0) + H5.H5Dwrite(dataset_id, HDF5Constants.H5T_NATIVE_INT, HDF5Constants.H5S_ALL, HDF5Constants.H5S_ALL, + HDF5Constants.H5P_DEFAULT, dset_data); + } + catch (Exception e) { + e.printStackTrace(); + } + + // End access to the dataset and release resources used by it. + try { + if (dataset_id >= 0) + H5.H5Dclose(dataset_id); + } + catch (Exception e) { + e.printStackTrace(); + } + + try { + if (dataspace_id >= 0) + H5.H5Sclose(dataspace_id); + } + catch (Exception e) { + e.printStackTrace(); + } + + try { + if (dcpl_id >= 0) + H5.H5Pclose(dcpl_id); + } + catch (Exception e) { + e.printStackTrace(); + } + + // Close the file. + try { + if (file_id >= 0) + H5.H5Fclose(file_id); + } + catch (Exception e) { + e.printStackTrace(); + } + } + + private static void extendUnlimited() { + long file_id = -1; + long dataspace_id = -1; + long dataset_id = -1; + long[] dims = { DIM_X, DIM_Y }; + long[] extdims = { EDIM_X, EDIM_Y }; + long[] start = { 0, 0 }; + long[] count = new long[2]; + int[][] dset_data; + int[][] extend_dset_data = new int[EDIM_X][EDIM_Y]; + + // Open an existing file. + try { + file_id = H5.H5Fopen(FILENAME, HDF5Constants.H5F_ACC_RDWR, HDF5Constants.H5P_DEFAULT); + } + catch (Exception e) { + e.printStackTrace(); + } + + // Open an existing dataset. + try { + if (file_id >= 0) + dataset_id = H5.H5Dopen(file_id, DATASETNAME, HDF5Constants.H5P_DEFAULT); + } + catch (Exception e) { + e.printStackTrace(); + } + + // Get dataspace and allocate memory for read buffer. This is a + // two dimensional dataset so the dynamic allocation must be done + // in steps. + try { + if (dataset_id >= 0) + dataspace_id = H5.H5Dget_space(dataset_id); + } + catch (Exception e) { + e.printStackTrace(); + } + + try { + if (dataspace_id >= 0) + H5.H5Sget_simple_extent_dims(dataspace_id, dims, null); + } + catch (Exception e) { + e.printStackTrace(); + } + + // Allocate array of pointers to rows. + dset_data = new int[(int) dims[0]][(int) dims[1]]; + + // Read the data using the default properties. + try { + if (dataset_id >= 0) + H5.H5Dread(dataset_id, HDF5Constants.H5T_NATIVE_INT, HDF5Constants.H5S_ALL, HDF5Constants.H5S_ALL, + HDF5Constants.H5P_DEFAULT, dset_data); + } + catch (Exception e) { + e.printStackTrace(); + } + + // Output the data to the screen. + System.out.println("Dataset before extension:"); + for (int indx = 0; indx < DIM_X; indx++) { + System.out.print(" [ "); + for (int jndx = 0; jndx < DIM_Y; jndx++) + System.out.print(dset_data[indx][jndx] + " "); + System.out.println("]"); + } + System.out.println(); + + try { + if (dataspace_id >= 0) + H5.H5Sclose(dataspace_id); + } + catch (Exception e) { + e.printStackTrace(); + } + + // Extend the dataset. + try { + if (dataset_id >= 0) + H5.H5Dset_extent(dataset_id, extdims); + } + catch (Exception e) { + e.printStackTrace(); + } + + // Retrieve the dataspace for the newly extended dataset. + try { + if (dataset_id >= 0) + dataspace_id = H5.H5Dget_space(dataset_id); + } + catch (Exception e) { + e.printStackTrace(); + } + + // Initialize data for writing to the extended dataset. + for (int indx = 0; indx < EDIM_X; indx++) + for (int jndx = 0; jndx < EDIM_Y; jndx++) + extend_dset_data[indx][jndx] = jndx; + + // Select the entire dataspace. + try { + if (dataspace_id >= 0) { + H5.H5Sselect_all(dataspace_id); + + // Subtract a hyperslab reflecting the original dimensions from the + // selection. The selection now contains only the newly extended + // portions of the dataset. + count[0] = dims[0]; + count[1] = dims[1]; + H5.H5Sselect_hyperslab(dataspace_id, HDF5Constants.H5S_SELECT_NOTB, start, null, count, null); + + // Write the data to the selected portion of the dataset. + if (dataset_id >= 0) + H5.H5Dwrite(dataset_id, HDF5Constants.H5T_NATIVE_INT, HDF5Constants.H5S_ALL, dataspace_id, + HDF5Constants.H5P_DEFAULT, extend_dset_data); + } + } + catch (Exception e) { + e.printStackTrace(); + } + + // End access to the dataset and release resources used by it. + try { + if (dataset_id >= 0) + H5.H5Dclose(dataset_id); + } + catch (Exception e) { + e.printStackTrace(); + } + + try { + if (dataspace_id >= 0) + H5.H5Sclose(dataspace_id); + } + catch (Exception e) { + e.printStackTrace(); + } + + // Close the file. + try { + if (file_id >= 0) + H5.H5Fclose(file_id); + } + catch (Exception e) { + e.printStackTrace(); + } + } + + private static void readUnlimited() { + long file_id = -1; + long dataspace_id = -1; + long dataset_id = -1; + long dcpl_id = -1; + long[] dims = { DIM_X, DIM_Y }; + int[][] dset_data; + + // Open an existing file. + try { + file_id = H5.H5Fopen(FILENAME, HDF5Constants.H5F_ACC_RDONLY, HDF5Constants.H5P_DEFAULT); + } + catch (Exception e) { + e.printStackTrace(); + } + + // Open an existing dataset. + try { + if (file_id >= 0) + dataset_id = H5.H5Dopen(file_id, DATASETNAME, HDF5Constants.H5P_DEFAULT); + } + catch (Exception e) { + e.printStackTrace(); + } + + // Retrieve the dataset creation property list. + try { + if (dataset_id >= 0) + dcpl_id = H5.H5Dget_create_plist(dataset_id); + } + catch (Exception e) { + e.printStackTrace(); + } + + // Retrieve and print the filter type. Here we only retrieve the + // first filter because we know that we only added one filter. + try { + if (dcpl_id >= 0) { + // Java lib requires a valid filter_name object and cd_values + int[] flags = { 0 }; + long[] cd_nelmts = { 1 }; + int[] cd_values = { 0 }; + String[] filter_name = { "" }; + int[] filter_config = { 0 }; + int filter_type = -1; + filter_type = H5 + .H5Pget_filter(dcpl_id, 0, flags, cd_nelmts, cd_values, 120, filter_name, filter_config); + System.out.print("Filter type is: "); + switch (H5Z_filter.get(filter_type)) { + case H5Z_FILTER_DEFLATE: + System.out.println("H5Z_FILTER_DEFLATE"); + break; + case H5Z_FILTER_SHUFFLE: + System.out.println("H5Z_FILTER_SHUFFLE"); + break; + case H5Z_FILTER_FLETCHER32: + System.out.println("H5Z_FILTER_FLETCHER32"); + break; + case H5Z_FILTER_SZIP: + System.out.println("H5Z_FILTER_SZIP"); + break; + default: + System.out.println("H5Z_FILTER_ERROR"); + } + System.out.println(); + } + } + catch (Exception e) { + e.printStackTrace(); + } + + // Get dataspace and allocate memory for the read buffer as before. + try { + if (dataset_id >= 0) + dataspace_id = H5.H5Dget_space(dataset_id); + } + catch (Exception e) { + e.printStackTrace(); + } + + try { + if (dataspace_id >= 0) + H5.H5Sget_simple_extent_dims(dataspace_id, dims, null); + } + catch (Exception e) { + e.printStackTrace(); + } + // Allocate array of pointers to rows. + dset_data = new int[(int) dims[0]][(int) dims[1]]; + + // Read the data using the default properties. + try { + if (dataset_id >= 0) + H5.H5Dread(dataset_id, HDF5Constants.H5T_NATIVE_INT, HDF5Constants.H5S_ALL, HDF5Constants.H5S_ALL, + HDF5Constants.H5P_DEFAULT, dset_data); + } + catch (Exception e) { + e.printStackTrace(); + } + + // Output the data to the screen. + System.out.println("Dataset after extension:"); + for (int indx = 0; indx < dims[0]; indx++) { + System.out.print(" [ "); + for (int jndx = 0; jndx < dims[1]; jndx++) + System.out.print(dset_data[indx][jndx] + " "); + System.out.println("]"); + } + System.out.println(); + + // End access to the dataset and release resources used by it. + try { + if (dataset_id >= 0) + H5.H5Dclose(dataset_id); + } + catch (Exception e) { + e.printStackTrace(); + } + + try { + if (dataspace_id >= 0) + H5.H5Sclose(dataspace_id); + } + catch (Exception e) { + e.printStackTrace(); + } + + // Close the file. + try { + if (file_id >= 0) + H5.H5Fclose(file_id); + } + catch (Exception e) { + e.printStackTrace(); + } + } + + public static void main(String[] args) { + // Check if gzip compression is available and can be used for both + // compression and decompression. Normally we do not perform error + // checking in these examples for the sake of clarity, but in this + // case we will make an exception because this filter is an + // optional part of the hdf5 library. + if (H5Ex_D_UnlimitedGzip.checkGzipFilter()) { + H5Ex_D_UnlimitedGzip.writeUnlimited(); + H5Ex_D_UnlimitedGzip.extendUnlimited(); + H5Ex_D_UnlimitedGzip.readUnlimited(); + } + } + +} diff --git a/java/examples/datasets/H5Ex_D_UnlimitedMod.java b/java/examples/datasets/H5Ex_D_UnlimitedMod.java new file mode 100644 index 0000000..884cad3 --- /dev/null +++ b/java/examples/datasets/H5Ex_D_UnlimitedMod.java @@ -0,0 +1,379 @@ +/* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * + * Copyright by The HDF Group. * + * Copyright by the Board of Trustees of the University of Illinois. * + * All rights reserved. * + * * + * This file is part of HDF5. The full HDF5 copyright notice, including * + * terms governing use, modification, and redistribution, is contained in * + * the files COPYING and Copyright.html. COPYING can be found at the root * + * of the source code distribution tree; Copyright.html can be found at the * + * root level of an installed copy of the electronic HDF5 document set and * + * is linked from the top-level documents page. It can also be found at * + * http://hdfgroup.org/HDF5/doc/Copyright.html. If you do not have * + * access to either file, you may request a copy from help@hdfgroup.org. * + * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */ + +/************************************************************ + This example shows how to create and extend an unlimited + dataset. The program first writes integers to a dataset + with dataspace dimensions of DIM_XxDIM_Y, then closes the + file. Next, it reopens the file, reads back the data, + outputs it to the screen, extends the dataset, and writes + new data to the entire extended dataset. Finally it + reopens the file again, reads back the data, and utputs it + to the screen. + ************************************************************/ +package examples.datasets; + +import hdf.hdf5lib.H5; +import hdf.hdf5lib.HDF5Constants; + +public class H5Ex_D_UnlimitedMod { + private static String FILENAME = "H5Ex_D_UnlimitedMod.h5"; + private static String DATASETNAME = "DS1"; + private static final int DIM_X = 4; + private static final int DIM_Y = 7; + private static final int EDIM_X = 6; + private static final int EDIM_Y = 10; + private static final int CHUNK_X = 4; + private static final int CHUNK_Y = 4; + private static final int RANK = 2; + private static final int NDIMS = 2; + + private static void writeUnlimited() { + long file_id = -1; + long dcpl_id = -1; + long dataspace_id = -1; + long dataset_id = -1; + long[] dims = { DIM_X, DIM_Y }; + long[] chunk_dims = { CHUNK_X, CHUNK_Y }; + long[] maxdims = { HDF5Constants.H5S_UNLIMITED, HDF5Constants.H5S_UNLIMITED }; + int[][] dset_data = new int[DIM_X][DIM_Y]; + + // Initialize the dataset. + for (int indx = 0; indx < DIM_X; indx++) + for (int jndx = 0; jndx < DIM_Y; jndx++) + dset_data[indx][jndx] = indx * jndx - jndx; + + // Create a new file using default properties. + try { + file_id = H5.H5Fcreate(FILENAME, HDF5Constants.H5F_ACC_TRUNC, HDF5Constants.H5P_DEFAULT, + HDF5Constants.H5P_DEFAULT); + } + catch (Exception e) { + e.printStackTrace(); + } + + // Create dataspace with unlimited dimensions. + try { + dataspace_id = H5.H5Screate_simple(RANK, dims, maxdims); + } + catch (Exception e) { + e.printStackTrace(); + } + + // Create the dataset creation property list. + try { + dcpl_id = H5.H5Pcreate(HDF5Constants.H5P_DATASET_CREATE); + } + catch (Exception e) { + e.printStackTrace(); + } + + // Set the chunk size. + try { + if (dcpl_id >= 0) + H5.H5Pset_chunk(dcpl_id, NDIMS, chunk_dims); + } + catch (Exception e) { + e.printStackTrace(); + } + + // Create the unlimited dataset. + try { + if ((file_id >= 0) && (dataspace_id >= 0) && (dcpl_id >= 0)) + dataset_id = H5.H5Dcreate(file_id, DATASETNAME, HDF5Constants.H5T_STD_I32LE, dataspace_id, + HDF5Constants.H5P_DEFAULT, dcpl_id, HDF5Constants.H5P_DEFAULT); + } + catch (Exception e) { + e.printStackTrace(); + } + + // Write the data to the dataset. + try { + if (dataset_id >= 0) + H5.H5Dwrite(dataset_id, HDF5Constants.H5T_NATIVE_INT, HDF5Constants.H5S_ALL, HDF5Constants.H5S_ALL, + HDF5Constants.H5P_DEFAULT, dset_data); + } + catch (Exception e) { + e.printStackTrace(); + } + + // End access to the dataset and release resources used by it. + try { + if (dataset_id >= 0) + H5.H5Dclose(dataset_id); + } + catch (Exception e) { + e.printStackTrace(); + } + + try { + if (dataspace_id >= 0) + H5.H5Sclose(dataspace_id); + } + catch (Exception e) { + e.printStackTrace(); + } + + try { + if (dcpl_id >= 0) + H5.H5Pclose(dcpl_id); + } + catch (Exception e) { + e.printStackTrace(); + } + + // Close the file. + try { + if (file_id >= 0) + H5.H5Fclose(file_id); + } + catch (Exception e) { + e.printStackTrace(); + } + } + + private static void extendUnlimited() { + long file_id = -1; + long dataspace_id = -1; + long dataset_id = -1; + long[] dims = { DIM_X, DIM_Y }; + long[] extdims = { EDIM_X, EDIM_Y }; + int[][] dset_data; + int[][] extend_dset_data = new int[EDIM_X][EDIM_Y]; + + // Open an existing file. + try { + file_id = H5.H5Fopen(FILENAME, HDF5Constants.H5F_ACC_RDWR, HDF5Constants.H5P_DEFAULT); + } + catch (Exception e) { + e.printStackTrace(); + } + + // Open an existing dataset. + try { + if (file_id >= 0) + dataset_id = H5.H5Dopen(file_id, DATASETNAME, HDF5Constants.H5P_DEFAULT); + } + catch (Exception e) { + e.printStackTrace(); + } + + // Get dataspace and allocate memory for read buffer. This is a + // two dimensional dataset so the dynamic allocation must be done + // in steps. + try { + if (dataset_id >= 0) + dataspace_id = H5.H5Dget_space(dataset_id); + } + catch (Exception e) { + e.printStackTrace(); + } + + try { + if (dataspace_id >= 0) + H5.H5Sget_simple_extent_dims(dataspace_id, dims, null); + } + catch (Exception e) { + e.printStackTrace(); + } + + // Allocate array of pointers to rows. + dset_data = new int[(int) dims[0]][(int) dims[1]]; + + // Read the data using the default properties. + try { + if (dataset_id >= 0) + H5.H5Dread(dataset_id, HDF5Constants.H5T_NATIVE_INT, HDF5Constants.H5S_ALL, HDF5Constants.H5S_ALL, + HDF5Constants.H5P_DEFAULT, dset_data); + } + catch (Exception e) { + e.printStackTrace(); + } + + // Output the data to the screen. + System.out.println("Dataset before extension:"); + for (int indx = 0; indx < DIM_X; indx++) { + System.out.print(" [ "); + for (int jndx = 0; jndx < DIM_Y; jndx++) + System.out.print(dset_data[indx][jndx] + " "); + System.out.println("]"); + } + System.out.println(); + + try { + if (dataspace_id >= 0) + H5.H5Sclose(dataspace_id); + } + catch (Exception e) { + e.printStackTrace(); + } + + // Extend the dataset. + try { + if (dataset_id >= 0) + H5.H5Dset_extent(dataset_id, extdims); + } + catch (Exception e) { + e.printStackTrace(); + } + + // Retrieve the dataspace for the newly extended dataset. + try { + if (dataset_id >= 0) + dataspace_id = H5.H5Dget_space(dataset_id); + } + catch (Exception e) { + e.printStackTrace(); + } + + // Initialize data for writing to the extended dataset. + for (int indx = 0; indx < EDIM_X; indx++) + for (int jndx = 0; jndx < EDIM_Y; jndx++) + extend_dset_data[indx][jndx] = jndx; + + // Write the data tto the extended dataset. + try { + if ((dataspace_id >= 0) && (dataset_id >= 0)) + H5.H5Dwrite(dataset_id, HDF5Constants.H5T_NATIVE_INT, HDF5Constants.H5S_ALL, dataspace_id, + HDF5Constants.H5P_DEFAULT, extend_dset_data); + } + catch (Exception e) { + e.printStackTrace(); + } + + // End access to the dataset and release resources used by it. + try { + if (dataset_id >= 0) + H5.H5Dclose(dataset_id); + } + catch (Exception e) { + e.printStackTrace(); + } + + try { + if (dataspace_id >= 0) + H5.H5Sclose(dataspace_id); + } + catch (Exception e) { + e.printStackTrace(); + } + + // Close the file. + try { + if (file_id >= 0) + H5.H5Fclose(file_id); + } + catch (Exception e) { + e.printStackTrace(); + } + } + + private static void readUnlimited() { + long file_id = -1; + long dataspace_id = -1; + long dataset_id = -1; + long[] dims = { DIM_X, DIM_Y }; + int[][] dset_data; + + // Open an existing file. + try { + file_id = H5.H5Fopen(FILENAME, HDF5Constants.H5F_ACC_RDONLY, HDF5Constants.H5P_DEFAULT); + } + catch (Exception e) { + e.printStackTrace(); + } + + // Open an existing dataset. + try { + if (file_id >= 0) + dataset_id = H5.H5Dopen(file_id, DATASETNAME, HDF5Constants.H5P_DEFAULT); + } + catch (Exception e) { + e.printStackTrace(); + } + + // Get dataspace and allocate memory for the read buffer as before. + try { + if (dataset_id >= 0) + dataspace_id = H5.H5Dget_space(dataset_id); + } + catch (Exception e) { + e.printStackTrace(); + } + + try { + if (dataspace_id >= 0) + H5.H5Sget_simple_extent_dims(dataspace_id, dims, null); + } + catch (Exception e) { + e.printStackTrace(); + } + // Allocate array of pointers to rows. + dset_data = new int[(int) dims[0]][(int) dims[1]]; + + // Read the data using the default properties. + try { + if (dataset_id >= 0) + H5.H5Dread(dataset_id, HDF5Constants.H5T_NATIVE_INT, HDF5Constants.H5S_ALL, HDF5Constants.H5S_ALL, + HDF5Constants.H5P_DEFAULT, dset_data); + } + catch (Exception e) { + e.printStackTrace(); + } + + // Output the data to the screen. + System.out.println("Dataset after extension:"); + for (int indx = 0; indx < dims[0]; indx++) { + System.out.print(" [ "); + for (int jndx = 0; jndx < dims[1]; jndx++) + System.out.print(dset_data[indx][jndx] + " "); + System.out.println("]"); + } + System.out.println(); + + // End access to the dataset and release resources used by it. + try { + if (dataset_id >= 0) + H5.H5Dclose(dataset_id); + } + catch (Exception e) { + e.printStackTrace(); + } + + try { + if (dataspace_id >= 0) + H5.H5Sclose(dataspace_id); + } + catch (Exception e) { + e.printStackTrace(); + } + + // Close the file. + try { + if (file_id >= 0) + H5.H5Fclose(file_id); + } + catch (Exception e) { + e.printStackTrace(); + } + } + + public static void main(String[] args) { + H5Ex_D_UnlimitedMod.writeUnlimited(); + H5Ex_D_UnlimitedMod.extendUnlimited(); + H5Ex_D_UnlimitedMod.readUnlimited(); + } + +} diff --git a/java/examples/datasets/Makefile.am b/java/examples/datasets/Makefile.am new file mode 100644 index 0000000..49888a0 --- /dev/null +++ b/java/examples/datasets/Makefile.am @@ -0,0 +1,78 @@ +# +# Copyright by The HDF Group. +# Copyright by the Board of Trustees of the University of Illinois. +# All rights reserved. +# +# This file is part of HDF5. The full HDF5 copyright notice, including +# terms governing use, modification, and redistribution, is contained in +# the files COPYING and Copyright.html. COPYING can be found at the root +# of the source code distribution tree; Copyright.html can be found at the +# root level of an installed copy of the electronic HDF5 document set and +# is linked from the top-level documents page. It can also be found at +# http://hdfgroup.org/HDF5/doc/Copyright.html. If you do not have +# access to either file, you may request a copy from help@hdfgroup.org. +## +## Makefile.am +## Run automake to generate a Makefile.in from this file. +## +# +# HDF5 Java Library Examples Makefile(.in) + +include $(top_srcdir)/config/commence.am + +# Mark this directory as part of the JNI API +JAVA_API=yes + +JAVAROOT = .classes + +classes: + $(MKDIR_P) $(@D)/$(JAVAROOT) + +pkgpath = examples/datasets +hdfjarfile = jar$(PACKAGE_TARNAME)-$(PACKAGE_VERSION).jar +CLASSPATH_ENV=CLASSPATH=.:$(JAVAROOT):$(top_builddir)/java/src/$(hdfjarfile):$(top_srcdir)/java/lib/slf4j-api-1.7.5.jar:$(top_srcdir)/java/lib/ext/slf4j-simple-1.7.5.jar:$$CLASSPATH + +jarfile = jar$(PACKAGE_TARNAME)datasets.jar + +AM_JAVACFLAGS = $(H5_JAVACFLAGS) -deprecation + +TESTPACKAGE = + +noinst_JAVA = \ + H5Ex_D_Alloc.java \ + H5Ex_D_Checksum.java \ + H5Ex_D_Chunk.java \ + H5Ex_D_Compact.java \ + H5Ex_D_External.java \ + H5Ex_D_FillValue.java \ + H5Ex_D_Gzip.java \ + H5Ex_D_Hyperslab.java \ + H5Ex_D_ReadWrite.java \ + H5Ex_D_Shuffle.java \ + H5Ex_D_Szip.java \ + H5Ex_D_UnlimitedAdd.java \ + H5Ex_D_UnlimitedGzip.java \ + H5Ex_D_UnlimitedMod.java \ + H5Ex_D_Nbit.java \ + H5Ex_D_Transform.java \ + H5Ex_D_Sofloat.java \ + H5Ex_D_Soint.java + +$(jarfile): classnoinst.stamp classes + $(JAR) cvf $@ -C $(JAVAROOT)/ $(pkgpath) + +noinst_DATA = $(jarfile) + +.PHONY: classes + +check_SCRIPTS = runExample.sh +TEST_SCRIPT = $(check_SCRIPTS) + +CLEANFILES = classnoinst.stamp $(jarfile) $(JAVAROOT)/$(pkgpath)/*.class runExample.sh + +clean: + rm -rf $(JAVAROOT) + rm -f $(jarfile) + rm -f classnoinst.stamp + +include $(top_srcdir)/config/conclude.am diff --git a/java/examples/datasets/runExample.sh.in b/java/examples/datasets/runExample.sh.in new file mode 100644 index 0000000..8ac28a7 --- /dev/null +++ b/java/examples/datasets/runExample.sh.in @@ -0,0 +1,413 @@ +#! /bin/sh +# +# Copyright by The HDF Group. +# Copyright by the Board of Trustees of the University of Illinois. +# All rights reserved. +# +# This file is part of HDF5. The full HDF5 copyright notice, including +# terms governing use, modification, and redistribution, is contained in +# the files COPYING and Copyright.html. COPYING can be found at the root +# of the source code distribution tree; Copyright.html can be found at the +# root level of an installed copy of the electronic HDF5 document set and +# is linked from the top-level documents page. It can also be found at +# http://hdfgroup.org/HDF5/doc/Copyright.html. If you do not have +# access to either file, you may request a copy from help@hdfgroup.org. +# + +top_builddir=@top_builddir@ +top_srcdir=@top_srcdir@ +srcdir=@srcdir@ + +USE_FILTER_SZIP="@USE_FILTER_SZIP@" +USE_FILTER_DEFLATE="@USE_FILTER_DEFLATE@" + +TESTNAME=EX_Datasets +EXIT_SUCCESS=0 +EXIT_FAILURE=1 + +# Set up default variable values if not supplied by the user. +RM='rm -rf' +CMP='cmp' +DIFF='diff -c' +CP='cp' +DIRNAME='dirname' +LS='ls' +AWK='awk' + +nerrors=0 + +# where the libs exist +HDFLIB_HOME="$top_srcdir/java/lib" +BLDLIBDIR="./lib" +BLDDIR="." +HDFTEST_HOME="$top_srcdir/java/examples/datasets" +JARFILE=jar@PACKAGE_TARNAME@-@PACKAGE_VERSION@.jar +TESTJARFILE=jar@PACKAGE_TARNAME@datasets.jar +test -d $BLDLIBDIR || mkdir -p $BLDLIBDIR + +###################################################################### +# library files +# -------------------------------------------------------------------- +# All the library files copy from source directory to test directory +# NOTE: Keep this framework to add/remove test files. +# This list are also used for checking exist. +# Comment '#' without space can be used. +# -------------------------------------------------------------------- +LIST_LIBRARY_FILES=" +$HDFLIB_HOME/slf4j-api-1.7.5.jar +$HDFLIB_HOME/ext/slf4j-simple-1.7.5.jar +$top_builddir/src/.libs/libhdf5.* +$top_builddir/java/src/jni/.libs/libhdf5_java.* +$top_builddir/java/src/$JARFILE +" +LIST_DATA_FILES=" +$HDFTEST_HOME/../testfiles/examples.datasets.H5Ex_D_Alloc.txt +$HDFTEST_HOME/../testfiles/examples.datasets.H5Ex_D_Checksum.txt +$HDFTEST_HOME/../testfiles/examples.datasets.H5Ex_D_Chunk.txt +$HDFTEST_HOME/../testfiles/examples.datasets.H5Ex_D_Compact.txt +$HDFTEST_HOME/../testfiles/examples.datasets.H5Ex_D_External.txt +$HDFTEST_HOME/../testfiles/examples.datasets.H5Ex_D_FillValue.txt +$HDFTEST_HOME/../testfiles/examples.datasets.H5Ex_D_Gzip.txt +$HDFTEST_HOME/../testfiles/examples.datasets.H5Ex_D_Hyperslab.txt +$HDFTEST_HOME/../testfiles/examples.datasets.H5Ex_D_ReadWrite.txt +$HDFTEST_HOME/../testfiles/examples.datasets.H5Ex_D_Shuffle.txt +$HDFTEST_HOME/../testfiles/examples.datasets.H5Ex_D_Szip.txt +$HDFTEST_HOME/../testfiles/examples.datasets.H5Ex_D_UnlimitedAdd.txt +$HDFTEST_HOME/../testfiles/examples.datasets.H5Ex_D_UnlimitedGzip.txt +$HDFTEST_HOME/../testfiles/examples.datasets.H5Ex_D_UnlimitedMod.txt +$HDFTEST_HOME/../testfiles/examples.datasets.H5Ex_D_Nbit.txt +$HDFTEST_HOME/../testfiles/examples.datasets.H5Ex_D_Transform.txt +$HDFTEST_HOME/../testfiles/examples.datasets.H5Ex_D_Sofloat.txt +$HDFTEST_HOME/../testfiles/examples.datasets.H5Ex_D_Soint.txt +" + +# +# copy files from source dirs to test dir +# +COPY_LIBFILES="$LIST_LIBRARY_FILES" + +COPY_LIBFILES_TO_BLDLIBDIR() +{ + # copy test files. Used -f to make sure get a new copy + for tstfile in $COPY_LIBFILES + do + # ignore '#' comment + echo $tstfile | tr -d ' ' | grep '^#' > /dev/null + RET=$? + if [ $RET -eq 1 ]; then + # skip cp if srcdir is same as destdir + # this occurs when build/test performed in source dir and + # make cp fail + SDIR=`$DIRNAME $tstfile` + INODE_SDIR=`$LS -i -d $SDIR | $AWK -F' ' '{print $1}'` + INODE_DDIR=`$LS -i -d $TESTDIR | $AWK -F' ' '{print $1}'` + if [ "$INODE_SDIR" != "$INODE_DDIR" ]; then + $CP -f $tstfile $BLDLIBDIR + if [ $? -ne 0 ]; then + echo "Error: FAILED to copy $tstfile ." + + # Comment out this to CREATE expected file + exit $EXIT_FAILURE + fi + fi + fi + done +} + +CLEAN_LIBFILES_AND_BLDLIBDIR() +{ + # skip rm if srcdir is same as destdir + # this occurs when build/test performed in source dir and + # make cp fail + SDIR=`$DIRNAME $tstfile` + INODE_SDIR=`$LS -i -d $SDIR | $AWK -F' ' '{print $1}'` + INODE_DDIR=`$LS -i -d $TESTDIR | $AWK -F' ' '{print $1}'` + if [ "$INODE_SDIR" != "$INODE_DDIR" ]; then + $RM $BLDLIBDIR + fi +} + +COPY_DATAFILES="$LIST_DATA_FILES" + +COPY_DATAFILES_TO_BLDDIR() +{ + # copy test files. Used -f to make sure get a new copy + for tstfile in $COPY_DATAFILES + do + # ignore '#' comment + echo $tstfile | tr -d ' ' | grep '^#' > /dev/null + RET=$? + if [ $RET -eq 1 ]; then + # skip cp if srcdir is same as destdir + # this occurs when build/test performed in source dir and + # make cp fail + SDIR=`$DIRNAME $tstfile` + INODE_SDIR=`$LS -i -d $SDIR | $AWK -F' ' '{print $1}'` + INODE_DDIR=`$LS -i -d $TESTDIR | $AWK -F' ' '{print $1}'` + if [ "$INODE_SDIR" != "$INODE_DDIR" ]; then + $CP -f $tstfile $BLDDIR + if [ $? -ne 0 ]; then + echo "Error: FAILED to copy $tstfile ." + + # Comment out this to CREATE expected file + exit $EXIT_FAILURE + fi + fi + fi + done +} + +CLEAN_DATAFILES_AND_BLDDIR() +{ + # skip rm if srcdir is same as destdir + # this occurs when build/test performed in source dir and + # make cp fail + SDIR=`$DIRNAME $tstfile` + INODE_SDIR=`$LS -i -d $SDIR | $AWK -F' ' '{print $1}'` + INODE_DDIR=`$LS -i -d $TESTDIR | $AWK -F' ' '{print $1}'` + if [ "$INODE_SDIR" != "$INODE_DDIR" ]; then + $RM $BLDDIR/*.txt + $RM $BLDDIR/*.out + fi +} + +# Print a line-line message left justified in a field of 70 characters +# beginning with the word "Testing". +# +TESTING() { + SPACES=" " + echo "Testing $* $SPACES" | cut -c1-70 | tr -d '\012' +} + +# where Java is installed (requires jdk1.7.x) +JAVAEXE=@JAVA@ +JAVAEXEFLAGS=@H5_JAVAFLAGS@ + +############################################################################### +# DO NOT MODIFY BELOW THIS LINE +############################################################################### + +# prepare for test +COPY_LIBFILES_TO_BLDLIBDIR +COPY_DATAFILES_TO_BLDDIR + +CPATH=".:"$BLDLIBDIR"/"$JARFILE":"$BLDLIBDIR"/slf4j-api-1.7.5.jar:"$BLDLIBDIR"/slf4j-simple-1.7.5.jar:"$TESTJARFILE"" + +TEST=/usr/bin/test +if [ ! -x /usr/bin/test ] +then +TEST=`which test` +fi + +if $TEST -z "$CLASSPATH"; then + CLASSPATH="" +fi +CLASSPATH=$CPATH":"$CLASSPATH +export CLASSPATH + +if $TEST -n "$JAVAPATH" ; then + PATH=$JAVAPATH":"$PATH + export PATH +fi + +if $TEST -e /bin/uname; then + os_name=`/bin/uname -s` +elif $TEST -e /usr/bin/uname; then + os_name=`/usr/bin/uname -s` +else + os_name=unknown +fi + +if $TEST -z "$LD_LIBRARY_PATH" ; then + LD_LIBRARY_PATH="" +fi + +case $os_name in + Darwin) + DYLD_LIBRARY_PATH=$BLDLIBDIR:$DYLD_LIBRARY_PATH + export DYLD_LIBRARY_PATH + LD_LIBRARY_PATH=$DYLD_LIBRARY_PATH + ;; + *) + LD_LIBRARY_PATH=$BLDLIBDIR:$LD_LIBRARY_PATH + ;; +esac + +export LD_LIBRARY_PATH + +echo "$JAVAEXE $JAVAEXEFLAGS -Xmx1024M -Dorg.slf4j.simpleLogger.defaultLog=trace -Djava.library.path=$BLDLIBDIR -cp $CLASSPATH examples.datasets.H5Ex_D_Alloc" +($JAVAEXE $JAVAEXEFLAGS -Xmx1024M -Dorg.slf4j.simpleLogger.defaultLog=trace -Djava.library.path=$BLDLIBDIR -cp $CLASSPATH examples.datasets.H5Ex_D_Alloc > H5Ex_D_Alloc.out) +if diff H5Ex_D_Alloc.out examples.datasets.H5Ex_D_Alloc.txt > /dev/null; then + echo " PASSED datasets.H5Ex_D_Alloc" +else + echo "**FAILED** datasets.H5Ex_D_Alloc" + nerrors="`expr $nerrors + 1`" +fi + +echo "$JAVAEXE $JAVAEXEFLAGS -Xmx1024M -Dorg.slf4j.simpleLogger.defaultLog=trace -Djava.library.path=$BLDLIBDIR -cp $CLASSPATH examples.datasets.H5Ex_D_Checksum" +($JAVAEXE $JAVAEXEFLAGS -Xmx1024M -Dorg.slf4j.simpleLogger.defaultLog=trace -Djava.library.path=$BLDLIBDIR -cp $CLASSPATH examples.datasets.H5Ex_D_Checksum > H5Ex_D_Checksum.out) +if diff H5Ex_D_Checksum.out examples.datasets.H5Ex_D_Checksum.txt > /dev/null; then + echo " PASSED datasets.H5Ex_D_Checksum" +else + echo "**FAILED** datasets.H5Ex_D_Checksum" + nerrors="`expr $nerrors + 1`" +fi + +echo "$JAVAEXE $JAVAEXEFLAGS -Xmx1024M -Dorg.slf4j.simpleLogger.defaultLog=trace -Djava.library.path=$BLDLIBDIR -cp $CLASSPATH examples.datasets.H5Ex_D_Chunk" +($JAVAEXE $JAVAEXEFLAGS -Xmx1024M -Dorg.slf4j.simpleLogger.defaultLog=trace -Djava.library.path=$BLDLIBDIR -cp $CLASSPATH examples.datasets.H5Ex_D_Chunk > H5Ex_D_Chunk.out) +if diff H5Ex_D_Chunk.out examples.datasets.H5Ex_D_Chunk.txt > /dev/null; then + echo " PASSED datasets.H5Ex_D_Chunk" +else + echo "**FAILED** datasets.H5Ex_D_Chunk" + nerrors="`expr $nerrors + 1`" +fi + +echo "$JAVAEXE $JAVAEXEFLAGS -Xmx1024M -Dorg.slf4j.simpleLogger.defaultLog=trace -Djava.library.path=$BLDLIBDIR -cp $CLASSPATH examples.datasets.H5Ex_D_Compact" +($JAVAEXE $JAVAEXEFLAGS -Xmx1024M -Dorg.slf4j.simpleLogger.defaultLog=trace -Djava.library.path=$BLDLIBDIR -cp $CLASSPATH examples.datasets.H5Ex_D_Compact > H5Ex_D_Compact.out) +if diff H5Ex_D_Compact.out examples.datasets.H5Ex_D_Compact.txt > /dev/null; then + echo " PASSED datasets.H5Ex_D_Compact" +else + echo "**FAILED** datasets.H5Ex_D_Compact" + nerrors="`expr $nerrors + 1`" +fi + +echo "$JAVAEXE $JAVAEXEFLAGS -Xmx1024M -Dorg.slf4j.simpleLogger.defaultLog=trace -Djava.library.path=$BLDLIBDIR -cp $CLASSPATH examples.datasets.H5Ex_D_External" +($JAVAEXE $JAVAEXEFLAGS -Xmx1024M -Dorg.slf4j.simpleLogger.defaultLog=trace -Djava.library.path=$BLDLIBDIR -cp $CLASSPATH examples.datasets.H5Ex_D_External > H5Ex_D_External.out) +if diff H5Ex_D_External.out examples.datasets.H5Ex_D_External.txt > /dev/null; then + echo " PASSED datasets.H5Ex_D_External" +else + echo "**FAILED** datasets.H5Ex_D_External" + nerrors="`expr $nerrors + 1`" +fi + +echo "$JAVAEXE $JAVAEXEFLAGS -Xmx1024M -Dorg.slf4j.simpleLogger.defaultLog=trace -Djava.library.path=$BLDLIBDIR -cp $CLASSPATH examples.datasets.H5Ex_D_FillValue" +($JAVAEXE $JAVAEXEFLAGS -Xmx1024M -Dorg.slf4j.simpleLogger.defaultLog=trace -Djava.library.path=$BLDLIBDIR -cp $CLASSPATH examples.datasets.H5Ex_D_FillValue > H5Ex_D_FillValue.out) +if diff H5Ex_D_FillValue.out examples.datasets.H5Ex_D_FillValue.txt > /dev/null; then + echo " PASSED datasets.H5Ex_D_FillValue" +else + echo "**FAILED** datasets.H5Ex_D_FillValue" + nerrors="`expr $nerrors + 1`" +fi + +echo "$JAVAEXE $JAVAEXEFLAGS -Xmx1024M -Dorg.slf4j.simpleLogger.defaultLog=trace -Djava.library.path=$BLDLIBDIR -cp $CLASSPATH examples.datasets.H5Ex_D_Gzip" +($JAVAEXE $JAVAEXEFLAGS -Xmx1024M -Dorg.slf4j.simpleLogger.defaultLog=trace -Djava.library.path=$BLDLIBDIR -cp $CLASSPATH examples.datasets.H5Ex_D_Gzip > H5Ex_D_Gzip.out) +if diff H5Ex_D_Gzip.out examples.datasets.H5Ex_D_Gzip.txt > /dev/null; then + echo " PASSED datasets.H5Ex_D_Gzip" +else + echo "**FAILED** datasets.H5Ex_D_Gzip" + nerrors="`expr $nerrors + 1`" +fi + +echo "$JAVAEXE $JAVAEXEFLAGS -Xmx1024M -Dorg.slf4j.simpleLogger.defaultLog=trace -Djava.library.path=$BLDLIBDIR -cp $CLASSPATH examples.datasets.H5Ex_D_Hyperslab" +($JAVAEXE $JAVAEXEFLAGS -Xmx1024M -Dorg.slf4j.simpleLogger.defaultLog=trace -Djava.library.path=$BLDLIBDIR -cp $CLASSPATH examples.datasets.H5Ex_D_Hyperslab > H5Ex_D_Hyperslab.out) +if diff H5Ex_D_Hyperslab.out examples.datasets.H5Ex_D_Hyperslab.txt > /dev/null; then + echo " PASSED datasets.H5Ex_D_Hyperslab" +else + echo "**FAILED** datasets.H5Ex_D_Hyperslab" + nerrors="`expr $nerrors + 1`" +fi + +echo "$JAVAEXE $JAVAEXEFLAGS -Xmx1024M -Dorg.slf4j.simpleLogger.defaultLog=trace -Djava.library.path=$BLDLIBDIR -cp $CLASSPATH examples.datasets.H5Ex_D_ReadWrite" +($JAVAEXE $JAVAEXEFLAGS -Xmx1024M -Dorg.slf4j.simpleLogger.defaultLog=trace -Djava.library.path=$BLDLIBDIR -cp $CLASSPATH examples.datasets.H5Ex_D_ReadWrite > H5Ex_D_ReadWrite.out) +if diff H5Ex_D_ReadWrite.out examples.datasets.H5Ex_D_ReadWrite.txt > /dev/null; then + echo " PASSED datasets.H5Ex_D_ReadWrite" +else + echo "**FAILED** datasets.H5Ex_D_ReadWrite" + nerrors="`expr $nerrors + 1`" +fi + +echo "$JAVAEXE $JAVAEXEFLAGS -Xmx1024M -Dorg.slf4j.simpleLogger.defaultLog=trace -Djava.library.path=$BLDLIBDIR -cp $CLASSPATH examples.datasets.H5Ex_D_Shuffle" +($JAVAEXE $JAVAEXEFLAGS -Xmx1024M -Dorg.slf4j.simpleLogger.defaultLog=trace -Djava.library.path=$BLDLIBDIR -cp $CLASSPATH examples.datasets.H5Ex_D_Shuffle > H5Ex_D_Shuffle.out) +if diff H5Ex_D_Shuffle.out examples.datasets.H5Ex_D_Shuffle.txt > /dev/null; then + echo " PASSED datasets.H5Ex_D_Shuffle" +else + echo "**FAILED** datasets.H5Ex_D_Shuffle" + nerrors="`expr $nerrors + 1`" +fi + +if test $USE_FILTER_SZIP = "yes"; then + echo "$JAVAEXE $JAVAEXEFLAGS -Xmx1024M -Dorg.slf4j.simpleLogger.defaultLog=trace -Djava.library.path=$BLDLIBDIR -cp $CLASSPATH examples.datasets.H5Ex_D_Szip" + ($JAVAEXE $JAVAEXEFLAGS -Xmx1024M -Dorg.slf4j.simpleLogger.defaultLog=trace -Djava.library.path=$BLDLIBDIR -cp $CLASSPATH examples.datasets.H5Ex_D_Szip > H5Ex_D_Szip.out) + if diff H5Ex_D_Szip.out examples.datasets.H5Ex_D_Szip.txt > /dev/null; then + echo " PASSED datasets.H5Ex_D_Szip" + else + echo "**FAILED** datasets.H5Ex_D_Szip" + nerrors="`expr $nerrors + 1`" + fi +fi + +echo "$JAVAEXE $JAVAEXEFLAGS -Xmx1024M -Dorg.slf4j.simpleLogger.defaultLog=trace -Djava.library.path=$BLDLIBDIR -cp $CLASSPATH examples.datasets.H5Ex_D_UnlimitedAdd" +($JAVAEXE $JAVAEXEFLAGS -Xmx1024M -Dorg.slf4j.simpleLogger.defaultLog=trace -Djava.library.path=$BLDLIBDIR -cp $CLASSPATH examples.datasets.H5Ex_D_UnlimitedAdd > H5Ex_D_UnlimitedAdd.out) +if diff H5Ex_D_UnlimitedAdd.out examples.datasets.H5Ex_D_UnlimitedAdd.txt > /dev/null; then + echo " PASSED datasets.H5Ex_D_UnlimitedAdd" +else + echo "**FAILED** datasets.H5Ex_D_UnlimitedAdd" + nerrors="`expr $nerrors + 1`" +fi + +echo "$JAVAEXE $JAVAEXEFLAGS -Xmx1024M -Dorg.slf4j.simpleLogger.defaultLog=trace -Djava.library.path=$BLDLIBDIR -cp $CLASSPATH examples.datasets.H5Ex_D_UnlimitedGzip" +($JAVAEXE $JAVAEXEFLAGS -Xmx1024M -Dorg.slf4j.simpleLogger.defaultLog=trace -Djava.library.path=$BLDLIBDIR -cp $CLASSPATH examples.datasets.H5Ex_D_UnlimitedGzip > H5Ex_D_UnlimitedGzip.out) +if diff H5Ex_D_External.out examples.datasets.H5Ex_D_External.txt > /dev/null; then + echo " PASSED datasets.H5Ex_D_UnlimitedGzip" +else + echo "**FAILED** datasets.H5Ex_D_UnlimitedGzip" + nerrors="`expr $nerrors + 1`" +fi + +echo "$JAVAEXE $JAVAEXEFLAGS -Xmx1024M -Dorg.slf4j.simpleLogger.defaultLog=trace -Djava.library.path=$BLDLIBDIR -cp $CLASSPATH examples.datasets.H5Ex_D_UnlimitedMod" +($JAVAEXE $JAVAEXEFLAGS -Xmx1024M -Dorg.slf4j.simpleLogger.defaultLog=trace -Djava.library.path=$BLDLIBDIR -cp $CLASSPATH examples.datasets.H5Ex_D_UnlimitedMod > H5Ex_D_UnlimitedMod.out) +if diff H5Ex_D_UnlimitedMod.out examples.datasets.H5Ex_D_UnlimitedMod.txt > /dev/null; then + echo " PASSED datasets.H5Ex_D_UnlimitedMod" +else + echo "**FAILED** datasets.H5Ex_D_UnlimitedMod" + nerrors="`expr $nerrors + 1`" +fi + +echo "$JAVAEXE $JAVAEXEFLAGS -Xmx1024M -Dorg.slf4j.simpleLogger.defaultLog=trace -Djava.library.path=$BLDLIBDIR -cp $CLASSPATH examples.datasets.H5Ex_D_Nbit" +($JAVAEXE $JAVAEXEFLAGS -Xmx1024M -Dorg.slf4j.simpleLogger.defaultLog=trace -Djava.library.path=$BLDLIBDIR -cp $CLASSPATH examples.datasets.H5Ex_D_Nbit > H5Ex_D_Nbit.out) +if diff H5Ex_D_Nbit.out examples.datasets.H5Ex_D_Nbit.txt > /dev/null; then + echo " PASSED datasets.H5Ex_D_Nbit" +else + echo "**FAILED** datasets.H5Ex_D_Nbit" + nerrors="`expr $nerrors + 1`" +fi + +echo "$JAVAEXE $JAVAEXEFLAGS -Xmx1024M -Dorg.slf4j.simpleLogger.defaultLog=trace -Djava.library.path=$BLDLIBDIR -cp $CLASSPATH examples.datasets.H5Ex_D_Transform" +($JAVAEXE $JAVAEXEFLAGS -Xmx1024M -Dorg.slf4j.simpleLogger.defaultLog=trace -Djava.library.path=$BLDLIBDIR -cp $CLASSPATH examples.datasets.H5Ex_D_Transform > H5Ex_D_Transform.out) +if diff H5Ex_D_Transform.out examples.datasets.H5Ex_D_Transform.txt > /dev/null; then + echo " PASSED datasets.H5Ex_D_Transform" +else + echo "**FAILED** datasets.H5Ex_D_Transform" + nerrors="`expr $nerrors + 1`" +fi + +echo "$JAVAEXE $JAVAEXEFLAGS -Xmx1024M -Dorg.slf4j.simpleLogger.defaultLog=trace -Djava.library.path=$BLDLIBDIR -cp $CLASSPATH examples.datasets.H5Ex_D_Sofloat" +($JAVAEXE $JAVAEXEFLAGS -Xmx1024M -Dorg.slf4j.simpleLogger.defaultLog=trace -Djava.library.path=$BLDLIBDIR -cp $CLASSPATH examples.datasets.H5Ex_D_Sofloat > H5Ex_D_Sofloat.out) +if diff H5Ex_D_Sofloat.out examples.datasets.H5Ex_D_Sofloat.txt > /dev/null; then + echo " PASSED datasets.H5Ex_D_Sofloat" +else + echo "**FAILED** datasets.H5Ex_D_Sofloat" + nerrors="`expr $nerrors + 1`" +fi + +echo "$JAVAEXE $JAVAEXEFLAGS -Xmx1024M -Dorg.slf4j.simpleLogger.defaultLog=trace -Djava.library.path=$BLDLIBDIR -cp $CLASSPATH examples.datasets.H5Ex_D_Soint" +($JAVAEXE $JAVAEXEFLAGS -Xmx1024M -Dorg.slf4j.simpleLogger.defaultLog=trace -Djava.library.path=$BLDLIBDIR -cp $CLASSPATH examples.datasets.H5Ex_D_Soint > H5Ex_D_Soint.out) +if diff H5Ex_D_Soint.out examples.datasets.H5Ex_D_Soint.txt > /dev/null; then + echo " PASSED datasets.H5Ex_D_Soint" +else + echo "**FAILED** datasets.H5Ex_D_Soint" + nerrors="`expr $nerrors + 1`" +fi + +# Clean up temporary files/directories +CLEAN_LIBFILES_AND_BLDLIBDIR +CLEAN_DATAFILES_AND_BLDDIR + +# Report test results and exit +if test $nerrors -eq 0 ; then + echo "All $TESTNAME tests passed." + exit $EXIT_SUCCESS +else + echo "$TESTNAME tests failed with $nerrors errors." + exit $EXIT_FAILURE +fi diff --git a/java/examples/datatypes/CMakeLists.txt b/java/examples/datatypes/CMakeLists.txt new file mode 100644 index 0000000..6525506 --- /dev/null +++ b/java/examples/datatypes/CMakeLists.txt @@ -0,0 +1,109 @@ +cmake_minimum_required (VERSION 3.1.0) +PROJECT (HDFJAVA_EXAMPLES_DATATYPES Java) + +set (CMAKE_VERBOSE_MAKEFILE 1) + +INCLUDE_DIRECTORIES ( + ${HDF5_JAVA_JNI_BINARY_DIR} + ${HDF5_JAVA_HDF5_LIB_DIR} +) + +set (HDF_JAVA_EXAMPLES + H5Ex_T_Array + H5Ex_T_ArrayAttribute + H5Ex_T_Bit + H5Ex_T_BitAttribute + H5Ex_T_Commit + H5Ex_T_Compound + H5Ex_T_CompoundAttribute + H5Ex_T_Float + H5Ex_T_FloatAttribute + H5Ex_T_Integer + H5Ex_T_IntegerAttribute + H5Ex_T_ObjectReference + H5Ex_T_ObjectReferenceAttribute + H5Ex_T_Opaque + H5Ex_T_OpaqueAttribute + H5Ex_T_String + H5Ex_T_StringAttribute + H5Ex_T_VLString +) + +if (WIN32) + set (CMAKE_JAVA_INCLUDE_FLAG_SEP ";") +else (WIN32) + set (CMAKE_JAVA_INCLUDE_FLAG_SEP ":") +endif (WIN32) + +set (CMAKE_JAVA_INCLUDE_PATH "${HDF5_JAVA_JARS}") + +set (CMAKE_JAVA_CLASSPATH ".") +foreach (CMAKE_INCLUDE_PATH ${CMAKE_JAVA_INCLUDE_PATH}) + set (CMAKE_JAVA_CLASSPATH "${CMAKE_JAVA_CLASSPATH}${CMAKE_JAVA_INCLUDE_FLAG_SEP}${CMAKE_INCLUDE_PATH}") +endforeach (CMAKE_INCLUDE_PATH) + +foreach (example ${HDF_JAVA_EXAMPLES}) + file (WRITE ${PROJECT_BINARY_DIR}/${example}_Manifest.txt + "Main-Class: examples.datatypes.${example} +" + ) + add_jar (${example} MANIFEST ${PROJECT_BINARY_DIR}/${example}_Manifest.txt ${example}.java) + get_target_property (${example}_JAR_FILE ${example} JAR_FILE) +# install_jar (${example} ${HJAVA_INSTALL_DATA_DIR}/examples examples) + get_target_property (${example}_CLASSPATH ${example} CLASSDIR) + add_dependencies (${example} ${HDF5_JAVA_HDF5_LIB_TARGET}) +endforeach (example ${HDF_JAVA_EXAMPLES}) + +set (CMAKE_JAVA_INCLUDE_PATH "${HDF5_JAVA_JARS};${HDF5_JAVA_LOGGING_JAR};${HDF5_JAVA_LOGGING_NOP_JAR}") + +set (CMAKE_JAVA_CLASSPATH ".") +foreach (HDFJAVA_JAR ${CMAKE_JAVA_INCLUDE_PATH}) + set (CMAKE_JAVA_CLASSPATH "${CMAKE_JAVA_CLASSPATH}${CMAKE_JAVA_INCLUDE_FLAG_SEP}${HDFJAVA_JAR}") +endforeach (HDFJAVA_JAR) + +MACRO (ADD_H5_TEST resultfile resultcode) + add_test ( + NAME JAVA_datatypes-${resultfile} + COMMAND "${CMAKE_COMMAND}" + -D "TEST_TESTER=${CMAKE_Java_RUNTIME};${CMAKE_Java_RUNTIME_FLAGS}" + -D "TEST_PROGRAM=examples.datatypes.${resultfile}" + -D "TEST_ARGS:STRING=${ARGN}" + -D "TEST_CLASSPATH:STRING=${CMAKE_JAVA_CLASSPATH}${CMAKE_JAVA_INCLUDE_FLAG_SEP}${${resultfile}_JAR_FILE}" + -D "TEST_LIBRARY_DIRECTORY=${CMAKE_TEST_OUTPUT_DIRECTORY}" + -D "TEST_FOLDER=${HDFJAVA_EXAMPLES_BINARY_DIR}" + -D "TEST_OUTPUT=datatypes/${resultfile}.out" + -D "TEST_EXPECT=${resultcode}" + -D "TEST_REFERENCE=datatypes/${resultfile}.txt" + -P "${HDF_RESOURCES_DIR}/jrunTest.cmake" + ) + if (NOT "${last_test}" STREQUAL "") + set_tests_properties (JAVA_datatypes-${resultfile} PROPERTIES DEPENDS ${last_test}) + endif (NOT "${last_test}" STREQUAL "") + set (last_test "JAVA_datatypes-${resultfile}") +ENDMACRO (ADD_H5_TEST file) + +if (BUILD_TESTING) + foreach (example ${HDF_JAVA_EXAMPLES}) + add_test ( + NAME JAVA_datatypes-${example}-clearall-objects + COMMAND ${CMAKE_COMMAND} + -E remove + ${HDFJAVA_EXAMPLES_BINARY_DIR}/${example}.h5 + ${example}.out + ${example}.out.err + ) + if (NOT "${last_test}" STREQUAL "") + set_tests_properties (JAVA_datatypes-${example}-clearall-objects PROPERTIES DEPENDS ${last_test}) + endif (NOT "${last_test}" STREQUAL "") + add_test ( + NAME JAVA_datatypes-${example}-copy-objects + COMMAND ${CMAKE_COMMAND} + -E copy_if_different + ${HDFJAVA_EXAMPLES_SOURCE_DIR}/testfiles/examples.datatypes.${example}.txt + ${HDFJAVA_EXAMPLES_DATATYPES_BINARY_DIR}/${example}.txt + ) + set_tests_properties (JAVA_datatypes-${example}-copy-objects PROPERTIES DEPENDS JAVA_datatypes-${example}-clearall-objects) + set (last_test "JAVA_datatypes-${example}-copy-objects") + ADD_H5_TEST (${example} 0) + endforeach (example ${HDF_JAVA_EXAMPLES}) +endif (BUILD_TESTING) diff --git a/java/examples/datatypes/H5Ex_T_Array.java b/java/examples/datatypes/H5Ex_T_Array.java new file mode 100644 index 0000000..7b7009a --- /dev/null +++ b/java/examples/datatypes/H5Ex_T_Array.java @@ -0,0 +1,282 @@ +/* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * + * Copyright by The HDF Group. * + * Copyright by the Board of Trustees of the University of Illinois. * + * All rights reserved. * + * * + * This file is part of HDF5. The full HDF5 copyright notice, including * + * terms governing use, modification, and redistribution, is contained in * + * the files COPYING and Copyright.html. COPYING can be found at the root * + * of the source code distribution tree; Copyright.html can be found at the * + * root level of an installed copy of the electronic HDF5 document set and * + * is linked from the top-level documents page. It can also be found at * + * http://hdfgroup.org/HDF5/doc/Copyright.html. If you do not have * + * access to either file, you may request a copy from help@hdfgroup.org. * + * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */ + +/************************************************************ + This example shows how to read and write array datatypes + to a dataset. The program first writes integers arrays of + dimension ADIM0xADIM1 to a dataset with a dataspace of + DIM0, then closes the file. Next, it reopens the file, + reads back the data, and outputs it to the screen. + ************************************************************/ + +package examples.datatypes; + +import hdf.hdf5lib.H5; +import hdf.hdf5lib.HDF5Constants; + +public class H5Ex_T_Array { + private static String FILENAME = "H5Ex_T_Array.h5"; + private static String DATASETNAME = "DS1"; + private static final int DIM0 = 4; + private static final int ADIM0 = 3; + private static final int ADIM1 = 5; + private static final int RANK = 1; + private static final int NDIMS = 2; + + private static void CreateDataset() { + long file_id = -1; + long filetype_id = -1; + long memtype_id = -1; + long dataspace_id = -1; + long dataset_id = -1; + long[] dims = { DIM0 }; + long[] adims = { ADIM0, ADIM1 }; + int[][][] dset_data = new int[DIM0][ADIM0][ADIM1]; + + // Initialize data. indx is the element in the dataspace, jndx and kndx the + // elements within the array datatype. + for (int indx = 0; indx < DIM0; indx++) + for (int jndx = 0; jndx < ADIM0; jndx++) + for (int kndx = 0; kndx < ADIM1; kndx++) + dset_data[indx][jndx][kndx] = indx * jndx - jndx * kndx + indx * kndx; + + // Create a new file using default properties. + try { + file_id = H5.H5Fcreate(FILENAME, HDF5Constants.H5F_ACC_TRUNC, HDF5Constants.H5P_DEFAULT, + HDF5Constants.H5P_DEFAULT); + } + catch (Exception e) { + e.printStackTrace(); + } + + // Create array datatypes for file. + try { + filetype_id = H5.H5Tarray_create(HDF5Constants.H5T_STD_I64LE, NDIMS, adims); + } + catch (Exception e) { + e.printStackTrace(); + } + + // Create array datatypes for memory. + try { + memtype_id = H5.H5Tarray_create(HDF5Constants.H5T_NATIVE_INT, NDIMS, adims); + } + catch (Exception e) { + e.printStackTrace(); + } + + // Create dataspace. Setting maximum size to NULL sets the maximum + // size to be the current size. + try { + dataspace_id = H5.H5Screate_simple(RANK, dims, null); + } + catch (Exception e) { + e.printStackTrace(); + } + + // Create the dataset. + try { + if ((file_id >= 0) && (dataspace_id >= 0) && (filetype_id >= 0)) + dataset_id = H5.H5Dcreate(file_id, DATASETNAME, filetype_id, dataspace_id, HDF5Constants.H5P_DEFAULT, + HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT); + } + catch (Exception e) { + e.printStackTrace(); + } + + // Write the dataset. + try { + if ((dataset_id >= 0) && (memtype_id >= 0)) + H5.H5Dwrite(dataset_id, memtype_id, HDF5Constants.H5S_ALL, HDF5Constants.H5S_ALL, + HDF5Constants.H5P_DEFAULT, dset_data); + } + catch (Exception e) { + e.printStackTrace(); + } + + // End access to the dataset and release resources used by it. + try { + if (dataset_id >= 0) + H5.H5Dclose(dataset_id); + } + catch (Exception e) { + e.printStackTrace(); + } + + // Terminate access to the data space. + try { + if (dataspace_id >= 0) + H5.H5Sclose(dataspace_id); + } + catch (Exception e) { + e.printStackTrace(); + } + + // Terminate access to the file type. + try { + if (filetype_id >= 0) + H5.H5Tclose(filetype_id); + } + catch (Exception e) { + e.printStackTrace(); + } + + // Terminate access to the mem type. + try { + if (memtype_id >= 0) + H5.H5Tclose(memtype_id); + } + catch (Exception e) { + e.printStackTrace(); + } + + // Close the file. + try { + if (file_id >= 0) + H5.H5Fclose(file_id); + } + catch (Exception e) { + e.printStackTrace(); + } + + } + + private static void ReadDataset() { + long file_id = -1; + long filetype_id = -1; + long memtype_id = -1; + long dataset_id = -1; + long[] dims = { DIM0 }; + long[] adims = { ADIM0, ADIM1 }; + int[][][] dset_data; + + // Open an existing file. + try { + file_id = H5.H5Fopen(FILENAME, HDF5Constants.H5F_ACC_RDONLY, HDF5Constants.H5P_DEFAULT); + } + catch (Exception e) { + e.printStackTrace(); + } + + // Open an existing dataset. + try { + if (file_id >= 0) + dataset_id = H5.H5Dopen(file_id, DATASETNAME, HDF5Constants.H5P_DEFAULT); + } + catch (Exception e) { + e.printStackTrace(); + } + + // Get the datatype. + try { + if (dataset_id >= 0) + filetype_id = H5.H5Dget_type(dataset_id); + } + catch (Exception e) { + e.printStackTrace(); + } + + // Get the datatype's dimensions. + try { + if (filetype_id >= 0) + H5.H5Tget_array_dims(filetype_id, adims); + } + catch (Exception e) { + e.printStackTrace(); + } + + // Allocate array of pointers to two-dimensional arrays (the + // elements of the dataset. + dset_data = new int[(int) dims[0]][(int) (adims[0])][(int) (adims[1])]; + + // Create array datatypes for memory. + try { + memtype_id = H5.H5Tarray_create(HDF5Constants.H5T_NATIVE_INT, 2, adims); + } + catch (Exception e) { + e.printStackTrace(); + } + + // Read data. + try { + if ((dataset_id >= 0) && (memtype_id >= 0)) + H5.H5Dread(dataset_id, memtype_id, HDF5Constants.H5S_ALL, HDF5Constants.H5S_ALL, + HDF5Constants.H5P_DEFAULT, dset_data); + } + catch (Exception e) { + e.printStackTrace(); + } + + // Output the data to the screen. + for (int indx = 0; indx < dims[0]; indx++) { + System.out.println(DATASETNAME + " [" + indx + "]:"); + for (int jndx = 0; jndx < adims[0]; jndx++) { + System.out.print(" ["); + for (int kndx = 0; kndx < adims[1]; kndx++) + System.out.print(dset_data[indx][jndx][kndx] + " "); + System.out.println("]"); + } + System.out.println(); + } + System.out.println(); + + // End access to the dataset and release resources used by it. + try { + if (dataset_id >= 0) + H5.H5Dclose(dataset_id); + } + catch (Exception e) { + e.printStackTrace(); + } + + // Terminate access to the file type. + try { + if (filetype_id >= 0) + H5.H5Tclose(filetype_id); + } + catch (Exception e) { + e.printStackTrace(); + } + + // Terminate access to the mem type. + try { + if (memtype_id >= 0) + H5.H5Tclose(memtype_id); + } + catch (Exception e) { + e.printStackTrace(); + } + + // Close the file. + try { + if (file_id >= 0) + H5.H5Fclose(file_id); + } + catch (Exception e) { + e.printStackTrace(); + } + + } + + public static void main(String[] args) { + H5Ex_T_Array.CreateDataset(); + // Now we begin the read section of this example. Here we assume + // the dataset and array have the same name and rank, but can have + // any size. Therefore we must allocate a new array to read in + // data using malloc(). + H5Ex_T_Array.ReadDataset(); + } + +} diff --git a/java/examples/datatypes/H5Ex_T_ArrayAttribute.java b/java/examples/datatypes/H5Ex_T_ArrayAttribute.java new file mode 100644 index 0000000..ce97457 --- /dev/null +++ b/java/examples/datatypes/H5Ex_T_ArrayAttribute.java @@ -0,0 +1,322 @@ +/* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * + * Copyright by The HDF Group. * + * Copyright by the Board of Trustees of the University of Illinois. * + * All rights reserved. * + * * + * This file is part of HDF5. The full HDF5 copyright notice, including * + * terms governing use, modification, and redistribution, is contained in * + * the files COPYING and Copyright.html. COPYING can be found at the root * + * of the source code distribution tree; Copyright.html can be found at the * + * root level of an installed copy of the electronic HDF5 document set and * + * is linked from the top-level documents page. It can also be found at * + * http://hdfgroup.org/HDF5/doc/Copyright.html. If you do not have * + * access to either file, you may request a copy from help@hdfgroup.org. * + * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */ + +/************************************************************ + This example shows how to read and write array datatypes + to an attribute. The program first writes integers arrays + of dimension ADIM0xADIM1 to an attribute with a dataspace + of DIM0, then closes the file. Next, it reopens the + file, reads back the data, and outputs it to the screen. + ************************************************************/ + +package examples.datatypes; + +import hdf.hdf5lib.H5; +import hdf.hdf5lib.HDF5Constants; + +public class H5Ex_T_ArrayAttribute { + private static String FILENAME = "H5Ex_T_ArrayAttribute.h5"; + private static String DATASETNAME = "DS1"; + private static String ATTRIBUTENAME = "A1"; + private static final int DIM0 = 4; + private static final int ADIM0 = 3; + private static final int ADIM1 = 5; + private static final int RANK = 1; + private static final int NDIMS = 2; + + private static void CreateDataset() { + long file_id = -1; + long filetype_id = -1; + long memtype_id = -1; + long dataspace_id = -1; + long dataset_id = -1; + long attribute_id = -1; + long[] dims = { DIM0 }; + long[] adims = { ADIM0, ADIM1 }; + int[][][] dset_data = new int[DIM0][ADIM0][ADIM1]; + + // Initialize data. indx is the element in the dataspace, jndx and kndx the + // elements within the array datatype. + for (int indx = 0; indx < DIM0; indx++) + for (int jndx = 0; jndx < ADIM0; jndx++) + for (int kndx = 0; kndx < ADIM1; kndx++) + dset_data[indx][jndx][kndx] = indx * jndx - jndx * kndx + indx * kndx; + + // Create a new file using default properties. + try { + file_id = H5.H5Fcreate(FILENAME, HDF5Constants.H5F_ACC_TRUNC, HDF5Constants.H5P_DEFAULT, + HDF5Constants.H5P_DEFAULT); + } + catch (Exception e) { + e.printStackTrace(); + } + + // Create array datatypes for file. + try { + filetype_id = H5.H5Tarray_create(HDF5Constants.H5T_STD_I64LE, NDIMS, adims); + } + catch (Exception e) { + e.printStackTrace(); + } + + // Create array datatypes for memory. + try { + memtype_id = H5.H5Tarray_create(HDF5Constants.H5T_NATIVE_INT, NDIMS, adims); + } + catch (Exception e) { + e.printStackTrace(); + } + + // Create dataset with a scalar dataspace. + try { + dataspace_id = H5.H5Screate(HDF5Constants.H5S_SCALAR); + if (dataspace_id >= 0) { + dataset_id = H5.H5Dcreate(file_id, DATASETNAME, HDF5Constants.H5T_STD_I32LE, dataspace_id, + HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT); + H5.H5Sclose(dataspace_id); + dataspace_id = -1; + } + } + catch (Exception e) { + e.printStackTrace(); + } + + // Create dataspace. Setting maximum size to NULL sets the maximum + // size to be the current size. + try { + dataspace_id = H5.H5Screate_simple(RANK, dims, null); + } + catch (Exception e) { + e.printStackTrace(); + } + + // Create the attribute and write the array data to it. + try { + if ((dataset_id >= 0) && (dataspace_id >= 0) && (filetype_id >= 0)) + attribute_id = H5.H5Acreate(dataset_id, ATTRIBUTENAME, filetype_id, dataspace_id, + HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT); + } + catch (Exception e) { + e.printStackTrace(); + } + + // Write the dataset. + try { + if ((attribute_id >= 0) && (memtype_id >= 0)) + H5.H5Awrite(attribute_id, memtype_id, dset_data); + } + catch (Exception e) { + e.printStackTrace(); + } + + // End access to the dataset and release resources used by it. + try { + if (attribute_id >= 0) + H5.H5Aclose(attribute_id); + } + catch (Exception e) { + e.printStackTrace(); + } + + try { + if (dataset_id >= 0) + H5.H5Dclose(dataset_id); + } + catch (Exception e) { + e.printStackTrace(); + } + + // Terminate access to the data space. + try { + if (dataspace_id >= 0) + H5.H5Sclose(dataspace_id); + } + catch (Exception e) { + e.printStackTrace(); + } + + // Terminate access to the file type. + try { + if (filetype_id >= 0) + H5.H5Tclose(filetype_id); + } + catch (Exception e) { + e.printStackTrace(); + } + + // Terminate access to the mem type. + try { + if (memtype_id >= 0) + H5.H5Tclose(memtype_id); + } + catch (Exception e) { + e.printStackTrace(); + } + + // Close the file. + try { + if (file_id >= 0) + H5.H5Fclose(file_id); + } + catch (Exception e) { + e.printStackTrace(); + } + + } + + private static void ReadDataset() { + long file_id = -1; + long filetype_id = -1; + long memtype_id = -1; + long dataset_id = -1; + long attribute_id = -1; + long[] dims = { DIM0 }; + long[] adims = { ADIM0, ADIM1 }; + int[][][] dset_data; + + // Open an existing file. + try { + file_id = H5.H5Fopen(FILENAME, HDF5Constants.H5F_ACC_RDONLY, HDF5Constants.H5P_DEFAULT); + } + catch (Exception e) { + e.printStackTrace(); + } + + // Open an existing dataset. + try { + if (file_id >= 0) + dataset_id = H5.H5Dopen(file_id, DATASETNAME, HDF5Constants.H5P_DEFAULT); + } + catch (Exception e) { + e.printStackTrace(); + } + + try { + if (dataset_id >= 0) + attribute_id = H5.H5Aopen_by_name(dataset_id, ".", ATTRIBUTENAME, HDF5Constants.H5P_DEFAULT, + HDF5Constants.H5P_DEFAULT); + } + catch (Exception e) { + e.printStackTrace(); + } + + // Get the datatype. + try { + if (attribute_id >= 0) + filetype_id = H5.H5Aget_type(attribute_id); + } + catch (Exception e) { + e.printStackTrace(); + } + + // Get the datatype's dimensions. + try { + if (filetype_id >= 0) + H5.H5Tget_array_dims(filetype_id, adims); + } + catch (Exception e) { + e.printStackTrace(); + } + + // Allocate array of pointers to two-dimensional arrays (the + // elements of the dataset. + dset_data = new int[(int) dims[0]][(int) (adims[0])][(int) (adims[1])]; + + // Create array datatypes for memory. + try { + memtype_id = H5.H5Tarray_create(HDF5Constants.H5T_NATIVE_INT, 2, adims); + } + catch (Exception e) { + e.printStackTrace(); + } + + // Read data. + try { + if ((attribute_id >= 0) && (memtype_id >= 0)) + H5.H5Aread(attribute_id, memtype_id, dset_data); + } + catch (Exception e) { + e.printStackTrace(); + } + + // Output the data to the screen. + for (int indx = 0; indx < dims[0]; indx++) { + System.out.println(ATTRIBUTENAME + " [" + indx + "]:"); + for (int jndx = 0; jndx < adims[0]; jndx++) { + System.out.print(" ["); + for (int kndx = 0; kndx < adims[1]; kndx++) + System.out.print(dset_data[indx][jndx][kndx] + " "); + System.out.println("]"); + } + System.out.println(); + } + System.out.println(); + + // End access to the dataset and release resources used by it. + try { + if (attribute_id >= 0) + H5.H5Aclose(attribute_id); + } + catch (Exception e) { + e.printStackTrace(); + } + + try { + if (dataset_id >= 0) + H5.H5Dclose(dataset_id); + } + catch (Exception e) { + e.printStackTrace(); + } + + // Terminate access to the file type. + try { + if (filetype_id >= 0) + H5.H5Tclose(filetype_id); + } + catch (Exception e) { + e.printStackTrace(); + } + + // Terminate access to the mem type. + try { + if (memtype_id >= 0) + H5.H5Tclose(memtype_id); + } + catch (Exception e) { + e.printStackTrace(); + } + + // Close the file. + try { + if (file_id >= 0) + H5.H5Fclose(file_id); + } + catch (Exception e) { + e.printStackTrace(); + } + + } + + public static void main(String[] args) { + H5Ex_T_ArrayAttribute.CreateDataset(); + // Now we begin the read section of this example. Here we assume + // the dataset and array have the same name and rank, but can have + // any size. Therefore we must allocate a new array to read in + // data using malloc(). + H5Ex_T_ArrayAttribute.ReadDataset(); + } + +} diff --git a/java/examples/datatypes/H5Ex_T_Bit.java b/java/examples/datatypes/H5Ex_T_Bit.java new file mode 100644 index 0000000..f76c7d5 --- /dev/null +++ b/java/examples/datatypes/H5Ex_T_Bit.java @@ -0,0 +1,227 @@ +/* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * + * Copyright by The HDF Group. * + * Copyright by the Board of Trustees of the University of Illinois. * + * All rights reserved. * + * * + * This file is part of HDF5. The full HDF5 copyright notice, including * + * terms governing use, modification, and redistribution, is contained in * + * the files COPYING and Copyright.html. COPYING can be found at the root * + * of the source code distribution tree; Copyright.html can be found at the * + * root level of an installed copy of the electronic HDF5 document set and * + * is linked from the top-level documents page. It can also be found at * + * http://hdfgroup.org/HDF5/doc/Copyright.html. If you do not have * + * access to either file, you may request a copy from help@hdfgroup.org. * + * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */ + +/************************************************************ + This example shows how to read and write bitfield + datatypes to a dataset. The program first writes bit + fields to a dataset with a dataspace of DIM0xDIM1, then + closes the file. Next, it reopens the file, reads back + the data, and outputs it to the screen. + ************************************************************/ + +package examples.datatypes; + +import hdf.hdf5lib.H5; +import hdf.hdf5lib.HDF5Constants; + +public class H5Ex_T_Bit { + private static String FILENAME = "H5Ex_T_Bit.h5"; + private static String DATASETNAME = "DS1"; + private static final int DIM0 = 4; + private static final int DIM1 = 7; + private static final int RANK = 2; + + private static void CreateDataset() { + long file_id = -1; + long dataspace_id = -1; + long dataset_id = -1; + long[] dims = { DIM0, DIM1 }; + int[][] dset_data = new int[DIM0][DIM1]; + + // Initialize data. + for (int indx = 0; indx < DIM0; indx++) + for (int jndx = 0; jndx < DIM1; jndx++) { + dset_data[indx][jndx] = 0; + dset_data[indx][jndx] |= (indx * jndx - jndx) & 0x03; /* Field "A" */ + dset_data[indx][jndx] |= (indx & 0x03) << 2; /* Field "B" */ + dset_data[indx][jndx] |= (jndx & 0x03) << 4; /* Field "C" */ + dset_data[indx][jndx] |= ((indx + jndx) & 0x03) << 6; /* Field "D" */ + } + + // Create a new file using default properties. + try { + file_id = H5.H5Fcreate(FILENAME, HDF5Constants.H5F_ACC_TRUNC, HDF5Constants.H5P_DEFAULT, + HDF5Constants.H5P_DEFAULT); + } + catch (Exception e) { + e.printStackTrace(); + } + + // Create dataspace. Setting maximum size to NULL sets the maximum + // size to be the current size. + try { + dataspace_id = H5.H5Screate_simple(RANK, dims, null); + } + catch (Exception e) { + e.printStackTrace(); + } + + // Create the dataset. + try { + if ((file_id >= 0) && (dataspace_id >= 0)) + dataset_id = H5.H5Dcreate(file_id, DATASETNAME, HDF5Constants.H5T_STD_B8BE, dataspace_id, + HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT); + } + catch (Exception e) { + e.printStackTrace(); + } + + // Write the bitfield data to the dataset. + try { + if (dataset_id >= 0) + H5.H5Dwrite(dataset_id, HDF5Constants.H5T_NATIVE_B8, HDF5Constants.H5S_ALL, HDF5Constants.H5S_ALL, + HDF5Constants.H5P_DEFAULT, dset_data); + } + catch (Exception e) { + e.printStackTrace(); + } + + // End access to the dataset and release resources used by it. + try { + if (dataset_id >= 0) + H5.H5Dclose(dataset_id); + } + catch (Exception e) { + e.printStackTrace(); + } + + // Terminate access to the data space. + try { + if (dataspace_id >= 0) + H5.H5Sclose(dataspace_id); + } + catch (Exception e) { + e.printStackTrace(); + } + + // Close the file. + try { + if (file_id >= 0) + H5.H5Fclose(file_id); + } + catch (Exception e) { + e.printStackTrace(); + } + + } + + private static void ReadDataset() { + long file_id = -1; + long dataspace_id = -1; + long dataset_id = -1; + long[] dims = { DIM0, DIM1 }; + int[][] dset_data; + + // Open an existing file. + try { + file_id = H5.H5Fopen(FILENAME, HDF5Constants.H5F_ACC_RDONLY, HDF5Constants.H5P_DEFAULT); + } + catch (Exception e) { + e.printStackTrace(); + } + + // Open an existing dataset. + try { + if (file_id >= 0) + dataset_id = H5.H5Dopen(file_id, DATASETNAME, HDF5Constants.H5P_DEFAULT); + } + catch (Exception e) { + e.printStackTrace(); + } + + // Get dataspace and allocate memory for read buffer. + try { + if (dataset_id >= 0) + dataspace_id = H5.H5Dget_space(dataset_id); + } + catch (Exception e) { + e.printStackTrace(); + } + + try { + if (dataspace_id >= 0) + H5.H5Sget_simple_extent_dims(dataspace_id, dims, null); + } + catch (Exception e) { + e.printStackTrace(); + } + + // Allocate array of pointers to two-dimensional arrays (the + // elements of the dataset. + dset_data = new int[(int) dims[0]][(int) (dims[1])]; + + // Read data. + try { + if (dataset_id >= 0) + H5.H5Dread(dataset_id, HDF5Constants.H5T_NATIVE_B8, HDF5Constants.H5S_ALL, HDF5Constants.H5S_ALL, + HDF5Constants.H5P_DEFAULT, dset_data); + } + catch (Exception e) { + e.printStackTrace(); + } + + // Output the data to the screen. + System.out.println(DATASETNAME + ":"); + for (int indx = 0; indx < dims[0]; indx++) { + System.out.print(" ["); + for (int jndx = 0; jndx < dims[1]; jndx++) { + System.out.print("{" + (dset_data[indx][jndx] & 0x03) + ", "); + System.out.print(((dset_data[indx][jndx] >> 2) & 0x03) + ", "); + System.out.print(((dset_data[indx][jndx] >> 4) & 0x03) + ", "); + System.out.print(((dset_data[indx][jndx] >> 6) & 0x03) + "}"); + } + System.out.println("]"); + } + System.out.println(); + + // End access to the dataset and release resources used by it. + try { + if (dataset_id >= 0) + H5.H5Dclose(dataset_id); + } + catch (Exception e) { + e.printStackTrace(); + } + + // Terminate access to the data space. + try { + if (dataspace_id >= 0) + H5.H5Sclose(dataspace_id); + } + catch (Exception e) { + e.printStackTrace(); + } + + // Close the file. + try { + if (file_id >= 0) + H5.H5Fclose(file_id); + } + catch (Exception e) { + e.printStackTrace(); + } + + } + + public static void main(String[] args) { + H5Ex_T_Bit.CreateDataset(); + // Now we begin the read section of this example. Here we assume + // the dataset and array have the same name and rank, but can have + // any size. Therefore we must allocate a new array to read in + // data using malloc(). + H5Ex_T_Bit.ReadDataset(); + } + +} diff --git a/java/examples/datatypes/H5Ex_T_BitAttribute.java b/java/examples/datatypes/H5Ex_T_BitAttribute.java new file mode 100644 index 0000000..a5ab81b --- /dev/null +++ b/java/examples/datatypes/H5Ex_T_BitAttribute.java @@ -0,0 +1,267 @@ +/* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * + * Copyright by The HDF Group. * + * Copyright by the Board of Trustees of the University of Illinois. * + * All rights reserved. * + * * + * This file is part of HDF5. The full HDF5 copyright notice, including * + * terms governing use, modification, and redistribution, is contained in * + * the files COPYING and Copyright.html. COPYING can be found at the root * + * of the source code distribution tree; Copyright.html can be found at the * + * root level of an installed copy of the electronic HDF5 document set and * + * is linked from the top-level documents page. It can also be found at * + * http://hdfgroup.org/HDF5/doc/Copyright.html. If you do not have * + * access to either file, you may request a copy from help@hdfgroup.org. * + * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */ + +/************************************************************ + This example shows how to read and write bitfield + datatypes to an attribute. The program first writes bit + fields to an attribute with a dataspace of DIM0xDIM1, then + closes the file. Next, it reopens the file, reads back + the data, and outputs it to the screen. + ************************************************************/ + +package examples.datatypes; + +import hdf.hdf5lib.H5; +import hdf.hdf5lib.HDF5Constants; + +public class H5Ex_T_BitAttribute { + private static String FILENAME = "H5Ex_T_BitAttribute.h5"; + private static String DATASETNAME = "DS1"; + private static String ATTRIBUTENAME = "A1"; + private static final int DIM0 = 4; + private static final int DIM1 = 7; + private static final int RANK = 2; + + private static void CreateDataset() { + long file_id = -1; + long dataspace_id = -1; + long dataset_id = -1; + long attribute_id = -1; + long[] dims = { DIM0, DIM1 }; + int[][] dset_data = new int[DIM0][DIM1]; + + // Initialize data. + for (int indx = 0; indx < DIM0; indx++) + for (int jndx = 0; jndx < DIM1; jndx++) { + dset_data[indx][jndx] = 0; + dset_data[indx][jndx] |= (indx * jndx - jndx) & 0x03; /* Field "A" */ + dset_data[indx][jndx] |= (indx & 0x03) << 2; /* Field "B" */ + dset_data[indx][jndx] |= (jndx & 0x03) << 4; /* Field "C" */ + dset_data[indx][jndx] |= ((indx + jndx) & 0x03) << 6; /* Field "D" */ + } + + // Create a new file using default properties. + try { + file_id = H5.H5Fcreate(FILENAME, HDF5Constants.H5F_ACC_TRUNC, HDF5Constants.H5P_DEFAULT, + HDF5Constants.H5P_DEFAULT); + } + catch (Exception e) { + e.printStackTrace(); + } + + // Create dataset with a scalar dataspace. + try { + dataspace_id = H5.H5Screate(HDF5Constants.H5S_SCALAR); + if (dataspace_id >= 0) { + dataset_id = H5.H5Dcreate(file_id, DATASETNAME, HDF5Constants.H5T_STD_I32LE, dataspace_id, + HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT); + H5.H5Sclose(dataspace_id); + dataspace_id = -1; + } + } + catch (Exception e) { + e.printStackTrace(); + } + + // Create dataspace. Setting maximum size to NULL sets the maximum + // size to be the current size. + try { + dataspace_id = H5.H5Screate_simple(RANK, dims, null); + } + catch (Exception e) { + e.printStackTrace(); + } + + // Create the attribute and write the array data to it. + try { + if ((dataset_id >= 0) && (dataspace_id >= 0)) + attribute_id = H5.H5Acreate(dataset_id, ATTRIBUTENAME, HDF5Constants.H5T_STD_B8BE, dataspace_id, + HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT); + } + catch (Exception e) { + e.printStackTrace(); + } + + // Write the dataset. + try { + if (attribute_id >= 0) + H5.H5Awrite(attribute_id, HDF5Constants.H5T_NATIVE_B8, dset_data); + } + catch (Exception e) { + e.printStackTrace(); + } + + // End access to the dataset and release resources used by it. + try { + if (attribute_id >= 0) + H5.H5Aclose(attribute_id); + } + catch (Exception e) { + e.printStackTrace(); + } + + try { + if (dataset_id >= 0) + H5.H5Dclose(dataset_id); + } + catch (Exception e) { + e.printStackTrace(); + } + + // Terminate access to the data space. + try { + if (dataspace_id >= 0) + H5.H5Sclose(dataspace_id); + } + catch (Exception e) { + e.printStackTrace(); + } + + // Close the file. + try { + if (file_id >= 0) + H5.H5Fclose(file_id); + } + catch (Exception e) { + e.printStackTrace(); + } + + } + + private static void ReadDataset() { + long file_id = -1; + long dataspace_id = -1; + long dataset_id = -1; + long attribute_id = -1; + long[] dims = { DIM0, DIM1 }; + int[][] dset_data; + + // Open an existing file. + try { + file_id = H5.H5Fopen(FILENAME, HDF5Constants.H5F_ACC_RDONLY, HDF5Constants.H5P_DEFAULT); + } + catch (Exception e) { + e.printStackTrace(); + } + + // Open an existing dataset. + try { + if (file_id >= 0) + dataset_id = H5.H5Dopen(file_id, DATASETNAME, HDF5Constants.H5P_DEFAULT); + } + catch (Exception e) { + e.printStackTrace(); + } + + try { + if (dataset_id >= 0) + attribute_id = H5.H5Aopen_by_name(dataset_id, ".", ATTRIBUTENAME, HDF5Constants.H5P_DEFAULT, + HDF5Constants.H5P_DEFAULT); + } + catch (Exception e) { + e.printStackTrace(); + } + + // Get dataspace and allocate memory for read buffer. + try { + if (attribute_id >= 0) + dataspace_id = H5.H5Aget_space(attribute_id); + } + catch (Exception e) { + e.printStackTrace(); + } + + try { + if (dataspace_id >= 0) + H5.H5Sget_simple_extent_dims(dataspace_id, dims, null); + } + catch (Exception e) { + e.printStackTrace(); + } + + // Allocate array of pointers to two-dimensional arrays (the + // elements of the dataset. + dset_data = new int[(int) dims[0]][(int) (dims[1])]; + + // Read data. + try { + if (attribute_id >= 0) + H5.H5Aread(attribute_id, HDF5Constants.H5T_NATIVE_B8, dset_data); + } + catch (Exception e) { + e.printStackTrace(); + } + + // Output the data to the screen. + System.out.println(ATTRIBUTENAME + ":"); + for (int indx = 0; indx < dims[0]; indx++) { + System.out.print(" ["); + for (int jndx = 0; jndx < dims[1]; jndx++) { + System.out.print("{" + (dset_data[indx][jndx] & 0x03) + ", "); + System.out.print(((dset_data[indx][jndx] >> 2) & 0x03) + ", "); + System.out.print(((dset_data[indx][jndx] >> 4) & 0x03) + ", "); + System.out.print(((dset_data[indx][jndx] >> 6) & 0x03) + "}"); + } + System.out.println("]"); + } + System.out.println(); + + // End access to the dataset and release resources used by it. + try { + if (attribute_id >= 0) + H5.H5Aclose(attribute_id); + } + catch (Exception e) { + e.printStackTrace(); + } + + try { + if (dataset_id >= 0) + H5.H5Dclose(dataset_id); + } + catch (Exception e) { + e.printStackTrace(); + } + + // Terminate access to the data space. + try { + if (dataspace_id >= 0) + H5.H5Sclose(dataspace_id); + } + catch (Exception e) { + e.printStackTrace(); + } + + // Close the file. + try { + if (file_id >= 0) + H5.H5Fclose(file_id); + } + catch (Exception e) { + e.printStackTrace(); + } + + } + + public static void main(String[] args) { + H5Ex_T_BitAttribute.CreateDataset(); + // Now we begin the read section of this example. Here we assume + // the dataset and array have the same name and rank, but can have + // any size. Therefore we must allocate a new array to read in + // data using malloc(). + H5Ex_T_BitAttribute.ReadDataset(); + } + +} diff --git a/java/examples/datatypes/H5Ex_T_Commit.java b/java/examples/datatypes/H5Ex_T_Commit.java new file mode 100644 index 0000000..d4e43f9 --- /dev/null +++ b/java/examples/datatypes/H5Ex_T_Commit.java @@ -0,0 +1,265 @@ +/* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * + * Copyright by The HDF Group. * + * Copyright by the Board of Trustees of the University of Illinois. * + * All rights reserved. * + * * + * This file is part of HDF5. The full HDF5 copyright notice, including * + * terms governing use, modification, and redistribution, is contained in * + * the files COPYING and Copyright.html. COPYING can be found at the root * + * of the source code distribution tree; Copyright.html can be found at the * + * root level of an installed copy of the electronic HDF5 document set and * + * is linked from the top-level documents page. It can also be found at * + * http://hdfgroup.org/HDF5/doc/Copyright.html. If you do not have * + * access to either file, you may request a copy from help@hdfgroup.org. * + * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */ + +/************************************************************ + This example shows how to commit a named datatype to a + file, and read back that datatype. The program first + defines a compound datatype, commits it to a file, then + closes the file. Next, it reopens the file, opens the + datatype, and outputs the names of its fields to the + screen. + ************************************************************/ + +package examples.datatypes; + +import hdf.hdf5lib.H5; +import hdf.hdf5lib.HDF5Constants; + +import java.util.EnumSet; +import java.util.HashMap; +import java.util.Map; + +public class H5Ex_T_Commit { + private static String FILENAME = "H5Ex_T_Commit.h5"; + private static String DATATYPENAME = "Sensor_Type"; + protected static final int INTEGERSIZE = 4; + protected static final int DOUBLESIZE = 8; + protected final static int MAXSTRINGSIZE = 80; + + // Values for the various classes of datatypes + enum H5T_class { + H5T_NO_CLASS(HDF5Constants.H5T_NO_CLASS), // error + H5T_INTEGER(HDF5Constants.H5T_INTEGER), // integer types + H5T_FLOAT(HDF5Constants.H5T_FLOAT), // floating-point types + H5T_TIME(HDF5Constants.H5T_TIME), // date and time types + H5T_STRING(HDF5Constants.H5T_STRING), // character string types + H5T_BITFIELD(HDF5Constants.H5T_BITFIELD), // bit field types + H5T_OPAQUE(HDF5Constants.H5T_OPAQUE), // opaque types + H5T_COMPOUND(HDF5Constants.H5T_COMPOUND), // compound types + H5T_REFERENCE(HDF5Constants.H5T_REFERENCE), // reference types + H5T_ENUM(HDF5Constants.H5T_ENUM), // enumeration types + H5T_VLEN(HDF5Constants.H5T_VLEN), // Variable-Length types + H5T_ARRAY(HDF5Constants.H5T_ARRAY), // Array types + H5T_NCLASSES(11); // this must be last + + private static final Map<Long, H5T_class> lookup = new HashMap<Long, H5T_class>(); + + static { + for (H5T_class s : EnumSet.allOf(H5T_class.class)) + lookup.put(s.getCode(), s); + } + + private long code; + + H5T_class(long layout_type) { + this.code = layout_type; + } + + public long getCode() { + return this.code; + } + + public static H5T_class get(long typeclass_id) { + return lookup.get(typeclass_id); + } + } + + // The supporting Sensor_Datatype class. + private static class Sensor_Datatype { + static int numberMembers = 4; + static int[] memberDims = { 1, 1, 1, 1 }; + + String[] memberNames = { "Serial number", "Location", "Temperature (F)", "Pressure (inHg)" }; + long[] memberFileTypes = { HDF5Constants.H5T_STD_I32BE, HDF5Constants.H5T_C_S1, HDF5Constants.H5T_IEEE_F64BE, + HDF5Constants.H5T_IEEE_F64BE }; + static int[] memberStorage = { INTEGERSIZE, MAXSTRINGSIZE, DOUBLESIZE, DOUBLESIZE }; + + // Data size is the storage size for the members not the object. + static long getDataSize() { + long data_size = 0; + for (int indx = 0; indx < numberMembers; indx++) + data_size += memberStorage[indx] * memberDims[indx]; + return data_size; + } + + static int getOffset(int memberItem) { + int data_offset = 0; + for (int indx = 0; indx < memberItem; indx++) + data_offset += memberStorage[indx]; + return data_offset; + } + } + + private static void CreateDataType() { + long file_id = -1; + long strtype_id = -1; + long filetype_id = -1; + Sensor_Datatype datatypes = new Sensor_Datatype(); + // Create a new file using default properties. + try { + file_id = H5.H5Fcreate(FILENAME, HDF5Constants.H5F_ACC_TRUNC, HDF5Constants.H5P_DEFAULT, + HDF5Constants.H5P_DEFAULT); + } + catch (Exception e) { + e.printStackTrace(); + } + + // Create string datatype. + try { + strtype_id = H5.H5Tcopy(HDF5Constants.H5T_C_S1); + if (strtype_id >= 0) + H5.H5Tset_size(strtype_id, MAXSTRINGSIZE); + } + catch (Exception e) { + e.printStackTrace(); + } + + // Create the compound datatype for the file. Because the standard + // types we are using for the file may have different sizes than + // the corresponding native types, we must manually calculate the + // offset of each member. + try { + filetype_id = H5.H5Tcreate(HDF5Constants.H5T_COMPOUND, Sensor_Datatype.getDataSize()); + if (filetype_id >= 0) { + for (int indx = 0; indx < Sensor_Datatype.numberMembers; indx++) { + long type_id = datatypes.memberFileTypes[indx]; + if (type_id == HDF5Constants.H5T_C_S1) + type_id = strtype_id; + H5.H5Tinsert(filetype_id, datatypes.memberNames[indx], Sensor_Datatype.getOffset(indx), type_id); + } + } + } + catch (Exception e) { + e.printStackTrace(); + } + + // Commit the compound datatype to the file, creating a named datatype. + try { + if ((file_id >= 0) && (filetype_id >= 0)) + H5.H5Tcommit(file_id, DATATYPENAME, filetype_id, HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT, + HDF5Constants.H5P_DEFAULT); + } + catch (Exception e) { + e.printStackTrace(); + } + + // Terminate access to the file type. + try { + if (filetype_id >= 0) + H5.H5Tclose(filetype_id); + } + catch (Exception e) { + e.printStackTrace(); + } + + // Terminate access to the str type. + try { + if (strtype_id >= 0) + H5.H5Tclose(strtype_id); + } + catch (Exception e) { + e.printStackTrace(); + } + + // Close the file. + try { + if (file_id >= 0) + H5.H5Fclose(file_id); + } + catch (Exception e) { + e.printStackTrace(); + } + + } + + private static void ReadDataType() { + long file_id = -1; + long typeclass_id = -1; + long filetype_id = -1; + + // Open an existing file. + try { + file_id = H5.H5Fopen(FILENAME, HDF5Constants.H5F_ACC_RDONLY, HDF5Constants.H5P_DEFAULT); + } + catch (Exception e) { + e.printStackTrace(); + } + + // Open named datatype. + try { + if (file_id >= 0) + filetype_id = H5.H5Topen(file_id, DATATYPENAME, HDF5Constants.H5P_DEFAULT); + } + catch (Exception e) { + e.printStackTrace(); + } + + // Output the data to the screen. + System.out.println("Named datatype: " + DATATYPENAME + ":"); + + // Get datatype class. If it isn't compound, we won't print anything. + try { + if (filetype_id >= 0) + typeclass_id = H5.H5Tget_class(filetype_id); + } + catch (Exception e) { + e.printStackTrace(); + } + // Read data. + try { + if (H5T_class.get(typeclass_id) == H5T_class.H5T_COMPOUND) { + System.out.println(" Class: H5T_COMPOUND"); + int nmembs = H5.H5Tget_nmembers(filetype_id); + // Iterate over compound datatype members. + for (int indx = 0; indx < nmembs; indx++) { + String member_name = H5.H5Tget_member_name(filetype_id, indx); + System.out.println(" " + member_name); + } + } + } + catch (Exception e) { + e.printStackTrace(); + } + + // Terminate access to the mem type. + try { + if (filetype_id >= 0) + H5.H5Tclose(filetype_id); + } + catch (Exception e) { + e.printStackTrace(); + } + + // Close the file. + try { + if (file_id >= 0) + H5.H5Fclose(file_id); + } + catch (Exception e) { + e.printStackTrace(); + } + + } + + public static void main(String[] args) { + H5Ex_T_Commit.CreateDataType(); + // Now we begin the read section of this example. Here we assume + // the dataset and array have the same name and rank, but can have + // any size. Therefore we must allocate a new array to read in + // data using malloc(). + H5Ex_T_Commit.ReadDataType(); + } + +} diff --git a/java/examples/datatypes/H5Ex_T_Compound.java b/java/examples/datatypes/H5Ex_T_Compound.java new file mode 100644 index 0000000..f270cb9 --- /dev/null +++ b/java/examples/datatypes/H5Ex_T_Compound.java @@ -0,0 +1,443 @@ +/* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * + * Copyright by The HDF Group. * + * Copyright by the Board of Trustees of the University of Illinois. * + * All rights reserved. * + * * + * This file is part of HDF5. The full HDF5 copyright notice, including * + * terms governing use, modification, and redistribution, is contained in * + * the files COPYING and Copyright.html. COPYING can be found at the root * + * of the source code distribution tree; Copyright.html can be found at the * + * root level of an installed copy of the electronic HDF5 document set and * + * is linked from the top-level documents page. It can also be found at * + * http://hdfgroup.org/HDF5/doc/Copyright.html. If you do not have * + * access to either file, you may request a copy from help@hdfgroup.org. * + * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */ + +/************************************************************ + This example shows how to read and write compound + datatypes to a dataset. The program first writes + compound structures to a dataset with a dataspace of DIM0, + then closes the file. Next, it reopens the file, reads + back the data, and outputs it to the screen. + ************************************************************/ + +package examples.datatypes; + +import hdf.hdf5lib.H5; +import hdf.hdf5lib.HDF5Constants; + +import java.nio.ByteBuffer; +import java.nio.ByteOrder; +import java.nio.charset.Charset; + +public class H5Ex_T_Compound { + private static String FILENAME = "H5Ex_T_Compound.h5"; + private static String DATASETNAME = "DS1"; + private static final int DIM0 = 4; + private static final int RANK = 1; + protected static final int INTEGERSIZE = 4; + protected static final int DOUBLESIZE = 8; + protected final static int MAXSTRINGSIZE = 80; + + static class Sensor_Datatype { + static int numberMembers = 4; + static int[] memberDims = { 1, 1, 1, 1 }; + + static String[] memberNames = { "Serial number", "Location", "Temperature (F)", "Pressure (inHg)" }; + static long[] memberMemTypes = { HDF5Constants.H5T_NATIVE_INT, HDF5Constants.H5T_C_S1, + HDF5Constants.H5T_NATIVE_DOUBLE, HDF5Constants.H5T_NATIVE_DOUBLE }; + static long[] memberFileTypes = { HDF5Constants.H5T_STD_I32BE, HDF5Constants.H5T_C_S1, + HDF5Constants.H5T_IEEE_F64BE, HDF5Constants.H5T_IEEE_F64BE }; + static int[] memberStorage = { INTEGERSIZE, MAXSTRINGSIZE, DOUBLESIZE, DOUBLESIZE }; + + // Data size is the storage size for the members. + static long getTotalDataSize() { + long data_size = 0; + for (int indx = 0; indx < numberMembers; indx++) + data_size += memberStorage[indx] * memberDims[indx]; + return DIM0 * data_size; + } + + static long getDataSize() { + long data_size = 0; + for (int indx = 0; indx < numberMembers; indx++) + data_size += memberStorage[indx] * memberDims[indx]; + return data_size; + } + + static int getOffset(int memberItem) { + int data_offset = 0; + for (int indx = 0; indx < memberItem; indx++) + data_offset += memberStorage[indx]; + return data_offset; + } + } + + static class Sensor { + public int serial_no; + public String location; + public double temperature; + public double pressure; + + Sensor(int serial_no, String location, double temperature, double pressure) { + this.serial_no = serial_no; + this.location = location; + this.temperature = temperature; + this.pressure = pressure; + } + + Sensor(ByteBuffer databuf, int dbposition) { + readBuffer(databuf, dbposition); + } + + void writeBuffer(ByteBuffer databuf, int dbposition) { + databuf.putInt(dbposition + Sensor_Datatype.getOffset(0), serial_no); + byte[] temp_str = location.getBytes(Charset.forName("UTF-8")); + int arraylen = (temp_str.length > MAXSTRINGSIZE) ? MAXSTRINGSIZE : temp_str.length; + for (int ndx = 0; ndx < arraylen; ndx++) + databuf.put(dbposition + Sensor_Datatype.getOffset(1) + ndx, temp_str[ndx]); + for (int ndx = arraylen; ndx < MAXSTRINGSIZE; ndx++) + databuf.put(dbposition + Sensor_Datatype.getOffset(1) + arraylen, (byte) 0); + databuf.putDouble(dbposition + Sensor_Datatype.getOffset(2), temperature); + databuf.putDouble(dbposition + Sensor_Datatype.getOffset(3), pressure); + } + + void readBuffer(ByteBuffer databuf, int dbposition) { + this.serial_no = databuf.getInt(dbposition + Sensor_Datatype.getOffset(0)); + ByteBuffer stringbuf = databuf.duplicate(); + stringbuf.position(dbposition + Sensor_Datatype.getOffset(1)); + stringbuf.limit(dbposition + Sensor_Datatype.getOffset(1) + MAXSTRINGSIZE); + byte[] bytearr = new byte[stringbuf.remaining()]; + stringbuf.get(bytearr); + this.location = new String(bytearr, Charset.forName("UTF-8")).trim(); + this.temperature = databuf.getDouble(dbposition + Sensor_Datatype.getOffset(2)); + this.pressure = databuf.getDouble(dbposition + Sensor_Datatype.getOffset(3)); + } + + @Override + public String toString() { + return String.format("Serial number : " + serial_no + "%n" + + "Location : " + location + "%n" + + "Temperature (F) : " + temperature + "%n" + + "Pressure (inHg) : " + pressure + "%n"); + } + } + + private static void CreateDataset() { + long file_id = -1; + long strtype_id = -1; + long memtype_id = -1; + long filetype_id = -1; + long dataspace_id = -1; + long dataset_id = -1; + long[] dims = { DIM0 }; + Sensor[] object_data = new Sensor[DIM0]; + byte[] dset_data = null; + + // Initialize data. + object_data[0] = new Sensor(1153, new String("Exterior (static)"), 53.23, 24.57); + object_data[1] = new Sensor(1184, new String("Intake"), 55.12, 22.95); + object_data[2] = new Sensor(1027, new String("Intake manifold"), 103.55, 31.23); + object_data[3] = new Sensor(1313, new String("Exhaust manifold"), 1252.89, 84.11); + + // Create a new file using default properties. + try { + file_id = H5.H5Fcreate(FILENAME, HDF5Constants.H5F_ACC_TRUNC, HDF5Constants.H5P_DEFAULT, + HDF5Constants.H5P_DEFAULT); + } + catch (Exception e) { + e.printStackTrace(); + } + + // Create string datatype. + try { + strtype_id = H5.H5Tcopy(HDF5Constants.H5T_C_S1); + if (strtype_id >= 0) + H5.H5Tset_size(strtype_id, MAXSTRINGSIZE); + } + catch (Exception e) { + e.printStackTrace(); + } + + // Create the compound datatype for memory. + try { + memtype_id = H5.H5Tcreate(HDF5Constants.H5T_COMPOUND, Sensor_Datatype.getDataSize()); + if (memtype_id >= 0) { + for (int indx = 0; indx < Sensor_Datatype.numberMembers; indx++) { + long type_id = Sensor_Datatype.memberMemTypes[indx]; + if (type_id == HDF5Constants.H5T_C_S1) + type_id = strtype_id; + H5.H5Tinsert(memtype_id, Sensor_Datatype.memberNames[indx], Sensor_Datatype.getOffset(indx), + type_id); + } + } + } + catch (Exception e) { + e.printStackTrace(); + } + + // Create the compound datatype for the file. Because the standard + // types we are using for the file may have different sizes than + // the corresponding native types, we must manually calculate the + // offset of each member. + try { + filetype_id = H5.H5Tcreate(HDF5Constants.H5T_COMPOUND, Sensor_Datatype.getDataSize()); + if (filetype_id >= 0) { + for (int indx = 0; indx < Sensor_Datatype.numberMembers; indx++) { + long type_id = Sensor_Datatype.memberFileTypes[indx]; + if (type_id == HDF5Constants.H5T_C_S1) + type_id = strtype_id; + H5.H5Tinsert(filetype_id, Sensor_Datatype.memberNames[indx], Sensor_Datatype.getOffset(indx), + type_id); + } + } + } + catch (Exception e) { + e.printStackTrace(); + } + + // Create dataspace. Setting maximum size to NULL sets the maximum + // size to be the current size. + try { + dataspace_id = H5.H5Screate_simple(RANK, dims, null); + } + catch (Exception e) { + e.printStackTrace(); + } + + // Create the dataset. + try { + if ((file_id >= 0) && (dataspace_id >= 0) && (filetype_id >= 0)) + dataset_id = H5.H5Dcreate(file_id, DATASETNAME, filetype_id, dataspace_id, HDF5Constants.H5P_DEFAULT, + HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT); + } + catch (Exception e) { + e.printStackTrace(); + } + + // Write the compound data to the dataset. + // allocate memory for read buffer. + dset_data = new byte[(int)dims[0] * (int)Sensor_Datatype.getDataSize()]; + ByteBuffer outBuf = ByteBuffer.wrap(dset_data); + outBuf.order(ByteOrder.nativeOrder()); + for (int indx = 0; indx < (int) dims[0]; indx++) { + object_data[indx].writeBuffer(outBuf, indx * (int)Sensor_Datatype.getDataSize()); + } + try { + if ((dataset_id >= 0) && (memtype_id >= 0)) + H5.H5Dwrite(dataset_id, memtype_id, HDF5Constants.H5S_ALL, HDF5Constants.H5S_ALL, + HDF5Constants.H5P_DEFAULT, dset_data); + } + catch (Exception e) { + e.printStackTrace(); + } + + // End access to the dataset and release resources used by it. + try { + if (dataset_id >= 0) + H5.H5Dclose(dataset_id); + } + catch (Exception e) { + e.printStackTrace(); + } + + // Terminate access to the data space. + try { + if (dataspace_id >= 0) + H5.H5Sclose(dataspace_id); + } + catch (Exception e) { + e.printStackTrace(); + } + + // Terminate access to the file type. + try { + if (filetype_id >= 0) + H5.H5Tclose(filetype_id); + } + catch (Exception e) { + e.printStackTrace(); + } + + // Terminate access to the mem type. + try { + if (memtype_id >= 0) + H5.H5Tclose(memtype_id); + } + catch (Exception e) { + e.printStackTrace(); + } + + try { + if (strtype_id >= 0) + H5.H5Tclose(strtype_id); + } + catch (Exception e) { + e.printStackTrace(); + } + + // Close the file. + try { + if (file_id >= 0) + H5.H5Fclose(file_id); + } + catch (Exception e) { + e.printStackTrace(); + } + + } + + private static void ReadDataset() { + long file_id = -1; + long strtype_id = -1; + long memtype_id = -1; + long dataspace_id = -1; + long dataset_id = -1; + long[] dims = { DIM0 }; + Sensor[] object_data2; + byte[] dset_data; + + // Open an existing file. + try { + file_id = H5.H5Fopen(FILENAME, HDF5Constants.H5F_ACC_RDONLY, HDF5Constants.H5P_DEFAULT); + } + catch (Exception e) { + e.printStackTrace(); + } + + // Open an existing dataset. + try { + if (file_id >= 0) + dataset_id = H5.H5Dopen(file_id, DATASETNAME, HDF5Constants.H5P_DEFAULT); + } + catch (Exception e) { + e.printStackTrace(); + } + + // Get dataspace and allocate memory for read buffer. + try { + if (dataset_id >= 0) + dataspace_id = H5.H5Dget_space(dataset_id); + } + catch (Exception e) { + e.printStackTrace(); + } + + try { + if (dataspace_id >= 0) + H5.H5Sget_simple_extent_dims(dataspace_id, dims, null); + } + catch (Exception e) { + e.printStackTrace(); + } + + // Create string datatype. + try { + strtype_id = H5.H5Tcopy(HDF5Constants.H5T_C_S1); + if (strtype_id >= 0) + H5.H5Tset_size(strtype_id, MAXSTRINGSIZE); + } + catch (Exception e) { + e.printStackTrace(); + } + + // Create the compound datatype for memory. + try { + memtype_id = H5.H5Tcreate(HDF5Constants.H5T_COMPOUND, Sensor_Datatype.getDataSize()); + if (memtype_id >= 0) { + for (int indx = 0; indx < Sensor_Datatype.numberMembers; indx++) { + long type_id = Sensor_Datatype.memberMemTypes[indx]; + if (type_id == HDF5Constants.H5T_C_S1) + type_id = strtype_id; + H5.H5Tinsert(memtype_id, Sensor_Datatype.memberNames[indx], Sensor_Datatype.getOffset(indx), + type_id); + } + } + } + catch (Exception e) { + e.printStackTrace(); + } + + // allocate memory for read buffer. + dset_data = new byte[(int) dims[0] * (int)Sensor_Datatype.getDataSize()]; + + object_data2 = new Sensor[(int) dims[0]]; + + // Read data. + try { + if ((dataset_id >= 0) && (memtype_id >= 0)) + H5.H5Dread(dataset_id, memtype_id, HDF5Constants.H5S_ALL, HDF5Constants.H5S_ALL, + HDF5Constants.H5P_DEFAULT, dset_data); + + ByteBuffer inBuf = ByteBuffer.wrap(dset_data); + inBuf.order(ByteOrder.nativeOrder()); + for (int indx = 0; indx < (int) dims[0]; indx++) { + object_data2[indx] = new Sensor(inBuf, indx * (int)Sensor_Datatype.getDataSize()); + } + } + catch (Exception e) { + e.printStackTrace(); + } + + // Output the data to the screen. + for (int indx = 0; indx < dims[0]; indx++) { + System.out.println(DATASETNAME + " [" + indx + "]:"); + System.out.println(object_data2[indx].toString()); + } + System.out.println(); + + try { + if (dataset_id >= 0) + H5.H5Dclose(dataset_id); + } + catch (Exception e) { + e.printStackTrace(); + } + + // Terminate access to the data space. + try { + if (dataspace_id >= 0) + H5.H5Sclose(dataspace_id); + } + catch (Exception e) { + e.printStackTrace(); + } + + // Terminate access to the mem type. + try { + if (memtype_id >= 0) + H5.H5Tclose(memtype_id); + } + catch (Exception e) { + e.printStackTrace(); + } + + try { + if (strtype_id >= 0) + H5.H5Tclose(strtype_id); + } + catch (Exception e) { + e.printStackTrace(); + } + + // Close the file. + try { + if (file_id >= 0) + H5.H5Fclose(file_id); + } + catch (Exception e) { + e.printStackTrace(); + } + + } + + public static void main(String[] args) { + H5Ex_T_Compound.CreateDataset(); + // Now we begin the read section of this example. Here we assume + // the dataset and array have the same name and rank, but can have + // any size. Therefore we must allocate a new array to read in + // data using malloc(). + H5Ex_T_Compound.ReadDataset(); + } + +} diff --git a/java/examples/datatypes/H5Ex_T_CompoundAttribute.java b/java/examples/datatypes/H5Ex_T_CompoundAttribute.java new file mode 100644 index 0000000..25581d4 --- /dev/null +++ b/java/examples/datatypes/H5Ex_T_CompoundAttribute.java @@ -0,0 +1,486 @@ +/* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * + * Copyright by The HDF Group. * + * Copyright by the Board of Trustees of the University of Illinois. * + * All rights reserved. * + * * + * This file is part of HDF5. The full HDF5 copyright notice, including * + * terms governing use, modification, and redistribution, is contained in * + * the files COPYING and Copyright.html. COPYING can be found at the root * + * of the source code distribution tree; Copyright.html can be found at the * + * root level of an installed copy of the electronic HDF5 document set and * + * is linked from the top-level documents page. It can also be found at * + * http://hdfgroup.org/HDF5/doc/Copyright.html. If you do not have * + * access to either file, you may request a copy from help@hdfgroup.org. * + * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */ + +/************************************************************ + This example shows how to read and write compound + datatypes to an attribute. The program first writes + compound structures to an attribute with a dataspace of + DIM0, then closes the file. Next, it reopens the file, + reads back the data, and outputs it to the screen. + ************************************************************/ + +package examples.datatypes; + +import hdf.hdf5lib.H5; +import hdf.hdf5lib.HDF5Constants; + +import java.nio.ByteBuffer; +import java.nio.ByteOrder; +import java.nio.charset.Charset; + +public class H5Ex_T_CompoundAttribute { + private static String FILENAME = "H5Ex_T_CompoundAttribute.h5"; + private static String DATASETNAME = "DS1"; + private static String ATTRIBUTENAME = "A1"; + private static final int DIM0 = 4; + private static final int RANK = 1; + protected static final int INTEGERSIZE = 4; + protected static final int DOUBLESIZE = 8; + protected final static int MAXSTRINGSIZE = 80; + + // Using Java Externalization will add a two-byte object header in + // the stream, which needs to be called out in the datatypes. + static class Sensor_Datatype { + static int numberMembers = 4; + static int[] memberDims = { 1, 1, 1, 1 }; + + static String[] memberNames = { "Serial number", "Location", "Temperature (F)", "Pressure (inHg)" }; + static long[] memberMemTypes = { HDF5Constants.H5T_NATIVE_INT, HDF5Constants.H5T_C_S1, + HDF5Constants.H5T_NATIVE_DOUBLE, HDF5Constants.H5T_NATIVE_DOUBLE }; + static long[] memberFileTypes = { HDF5Constants.H5T_STD_I32BE, HDF5Constants.H5T_C_S1, + HDF5Constants.H5T_IEEE_F64BE, HDF5Constants.H5T_IEEE_F64BE }; + static int[] memberStorage = { INTEGERSIZE, MAXSTRINGSIZE, DOUBLESIZE, DOUBLESIZE }; + + // Data size is the storage size for the members not the object. + static long getTotalDataSize() { + long data_size = 0; + for (int indx = 0; indx < numberMembers; indx++) + data_size += memberStorage[indx] * memberDims[indx]; + return DIM0 * data_size; + } + + static long getDataSize() { + long data_size = 0; + for (int indx = 0; indx < numberMembers; indx++) + data_size += memberStorage[indx] * memberDims[indx]; + return data_size; + } + + static int getOffset(int memberItem) { + int data_offset = 0; + for (int indx = 0; indx < memberItem; indx++) + data_offset += memberStorage[indx]; + return data_offset; + } + } + + static class Sensor { + public int serial_no; + public String location; + public double temperature; + public double pressure; + + Sensor(int serial_no, String location, double temperature, double pressure) { + this.serial_no = serial_no; + this.location = location; + this.temperature = temperature; + this.pressure = pressure; + } + + Sensor(ByteBuffer databuf, int dbposition) { + readBuffer(databuf, dbposition); + } + + void writeBuffer(ByteBuffer databuf, int dbposition) { + databuf.putInt(dbposition + Sensor_Datatype.getOffset(0), serial_no); + byte[] temp_str = location.getBytes(Charset.forName("UTF-8")); + int arraylen = (temp_str.length > MAXSTRINGSIZE) ? MAXSTRINGSIZE : temp_str.length; + for (int ndx = 0; ndx < arraylen; ndx++) + databuf.put(dbposition + Sensor_Datatype.getOffset(1) + ndx, temp_str[ndx]); + for (int ndx = arraylen; ndx < MAXSTRINGSIZE; ndx++) + databuf.put(dbposition + Sensor_Datatype.getOffset(1) + arraylen, (byte) 0); + databuf.putDouble(dbposition + Sensor_Datatype.getOffset(2), temperature); + databuf.putDouble(dbposition + Sensor_Datatype.getOffset(3), pressure); + } + + void readBuffer(ByteBuffer databuf, int dbposition) { + this.serial_no = databuf.getInt(dbposition + Sensor_Datatype.getOffset(0)); + ByteBuffer stringbuf = databuf.duplicate(); + stringbuf.position(dbposition + Sensor_Datatype.getOffset(1)); + stringbuf.limit(dbposition + Sensor_Datatype.getOffset(1) + MAXSTRINGSIZE); + byte[] bytearr = new byte[stringbuf.remaining()]; + stringbuf.get(bytearr); + this.location = new String(bytearr, Charset.forName("UTF-8")).trim(); + this.temperature = databuf.getDouble(dbposition + Sensor_Datatype.getOffset(2)); + this.pressure = databuf.getDouble(dbposition + Sensor_Datatype.getOffset(3)); + } + + @Override + public String toString() { + return String.format("Serial number : " + serial_no + "%n" + + "Location : " + location + "%n" + + "Temperature (F) : " + temperature + "%n" + + "Pressure (inHg) : " + pressure + "%n"); + } + } + + private static void CreateDataset() { + long file_id = -1; + long strtype_id = -1; + long memtype_id = -1; + long filetype_id = -1; + long dataspace_id = -1; + long dataset_id = -1; + long attribute_id = -1; + long[] dims = { DIM0 }; + Sensor[] object_data = new Sensor[DIM0]; + byte[] dset_data = null; + + // Initialize data. + object_data[0] = new Sensor(1153, new String("Exterior (static)"), 53.23, 24.57); + object_data[1] = new Sensor(1184, new String("Intake"), 55.12, 22.95); + object_data[2] = new Sensor(1027, new String("Intake manifold"), 103.55, 31.23); + object_data[3] = new Sensor(1313, new String("Exhaust manifold"), 1252.89, 84.11); + + // Create a new file using default properties. + try { + file_id = H5.H5Fcreate(FILENAME, HDF5Constants.H5F_ACC_TRUNC, HDF5Constants.H5P_DEFAULT, + HDF5Constants.H5P_DEFAULT); + } + catch (Exception e) { + e.printStackTrace(); + } + + // Create string datatype. + try { + strtype_id = H5.H5Tcopy(HDF5Constants.H5T_C_S1); + if (strtype_id >= 0) + H5.H5Tset_size(strtype_id, MAXSTRINGSIZE); + } + catch (Exception e) { + e.printStackTrace(); + } + + // Create the compound datatype for memory. + try { + memtype_id = H5.H5Tcreate(HDF5Constants.H5T_COMPOUND, Sensor_Datatype.getDataSize()); + if (memtype_id >= 0) { + for (int indx = 0; indx < Sensor_Datatype.numberMembers; indx++) { + long type_id = Sensor_Datatype.memberMemTypes[indx]; + if (type_id == HDF5Constants.H5T_C_S1) + type_id = strtype_id; + H5.H5Tinsert(memtype_id, Sensor_Datatype.memberNames[indx], Sensor_Datatype.getOffset(indx), + type_id); + } + } + } + catch (Exception e) { + e.printStackTrace(); + } + + // Create the compound datatype for the file. Because the standard + // types we are using for the file may have different sizes than + // the corresponding native types, we must manually calculate the + // offset of each member. + try { + filetype_id = H5.H5Tcreate(HDF5Constants.H5T_COMPOUND, Sensor_Datatype.getDataSize()); + if (filetype_id >= 0) { + for (int indx = 0; indx < Sensor_Datatype.numberMembers; indx++) { + long type_id = Sensor_Datatype.memberFileTypes[indx]; + if (type_id == HDF5Constants.H5T_C_S1) + type_id = strtype_id; + H5.H5Tinsert(filetype_id, Sensor_Datatype.memberNames[indx], Sensor_Datatype.getOffset(indx), + type_id); + } + } + } + catch (Exception e) { + e.printStackTrace(); + } + + // Create dataset with a scalar dataspace. + try { + dataspace_id = H5.H5Screate(HDF5Constants.H5S_SCALAR); + if (dataspace_id >= 0) { + dataset_id = H5.H5Dcreate(file_id, DATASETNAME, HDF5Constants.H5T_STD_I32LE, dataspace_id, + HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT); + H5.H5Sclose(dataspace_id); + dataspace_id = -1; + } + } + catch (Exception e) { + e.printStackTrace(); + } + + // Create dataspace. Setting maximum size to NULL sets the maximum + // size to be the current size. + try { + dataspace_id = H5.H5Screate_simple(RANK, dims, null); + } + catch (Exception e) { + e.printStackTrace(); + } + + // Create the attribute. + try { + if ((dataset_id >= 0) && (dataspace_id >= 0) && (filetype_id >= 0)) + attribute_id = H5.H5Acreate(dataset_id, ATTRIBUTENAME, filetype_id, dataspace_id, + HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT); + } + catch (Exception e) { + e.printStackTrace(); + } + + // Write the compound data. + dset_data = new byte[(int) dims[0] * (int)Sensor_Datatype.getDataSize()]; + ByteBuffer outBuf = ByteBuffer.wrap(dset_data); + outBuf.order(ByteOrder.nativeOrder()); + for (int indx = 0; indx < (int) dims[0]; indx++) { + object_data[indx].writeBuffer(outBuf, indx * (int)Sensor_Datatype.getDataSize()); + } + try { + if ((attribute_id >= 0) && (memtype_id >= 0)) + H5.H5Awrite(attribute_id, memtype_id, dset_data); + } + catch (Exception e) { + e.printStackTrace(); + } + + // End access to the dataset and release resources used by it. + try { + if (attribute_id >= 0) + H5.H5Aclose(attribute_id); + } + catch (Exception e) { + e.printStackTrace(); + } + + try { + if (dataset_id >= 0) + H5.H5Dclose(dataset_id); + } + catch (Exception e) { + e.printStackTrace(); + } + + // Terminate access to the data space. + try { + if (dataspace_id >= 0) + H5.H5Sclose(dataspace_id); + } + catch (Exception e) { + e.printStackTrace(); + } + + // Terminate access to the file type. + try { + if (filetype_id >= 0) + H5.H5Tclose(filetype_id); + } + catch (Exception e) { + e.printStackTrace(); + } + + // Terminate access to the mem type. + try { + if (memtype_id >= 0) + H5.H5Tclose(memtype_id); + } + catch (Exception e) { + e.printStackTrace(); + } + + try { + if (strtype_id >= 0) + H5.H5Tclose(strtype_id); + } + catch (Exception e) { + e.printStackTrace(); + } + + // Close the file. + try { + if (file_id >= 0) + H5.H5Fclose(file_id); + } + catch (Exception e) { + e.printStackTrace(); + } + + } + + private static void ReadDataset() { + long file_id = -1; + long strtype_id = -1; + long memtype_id = -1; + long dataspace_id = -1; + long dataset_id = -1; + long attribute_id = -1; + long[] dims = { DIM0 }; + Sensor[] object_data2; + byte[] dset_data; + + // Open an existing file. + try { + file_id = H5.H5Fopen(FILENAME, HDF5Constants.H5F_ACC_RDONLY, HDF5Constants.H5P_DEFAULT); + } + catch (Exception e) { + e.printStackTrace(); + } + + // Open an existing dataset. + try { + if (file_id >= 0) + dataset_id = H5.H5Dopen(file_id, DATASETNAME, HDF5Constants.H5P_DEFAULT); + } + catch (Exception e) { + e.printStackTrace(); + } + + try { + if (dataset_id >= 0) + attribute_id = H5.H5Aopen_by_name(dataset_id, ".", ATTRIBUTENAME, HDF5Constants.H5P_DEFAULT, + HDF5Constants.H5P_DEFAULT); + } + catch (Exception e) { + e.printStackTrace(); + } + + // Get dataspace and allocate memory for read buffer. This is a + // three dimensional dataset when the array datatype is included so + // the dynamic allocation must be done in steps. + try { + if (attribute_id >= 0) + dataspace_id = H5.H5Aget_space(attribute_id); + } + catch (Exception e) { + e.printStackTrace(); + } + + try { + if (dataspace_id >= 0) + H5.H5Sget_simple_extent_dims(dataspace_id, dims, null); + } + catch (Exception e) { + e.printStackTrace(); + } + + // Create string datatype. + try { + strtype_id = H5.H5Tcopy(HDF5Constants.H5T_C_S1); + if (strtype_id >= 0) + H5.H5Tset_size(strtype_id, MAXSTRINGSIZE); + } + catch (Exception e) { + e.printStackTrace(); + } + + // Create the compound datatype for memory. + try { + memtype_id = H5.H5Tcreate(HDF5Constants.H5T_COMPOUND, Sensor_Datatype.getDataSize()); + if (memtype_id >= 0) { + for (int indx = 0; indx < Sensor_Datatype.numberMembers; indx++) { + long type_id = Sensor_Datatype.memberMemTypes[indx]; + if (type_id == HDF5Constants.H5T_C_S1) + type_id = strtype_id; + H5.H5Tinsert(memtype_id, Sensor_Datatype.memberNames[indx], Sensor_Datatype.getOffset(indx), + type_id); + } + } + } + catch (Exception e) { + e.printStackTrace(); + } + + // allocate memory for read buffer. + dset_data = new byte[(int) dims[0] * (int)Sensor_Datatype.getDataSize()]; + + object_data2 = new Sensor[(int) dims[0]]; + + // Read data. + try { + if ((attribute_id >= 0) && (memtype_id >= 0)) + H5.H5Aread(attribute_id, memtype_id, dset_data); + + ByteBuffer inBuf = ByteBuffer.wrap(dset_data); + inBuf.order(ByteOrder.nativeOrder()); + for (int indx = 0; indx < (int) dims[0]; indx++) { + object_data2[indx] = new Sensor(inBuf, indx * (int)Sensor_Datatype.getDataSize()); + } + } + catch (Exception e) { + e.printStackTrace(); + } + + // Output the data to the screen. + for (int indx = 0; indx < dims[0]; indx++) { + System.out.println(ATTRIBUTENAME + " [" + indx + "]:"); + System.out.println(object_data2[indx].toString()); + } + System.out.println(); + + try { + if (attribute_id >= 0) + H5.H5Aclose(attribute_id); + } + catch (Exception e) { + e.printStackTrace(); + } + + try { + if (dataset_id >= 0) + H5.H5Dclose(dataset_id); + } + catch (Exception e) { + e.printStackTrace(); + } + + // Terminate access to the data space. + try { + if (dataspace_id >= 0) + H5.H5Sclose(dataspace_id); + } + catch (Exception e) { + e.printStackTrace(); + } + + // Terminate access to the mem type. + try { + if (memtype_id >= 0) + H5.H5Tclose(memtype_id); + } + catch (Exception e) { + e.printStackTrace(); + } + + try { + if (strtype_id >= 0) + H5.H5Tclose(strtype_id); + } + catch (Exception e) { + e.printStackTrace(); + } + + // Close the file. + try { + if (file_id >= 0) + H5.H5Fclose(file_id); + } + catch (Exception e) { + e.printStackTrace(); + } + + } + + public static void main(String[] args) { + H5Ex_T_CompoundAttribute.CreateDataset(); + // Now we begin the read section of this example. Here we assume + // the dataset and array have the same name and rank, but can have + // any size. Therefore we must allocate a new array to read in + // data using malloc(). + H5Ex_T_CompoundAttribute.ReadDataset(); + } + +} diff --git a/java/examples/datatypes/H5Ex_T_Float.java b/java/examples/datatypes/H5Ex_T_Float.java new file mode 100644 index 0000000..1b5fd9b --- /dev/null +++ b/java/examples/datatypes/H5Ex_T_Float.java @@ -0,0 +1,227 @@ +/* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * + * Copyright by The HDF Group. * + * Copyright by the Board of Trustees of the University of Illinois. * + * All rights reserved. * + * * + * This file is part of HDF5. The full HDF5 copyright notice, including * + * terms governing use, modification, and redistribution, is contained in * + * the files COPYING and Copyright.html. COPYING can be found at the root * + * of the source code distribution tree; Copyright.html can be found at the * + * root level of an installed copy of the electronic HDF5 document set and * + * is linked from the top-level documents page. It can also be found at * + * http://hdfgroup.org/HDF5/doc/Copyright.html. If you do not have * + * access to either file, you may request a copy from help@hdfgroup.org. * + * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */ + +/************************************************************ + This example shows how to read and write integer datatypes + to a dataset. The program first writes integers to a + dataset with a dataspace of DIM0xDIM1, then closes the + file. Next, it reopens the file, reads back the data, and + outputs it to the screen. + ************************************************************/ + +package examples.datatypes; + +import java.text.DecimalFormat; + +import hdf.hdf5lib.H5; +import hdf.hdf5lib.HDF5Constants; + +public class H5Ex_T_Float { + private static String FILENAME = "H5Ex_T_Float.h5"; + private static String DATASETNAME = "DS1"; + private static final int DIM0 = 4; + private static final int DIM1 = 7; + private static final int RANK = 2; + + private static void CreateDataset() { + long file_id = -1; + long dataspace_id = -1; + long dataset_id = -1; + long[] dims = { DIM0, DIM1 }; + double[][] dset_data = new double[DIM0][DIM1]; + + // Initialize data. + for (int indx = 0; indx < DIM0; indx++) + for (int jndx = 0; jndx < DIM1; jndx++) { + dset_data[indx][jndx] = indx / (jndx + 0.5) + jndx; + } + + // Create a new file using default properties. + try { + file_id = H5.H5Fcreate(FILENAME, HDF5Constants.H5F_ACC_TRUNC, HDF5Constants.H5P_DEFAULT, + HDF5Constants.H5P_DEFAULT); + } + catch (Exception e) { + e.printStackTrace(); + } + + // Create dataspace. Setting maximum size to NULL sets the maximum + // size to be the current size. + try { + dataspace_id = H5.H5Screate_simple(RANK, dims, null); + } + catch (Exception e) { + e.printStackTrace(); + } + + // Create the dataset and write the floating point data to it. In + // this example we will save the data as 64 bit little endian IEEE + // floating point numbers, regardless of the native type. The HDF5 + // library automatically converts between different floating point + // types. + try { + if ((file_id >= 0) && (dataspace_id >= 0)) + dataset_id = H5.H5Dcreate(file_id, DATASETNAME, HDF5Constants.H5T_IEEE_F64LE, dataspace_id, + HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT); + } + catch (Exception e) { + e.printStackTrace(); + } + + // Write the data to the dataset. + try { + if (dataset_id >= 0) + H5.H5Dwrite(dataset_id, HDF5Constants.H5T_NATIVE_DOUBLE, HDF5Constants.H5S_ALL, HDF5Constants.H5S_ALL, + HDF5Constants.H5P_DEFAULT, dset_data); + } + catch (Exception e) { + e.printStackTrace(); + } + + // End access to the dataset and release resources used by it. + try { + if (dataset_id >= 0) + H5.H5Dclose(dataset_id); + } + catch (Exception e) { + e.printStackTrace(); + } + + // Terminate access to the data space. + try { + if (dataspace_id >= 0) + H5.H5Sclose(dataspace_id); + } + catch (Exception e) { + e.printStackTrace(); + } + + // Close the file. + try { + if (file_id >= 0) + H5.H5Fclose(file_id); + } + catch (Exception e) { + e.printStackTrace(); + } + + } + + private static void ReadDataset() { + long file_id = -1; + long dataspace_id = -1; + long dataset_id = -1; + long[] dims = { DIM0, DIM1 }; + double[][] dset_data; + + // Open an existing file. + try { + file_id = H5.H5Fopen(FILENAME, HDF5Constants.H5F_ACC_RDONLY, HDF5Constants.H5P_DEFAULT); + } + catch (Exception e) { + e.printStackTrace(); + } + + // Open an existing dataset. + try { + if (file_id >= 0) + dataset_id = H5.H5Dopen(file_id, DATASETNAME, HDF5Constants.H5P_DEFAULT); + } + catch (Exception e) { + e.printStackTrace(); + } + + // Get dataspace and allocate memory for read buffer. + try { + if (dataset_id >= 0) + dataspace_id = H5.H5Dget_space(dataset_id); + } + catch (Exception e) { + e.printStackTrace(); + } + + try { + if (dataspace_id >= 0) + H5.H5Sget_simple_extent_dims(dataspace_id, dims, null); + } + catch (Exception e) { + e.printStackTrace(); + } + + // Allocate array of pointers to two-dimensional arrays (the + // elements of the dataset. + dset_data = new double[(int) dims[0]][(int) (dims[1])]; + + // Read data. + try { + if (dataset_id >= 0) + H5.H5Dread(dataset_id, HDF5Constants.H5T_NATIVE_DOUBLE, HDF5Constants.H5S_ALL, HDF5Constants.H5S_ALL, + HDF5Constants.H5P_DEFAULT, dset_data); + } + catch (Exception e) { + e.printStackTrace(); + } + + // Output the data to the screen. + DecimalFormat df = new DecimalFormat("#,##0.0000"); + System.out.println(DATASETNAME + ":"); + for (int indx = 0; indx < dims[0]; indx++) { + System.out.print(" ["); + for (int jndx = 0; jndx < dims[1]; jndx++) { + System.out.print(" " + df.format(dset_data[indx][jndx])); + } + System.out.println("]"); + } + System.out.println(); + + // End access to the dataset and release resources used by it. + try { + if (dataset_id >= 0) + H5.H5Dclose(dataset_id); + } + catch (Exception e) { + e.printStackTrace(); + } + + // Terminate access to the data space. + try { + if (dataspace_id >= 0) + H5.H5Sclose(dataspace_id); + } + catch (Exception e) { + e.printStackTrace(); + } + + // Close the file. + try { + if (file_id >= 0) + H5.H5Fclose(file_id); + } + catch (Exception e) { + e.printStackTrace(); + } + + } + + public static void main(String[] args) { + H5Ex_T_Float.CreateDataset(); + // Now we begin the read section of this example. Here we assume + // the dataset and array have the same name and rank, but can have + // any size. Therefore we must allocate a new array to read in + // data using malloc(). + H5Ex_T_Float.ReadDataset(); + } + +} diff --git a/java/examples/datatypes/H5Ex_T_FloatAttribute.java b/java/examples/datatypes/H5Ex_T_FloatAttribute.java new file mode 100644 index 0000000..de1dac7 --- /dev/null +++ b/java/examples/datatypes/H5Ex_T_FloatAttribute.java @@ -0,0 +1,263 @@ +/* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * + * Copyright by The HDF Group. * + * Copyright by the Board of Trustees of the University of Illinois. * + * All rights reserved. * + * * + * This file is part of HDF5. The full HDF5 copyright notice, including * + * terms governing use, modification, and redistribution, is contained in * + * the files COPYING and Copyright.html. COPYING can be found at the root * + * of the source code distribution tree; Copyright.html can be found at the * + * root level of an installed copy of the electronic HDF5 document set and * + * is linked from the top-level documents page. It can also be found at * + * http://hdfgroup.org/HDF5/doc/Copyright.html. If you do not have * + * access to either file, you may request a copy from help@hdfgroup.org. * + * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */ + +/************************************************************ + This example shows how to read and write floating point + datatypes to an attribute. The program first writes + floating point numbers to an attribute with a dataspace of + DIM0xDIM1, then closes the file. Next, it reopens the + file, reads back the data, and outputs it to the screen. + ************************************************************/ + +package examples.datatypes; + +import java.text.DecimalFormat; + +import hdf.hdf5lib.H5; +import hdf.hdf5lib.HDF5Constants; + +public class H5Ex_T_FloatAttribute { + private static String FILENAME = "H5Ex_T_FloatAttribute.h5"; + private static String DATASETNAME = "DS1"; + private static String ATTRIBUTENAME = "A1"; + private static final int DIM0 = 4; + private static final int DIM1 = 7; + private static final int RANK = 2; + + private static void CreateDataset() { + long file_id = -1; + long dataspace_id = -1; + long dataset_id = -1; + long attribute_id = -1; + long[] dims = { DIM0, DIM1 }; + double[][] dset_data = new double[DIM0][DIM1]; + + // Initialize data. + for (int indx = 0; indx < DIM0; indx++) + for (int jndx = 0; jndx < DIM1; jndx++) { + dset_data[indx][jndx] = indx / (jndx + 0.5) + jndx; + } + + // Create a new file using default properties. + try { + file_id = H5.H5Fcreate(FILENAME, HDF5Constants.H5F_ACC_TRUNC, HDF5Constants.H5P_DEFAULT, + HDF5Constants.H5P_DEFAULT); + } + catch (Exception e) { + e.printStackTrace(); + } + + // Create dataset with a scalar dataspace. + try { + dataspace_id = H5.H5Screate(HDF5Constants.H5S_SCALAR); + if (dataspace_id >= 0) { + dataset_id = H5.H5Dcreate(file_id, DATASETNAME, HDF5Constants.H5T_STD_I32LE, dataspace_id, + HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT); + H5.H5Sclose(dataspace_id); + dataspace_id = -1; + } + } + catch (Exception e) { + e.printStackTrace(); + } + + // Create dataspace. Setting maximum size to NULL sets the maximum + // size to be the current size. + try { + dataspace_id = H5.H5Screate_simple(RANK, dims, null); + } + catch (Exception e) { + e.printStackTrace(); + } + + // Create the attribute and write the array data to it. + try { + if ((dataset_id >= 0) && (dataspace_id >= 0)) + attribute_id = H5.H5Acreate(dataset_id, ATTRIBUTENAME, HDF5Constants.H5T_IEEE_F64LE, dataspace_id, + HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT); + } + catch (Exception e) { + e.printStackTrace(); + } + + // Write the dataset. + try { + if (attribute_id >= 0) + H5.H5Awrite(attribute_id, HDF5Constants.H5T_NATIVE_DOUBLE, dset_data); + } + catch (Exception e) { + e.printStackTrace(); + } + + // End access to the dataset and release resources used by it. + try { + if (attribute_id >= 0) + H5.H5Aclose(attribute_id); + } + catch (Exception e) { + e.printStackTrace(); + } + + try { + if (dataset_id >= 0) + H5.H5Dclose(dataset_id); + } + catch (Exception e) { + e.printStackTrace(); + } + + // Terminate access to the data space. + try { + if (dataspace_id >= 0) + H5.H5Sclose(dataspace_id); + } + catch (Exception e) { + e.printStackTrace(); + } + + // Close the file. + try { + if (file_id >= 0) + H5.H5Fclose(file_id); + } + catch (Exception e) { + e.printStackTrace(); + } + + } + + private static void ReadDataset() { + long file_id = -1; + long dataspace_id = -1; + long dataset_id = -1; + long attribute_id = -1; + long[] dims = { DIM0, DIM1 }; + double[][] dset_data; + + // Open an existing file. + try { + file_id = H5.H5Fopen(FILENAME, HDF5Constants.H5F_ACC_RDONLY, HDF5Constants.H5P_DEFAULT); + } + catch (Exception e) { + e.printStackTrace(); + } + + // Open an existing dataset. + try { + if (file_id >= 0) + dataset_id = H5.H5Dopen(file_id, DATASETNAME, HDF5Constants.H5P_DEFAULT); + } + catch (Exception e) { + e.printStackTrace(); + } + + try { + if (dataset_id >= 0) + attribute_id = H5.H5Aopen_by_name(dataset_id, ".", ATTRIBUTENAME, HDF5Constants.H5P_DEFAULT, + HDF5Constants.H5P_DEFAULT); + } + catch (Exception e) { + e.printStackTrace(); + } + + // Get dataspace and allocate memory for read buffer. + try { + if (attribute_id >= 0) + dataspace_id = H5.H5Aget_space(attribute_id); + } + catch (Exception e) { + e.printStackTrace(); + } + + try { + if (dataspace_id >= 0) + H5.H5Sget_simple_extent_dims(dataspace_id, dims, null); + } + catch (Exception e) { + e.printStackTrace(); + } + + // Allocate array of pointers to two-dimensional arrays (the + // elements of the dataset. + dset_data = new double[(int) dims[0]][(int) (dims[1])]; + + // Read data. + try { + if (attribute_id >= 0) + H5.H5Aread(attribute_id, HDF5Constants.H5T_NATIVE_DOUBLE, dset_data); + } + catch (Exception e) { + e.printStackTrace(); + } + + // Output the data to the screen. + DecimalFormat df = new DecimalFormat("#,##0.0000"); + System.out.println(ATTRIBUTENAME + ":"); + for (int indx = 0; indx < dims[0]; indx++) { + System.out.print(" ["); + for (int jndx = 0; jndx < dims[1]; jndx++) { + System.out.print(" " + df.format(dset_data[indx][jndx])); + } + System.out.println("]"); + } + System.out.println(); + + // End access to the dataset and release resources used by it. + try { + if (attribute_id >= 0) + H5.H5Aclose(attribute_id); + } + catch (Exception e) { + e.printStackTrace(); + } + + try { + if (dataset_id >= 0) + H5.H5Dclose(dataset_id); + } + catch (Exception e) { + e.printStackTrace(); + } + + // Terminate access to the data space. + try { + if (dataspace_id >= 0) + H5.H5Sclose(dataspace_id); + } + catch (Exception e) { + e.printStackTrace(); + } + + // Close the file. + try { + if (file_id >= 0) + H5.H5Fclose(file_id); + } + catch (Exception e) { + e.printStackTrace(); + } + + } + + public static void main(String[] args) { + H5Ex_T_FloatAttribute.CreateDataset(); + // Now we begin the read section of this example. Here we assume + // the dataset and array have the same name and rank, but can have + // any size. Therefore we must allocate a new array to read in + // data using malloc(). + H5Ex_T_FloatAttribute.ReadDataset(); + } + +} diff --git a/java/examples/datatypes/H5Ex_T_Integer.java b/java/examples/datatypes/H5Ex_T_Integer.java new file mode 100644 index 0000000..2f365cd --- /dev/null +++ b/java/examples/datatypes/H5Ex_T_Integer.java @@ -0,0 +1,226 @@ +/* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * + * Copyright by The HDF Group. * + * Copyright by the Board of Trustees of the University of Illinois. * + * All rights reserved. * + * * + * This file is part of HDF5. The full HDF5 copyright notice, including * + * terms governing use, modification, and redistribution, is contained in * + * the files COPYING and Copyright.html. COPYING can be found at the root * + * of the source code distribution tree; Copyright.html can be found at the * + * root level of an installed copy of the electronic HDF5 document set and * + * is linked from the top-level documents page. It can also be found at * + * http://hdfgroup.org/HDF5/doc/Copyright.html. If you do not have * + * access to either file, you may request a copy from help@hdfgroup.org. * + * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */ + +/************************************************************ + This example shows how to read and write integer datatypes + to a dataset. The program first writes integers to a + dataset with a dataspace of DIM0xDIM1, then closes the + file. Next, it reopens the file, reads back the data, and + outputs it to the screen. + ************************************************************/ + +package examples.datatypes; + +import java.text.DecimalFormat; + +import hdf.hdf5lib.H5; +import hdf.hdf5lib.HDF5Constants; + +public class H5Ex_T_Integer { + private static String FILENAME = "H5Ex_T_Integer.h5"; + private static String DATASETNAME = "DS1"; + private static final int DIM0 = 4; + private static final int DIM1 = 7; + private static final int RANK = 2; + + private static void CreateDataset() { + long file_id = -1; + long dataspace_id = -1; + long dataset_id = -1; + long[] dims = { DIM0, DIM1 }; + int[][] dset_data = new int[DIM0][DIM1]; + + // Initialize data. + for (int indx = 0; indx < DIM0; indx++) + for (int jndx = 0; jndx < DIM1; jndx++) { + dset_data[indx][jndx] = indx * jndx - jndx; + } + + // Create a new file using default properties. + try { + file_id = H5.H5Fcreate(FILENAME, HDF5Constants.H5F_ACC_TRUNC, HDF5Constants.H5P_DEFAULT, + HDF5Constants.H5P_DEFAULT); + } + catch (Exception e) { + e.printStackTrace(); + } + + // Create dataspace. Setting maximum size to NULL sets the maximum + // size to be the current size. + try { + dataspace_id = H5.H5Screate_simple(RANK, dims, null); + } + catch (Exception e) { + e.printStackTrace(); + } + + // Create the dataset and write the integer data to it. In this + // example we will save the data as 64 bit big endian integers, + // regardless of the native integer type. The HDF5 library + // automatically converts between different integer types. + try { + if ((file_id >= 0) && (dataspace_id >= 0)) + dataset_id = H5.H5Dcreate(file_id, DATASETNAME, HDF5Constants.H5T_STD_I64BE, dataspace_id, + HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT); + } + catch (Exception e) { + e.printStackTrace(); + } + + // Write the data to the dataset. + try { + if (dataset_id >= 0) + H5.H5Dwrite(dataset_id, HDF5Constants.H5T_NATIVE_INT, HDF5Constants.H5S_ALL, HDF5Constants.H5S_ALL, + HDF5Constants.H5P_DEFAULT, dset_data); + } + catch (Exception e) { + e.printStackTrace(); + } + + // End access to the dataset and release resources used by it. + try { + if (dataset_id >= 0) + H5.H5Dclose(dataset_id); + } + catch (Exception e) { + e.printStackTrace(); + } + + // Terminate access to the data space. + try { + if (dataspace_id >= 0) + H5.H5Sclose(dataspace_id); + } + catch (Exception e) { + e.printStackTrace(); + } + + // Close the file. + try { + if (file_id >= 0) + H5.H5Fclose(file_id); + } + catch (Exception e) { + e.printStackTrace(); + } + + } + + private static void ReadDataset() { + long file_id = -1; + long dataspace_id = -1; + long dataset_id = -1; + long[] dims = { DIM0, DIM1 }; + int[][] dset_data; + + // Open an existing file. + try { + file_id = H5.H5Fopen(FILENAME, HDF5Constants.H5F_ACC_RDONLY, HDF5Constants.H5P_DEFAULT); + } + catch (Exception e) { + e.printStackTrace(); + } + + // Open an existing dataset. + try { + if (file_id >= 0) + dataset_id = H5.H5Dopen(file_id, DATASETNAME, HDF5Constants.H5P_DEFAULT); + } + catch (Exception e) { + e.printStackTrace(); + } + + // Get dataspace and allocate memory for read buffer. + try { + if (dataset_id >= 0) + dataspace_id = H5.H5Dget_space(dataset_id); + } + catch (Exception e) { + e.printStackTrace(); + } + + try { + if (dataspace_id >= 0) + H5.H5Sget_simple_extent_dims(dataspace_id, dims, null); + } + catch (Exception e) { + e.printStackTrace(); + } + + // Allocate array of pointers to two-dimensional arrays (the + // elements of the dataset. + dset_data = new int[(int) dims[0]][(int) (dims[1])]; + + // Read data. + try { + if (dataset_id >= 0) + H5.H5Dread(dataset_id, HDF5Constants.H5T_NATIVE_INT, HDF5Constants.H5S_ALL, HDF5Constants.H5S_ALL, + HDF5Constants.H5P_DEFAULT, dset_data); + } + catch (Exception e) { + e.printStackTrace(); + } + + // Output the data to the screen. + DecimalFormat df = new DecimalFormat("#,##0"); + System.out.println(DATASETNAME + ":"); + for (int indx = 0; indx < dims[0]; indx++) { + System.out.print(" ["); + for (int jndx = 0; jndx < dims[1]; jndx++) { + System.out.print(" " + df.format(dset_data[indx][jndx])); + } + System.out.println("]"); + } + System.out.println(); + + // End access to the dataset and release resources used by it. + try { + if (dataset_id >= 0) + H5.H5Dclose(dataset_id); + } + catch (Exception e) { + e.printStackTrace(); + } + + // Terminate access to the data space. + try { + if (dataspace_id >= 0) + H5.H5Sclose(dataspace_id); + } + catch (Exception e) { + e.printStackTrace(); + } + + // Close the file. + try { + if (file_id >= 0) + H5.H5Fclose(file_id); + } + catch (Exception e) { + e.printStackTrace(); + } + + } + + public static void main(String[] args) { + H5Ex_T_Integer.CreateDataset(); + // Now we begin the read section of this example. Here we assume + // the dataset and array have the same name and rank, but can have + // any size. Therefore we must allocate a new array to read in + // data using malloc(). + H5Ex_T_Integer.ReadDataset(); + } + +} diff --git a/java/examples/datatypes/H5Ex_T_IntegerAttribute.java b/java/examples/datatypes/H5Ex_T_IntegerAttribute.java new file mode 100644 index 0000000..c153d99 --- /dev/null +++ b/java/examples/datatypes/H5Ex_T_IntegerAttribute.java @@ -0,0 +1,263 @@ +/* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * + * Copyright by The HDF Group. * + * Copyright by the Board of Trustees of the University of Illinois. * + * All rights reserved. * + * * + * This file is part of HDF5. The full HDF5 copyright notice, including * + * terms governing use, modification, and redistribution, is contained in * + * the files COPYING and Copyright.html. COPYING can be found at the root * + * of the source code distribution tree; Copyright.html can be found at the * + * root level of an installed copy of the electronic HDF5 document set and * + * is linked from the top-level documents page. It can also be found at * + * http://hdfgroup.org/HDF5/doc/Copyright.html. If you do not have * + * access to either file, you may request a copy from help@hdfgroup.org. * + * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */ + +/************************************************************ + This example shows how to read and write integer datatypes + to an attribute. The program first writes integers to an + attribute with a dataspace of DIM0xDIM1, then closes the + file. Next, it reopens the file, reads back the data, and + outputs it to the screen. + ************************************************************/ + +package examples.datatypes; + +import java.text.DecimalFormat; + +import hdf.hdf5lib.H5; +import hdf.hdf5lib.HDF5Constants; + +public class H5Ex_T_IntegerAttribute { + private static String FILENAME = "H5Ex_T_IntegerAttribute.h5"; + private static String DATASETNAME = "DS1"; + private static String ATTRIBUTENAME = "A1"; + private static final int DIM0 = 4; + private static final int DIM1 = 7; + private static final int RANK = 2; + + private static void CreateDataset() { + long file_id = -1; + long dataspace_id = -1; + long dataset_id = -1; + long attribute_id = -1; + long[] dims = { DIM0, DIM1 }; + int[][] dset_data = new int[DIM0][DIM1]; + + // Initialize data. + for (int indx = 0; indx < DIM0; indx++) + for (int jndx = 0; jndx < DIM1; jndx++) { + dset_data[indx][jndx] = indx * jndx - jndx; + } + + // Create a new file using default properties. + try { + file_id = H5.H5Fcreate(FILENAME, HDF5Constants.H5F_ACC_TRUNC, HDF5Constants.H5P_DEFAULT, + HDF5Constants.H5P_DEFAULT); + } + catch (Exception e) { + e.printStackTrace(); + } + + // Create dataset with a scalar dataspace. + try { + dataspace_id = H5.H5Screate(HDF5Constants.H5S_SCALAR); + if (dataspace_id >= 0) { + dataset_id = H5.H5Dcreate(file_id, DATASETNAME, HDF5Constants.H5T_STD_I32LE, dataspace_id, + HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT); + H5.H5Sclose(dataspace_id); + dataspace_id = -1; + } + } + catch (Exception e) { + e.printStackTrace(); + } + + // Create dataspace. Setting maximum size to NULL sets the maximum + // size to be the current size. + try { + dataspace_id = H5.H5Screate_simple(RANK, dims, null); + } + catch (Exception e) { + e.printStackTrace(); + } + + // Create the attribute and write the array data to it. + try { + if ((dataset_id >= 0) && (dataspace_id >= 0)) + attribute_id = H5.H5Acreate(dataset_id, ATTRIBUTENAME, HDF5Constants.H5T_STD_I64BE, dataspace_id, + HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT); + } + catch (Exception e) { + e.printStackTrace(); + } + + // Write the dataset. + try { + if (attribute_id >= 0) + H5.H5Awrite(attribute_id, HDF5Constants.H5T_NATIVE_INT, dset_data); + } + catch (Exception e) { + e.printStackTrace(); + } + + // End access to the dataset and release resources used by it. + try { + if (attribute_id >= 0) + H5.H5Aclose(attribute_id); + } + catch (Exception e) { + e.printStackTrace(); + } + + try { + if (dataset_id >= 0) + H5.H5Dclose(dataset_id); + } + catch (Exception e) { + e.printStackTrace(); + } + + // Terminate access to the data space. + try { + if (dataspace_id >= 0) + H5.H5Sclose(dataspace_id); + } + catch (Exception e) { + e.printStackTrace(); + } + + // Close the file. + try { + if (file_id >= 0) + H5.H5Fclose(file_id); + } + catch (Exception e) { + e.printStackTrace(); + } + + } + + private static void ReadDataset() { + long file_id = -1; + long dataspace_id = -1; + long dataset_id = -1; + long attribute_id = -1; + long[] dims = { DIM0, DIM1 }; + int[][] dset_data; + + // Open an existing file. + try { + file_id = H5.H5Fopen(FILENAME, HDF5Constants.H5F_ACC_RDONLY, HDF5Constants.H5P_DEFAULT); + } + catch (Exception e) { + e.printStackTrace(); + } + + // Open an existing dataset. + try { + if (file_id >= 0) + dataset_id = H5.H5Dopen(file_id, DATASETNAME, HDF5Constants.H5P_DEFAULT); + } + catch (Exception e) { + e.printStackTrace(); + } + + try { + if (dataset_id >= 0) + attribute_id = H5.H5Aopen_by_name(dataset_id, ".", ATTRIBUTENAME, HDF5Constants.H5P_DEFAULT, + HDF5Constants.H5P_DEFAULT); + } + catch (Exception e) { + e.printStackTrace(); + } + + // Get dataspace and allocate memory for read buffer. + try { + if (attribute_id >= 0) + dataspace_id = H5.H5Aget_space(attribute_id); + } + catch (Exception e) { + e.printStackTrace(); + } + + try { + if (dataspace_id >= 0) + H5.H5Sget_simple_extent_dims(dataspace_id, dims, null); + } + catch (Exception e) { + e.printStackTrace(); + } + + // Allocate array of pointers to two-dimensional arrays (the + // elements of the dataset. + dset_data = new int[(int) dims[0]][(int) (dims[1])]; + + // Read data. + try { + if (attribute_id >= 0) + H5.H5Aread(attribute_id, HDF5Constants.H5T_NATIVE_INT, dset_data); + } + catch (Exception e) { + e.printStackTrace(); + } + + // Output the data to the screen. + DecimalFormat df = new DecimalFormat("#,##0"); + System.out.println(ATTRIBUTENAME + ":"); + for (int indx = 0; indx < dims[0]; indx++) { + System.out.print(" ["); + for (int jndx = 0; jndx < dims[1]; jndx++) { + System.out.print(" " + df.format(dset_data[indx][jndx])); + } + System.out.println("]"); + } + System.out.println(); + + // End access to the dataset and release resources used by it. + try { + if (attribute_id >= 0) + H5.H5Aclose(attribute_id); + } + catch (Exception e) { + e.printStackTrace(); + } + + try { + if (dataset_id >= 0) + H5.H5Dclose(dataset_id); + } + catch (Exception e) { + e.printStackTrace(); + } + + // Terminate access to the data space. + try { + if (dataspace_id >= 0) + H5.H5Sclose(dataspace_id); + } + catch (Exception e) { + e.printStackTrace(); + } + + // Close the file. + try { + if (file_id >= 0) + H5.H5Fclose(file_id); + } + catch (Exception e) { + e.printStackTrace(); + } + + } + + public static void main(String[] args) { + H5Ex_T_IntegerAttribute.CreateDataset(); + // Now we begin the read section of this example. Here we assume + // the dataset and array have the same name and rank, but can have + // any size. Therefore we must allocate a new array to read in + // data using malloc(). + H5Ex_T_IntegerAttribute.ReadDataset(); + } + +} diff --git a/java/examples/datatypes/H5Ex_T_ObjectReference.java b/java/examples/datatypes/H5Ex_T_ObjectReference.java new file mode 100644 index 0000000..3cad6c9 --- /dev/null +++ b/java/examples/datatypes/H5Ex_T_ObjectReference.java @@ -0,0 +1,347 @@ +/* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * + * Copyright by The HDF Group. * + * Copyright by the Board of Trustees of the University of Illinois. * + * All rights reserved. * + * * + * This file is part of HDF5. The full HDF5 copyright notice, including * + * terms governing use, modification, and redistribution, is contained in * + * the files COPYING and Copyright.html. COPYING can be found at the root * + * of the source code distribution tree; Copyright.html can be found at the * + * root level of an installed copy of the electronic HDF5 document set and * + * is linked from the top-level documents page. It can also be found at * + * http://hdfgroup.org/HDF5/doc/Copyright.html. If you do not have * + * access to either file, you may request a copy from help@hdfgroup.org. * + * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */ + +/************************************************************ + This example shows how to read and write object references + to a dataset. The program first creates objects in the + file and writes references to those objects to a dataset + with a dataspace of DIM0, then closes the file. Next, it + reopens the file, dereferences the references, and outputs + the names of their targets to the screen. + ************************************************************/ +package examples.datatypes; + +import java.util.EnumSet; +import java.util.HashMap; +import java.util.Map; + +import hdf.hdf5lib.H5; +import hdf.hdf5lib.HDF5Constants; + +public class H5Ex_T_ObjectReference { + private static String FILENAME = "H5Ex_T_ObjectReference.h5"; + private static String DATASETNAME = "DS1"; + private static String DATASETNAME2 = "DS2"; + private static String GROUPNAME = "G1"; + private static final int DIM0 = 2; + private static final int RANK = 1; + + // Values for the status of space allocation + enum H5G_obj { + H5G_UNKNOWN(HDF5Constants.H5O_TYPE_UNKNOWN), /* Unknown object type */ + H5G_GROUP(HDF5Constants.H5O_TYPE_GROUP), /* Object is a group */ + H5G_DATASET(HDF5Constants.H5O_TYPE_DATASET), /* Object is a dataset */ + H5G_TYPE(HDF5Constants.H5O_TYPE_NAMED_DATATYPE); /* Object is a named data type */ + private static final Map<Integer, H5G_obj> lookup = new HashMap<Integer, H5G_obj>(); + + static { + for (H5G_obj s : EnumSet.allOf(H5G_obj.class)) + lookup.put(s.getCode(), s); + } + + private int code; + + H5G_obj(int layout_type) { + this.code = layout_type; + } + + public int getCode() { + return this.code; + } + + public static H5G_obj get(int code) { + return lookup.get(code); + } + } + + private static void writeObjRef() { + long file_id = -1; + long dataspace_id = -1; + long filespace_id = -1; + long group_id = -1; + long dataset_id = -1; + long[] dims = { DIM0 }; + byte[][] dset_data = new byte[DIM0][8]; + + // Create a new file using default properties. + try { + file_id = H5.H5Fcreate(FILENAME, HDF5Constants.H5F_ACC_TRUNC, HDF5Constants.H5P_DEFAULT, + HDF5Constants.H5P_DEFAULT); + } + catch (Exception e) { + e.printStackTrace(); + } + + // Create dataset with a scalar dataspace. + try { + dataspace_id = H5.H5Screate(HDF5Constants.H5S_SCALAR); + if ((file_id >= 0) && (dataspace_id >= 0)) { + dataset_id = H5.H5Dcreate(file_id, DATASETNAME2, HDF5Constants.H5T_STD_I32LE, dataspace_id, + HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT); + if (dataset_id >= 0) + H5.H5Dclose(dataset_id); + dataset_id = -1; + H5.H5Sclose(dataspace_id); + dataspace_id = -1; + } + } + catch (Exception e) { + e.printStackTrace(); + } + + // Create a group in the file. + try { + if (file_id >= 0) + group_id = H5.H5Gcreate(file_id, GROUPNAME, HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT, + HDF5Constants.H5P_DEFAULT); + if (group_id >= 0) + H5.H5Gclose(group_id); + group_id = -1; + } + catch (Exception e) { + e.printStackTrace(); + } + + // Create references to the previously created objects. Passing -1 + // as space_id causes this parameter to be ignored. Other values + // besides valid dataspaces result in an error. + try { + if (file_id >= 0) { + byte rbuf0[] = H5.H5Rcreate(file_id, GROUPNAME, HDF5Constants.H5R_OBJECT, -1); + byte rbuf1[] = H5.H5Rcreate(file_id, DATASETNAME2, HDF5Constants.H5R_OBJECT, -1); + for (int indx = 0; indx < 8; indx++) { + dset_data[0][indx] = rbuf0[indx]; + dset_data[1][indx] = rbuf1[indx]; + } + } + } + catch (Exception e) { + e.printStackTrace(); + } + + // Create dataspace. Setting maximum size to NULL sets the maximum + // size to be the current size. + try { + filespace_id = H5.H5Screate_simple(RANK, dims, null); + } + catch (Exception e) { + e.printStackTrace(); + } + + // Create the dataset. + try { + if ((file_id >= 0) && (filespace_id >= 0)) + dataset_id = H5.H5Dcreate(file_id, DATASETNAME, HDF5Constants.H5T_STD_REF_OBJ, filespace_id, + HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT); + } + catch (Exception e) { + e.printStackTrace(); + } + + // Write the object references to it. + try { + if (dataset_id >= 0) + H5.H5Dwrite(dataset_id, HDF5Constants.H5T_STD_REF_OBJ, HDF5Constants.H5S_ALL, HDF5Constants.H5S_ALL, + HDF5Constants.H5P_DEFAULT, dset_data); + } + catch (Exception e) { + e.printStackTrace(); + } + + // End access to the dataset and release resources used by it. + + try { + if (dataset_id >= 0) + H5.H5Dclose(dataset_id); + } + catch (Exception e) { + e.printStackTrace(); + } + + try { + if (filespace_id >= 0) + H5.H5Sclose(filespace_id); + } + catch (Exception e) { + e.printStackTrace(); + } + + // Close the file. + try { + if (file_id >= 0) + H5.H5Fclose(file_id); + } + catch (Exception e) { + e.printStackTrace(); + } + } + + private static void readObjRef() { + long file_id = -1; + long dataset_id = -1; + long dataspace_id = -1; + int object_type = -1; + long object_id = -1; + long[] dims = { DIM0 }; + byte[][] dset_data; + + // Open an existing file. + try { + file_id = H5.H5Fopen(FILENAME, HDF5Constants.H5F_ACC_RDONLY, HDF5Constants.H5P_DEFAULT); + } + catch (Exception e) { + e.printStackTrace(); + } + + // Open an existing dataset. + try { + if (file_id >= 0) + dataset_id = H5.H5Dopen(file_id, DATASETNAME, HDF5Constants.H5P_DEFAULT); + } + catch (Exception e) { + e.printStackTrace(); + } + + // Get dataspace and allocate memory for read buffer. + try { + if (dataset_id >= 0) + dataspace_id = H5.H5Dget_space(dataset_id); + } + catch (Exception e) { + e.printStackTrace(); + } + + try { + if (dataspace_id >= 0) + H5.H5Sget_simple_extent_dims(dataspace_id, dims, null); + } + catch (Exception e) { + e.printStackTrace(); + } + + // Allocate array of pointers to two-dimensional arrays (the + // elements of the dataset. + dset_data = new byte[(int)dims[0]][8]; + + // Read the data using the default properties. + try { + if (dataset_id >= 0) { + H5.H5Dread(dataset_id, HDF5Constants.H5T_STD_REF_OBJ, HDF5Constants.H5S_ALL, HDF5Constants.H5S_ALL, + HDF5Constants.H5P_DEFAULT, dset_data); + } + } + catch (Exception e) { + e.printStackTrace(); + } + + // Output the data to the screen. + for (int indx = 0; indx < dims[0]; indx++) { + System.out.println(DATASETNAME + "[" + indx + "]:"); + System.out.print(" ->"); + // Open the referenced object, get its name and type. + try { + if (dataset_id >= 0) { + object_id = H5.H5Rdereference(dataset_id, HDF5Constants.H5P_DEFAULT, HDF5Constants.H5R_OBJECT, dset_data[indx]); + object_type = H5.H5Rget_obj_type(dataset_id, HDF5Constants.H5R_OBJECT, dset_data[indx]); + } + String[] obj_name = new String[1]; + long name_size = 1; + if (object_type >= 0) { + // Get the length of the name and retrieve the name. + name_size = 1 + H5.H5Iget_name(object_id, obj_name, name_size); + } + if ((object_id >= 0) && (object_type >= -1)) { + switch (H5G_obj.get(object_type)) { + case H5G_GROUP: + System.out.print("H5G_GROUP"); + try { + if (object_id >= 0) + H5.H5Gclose(object_id); + } + catch (Exception e) { + e.printStackTrace(); + } + break; + case H5G_DATASET: + System.out.print("H5G_DATASET"); + try { + if (object_id >= 0) + H5.H5Dclose(object_id); + } + catch (Exception e) { + e.printStackTrace(); + } + break; + case H5G_TYPE: + System.out.print("H5G_TYPE"); + try { + if (object_id >= 0) + H5.H5Tclose(object_id); + } + catch (Exception e) { + e.printStackTrace(); + } + break; + default: + System.out.print("UNHANDLED"); + } + } + // Print the name. + if (name_size > 1) + System.out.println(": " + obj_name[0]); + } + catch (Exception e) { + e.printStackTrace(); + } + } + + // End access to the dataset and release resources used by it. + try { + if (dataspace_id >= 0) + H5.H5Sclose(dataspace_id); + } + catch (Exception e) { + e.printStackTrace(); + } + + try { + if (dataset_id >= 0) + H5.H5Dclose(dataset_id); + } + catch (Exception e) { + e.printStackTrace(); + } + + // Close the file. + try { + if (file_id >= 0) + H5.H5Fclose(file_id); + } + catch (Exception e) { + e.printStackTrace(); + } + } + + public static void main(String[] args) { + // Check if gzip compression is available and can be used for both + // compression and decompression. Normally we do not perform error + // checking in these examples for the sake of clarity, but in this + // case we will make an exception because this filter is an + // optional part of the hdf5 library. + H5Ex_T_ObjectReference.writeObjRef(); + H5Ex_T_ObjectReference.readObjRef(); + } + +} diff --git a/java/examples/datatypes/H5Ex_T_ObjectReferenceAttribute.java b/java/examples/datatypes/H5Ex_T_ObjectReferenceAttribute.java new file mode 100644 index 0000000..e354029 --- /dev/null +++ b/java/examples/datatypes/H5Ex_T_ObjectReferenceAttribute.java @@ -0,0 +1,389 @@ +/* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * + * Copyright by The HDF Group. * + * Copyright by the Board of Trustees of the University of Illinois. * + * All rights reserved. * + * * + * This file is part of HDF5. The full HDF5 copyright notice, including * + * terms governing use, modification, and redistribution, is contained in * + * the files COPYING and Copyright.html. COPYING can be found at the root * + * of the source code distribution tree; Copyright.html can be found at the * + * root level of an installed copy of the electronic HDF5 document set and * + * is linked from the top-level documents page. It can also be found at * + * http://hdfgroup.org/HDF5/doc/Copyright.html. If you do not have * + * access to either file, you may request a copy from help@hdfgroup.org. * + * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */ + +/************************************************************ + This example shows how to read and write object references + to an attribute. The program first creates objects in the + file and writes references to those objects to an + attribute with a dataspace of DIM0, then closes the file. + Next, it reopens the file, dereferences the references, + and outputs the names of their targets to the screen. + ************************************************************/ + +package examples.datatypes; + +import java.util.EnumSet; +import java.util.HashMap; +import java.util.Map; + +import hdf.hdf5lib.H5; +import hdf.hdf5lib.HDF5Constants; + +public class H5Ex_T_ObjectReferenceAttribute { + private static String FILENAME = "H5Ex_T_ObjectReferenceAttribute.h5"; + private static String DATASETNAME = "DS1"; + private static String ATTRIBUTENAME = "A1"; + private static String DATASETNAME2 = "DS2"; + private static String GROUPNAME = "G1"; + private static final int DIM0 = 2; + private static final int RANK = 1; + + // Values for the status of space allocation + enum H5G_obj { + H5G_UNKNOWN(HDF5Constants.H5O_TYPE_UNKNOWN), /* Unknown object type */ + H5G_GROUP(HDF5Constants.H5O_TYPE_GROUP), /* Object is a group */ + H5G_DATASET(HDF5Constants.H5O_TYPE_DATASET), /* Object is a dataset */ + H5G_TYPE(HDF5Constants.H5O_TYPE_NAMED_DATATYPE); /* Object is a named data type */ + private static final Map<Integer, H5G_obj> lookup = new HashMap<Integer, H5G_obj>(); + + static { + for (H5G_obj s : EnumSet.allOf(H5G_obj.class)) + lookup.put(s.getCode(), s); + } + + private int code; + + H5G_obj(int layout_type) { + this.code = layout_type; + } + + public int getCode() { + return this.code; + } + + public static H5G_obj get(int code) { + return lookup.get(code); + } + } + + private static void CreateDataset() { + long file_id = -1; + long dataspace_id = -1; + long group_id = -1; + long dataset_id = -1; + long attribute_id = -1; + long[] dims = { DIM0 }; + byte[][] dset_data = new byte[DIM0][8]; + + // Create a new file using default properties. + try { + file_id = H5.H5Fcreate(FILENAME, HDF5Constants.H5F_ACC_TRUNC, HDF5Constants.H5P_DEFAULT, + HDF5Constants.H5P_DEFAULT); + } + catch (Exception e) { + e.printStackTrace(); + } + + // Create dataset with a scalar dataspace. + try { + dataspace_id = H5.H5Screate(HDF5Constants.H5S_SCALAR); + if (dataspace_id >= 0) { + dataset_id = H5.H5Dcreate(file_id, DATASETNAME2, HDF5Constants.H5T_STD_I32LE, dataspace_id, + HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT); + if (dataset_id >= 0) + H5.H5Dclose(dataset_id); + dataset_id = -1; + H5.H5Sclose(dataspace_id); + dataspace_id = -1; + } + } + catch (Exception e) { + e.printStackTrace(); + } + + // Create a group in the file. + try { + if (file_id >= 0) + group_id = H5.H5Gcreate(file_id, GROUPNAME, HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT, + HDF5Constants.H5P_DEFAULT); + if (group_id >= 0) + H5.H5Gclose(group_id); + group_id = -1; + } + catch (Exception e) { + e.printStackTrace(); + } + + // Create references to the previously created objects. Passing -1 + // as space_id causes this parameter to be ignored. Other values + // besides valid dataspaces result in an error. + try { + if (file_id >= 0) { + byte rbuf0[] = H5.H5Rcreate(file_id, GROUPNAME, HDF5Constants.H5R_OBJECT, -1); + byte rbuf1[] = H5.H5Rcreate(file_id, DATASETNAME2, HDF5Constants.H5R_OBJECT, -1); + for (int indx = 0; indx < 8; indx++) { + dset_data[0][indx] = rbuf0[indx]; + dset_data[1][indx] = rbuf1[indx]; + } + } + } + catch (Exception e) { + e.printStackTrace(); + } + + // Create dataset with a scalar dataspace to serve as the parent + // for the attribute. + try { + dataspace_id = H5.H5Screate(HDF5Constants.H5S_SCALAR); + if (dataspace_id >= 0) { + dataset_id = H5.H5Dcreate(file_id, DATASETNAME, HDF5Constants.H5T_STD_I32LE, dataspace_id, + HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT); + H5.H5Sclose(dataspace_id); + dataspace_id = -1; + } + } + catch (Exception e) { + e.printStackTrace(); + } + + // Create dataspace. Setting maximum size to NULL sets the maximum + // size to be the current size. + try { + dataspace_id = H5.H5Screate_simple(RANK, dims, null); + } + catch (Exception e) { + e.printStackTrace(); + } + + // Create the attribute and write the array data to it. + try { + if ((dataset_id >= 0) && (dataspace_id >= 0)) + attribute_id = H5.H5Acreate(dataset_id, ATTRIBUTENAME, HDF5Constants.H5T_STD_REF_OBJ, dataspace_id, + HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT); + } + catch (Exception e) { + e.printStackTrace(); + } + + // Write the dataset. + try { + if (attribute_id >= 0) + H5.H5Awrite(attribute_id, HDF5Constants.H5T_STD_REF_OBJ, dset_data); + } + catch (Exception e) { + e.printStackTrace(); + } + + // End access to the dataset and release resources used by it. + try { + if (attribute_id >= 0) + H5.H5Aclose(attribute_id); + } + catch (Exception e) { + e.printStackTrace(); + } + + try { + if (dataset_id >= 0) + H5.H5Dclose(dataset_id); + } + catch (Exception e) { + e.printStackTrace(); + } + + // Terminate access to the data space. + try { + if (dataspace_id >= 0) + H5.H5Sclose(dataspace_id); + } + catch (Exception e) { + e.printStackTrace(); + } + + // Close the file. + try { + if (file_id >= 0) + H5.H5Fclose(file_id); + } + catch (Exception e) { + e.printStackTrace(); + } + + } + + private static void ReadDataset() { + long file_id = -1; + long dataspace_id = -1; + long dataset_id = -1; + long attribute_id = -1; + int object_type = -1; + long object_id = -1; + long[] dims = { DIM0 }; + byte[][] dset_data; + + // Open an existing file. + try { + file_id = H5.H5Fopen(FILENAME, HDF5Constants.H5F_ACC_RDONLY, HDF5Constants.H5P_DEFAULT); + } + catch (Exception e) { + e.printStackTrace(); + } + + // Open an existing dataset. + try { + if (file_id >= 0) + dataset_id = H5.H5Dopen(file_id, DATASETNAME, HDF5Constants.H5P_DEFAULT); + } + catch (Exception e) { + e.printStackTrace(); + } + + try { + if (dataset_id >= 0) + attribute_id = H5.H5Aopen_by_name(dataset_id, ".", ATTRIBUTENAME, HDF5Constants.H5P_DEFAULT, + HDF5Constants.H5P_DEFAULT); + } + catch (Exception e) { + e.printStackTrace(); + } + + // Get dataspace and allocate memory for read buffer. + try { + if (attribute_id >= 0) + dataspace_id = H5.H5Aget_space(attribute_id); + } + catch (Exception e) { + e.printStackTrace(); + } + + try { + if (dataspace_id >= 0) + H5.H5Sget_simple_extent_dims(dataspace_id, dims, null); + } + catch (Exception e) { + e.printStackTrace(); + } + + // Allocate array of pointers to two-dimensional arrays (the + // elements of the dataset. + dset_data = new byte[(int) dims[0]][8]; + + // Read data. + try { + if (attribute_id >= 0) + H5.H5Aread(attribute_id, HDF5Constants.H5T_STD_REF_OBJ, dset_data); + } + catch (Exception e) { + e.printStackTrace(); + } + + // Output the data to the screen. + for (int indx = 0; indx < dims[0]; indx++) { + System.out.println(ATTRIBUTENAME + "[" + indx + "]:"); + System.out.print(" ->"); + // Open the referenced object, get its name and type. + try { + if (dataset_id >= 0) { + object_id = H5.H5Rdereference(dataset_id, HDF5Constants.H5P_DEFAULT, HDF5Constants.H5R_OBJECT, dset_data[indx]); + object_type = H5.H5Rget_obj_type(dataset_id, HDF5Constants.H5R_OBJECT, dset_data[indx]); + } + String[] obj_name = new String[1]; + long name_size = 1; + if (object_type >= 0) { + // Get the length of the name and retrieve the name. + name_size = 1 + H5.H5Iget_name(object_id, obj_name, name_size); + } + if ((object_id >= 0) && (object_type >= -1)) { + switch (H5G_obj.get(object_type)) { + case H5G_GROUP: + System.out.print("H5G_GROUP"); + try { + if (object_id >= 0) + H5.H5Gclose(object_id); + } + catch (Exception e) { + e.printStackTrace(); + } + break; + case H5G_DATASET: + System.out.print("H5G_DATASET"); + try { + if (object_id >= 0) + H5.H5Dclose(object_id); + } + catch (Exception e) { + e.printStackTrace(); + } + break; + case H5G_TYPE: + System.out.print("H5G_TYPE"); + try { + if (object_id >= 0) + H5.H5Tclose(object_id); + } + catch (Exception e) { + e.printStackTrace(); + } + break; + default: + System.out.print("UNHANDLED"); + } + } + // Print the name. + if (name_size > 1) + System.out.println(": " + obj_name[0]); + } + catch (Exception e) { + e.printStackTrace(); + } + } + + // End access to the dataset and release resources used by it. + try { + if (attribute_id >= 0) + H5.H5Aclose(attribute_id); + } + catch (Exception e) { + e.printStackTrace(); + } + + try { + if (dataset_id >= 0) + H5.H5Dclose(dataset_id); + } + catch (Exception e) { + e.printStackTrace(); + } + + // Terminate access to the data space. + try { + if (dataspace_id >= 0) + H5.H5Sclose(dataspace_id); + } + catch (Exception e) { + e.printStackTrace(); + } + + // Close the file. + try { + if (file_id >= 0) + H5.H5Fclose(file_id); + } + catch (Exception e) { + e.printStackTrace(); + } + + } + + public static void main(String[] args) { + H5Ex_T_ObjectReferenceAttribute.CreateDataset(); + // Now we begin the read section of this example. Here we assume + // the dataset and array have the same name and rank, but can have + // any size. Therefore we must allocate a new array to read in + // data using malloc(). + H5Ex_T_ObjectReferenceAttribute.ReadDataset(); + } + +} diff --git a/java/examples/datatypes/H5Ex_T_Opaque.java b/java/examples/datatypes/H5Ex_T_Opaque.java new file mode 100644 index 0000000..eb45686 --- /dev/null +++ b/java/examples/datatypes/H5Ex_T_Opaque.java @@ -0,0 +1,270 @@ +/* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * + * Copyright by The HDF Group. * + * Copyright by the Board of Trustees of the University of Illinois. * + * All rights reserved. * + * * + * This file is part of HDF5. The full HDF5 copyright notice, including * + * terms governing use, modification, and redistribution, is contained in * + * the files COPYING and Copyright.html. COPYING can be found at the root * + * of the source code distribution tree; Copyright.html can be found at the * + * root level of an installed copy of the electronic HDF5 document set and * + * is linked from the top-level documents page. It can also be found at * + * http://hdfgroup.org/HDF5/doc/Copyright.html. If you do not have * + * access to either file, you may request a copy from help@hdfgroup.org. * + * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */ + +/************************************************************ + This example shows how to read and write opaque datatypes + to a dataset. The program first writes opaque data to a + dataset with a dataspace of DIM0, then closes the file. + Next, it reopens the file, reads back the data, and + outputs it to the screen. + ************************************************************/ + +package examples.datatypes; + +import hdf.hdf5lib.H5; +import hdf.hdf5lib.HDF5Constants; + +public class H5Ex_T_Opaque { + private static String FILENAME = "H5Ex_T_Opaque.h5"; + private static String DATASETNAME = "DS1"; + private static final int DIM0 = 4; + private static final int LEN = 7; + private static final int RANK = 1; + + private static void CreateDataset() { + long file_id = -1; + long dataspace_id = -1; + long datatype_id = -1; + long dataset_id = -1; + long[] dims = { DIM0 }; + byte[] dset_data = new byte[DIM0 * LEN]; + byte[] str_data = { 'O', 'P', 'A', 'Q', 'U', 'E' }; + + // Initialize data. + for (int indx = 0; indx < DIM0; indx++) { + for (int jndx = 0; jndx < LEN - 1; jndx++) + dset_data[jndx + indx * LEN] = str_data[jndx]; + dset_data[LEN - 1 + indx * LEN] = (byte) (indx + '0'); + } + + // Create a new file using default properties. + try { + file_id = H5.H5Fcreate(FILENAME, HDF5Constants.H5F_ACC_TRUNC, HDF5Constants.H5P_DEFAULT, + HDF5Constants.H5P_DEFAULT); + } + catch (Exception e) { + e.printStackTrace(); + } + + // Create opaque datatype and set the tag to something appropriate. + // For this example we will write and view the data as a character + // array. + try { + datatype_id = H5.H5Tcreate(HDF5Constants.H5T_OPAQUE, (long)LEN); + if (datatype_id >= 0) + H5.H5Tset_tag(datatype_id, "Character array"); + } + catch (Exception e) { + e.printStackTrace(); + } + + // Create dataspace. Setting maximum size to NULL sets the maximum + // size to be the current size. + try { + dataspace_id = H5.H5Screate_simple(RANK, dims, null); + } + catch (Exception e) { + e.printStackTrace(); + } + + // Create the dataset and write the integer data to it. In this + // example we will save the data as 64 bit big endian integers, + // regardless of the native integer type. The HDF5 library + // automatically converts between different integer types. + try { + if ((file_id >= 0) && (datatype_id >= 0) && (dataspace_id >= 0)) + dataset_id = H5.H5Dcreate(file_id, DATASETNAME, datatype_id, dataspace_id, HDF5Constants.H5P_DEFAULT, + HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT); + } + catch (Exception e) { + e.printStackTrace(); + } + + // Write the opaque data to the dataset. + try { + if ((dataset_id >= 0) && (datatype_id >= 0)) + H5.H5Dwrite(dataset_id, datatype_id, HDF5Constants.H5S_ALL, HDF5Constants.H5S_ALL, + HDF5Constants.H5P_DEFAULT, dset_data); + } + catch (Exception e) { + e.printStackTrace(); + } + + // End access to the dataset and release resources used by it. + try { + if (dataset_id >= 0) + H5.H5Dclose(dataset_id); + } + catch (Exception e) { + e.printStackTrace(); + } + + // Terminate access to the data space. + try { + if (dataspace_id >= 0) + H5.H5Sclose(dataspace_id); + } + catch (Exception e) { + e.printStackTrace(); + } + + try { + if (datatype_id >= 0) + H5.H5Tclose(datatype_id); + } + catch (Exception e) { + e.printStackTrace(); + } + + // Close the file. + try { + if (file_id >= 0) + H5.H5Fclose(file_id); + } + catch (Exception e) { + e.printStackTrace(); + } + + } + + private static void ReadDataset() { + long file_id = -1; + long datatype_id = -1; + long dataspace_id = -1; + long dataset_id = -1; + long type_len = -1; + long[] dims = { DIM0 }; + byte[] dset_data; + String tag_name = null; + + // Open an existing file. + try { + file_id = H5.H5Fopen(FILENAME, HDF5Constants.H5F_ACC_RDONLY, HDF5Constants.H5P_DEFAULT); + } + catch (Exception e) { + e.printStackTrace(); + } + + // Open an existing dataset. + try { + if (file_id >= 0) + dataset_id = H5.H5Dopen(file_id, DATASETNAME, HDF5Constants.H5P_DEFAULT); + } + catch (Exception e) { + e.printStackTrace(); + } + + // Get datatype and properties for the datatype. + try { + if (dataset_id >= 0) + datatype_id = H5.H5Dget_type(dataset_id); + if (datatype_id >= 0) { + type_len = H5.H5Tget_size(datatype_id); + tag_name = H5.H5Tget_tag(datatype_id); + } + } + catch (Exception e) { + e.printStackTrace(); + } + + // Get dataspace and allocate memory for read buffer. + try { + if (dataset_id >= 0) + dataspace_id = H5.H5Dget_space(dataset_id); + } + catch (Exception e) { + e.printStackTrace(); + } + + try { + if (dataspace_id >= 0) + H5.H5Sget_simple_extent_dims(dataspace_id, dims, null); + } + catch (Exception e) { + e.printStackTrace(); + } + + // Allocate buffer. + dset_data = new byte[(int) (dims[0] * type_len)]; + + // Read data. + try { + if ((dataset_id >= 0) && (datatype_id >= 0)) + H5.H5Dread(dataset_id, datatype_id, HDF5Constants.H5S_ALL, HDF5Constants.H5S_ALL, + HDF5Constants.H5P_DEFAULT, dset_data); + } + catch (Exception e) { + e.printStackTrace(); + } + + // Output the data to the screen. + System.out.println("Datatype tag for " + DATASETNAME + " is: \"" + tag_name + "\""); + for (int indx = 0; indx < dims[0]; indx++) { + System.out.print(DATASETNAME + "[" + indx + "]: "); + for (int jndx = 0; jndx < type_len; jndx++) { + char temp = (char) dset_data[jndx + indx * (int)type_len]; + System.out.print(temp); + } + System.out.println(""); + } + System.out.println(); + + // End access to the dataset and release resources used by it. + try { + if (dataset_id >= 0) + H5.H5Dclose(dataset_id); + } + catch (Exception e) { + e.printStackTrace(); + } + + // Terminate access to the data space. + try { + if (dataspace_id >= 0) + H5.H5Sclose(dataspace_id); + } + catch (Exception e) { + e.printStackTrace(); + } + + try { + if (datatype_id >= 0) + H5.H5Tclose(datatype_id); + } + catch (Exception e) { + e.printStackTrace(); + } + + // Close the file. + try { + if (file_id >= 0) + H5.H5Fclose(file_id); + } + catch (Exception e) { + e.printStackTrace(); + } + + } + + public static void main(String[] args) { + H5Ex_T_Opaque.CreateDataset(); + // Now we begin the read section of this example. Here we assume + // the dataset and array have the same name and rank, but can have + // any size. Therefore we must allocate a new array to read in + // data using malloc(). + H5Ex_T_Opaque.ReadDataset(); + } + +} diff --git a/java/examples/datatypes/H5Ex_T_OpaqueAttribute.java b/java/examples/datatypes/H5Ex_T_OpaqueAttribute.java new file mode 100644 index 0000000..e42bfe8 --- /dev/null +++ b/java/examples/datatypes/H5Ex_T_OpaqueAttribute.java @@ -0,0 +1,307 @@ +/* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * + * Copyright by The HDF Group. * + * Copyright by the Board of Trustees of the University of Illinois. * + * All rights reserved. * + * * + * This file is part of HDF5. The full HDF5 copyright notice, including * + * terms governing use, modification, and redistribution, is contained in * + * the files COPYING and Copyright.html. COPYING can be found at the root * + * of the source code distribution tree; Copyright.html can be found at the * + * root level of an installed copy of the electronic HDF5 document set and * + * is linked from the top-level documents page. It can also be found at * + * http://hdfgroup.org/HDF5/doc/Copyright.html. If you do not have * + * access to either file, you may request a copy from help@hdfgroup.org. * + * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */ + +/************************************************************ + This example shows how to read and write opaque datatypes + to an attribute. The program first writes opaque data to + an attribute with a dataspace of DIM0, then closes the + file. Next, it reopens the file, reads back the data, and + outputs it to the screen. + ************************************************************/ + +package examples.datatypes; + +import hdf.hdf5lib.H5; +import hdf.hdf5lib.HDF5Constants; + +public class H5Ex_T_OpaqueAttribute { + private static String FILENAME = "H5Ex_T_OpaqueAttribute.h5"; + private static String DATASETNAME = "DS1"; + private static String ATTRIBUTENAME = "A1"; + private static final int DIM0 = 4; + private static final int LEN = 7; + private static final int RANK = 1; + + private static void CreateDataset() { + long file_id = -1; + long dataspace_id = -1; + long datatype_id = -1; + long dataset_id = -1; + long attribute_id = -1; + long[] dims = { DIM0 }; + byte[] dset_data = new byte[DIM0 * LEN]; + byte[] str_data = { 'O', 'P', 'A', 'Q', 'U', 'E' }; + + // Initialize data. + for (int indx = 0; indx < DIM0; indx++) { + for (int jndx = 0; jndx < LEN - 1; jndx++) + dset_data[jndx + indx * LEN] = str_data[jndx]; + dset_data[LEN - 1 + indx * LEN] = (byte) (indx + '0'); + } + + // Create a new file using default properties. + try { + file_id = H5.H5Fcreate(FILENAME, HDF5Constants.H5F_ACC_TRUNC, HDF5Constants.H5P_DEFAULT, + HDF5Constants.H5P_DEFAULT); + } + catch (Exception e) { + e.printStackTrace(); + } + + // Create dataset with a scalar dataspace. + try { + dataspace_id = H5.H5Screate(HDF5Constants.H5S_SCALAR); + if (dataspace_id >= 0) { + dataset_id = H5.H5Dcreate(file_id, DATASETNAME, HDF5Constants.H5T_STD_I32LE, dataspace_id, + HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT); + H5.H5Sclose(dataspace_id); + dataspace_id = -1; + } + } + catch (Exception e) { + e.printStackTrace(); + } + + // Create opaque datatype and set the tag to something appropriate. + // For this example we will write and view the data as a character + // array. + try { + datatype_id = H5.H5Tcreate(HDF5Constants.H5T_OPAQUE, (long)LEN); + if (datatype_id >= 0) + H5.H5Tset_tag(datatype_id, "Character array"); + } + catch (Exception e) { + e.printStackTrace(); + } + + // Create dataspace. Setting maximum size to NULL sets the maximum + // size to be the current size. + try { + dataspace_id = H5.H5Screate_simple(RANK, dims, null); + } + catch (Exception e) { + e.printStackTrace(); + } + + // Create the attribute and write the array data to it. + try { + if ((dataset_id >= 0) && (datatype_id >= 0) && (dataspace_id >= 0)) + attribute_id = H5.H5Acreate(dataset_id, ATTRIBUTENAME, datatype_id, dataspace_id, + HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT); + } + catch (Exception e) { + e.printStackTrace(); + } + + // Write the dataset. + try { + if ((attribute_id >= 0) && (datatype_id >= 0)) + H5.H5Awrite(attribute_id, datatype_id, dset_data); + } + catch (Exception e) { + e.printStackTrace(); + } + + // End access to the dataset and release resources used by it. + try { + if (attribute_id >= 0) + H5.H5Aclose(attribute_id); + } + catch (Exception e) { + e.printStackTrace(); + } + + try { + if (dataset_id >= 0) + H5.H5Dclose(dataset_id); + } + catch (Exception e) { + e.printStackTrace(); + } + + // Terminate access to the data space. + try { + if (dataspace_id >= 0) + H5.H5Sclose(dataspace_id); + } + catch (Exception e) { + e.printStackTrace(); + } + + try { + if (datatype_id >= 0) + H5.H5Tclose(datatype_id); + } + catch (Exception e) { + e.printStackTrace(); + } + + // Close the file. + try { + if (file_id >= 0) + H5.H5Fclose(file_id); + } + catch (Exception e) { + e.printStackTrace(); + } + + } + + private static void ReadDataset() { + long file_id = -1; + long datatype_id = -1; + long dataspace_id = -1; + long dataset_id = -1; + long attribute_id = -1; + long type_len = -1; + long[] dims = { DIM0 }; + byte[] dset_data; + String tag_name = null; + + // Open an existing file. + try { + file_id = H5.H5Fopen(FILENAME, HDF5Constants.H5F_ACC_RDONLY, HDF5Constants.H5P_DEFAULT); + } + catch (Exception e) { + e.printStackTrace(); + } + + // Open an existing dataset. + try { + if (file_id >= 0) + dataset_id = H5.H5Dopen(file_id, DATASETNAME, HDF5Constants.H5P_DEFAULT); + } + catch (Exception e) { + e.printStackTrace(); + } + + try { + if (dataset_id >= 0) + attribute_id = H5.H5Aopen_by_name(dataset_id, ".", ATTRIBUTENAME, HDF5Constants.H5P_DEFAULT, + HDF5Constants.H5P_DEFAULT); + } + catch (Exception e) { + e.printStackTrace(); + } + + // Get datatype and properties for the datatype. + try { + if (attribute_id >= 0) + datatype_id = H5.H5Aget_type(attribute_id); + if (datatype_id >= 0) { + type_len = H5.H5Tget_size(datatype_id); + tag_name = H5.H5Tget_tag(datatype_id); + } + } + catch (Exception e) { + e.printStackTrace(); + } + + // Get dataspace and allocate memory for read buffer. + try { + if (attribute_id >= 0) + dataspace_id = H5.H5Aget_space(attribute_id); + } + catch (Exception e) { + e.printStackTrace(); + } + + try { + if (dataspace_id >= 0) + H5.H5Sget_simple_extent_dims(dataspace_id, dims, null); + } + catch (Exception e) { + e.printStackTrace(); + } + + // Allocate buffer. + dset_data = new byte[(int) (dims[0] * type_len)]; + + // Read data. + try { + if ((attribute_id >= 0) && (datatype_id >= 0)) + H5.H5Aread(attribute_id, datatype_id, dset_data); + } + catch (Exception e) { + e.printStackTrace(); + } + + // Output the data to the screen. + System.out.println("Datatype tag for " + ATTRIBUTENAME + " is: \"" + tag_name + "\""); + for (int indx = 0; indx < dims[0]; indx++) { + System.out.print(ATTRIBUTENAME + "[" + indx + "]: "); + for (int jndx = 0; jndx < type_len; jndx++) { + char temp = (char) dset_data[jndx + indx * (int)type_len]; + System.out.print(temp); + } + System.out.println(""); + } + System.out.println(); + + // End access to the dataset and release resources used by it. + try { + if (attribute_id >= 0) + H5.H5Aclose(attribute_id); + } + catch (Exception e) { + e.printStackTrace(); + } + + try { + if (dataset_id >= 0) + H5.H5Dclose(dataset_id); + } + catch (Exception e) { + e.printStackTrace(); + } + + // Terminate access to the data space. + try { + if (dataspace_id >= 0) + H5.H5Sclose(dataspace_id); + } + catch (Exception e) { + e.printStackTrace(); + } + + try { + if (datatype_id >= 0) + H5.H5Tclose(datatype_id); + } + catch (Exception e) { + e.printStackTrace(); + } + + // Close the file. + try { + if (file_id >= 0) + H5.H5Fclose(file_id); + } + catch (Exception e) { + e.printStackTrace(); + } + + } + + public static void main(String[] args) { + H5Ex_T_OpaqueAttribute.CreateDataset(); + // Now we begin the read section of this example. Here we assume + // the dataset and array have the same name and rank, but can have + // any size. Therefore we must allocate a new array to read in + // data using malloc(). + H5Ex_T_OpaqueAttribute.ReadDataset(); + } + +} diff --git a/java/examples/datatypes/H5Ex_T_String.java b/java/examples/datatypes/H5Ex_T_String.java new file mode 100644 index 0000000..469172d --- /dev/null +++ b/java/examples/datatypes/H5Ex_T_String.java @@ -0,0 +1,311 @@ +/* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * + * Copyright by The HDF Group. * + * Copyright by the Board of Trustees of the University of Illinois. * + * All rights reserved. * + * * + * This file is part of HDF5. The full HDF5 copyright notice, including * + * terms governing use, modification, and redistribution, is contained in * + * the files COPYING and Copyright.html. COPYING can be found at the root * + * of the source code distribution tree; Copyright.html can be found at the * + * root level of an installed copy of the electronic HDF5 document set and * + * is linked from the top-level documents page. It can also be found at * + * http://hdfgroup.org/HDF5/doc/Copyright.html. If you do not have * + * access to either file, you may request a copy from help@hdfgroup.org. * + * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */ + +/************************************************************ + This example shows how to read and write string datatypes + to a dataset. The program first writes strings to a + dataset with a dataspace of DIM0, then closes the file. + Next, it reopens the file, reads back the data, and + outputs it to the screen. + ************************************************************/ + +package examples.datatypes; + +import hdf.hdf5lib.H5; +import hdf.hdf5lib.HDF5Constants; + +public class H5Ex_T_String { + private static String FILENAME = "H5Ex_T_String.h5"; + private static String DATASETNAME = "DS1"; + private static final int DIM0 = 4; + private static final int SDIM = 8; + private static final int RANK = 1; + + private static void CreateDataset() { + long file_id = -1; + long memtype_id = -1; + long filetype_id = -1; + long dataspace_id = -1; + long dataset_id = -1; + long[] dims = { DIM0 }; + byte[][] dset_data = new byte[DIM0][SDIM]; + StringBuffer[] str_data = { new StringBuffer("Parting"), new StringBuffer("is such"), + new StringBuffer("sweet"), new StringBuffer("sorrow.") }; + + // Create a new file using default properties. + try { + file_id = H5.H5Fcreate(FILENAME, HDF5Constants.H5F_ACC_TRUNC, HDF5Constants.H5P_DEFAULT, + HDF5Constants.H5P_DEFAULT); + } + catch (Exception e) { + e.printStackTrace(); + } + + // Create file and memory datatypes. For this example we will save + // the strings as FORTRAN strings, therefore they do not need space + // for the null terminator in the file. + try { + filetype_id = H5.H5Tcopy(HDF5Constants.H5T_FORTRAN_S1); + if (filetype_id >= 0) + H5.H5Tset_size(filetype_id, SDIM - 1); + } + catch (Exception e) { + e.printStackTrace(); + } + try { + memtype_id = H5.H5Tcopy(HDF5Constants.H5T_C_S1); + if (memtype_id >= 0) + H5.H5Tset_size(memtype_id, SDIM); + } + catch (Exception e) { + e.printStackTrace(); + } + + // Create dataspace. Setting maximum size to NULL sets the maximum + // size to be the current size. + try { + dataspace_id = H5.H5Screate_simple(RANK, dims, null); + } + catch (Exception e) { + e.printStackTrace(); + } + + // Create the dataset and write the string data to it. + try { + if ((file_id >= 0) && (filetype_id >= 0) && (dataspace_id >= 0)) + dataset_id = H5.H5Dcreate(file_id, DATASETNAME, filetype_id, dataspace_id, HDF5Constants.H5P_DEFAULT, + HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT); + } + catch (Exception e) { + e.printStackTrace(); + } + + // Write the data to the dataset. + try { + for (int indx = 0; indx < DIM0; indx++) { + for (int jndx = 0; jndx < SDIM; jndx++) { + if (jndx < str_data[indx].length()) + dset_data[indx][jndx] = (byte) str_data[indx].charAt(jndx); + else + dset_data[indx][jndx] = 0; + } + } + if ((dataset_id >= 0) && (memtype_id >= 0)) + H5.H5Dwrite(dataset_id, memtype_id, HDF5Constants.H5S_ALL, HDF5Constants.H5S_ALL, + HDF5Constants.H5P_DEFAULT, dset_data); + } + catch (Exception e) { + e.printStackTrace(); + } + + // End access to the dataset and release resources used by it. + try { + if (dataset_id >= 0) + H5.H5Dclose(dataset_id); + } + catch (Exception e) { + e.printStackTrace(); + } + + // Terminate access to the data space. + try { + if (dataspace_id >= 0) + H5.H5Sclose(dataspace_id); + } + catch (Exception e) { + e.printStackTrace(); + } + + // Terminate access to the file type. + try { + if (filetype_id >= 0) + H5.H5Tclose(filetype_id); + } + catch (Exception e) { + e.printStackTrace(); + } + + // Terminate access to the mem type. + try { + if (memtype_id >= 0) + H5.H5Tclose(memtype_id); + } + catch (Exception e) { + e.printStackTrace(); + } + + // Close the file. + try { + if (file_id >= 0) + H5.H5Fclose(file_id); + } + catch (Exception e) { + e.printStackTrace(); + } + + } + + private static void ReadDataset() { + long file_id = -1; + long filetype_id = -1; + long memtype_id = -1; + long dataspace_id = -1; + long dataset_id = -1; + long sdim = 0; + long[] dims = { DIM0 }; + byte[][] dset_data; + StringBuffer[] str_data; + + // Open an existing file. + try { + file_id = H5.H5Fopen(FILENAME, HDF5Constants.H5F_ACC_RDONLY, HDF5Constants.H5P_DEFAULT); + } + catch (Exception e) { + e.printStackTrace(); + } + + // Open an existing dataset. + try { + if (file_id >= 0) + dataset_id = H5.H5Dopen(file_id, DATASETNAME, HDF5Constants.H5P_DEFAULT); + } + catch (Exception e) { + e.printStackTrace(); + } + + // Get the datatype and its size. + try { + if (dataset_id >= 0) + filetype_id = H5.H5Dget_type(dataset_id); + if (filetype_id >= 0) { + sdim = H5.H5Tget_size(filetype_id); + sdim++; // Make room for null terminator + } + } + catch (Exception e) { + e.printStackTrace(); + } + + // Get dataspace and allocate memory for read buffer. + try { + if (dataset_id >= 0) + dataspace_id = H5.H5Dget_space(dataset_id); + } + catch (Exception e) { + e.printStackTrace(); + } + + try { + if (dataspace_id >= 0) + H5.H5Sget_simple_extent_dims(dataspace_id, dims, null); + } + catch (Exception e) { + e.printStackTrace(); + } + + // Allocate space for data. + dset_data = new byte[(int) dims[0]][(int)sdim]; + str_data = new StringBuffer[(int) dims[0]]; + + // Create the memory datatype. + try { + memtype_id = H5.H5Tcopy(HDF5Constants.H5T_C_S1); + if (memtype_id >= 0) + H5.H5Tset_size(memtype_id, sdim); + } + catch (Exception e) { + e.printStackTrace(); + } + + // Read data. + try { + if ((dataset_id >= 0) && (memtype_id >= 0)) + H5.H5Dread(dataset_id, memtype_id, HDF5Constants.H5S_ALL, HDF5Constants.H5S_ALL, + HDF5Constants.H5P_DEFAULT, dset_data); + byte[] tempbuf = new byte[(int)sdim]; + for (int indx = 0; indx < (int) dims[0]; indx++) { + for (int jndx = 0; jndx < sdim; jndx++) { + tempbuf[jndx] = dset_data[indx][jndx]; + } + str_data[indx] = new StringBuffer(new String(tempbuf).trim()); + } + } + catch (Exception e) { + e.printStackTrace(); + } + + // Output the data to the screen. + for (int indx = 0; indx < dims[0]; indx++) { + System.out.println(DATASETNAME + " [" + indx + "]: " + str_data[indx]); + } + System.out.println(); + + // End access to the dataset and release resources used by it. + try { + if (dataset_id >= 0) + H5.H5Dclose(dataset_id); + } + catch (Exception e) { + e.printStackTrace(); + } + + // Terminate access to the data space. + try { + if (dataspace_id >= 0) + H5.H5Sclose(dataspace_id); + } + catch (Exception e) { + e.printStackTrace(); + } + + // Terminate access to the file type. + try { + if (filetype_id >= 0) + H5.H5Tclose(filetype_id); + } + catch (Exception e) { + e.printStackTrace(); + } + + // Terminate access to the mem type. + try { + if (memtype_id >= 0) + H5.H5Tclose(memtype_id); + } + catch (Exception e) { + e.printStackTrace(); + } + + // Close the file. + try { + if (file_id >= 0) + H5.H5Fclose(file_id); + } + catch (Exception e) { + e.printStackTrace(); + } + + } + + public static void main(String[] args) { + H5Ex_T_String.CreateDataset(); + // Now we begin the read section of this example. Here we assume + // the dataset and array have the same name and rank, but can have + // any size. Therefore we must allocate a new array to read in + // data using malloc(). + H5Ex_T_String.ReadDataset(); + } + +} diff --git a/java/examples/datatypes/H5Ex_T_StringAttribute.java b/java/examples/datatypes/H5Ex_T_StringAttribute.java new file mode 100644 index 0000000..49361bc --- /dev/null +++ b/java/examples/datatypes/H5Ex_T_StringAttribute.java @@ -0,0 +1,351 @@ +/* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * + * Copyright by The HDF Group. * + * Copyright by the Board of Trustees of the University of Illinois. * + * All rights reserved. * + * * + * This file is part of HDF5. The full HDF5 copyright notice, including * + * terms governing use, modification, and redistribution, is contained in * + * the files COPYING and Copyright.html. COPYING can be found at the root * + * of the source code distribution tree; Copyright.html can be found at the * + * root level of an installed copy of the electronic HDF5 document set and * + * is linked from the top-level documents page. It can also be found at * + * http://hdfgroup.org/HDF5/doc/Copyright.html. If you do not have * + * access to either file, you may request a copy from help@hdfgroup.org. * + * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */ + +/************************************************************ + This example shows how to read and write string datatypes + to an attribute. The program first writes strings to an + attribute with a dataspace of DIM0, then closes the file. + Next, it reopens the file, reads back the data, and + outputs it to the screen. + ************************************************************/ + +package examples.datatypes; + +import hdf.hdf5lib.H5; +import hdf.hdf5lib.HDF5Constants; + +public class H5Ex_T_StringAttribute { + private static String FILENAME = "H5Ex_T_StringAttribute.h5"; + private static String DATASETNAME = "DS1"; + private static String ATTRIBUTENAME = "A1"; + private static final int DIM0 = 4; + private static final int SDIM = 8; + private static final int RANK = 1; + + private static void CreateDataset() { + long file_id = -1; + long memtype_id = -1; + long filetype_id = -1; + long dataspace_id = -1; + long dataset_id = -1; + long attribute_id = -1; + long[] dims = { DIM0 }; + byte[][] dset_data = new byte[DIM0][SDIM]; + StringBuffer[] str_data = { new StringBuffer("Parting"), new StringBuffer("is such"), + new StringBuffer("sweet"), new StringBuffer("sorrow.") }; + + // Create a new file using default properties. + try { + file_id = H5.H5Fcreate(FILENAME, HDF5Constants.H5F_ACC_TRUNC, HDF5Constants.H5P_DEFAULT, + HDF5Constants.H5P_DEFAULT); + } + catch (Exception e) { + e.printStackTrace(); + } + + // Create file and memory datatypes. For this example we will save + // the strings as FORTRAN strings, therefore they do not need space + // for the null terminator in the file. + try { + filetype_id = H5.H5Tcopy(HDF5Constants.H5T_FORTRAN_S1); + if (filetype_id >= 0) + H5.H5Tset_size(filetype_id, SDIM - 1); + } + catch (Exception e) { + e.printStackTrace(); + } + try { + memtype_id = H5.H5Tcopy(HDF5Constants.H5T_C_S1); + if (memtype_id >= 0) + H5.H5Tset_size(memtype_id, SDIM); + } + catch (Exception e) { + e.printStackTrace(); + } + + // Create dataset with a scalar dataspace. + try { + dataspace_id = H5.H5Screate(HDF5Constants.H5S_SCALAR); + if (dataspace_id >= 0) { + dataset_id = H5.H5Dcreate(file_id, DATASETNAME, HDF5Constants.H5T_STD_I32LE, dataspace_id, + HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT); + H5.H5Sclose(dataspace_id); + dataspace_id = -1; + } + } + catch (Exception e) { + e.printStackTrace(); + } + + // Create dataspace. Setting maximum size to NULL sets the maximum + // size to be the current size. + try { + dataspace_id = H5.H5Screate_simple(RANK, dims, null); + } + catch (Exception e) { + e.printStackTrace(); + } + + // Create the attribute. + try { + if ((dataset_id >= 0) && (dataspace_id >= 0) && (filetype_id >= 0)) + attribute_id = H5.H5Acreate(dataset_id, ATTRIBUTENAME, filetype_id, dataspace_id, + HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT); + } + catch (Exception e) { + e.printStackTrace(); + } + + // Write the data to the dataset. + try { + for (int indx = 0; indx < DIM0; indx++) { + for (int jndx = 0; jndx < SDIM; jndx++) { + if (jndx < str_data[indx].length()) + dset_data[indx][jndx] = (byte) str_data[indx].charAt(jndx); + else + dset_data[indx][jndx] = 0; + } + } + if ((attribute_id >= 0) && (memtype_id >= 0)) + H5.H5Awrite(attribute_id, memtype_id, dset_data); + } + catch (Exception e) { + e.printStackTrace(); + } + + // End access to the dataset and release resources used by it. + try { + if (attribute_id >= 0) + H5.H5Aclose(attribute_id); + } + catch (Exception e) { + e.printStackTrace(); + } + + try { + if (dataset_id >= 0) + H5.H5Dclose(dataset_id); + } + catch (Exception e) { + e.printStackTrace(); + } + + // Terminate access to the data space. + try { + if (dataspace_id >= 0) + H5.H5Sclose(dataspace_id); + } + catch (Exception e) { + e.printStackTrace(); + } + + // Terminate access to the file type. + try { + if (filetype_id >= 0) + H5.H5Tclose(filetype_id); + } + catch (Exception e) { + e.printStackTrace(); + } + + // Terminate access to the mem type. + try { + if (memtype_id >= 0) + H5.H5Tclose(memtype_id); + } + catch (Exception e) { + e.printStackTrace(); + } + + // Close the file. + try { + if (file_id >= 0) + H5.H5Fclose(file_id); + } + catch (Exception e) { + e.printStackTrace(); + } + + } + + private static void ReadDataset() { + long file_id = -1; + long filetype_id = -1; + long memtype_id = -1; + long dataspace_id = -1; + long dataset_id = -1; + long attribute_id = -1; + long sdim = 0; + long[] dims = { DIM0 }; + byte[][] dset_data; + StringBuffer[] str_data; + + // Open an existing file. + try { + file_id = H5.H5Fopen(FILENAME, HDF5Constants.H5F_ACC_RDONLY, HDF5Constants.H5P_DEFAULT); + } + catch (Exception e) { + e.printStackTrace(); + } + + // Open an existing dataset. + try { + if (file_id >= 0) + dataset_id = H5.H5Dopen(file_id, DATASETNAME, HDF5Constants.H5P_DEFAULT); + } + catch (Exception e) { + e.printStackTrace(); + } + + try { + if (dataset_id >= 0) + attribute_id = H5.H5Aopen_by_name(dataset_id, ".", ATTRIBUTENAME, HDF5Constants.H5P_DEFAULT, + HDF5Constants.H5P_DEFAULT); + } + catch (Exception e) { + e.printStackTrace(); + } + + // Get the datatype and its size. + try { + if (attribute_id >= 0) + filetype_id = H5.H5Aget_type(attribute_id); + if (filetype_id >= 0) { + sdim = H5.H5Tget_size(filetype_id); + sdim++; // Make room for null terminator + } + } + catch (Exception e) { + e.printStackTrace(); + } + + // Get dataspace and allocate memory for read buffer. + try { + if (attribute_id >= 0) + dataspace_id = H5.H5Aget_space(attribute_id); + } + catch (Exception e) { + e.printStackTrace(); + } + + try { + if (dataspace_id >= 0) + H5.H5Sget_simple_extent_dims(dataspace_id, dims, null); + } + catch (Exception e) { + e.printStackTrace(); + } + + // Allocate space for data. + dset_data = new byte[(int) dims[0]][(int)sdim]; + str_data = new StringBuffer[(int) dims[0]]; + + // Create the memory datatype. + try { + memtype_id = H5.H5Tcopy(HDF5Constants.H5T_C_S1); + if (memtype_id >= 0) + H5.H5Tset_size(memtype_id, sdim); + } + catch (Exception e) { + e.printStackTrace(); + } + + // Read data. + try { + if ((attribute_id >= 0) && (memtype_id >= 0)) + H5.H5Aread(attribute_id, memtype_id, dset_data); + byte[] tempbuf = new byte[(int)sdim]; + for (int indx = 0; indx < (int) dims[0]; indx++) { + for (int jndx = 0; jndx < sdim; jndx++) { + tempbuf[jndx] = dset_data[indx][jndx]; + } + str_data[indx] = new StringBuffer(new String(tempbuf).trim()); + } + } + catch (Exception e) { + e.printStackTrace(); + } + + // Output the data to the screen. + for (int indx = 0; indx < dims[0]; indx++) { + System.out.println(DATASETNAME + " [" + indx + "]: " + str_data[indx]); + } + System.out.println(); + + // End access to the dataset and release resources used by it. + try { + if (attribute_id >= 0) + H5.H5Aclose(attribute_id); + } + catch (Exception e) { + e.printStackTrace(); + } + + try { + if (dataset_id >= 0) + H5.H5Dclose(dataset_id); + } + catch (Exception e) { + e.printStackTrace(); + } + + // Terminate access to the data space. + try { + if (dataspace_id >= 0) + H5.H5Sclose(dataspace_id); + } + catch (Exception e) { + e.printStackTrace(); + } + + // Terminate access to the file type. + try { + if (filetype_id >= 0) + H5.H5Tclose(filetype_id); + } + catch (Exception e) { + e.printStackTrace(); + } + + // Terminate access to the mem type. + try { + if (memtype_id >= 0) + H5.H5Tclose(memtype_id); + } + catch (Exception e) { + e.printStackTrace(); + } + + // Close the file. + try { + if (file_id >= 0) + H5.H5Fclose(file_id); + } + catch (Exception e) { + e.printStackTrace(); + } + + } + + public static void main(String[] args) { + H5Ex_T_StringAttribute.CreateDataset(); + // Now we begin the read section of this example. Here we assume + // the dataset and array have the same name and rank, but can have + // any size. Therefore we must allocate a new array to read in + // data using malloc(). + H5Ex_T_StringAttribute.ReadDataset(); + } + +} diff --git a/java/examples/datatypes/H5Ex_T_VLString.java b/java/examples/datatypes/H5Ex_T_VLString.java new file mode 100644 index 0000000..c8892ba --- /dev/null +++ b/java/examples/datatypes/H5Ex_T_VLString.java @@ -0,0 +1,138 @@ +/* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * + * Copyright by The HDF Group. * + * Copyright by the Board of Trustees of the University of Illinois. * + * All rights reserved. * + * * + * This file is part of HDF5. The full HDF5 copyright notice, including * + * terms governing use, modification, and redistribution, is contained in * + * the files COPYING and Copyright.html. COPYING can be found at the root * + * of the source code distribution tree; Copyright.html can be found at the * + * root level of an installed copy of the electronic HDF5 document set and * + * is linked from the top-level documents page. It can also be found at * + * http://hdfgroup.org/HDF5/doc/Copyright.html. If you do not have * + * access to either file, you may request a copy from help@hdfgroup.org. * + * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */ + +/************************************************************ + Creating and writing a VL string to a file. + ************************************************************/ + +package examples.datatypes; + +import hdf.hdf5lib.H5; +import hdf.hdf5lib.HDF5Constants; + +public class H5Ex_T_VLString +{ + private static String FILENAME = "H5Ex_T_VLString.h5"; + private static String DATASETNAME = "DS1"; + + private static void createDataset() { + long file_id = -1; + long type_id = -1; + long dataspace_id = -1; + long dataset_id = -1; + int rank = 1; + String[] str_data = { "Parting", "is such", "sweet", "sorrow." }; + long[] dims = { str_data.length }; + + // Create a new file using default properties. + try { + file_id = H5.H5Fcreate(FILENAME, HDF5Constants.H5F_ACC_TRUNC, HDF5Constants.H5P_DEFAULT, + HDF5Constants.H5P_DEFAULT); + } + catch (Exception e) { + e.printStackTrace(); + } + + try { + type_id = H5.H5Tcopy(HDF5Constants.H5T_C_S1); + H5.H5Tset_size(type_id, HDF5Constants.H5T_VARIABLE); + } + catch (Exception e) { + e.printStackTrace(); + } + + // Create dataspace. Setting maximum size to NULL sets the maximum + // size to be the current size. + try { + dataspace_id = H5.H5Screate_simple(rank, dims, null); + } + catch (Exception e) { + e.printStackTrace(); + } + + // Create the dataset and write the string data to it. + try { + if ((file_id >= 0) && (type_id >= 0) && (dataspace_id >= 0)) { + dataset_id = H5.H5Dcreate(file_id, DATASETNAME, type_id, dataspace_id, HDF5Constants.H5P_DEFAULT, + HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT); + } + } + catch (Exception e) { + e.printStackTrace(); + } + + // Write the data to the dataset. + try { + if (dataset_id >= 0) + H5.H5Dwrite_VLStrings(dataset_id, type_id, HDF5Constants.H5S_ALL, HDF5Constants.H5S_ALL, + HDF5Constants.H5P_DEFAULT, str_data); + } + catch (Exception e) { + e.printStackTrace(); + } + + try { + H5.H5Sclose(dataspace_id); + H5.H5Tclose(type_id); + H5.H5Dclose(dataset_id); + H5.H5Fclose(file_id); + } + catch (Exception e) { + e.printStackTrace(); + } + } + + private static void readDataset() { + long file_id = -1; + long type_id = -1; + long dataset_id = -1; + String[] str_data = { "", "", "", "" }; + + try { + file_id = H5.H5Fopen(FILENAME, HDF5Constants.H5F_ACC_RDONLY, HDF5Constants.H5P_DEFAULT); + } + catch (Exception e) { + e.printStackTrace(); + } + + try { + dataset_id = H5.H5Dopen(file_id, DATASETNAME, HDF5Constants.H5P_DEFAULT); + type_id = H5.H5Dget_type(dataset_id); + H5.H5Dread_VLStrings(dataset_id, type_id, HDF5Constants.H5S_ALL, HDF5Constants.H5S_ALL, HDF5Constants.H5P_DEFAULT, + str_data); + } + catch (Exception e) { + e.printStackTrace(); + } + + for (int indx = 0; indx < str_data.length; indx++) + System.out.println(DATASETNAME + " [" + indx + "]: " + str_data[indx]); + + try { + H5.H5Tclose(type_id); + H5.H5Dclose(dataset_id); + H5.H5Fclose(file_id); + } + catch (Exception e) { + e.printStackTrace(); + } + } + + public static void main(String[] args) { + H5Ex_T_VLString.createDataset(); + H5Ex_T_VLString.readDataset(); + } + +} diff --git a/java/examples/datatypes/Makefile.am b/java/examples/datatypes/Makefile.am new file mode 100644 index 0000000..de2ea9d --- /dev/null +++ b/java/examples/datatypes/Makefile.am @@ -0,0 +1,78 @@ +# +# Copyright by The HDF Group. +# Copyright by the Board of Trustees of the University of Illinois. +# All rights reserved. +# +# This file is part of HDF5. The full HDF5 copyright notice, including +# terms governing use, modification, and redistribution, is contained in +# the files COPYING and Copyright.html. COPYING can be found at the root +# of the source code distribution tree; Copyright.html can be found at the +# root level of an installed copy of the electronic HDF5 document set and +# is linked from the top-level documents page. It can also be found at +# http://hdfgroup.org/HDF5/doc/Copyright.html. If you do not have +# access to either file, you may request a copy from help@hdfgroup.org. +## +## Makefile.am +## Run automake to generate a Makefile.in from this file. +## +# +# HDF5 Java Library Examples Makefile(.in) + +include $(top_srcdir)/config/commence.am + +# Mark this directory as part of the JNI API +JAVA_API=yes + +JAVAROOT = .classes + +classes: + $(MKDIR_P) $(@D)/$(JAVAROOT) + +pkgpath = examples/datatypes +hdfjarfile = jar$(PACKAGE_TARNAME)-$(PACKAGE_VERSION).jar +CLASSPATH_ENV=CLASSPATH=.:$(JAVAROOT):$(top_builddir)/java/src/$(hdfjarfile):$(top_srcdir)/java/lib/slf4j-api-1.7.5.jar:$(top_srcdir)/java/lib/ext/slf4j-simple-1.7.5.jar:$$CLASSPATH + +jarfile = jar$(PACKAGE_TARNAME)datatypes.jar + +AM_JAVACFLAGS = $(H5_JAVACFLAGS) -deprecation + +TESTPACKAGE = + +noinst_JAVA = \ + H5Ex_T_Array.java \ + H5Ex_T_ArrayAttribute.java \ + H5Ex_T_Bit.java \ + H5Ex_T_BitAttribute.java \ + H5Ex_T_Commit.java \ + H5Ex_T_Compound.java \ + H5Ex_T_CompoundAttribute.java \ + H5Ex_T_Float.java \ + H5Ex_T_FloatAttribute.java \ + H5Ex_T_Integer.java \ + H5Ex_T_IntegerAttribute.java \ + H5Ex_T_ObjectReference.java \ + H5Ex_T_ObjectReferenceAttribute.java \ + H5Ex_T_Opaque.java \ + H5Ex_T_OpaqueAttribute.java \ + H5Ex_T_String.java \ + H5Ex_T_StringAttribute.java \ + H5Ex_T_VLString.java + +$(jarfile): classnoinst.stamp classes + $(JAR) cvf $@ -C $(JAVAROOT)/ $(pkgpath) + +noinst_DATA = $(jarfile) + +.PHONY: classes + +check_SCRIPTS = runExample.sh +TEST_SCRIPT = $(check_SCRIPTS) + +CLEANFILES = classnoinst.stamp $(jarfile) $(JAVAROOT)/$(pkgpath)/*.class runExample.sh + +clean: + rm -rf $(JAVAROOT) + rm -f $(jarfile) + rm -f classnoinst.stamp + +include $(top_srcdir)/config/conclude.am diff --git a/java/examples/datatypes/runExample.sh.in b/java/examples/datatypes/runExample.sh.in new file mode 100644 index 0000000..9ed3954 --- /dev/null +++ b/java/examples/datatypes/runExample.sh.in @@ -0,0 +1,408 @@ +#! /bin/sh +# +# Copyright by The HDF Group. +# Copyright by the Board of Trustees of the University of Illinois. +# All rights reserved. +# +# This file is part of HDF5. The full HDF5 copyright notice, including +# terms governing use, modification, and redistribution, is contained in +# the files COPYING and Copyright.html. COPYING can be found at the root +# of the source code distribution tree; Copyright.html can be found at the +# root level of an installed copy of the electronic HDF5 document set and +# is linked from the top-level documents page. It can also be found at +# http://hdfgroup.org/HDF5/doc/Copyright.html. If you do not have +# access to either file, you may request a copy from help@hdfgroup.org. +# + +top_builddir=@top_builddir@ +top_srcdir=@top_srcdir@ +srcdir=@srcdir@ + +TESTNAME=EX_Datatypes +EXIT_SUCCESS=0 +EXIT_FAILURE=1 + +# Set up default variable values if not supplied by the user. +RM='rm -rf' +CMP='cmp' +DIFF='diff -c' +CP='cp' +DIRNAME='dirname' +LS='ls' +AWK='awk' + +nerrors=0 + +# where the libs exist +HDFLIB_HOME="$top_srcdir/java/lib" +BLDLIBDIR="./lib" +BLDDIR="." +HDFTEST_HOME="$top_srcdir/java/examples/datatypes" +JARFILE=jar@PACKAGE_TARNAME@-@PACKAGE_VERSION@.jar +TESTJARFILE=jar@PACKAGE_TARNAME@datatypes.jar +test -d $BLDLIBDIR || mkdir -p $BLDLIBDIR + +###################################################################### +# library files +# -------------------------------------------------------------------- +# All the library files copy from source directory to test directory +# NOTE: Keep this framework to add/remove test files. +# This list are also used for checking exist. +# Comment '#' without space can be used. +# -------------------------------------------------------------------- +LIST_LIBRARY_FILES=" +$HDFLIB_HOME/slf4j-api-1.7.5.jar +$HDFLIB_HOME/ext/slf4j-simple-1.7.5.jar +$top_builddir/src/.libs/libhdf5.* +$top_builddir/java/src/jni/.libs/libhdf5_java.* +$top_builddir/java/src/$JARFILE +" +LIST_DATA_FILES=" +$HDFTEST_HOME/../testfiles/examples.datatypes.H5Ex_T_Array.txt +$HDFTEST_HOME/../testfiles/examples.datatypes.H5Ex_T_ArrayAttribute.txt +$HDFTEST_HOME/../testfiles/examples.datatypes.H5Ex_T_Bit.txt +$HDFTEST_HOME/../testfiles/examples.datatypes.H5Ex_T_BitAttribute.txt +$HDFTEST_HOME/../testfiles/examples.datatypes.H5Ex_T_Commit.txt +$HDFTEST_HOME/../testfiles/examples.datatypes.H5Ex_T_Compound.txt +$HDFTEST_HOME/../testfiles/examples.datatypes.H5Ex_T_CompoundAttribute.txt +$HDFTEST_HOME/../testfiles/examples.datatypes.H5Ex_T_Float.txt +$HDFTEST_HOME/../testfiles/examples.datatypes.H5Ex_T_FloatAttribute.txt +$HDFTEST_HOME/../testfiles/examples.datatypes.H5Ex_T_Integer.txt +$HDFTEST_HOME/../testfiles/examples.datatypes.H5Ex_T_IntegerAttribute.txt +$HDFTEST_HOME/../testfiles/examples.datatypes.H5Ex_T_ObjectReference.txt +$HDFTEST_HOME/../testfiles/examples.datatypes.H5Ex_T_ObjectReferenceAttribute.txt +$HDFTEST_HOME/../testfiles/examples.datatypes.H5Ex_T_Opaque.txt +$HDFTEST_HOME/../testfiles/examples.datatypes.H5Ex_T_OpaqueAttribute.txt +$HDFTEST_HOME/../testfiles/examples.datatypes.H5Ex_T_String.txt +$HDFTEST_HOME/../testfiles/examples.datatypes.H5Ex_T_StringAttribute.txt +$HDFTEST_HOME/../testfiles/examples.datatypes.H5Ex_T_VLString.txt +" + +# +# copy files from source dirs to test dir +# +COPY_LIBFILES="$LIST_LIBRARY_FILES" + +COPY_LIBFILES_TO_BLDLIBDIR() +{ + # copy test files. Used -f to make sure get a new copy + for tstfile in $COPY_LIBFILES + do + # ignore '#' comment + echo $tstfile | tr -d ' ' | grep '^#' > /dev/null + RET=$? + if [ $RET -eq 1 ]; then + # skip cp if srcdir is same as destdir + # this occurs when build/test performed in source dir and + # make cp fail + SDIR=`$DIRNAME $tstfile` + INODE_SDIR=`$LS -i -d $SDIR | $AWK -F' ' '{print $1}'` + INODE_DDIR=`$LS -i -d $TESTDIR | $AWK -F' ' '{print $1}'` + if [ "$INODE_SDIR" != "$INODE_DDIR" ]; then + $CP -f $tstfile $BLDLIBDIR + if [ $? -ne 0 ]; then + echo "Error: FAILED to copy $tstfile ." + + # Comment out this to CREATE expected file + exit $EXIT_FAILURE + fi + fi + fi + done +} + +CLEAN_LIBFILES_AND_BLDLIBDIR() +{ + # skip rm if srcdir is same as destdir + # this occurs when build/test performed in source dir and + # make cp fail + SDIR=`$DIRNAME $tstfile` + INODE_SDIR=`$LS -i -d $SDIR | $AWK -F' ' '{print $1}'` + INODE_DDIR=`$LS -i -d $TESTDIR | $AWK -F' ' '{print $1}'` + if [ "$INODE_SDIR" != "$INODE_DDIR" ]; then + $RM $BLDLIBDIR + fi +} + +COPY_DATAFILES="$LIST_DATA_FILES" + +COPY_DATAFILES_TO_BLDDIR() +{ + # copy test files. Used -f to make sure get a new copy + for tstfile in $COPY_DATAFILES + do + # ignore '#' comment + echo $tstfile | tr -d ' ' | grep '^#' > /dev/null + RET=$? + if [ $RET -eq 1 ]; then + # skip cp if srcdir is same as destdir + # this occurs when build/test performed in source dir and + # make cp fail + SDIR=`$DIRNAME $tstfile` + INODE_SDIR=`$LS -i -d $SDIR | $AWK -F' ' '{print $1}'` + INODE_DDIR=`$LS -i -d $TESTDIR | $AWK -F' ' '{print $1}'` + if [ "$INODE_SDIR" != "$INODE_DDIR" ]; then + $CP -f $tstfile $BLDDIR + if [ $? -ne 0 ]; then + echo "Error: FAILED to copy $tstfile ." + + # Comment out this to CREATE expected file + exit $EXIT_FAILURE + fi + fi + fi + done +} + +CLEAN_DATAFILES_AND_BLDDIR() +{ + # skip rm if srcdir is same as destdir + # this occurs when build/test performed in source dir and + # make cp fail + SDIR=`$DIRNAME $tstfile` + INODE_SDIR=`$LS -i -d $SDIR | $AWK -F' ' '{print $1}'` + INODE_DDIR=`$LS -i -d $TESTDIR | $AWK -F' ' '{print $1}'` + if [ "$INODE_SDIR" != "$INODE_DDIR" ]; then + $RM $BLDDIR/*.txt + $RM $BLDDIR/*.out + fi +} + +# Print a line-line message left justified in a field of 70 characters +# beginning with the word "Testing". +# +TESTING() { + SPACES=" " + echo "Testing $* $SPACES" | cut -c1-70 | tr -d '\012' +} + +# where Java is installed (requires jdk1.7.x) +JAVAEXE=@JAVA@ +JAVAEXEFLAGS=@H5_JAVAFLAGS@ + +############################################################################### +# DO NOT MODIFY BELOW THIS LINE +############################################################################### + +# prepare for test +COPY_LIBFILES_TO_BLDLIBDIR +COPY_DATAFILES_TO_BLDDIR + +CPATH=".:"$BLDLIBDIR"/"$JARFILE":"$BLDLIBDIR"/slf4j-api-1.7.5.jar:"$BLDLIBDIR"/slf4j-simple-1.7.5.jar:"$TESTJARFILE"" + +TEST=/usr/bin/test +if [ ! -x /usr/bin/test ] +then +TEST=`which test` +fi + +if $TEST -z "$CLASSPATH"; then + CLASSPATH="" +fi +CLASSPATH=$CPATH":"$CLASSPATH +export CLASSPATH + +if $TEST -n "$JAVAPATH" ; then + PATH=$JAVAPATH":"$PATH + export PATH +fi + +if $TEST -e /bin/uname; then + os_name=`/bin/uname -s` +elif $TEST -e /usr/bin/uname; then + os_name=`/usr/bin/uname -s` +else + os_name=unknown +fi + +if $TEST -z "$LD_LIBRARY_PATH" ; then + LD_LIBRARY_PATH="" +fi + +case $os_name in + Darwin) + DYLD_LIBRARY_PATH=$BLDLIBDIR:$DYLD_LIBRARY_PATH + export DYLD_LIBRARY_PATH + LD_LIBRARY_PATH=$DYLD_LIBRARY_PATH + ;; + *) + LD_LIBRARY_PATH=$BLDLIBDIR:$LD_LIBRARY_PATH + ;; +esac + +export LD_LIBRARY_PATH + +echo "$JAVAEXE $JAVAEXEFLAGS -Xmx1024M -Dorg.slf4j.simpleLogger.defaultLog=trace -Djava.library.path=$BLDLIBDIR -cp $CLASSPATH examples.datatypes.H5Ex_T_Array" +($JAVAEXE $JAVAEXEFLAGS -Xmx1024M -Dorg.slf4j.simpleLogger.defaultLog=trace -Djava.library.path=$BLDLIBDIR -cp $CLASSPATH examples.datatypes.H5Ex_T_Array > H5Ex_T_Array.out) +if diff H5Ex_T_Array.out examples.datatypes.H5Ex_T_Array.txt > /dev/null; then + echo " PASSED datatypes.H5Ex_T_Array" +else + echo "**FAILED** datatypes.H5Ex_T_Array" + nerrors="`expr $nerrors + 1`" +fi + +echo "$JAVAEXE $JAVAEXEFLAGS -Xmx1024M -Dorg.slf4j.simpleLogger.defaultLog=trace -Djava.library.path=$BLDLIBDIR -cp $CLASSPATH examples.datatypes.H5Ex_T_ArrayAttribute" +($JAVAEXE $JAVAEXEFLAGS -Xmx1024M -Dorg.slf4j.simpleLogger.defaultLog=trace -Djava.library.path=$BLDLIBDIR -cp $CLASSPATH examples.datatypes.H5Ex_T_ArrayAttribute > H5Ex_T_ArrayAttribute.out) +if diff H5Ex_T_ArrayAttribute.out examples.datatypes.H5Ex_T_ArrayAttribute.txt > /dev/null; then + echo " PASSED datatypes.H5Ex_T_ArrayAttribute" +else + echo "**FAILED** datatypes.H5Ex_T_ArrayAttribute" + nerrors="`expr $nerrors + 1`" +fi + +echo "$JAVAEXE $JAVAEXEFLAGS -Xmx1024M -Dorg.slf4j.simpleLogger.defaultLog=trace -Djava.library.path=$BLDLIBDIR -cp $CLASSPATH examples.datatypes.H5Ex_T_Bit" +($JAVAEXE $JAVAEXEFLAGS -Xmx1024M -Dorg.slf4j.simpleLogger.defaultLog=trace -Djava.library.path=$BLDLIBDIR -cp $CLASSPATH examples.datatypes.H5Ex_T_Bit > H5Ex_T_Bit.out) +if diff H5Ex_T_Bit.out examples.datatypes.H5Ex_T_Bit.txt > /dev/null; then + echo " PASSED datatypes.H5Ex_T_Bit" +else + echo "**FAILED** datatypes.H5Ex_T_Bit" + nerrors="`expr $nerrors + 1`" +fi + +echo "$JAVAEXE $JAVAEXEFLAGS -Xmx1024M -Dorg.slf4j.simpleLogger.defaultLog=trace -Djava.library.path=$BLDLIBDIR -cp $CLASSPATH examples.datatypes.H5Ex_T_BitAttribute" +($JAVAEXE $JAVAEXEFLAGS -Xmx1024M -Dorg.slf4j.simpleLogger.defaultLog=trace -Djava.library.path=$BLDLIBDIR -cp $CLASSPATH examples.datatypes.H5Ex_T_BitAttribute > H5Ex_T_BitAttribute.out) +if diff H5Ex_T_BitAttribute.out examples.datatypes.H5Ex_T_BitAttribute.txt > /dev/null; then + echo " PASSED datatypes.H5Ex_T_BitAttribute" +else + echo "**FAILED** datatypes.H5Ex_T_BitAttribute" + nerrors="`expr $nerrors + 1`" +fi + +echo "$JAVAEXE $JAVAEXEFLAGS -Xmx1024M -Dorg.slf4j.simpleLogger.defaultLog=trace -Djava.library.path=$BLDLIBDIR -cp $CLASSPATH examples.datatypes.H5Ex_T_Commit" +($JAVAEXE $JAVAEXEFLAGS -Xmx1024M -Dorg.slf4j.simpleLogger.defaultLog=trace -Djava.library.path=$BLDLIBDIR -cp $CLASSPATH examples.datatypes.H5Ex_T_Commit > H5Ex_T_Commit.out) +if diff H5Ex_T_Commit.out examples.datatypes.H5Ex_T_Commit.txt > /dev/null; then + echo " PASSED datatypes.H5Ex_T_Commit" +else + echo "**FAILED** datatypes.H5Ex_T_Commit" + nerrors="`expr $nerrors + 1`" +fi + +echo "$JAVAEXE $JAVAEXEFLAGS -Xmx1024M -Dorg.slf4j.simpleLogger.defaultLog=trace -Djava.library.path=$BLDLIBDIR -cp $CLASSPATH examples.datatypes.H5Ex_T_Compound" +($JAVAEXE $JAVAEXEFLAGS -Xmx1024M -Dorg.slf4j.simpleLogger.defaultLog=trace -Djava.library.path=$BLDLIBDIR -cp $CLASSPATH examples.datatypes.H5Ex_T_Compound > H5Ex_T_Compound.out) +if diff H5Ex_T_Compound.out examples.datatypes.H5Ex_T_Compound.txt > /dev/null; then + echo " PASSED datatypes.H5Ex_T_Compound" +else + echo "**FAILED** datatypes.H5Ex_T_Compound" + nerrors="`expr $nerrors + 1`" +fi + +echo "$JAVAEXE $JAVAEXEFLAGS -Xmx1024M -Dorg.slf4j.simpleLogger.defaultLog=trace -Djava.library.path=$BLDLIBDIR -cp $CLASSPATH examples.datatypes.H5Ex_T_CompoundAttribute" +($JAVAEXE $JAVAEXEFLAGS -Xmx1024M -Dorg.slf4j.simpleLogger.defaultLog=trace -Djava.library.path=$BLDLIBDIR -cp $CLASSPATH examples.datatypes.H5Ex_T_CompoundAttribute > H5Ex_T_CompoundAttribute.out) +if diff H5Ex_T_CompoundAttribute.out examples.datatypes.H5Ex_T_CompoundAttribute.txt > /dev/null; then + echo " PASSED datatypes.H5Ex_T_CompoundAttribute" +else + echo "**FAILED** datatypes.H5Ex_T_CompoundAttribute" + nerrors="`expr $nerrors + 1`" +fi + +echo "$JAVAEXE $JAVAEXEFLAGS -Xmx1024M -Dorg.slf4j.simpleLogger.defaultLog=trace -Djava.library.path=$BLDLIBDIR -cp $CLASSPATH examples.datatypes.H5Ex_T_Float" +($JAVAEXE $JAVAEXEFLAGS -Xmx1024M -Dorg.slf4j.simpleLogger.defaultLog=trace -Djava.library.path=$BLDLIBDIR -cp $CLASSPATH examples.datatypes.H5Ex_T_Float > H5Ex_T_Float.out) +if diff H5Ex_T_Float.out examples.datatypes.H5Ex_T_Float.txt > /dev/null; then + echo " PASSED datatypes.H5Ex_T_Float" +else + echo "**FAILED** datatypes.H5Ex_T_Float" + nerrors="`expr $nerrors + 1`" +fi + +echo "$JAVAEXE $JAVAEXEFLAGS -Xmx1024M -Dorg.slf4j.simpleLogger.defaultLog=trace -Djava.library.path=$BLDLIBDIR -cp $CLASSPATH examples.datatypes.H5Ex_T_FloatAttribute" +($JAVAEXE $JAVAEXEFLAGS -Xmx1024M -Dorg.slf4j.simpleLogger.defaultLog=trace -Djava.library.path=$BLDLIBDIR -cp $CLASSPATH examples.datatypes.H5Ex_T_FloatAttribute > H5Ex_T_FloatAttribute.out) +if diff H5Ex_T_FloatAttribute.out examples.datatypes.H5Ex_T_FloatAttribute.txt > /dev/null; then + echo " PASSED datatypes.H5Ex_T_FloatAttribute" +else + echo "**FAILED** datatypes.H5Ex_T_FloatAttribute" + nerrors="`expr $nerrors + 1`" +fi + +echo "$JAVAEXE $JAVAEXEFLAGS -Xmx1024M -Dorg.slf4j.simpleLogger.defaultLog=trace -Djava.library.path=$BLDLIBDIR -cp $CLASSPATH examples.datatypes.H5Ex_T_Integer" +($JAVAEXE $JAVAEXEFLAGS -Xmx1024M -Dorg.slf4j.simpleLogger.defaultLog=trace -Djava.library.path=$BLDLIBDIR -cp $CLASSPATH examples.datatypes.H5Ex_T_Integer > H5Ex_T_Integer.out) +if diff H5Ex_T_Integer.out examples.datatypes.H5Ex_T_Integer.txt > /dev/null; then + echo " PASSED datatypes.H5Ex_T_Integer" +else + echo "**FAILED** datatypes.H5Ex_T_Integer" + nerrors="`expr $nerrors + 1`" +fi + +echo "$JAVAEXE $JAVAEXEFLAGS -Xmx1024M -Dorg.slf4j.simpleLogger.defaultLog=trace -Djava.library.path=$BLDLIBDIR -cp $CLASSPATH examples.datatypes.H5Ex_T_IntegerAttribute" +($JAVAEXE $JAVAEXEFLAGS -Xmx1024M -Dorg.slf4j.simpleLogger.defaultLog=trace -Djava.library.path=$BLDLIBDIR -cp $CLASSPATH examples.datatypes.H5Ex_T_IntegerAttribute > H5Ex_T_IntegerAttribute.out) +if diff H5Ex_T_IntegerAttribute.out examples.datatypes.H5Ex_T_IntegerAttribute.txt > /dev/null; then + echo " PASSED datatypes.H5Ex_T_IntegerAttribute" +else + echo "**FAILED** datatypes.H5Ex_T_IntegerAttribute" + nerrors="`expr $nerrors + 1`" +fi + +echo "$JAVAEXE $JAVAEXEFLAGS -Xmx1024M -Dorg.slf4j.simpleLogger.defaultLog=trace -Djava.library.path=$BLDLIBDIR -cp $CLASSPATH examples.datatypes.H5Ex_T_ObjectReference" +($JAVAEXE $JAVAEXEFLAGS -Xmx1024M -Dorg.slf4j.simpleLogger.defaultLog=trace -Djava.library.path=$BLDLIBDIR -cp $CLASSPATH examples.datatypes.H5Ex_T_ObjectReference > H5Ex_T_ObjectReference.out) +if diff H5Ex_T_ObjectReference.out examples.datatypes.H5Ex_T_ObjectReference.txt > /dev/null; then + echo " PASSED datatypes.H5Ex_T_ObjectReference" +else + echo "**FAILED** datatypes.H5Ex_T_ObjectReference" + nerrors="`expr $nerrors + 1`" +fi + +echo "$JAVAEXE $JAVAEXEFLAGS -Xmx1024M -Dorg.slf4j.simpleLogger.defaultLog=trace -Djava.library.path=$BLDLIBDIR -cp $CLASSPATH examples.datatypes.H5Ex_T_ObjectReferenceAttribute" +($JAVAEXE $JAVAEXEFLAGS -Xmx1024M -Dorg.slf4j.simpleLogger.defaultLog=trace -Djava.library.path=$BLDLIBDIR -cp $CLASSPATH examples.datatypes.H5Ex_T_ObjectReferenceAttribute > H5Ex_T_ObjectReferenceAttribute.out) +if diff H5Ex_T_ObjectReferenceAttribute.out examples.datatypes.H5Ex_T_ObjectReferenceAttribute.txt > /dev/null; then + echo " PASSED datatypes.H5Ex_T_ObjectReferenceAttribute" +else + echo "**FAILED** datatypes.H5Ex_T_ObjectReferenceAttribute" + nerrors="`expr $nerrors + 1`" +fi + +echo "$JAVAEXE $JAVAEXEFLAGS -Xmx1024M -Dorg.slf4j.simpleLogger.defaultLog=trace -Djava.library.path=$BLDLIBDIR -cp $CLASSPATH examples.datatypes.H5Ex_T_Opaque" +($JAVAEXE $JAVAEXEFLAGS -Xmx1024M -Dorg.slf4j.simpleLogger.defaultLog=trace -Djava.library.path=$BLDLIBDIR -cp $CLASSPATH examples.datatypes.H5Ex_T_Opaque > H5Ex_T_Opaque.out) +if diff H5Ex_T_Opaque.out examples.datatypes.H5Ex_T_Opaque.txt > /dev/null; then + echo " PASSED datatypes.H5Ex_T_Opaque" +else + echo "**FAILED** datatypes.H5Ex_T_Opaque" + nerrors="`expr $nerrors + 1`" +fi + +echo "$JAVAEXE $JAVAEXEFLAGS -Xmx1024M -Dorg.slf4j.simpleLogger.defaultLog=trace -Djava.library.path=$BLDLIBDIR -cp $CLASSPATH examples.datatypes.H5Ex_T_OpaqueAttribute" +($JAVAEXE $JAVAEXEFLAGS -Xmx1024M -Dorg.slf4j.simpleLogger.defaultLog=trace -Djava.library.path=$BLDLIBDIR -cp $CLASSPATH examples.datatypes.H5Ex_T_OpaqueAttribute > H5Ex_T_OpaqueAttribute.out) +if diff H5Ex_T_OpaqueAttribute.out examples.datatypes.H5Ex_T_OpaqueAttribute.txt > /dev/null; then + echo " PASSED datatypes.H5Ex_T_OpaqueAttribute" +else + echo "**FAILED** datatypes.H5Ex_T_OpaqueAttribute" + nerrors="`expr $nerrors + 1`" +fi + +echo "$JAVAEXE $JAVAEXEFLAGS -Xmx1024M -Dorg.slf4j.simpleLogger.defaultLog=trace -Djava.library.path=$BLDLIBDIR -cp $CLASSPATH examples.datatypes.H5Ex_T_String" +($JAVAEXE $JAVAEXEFLAGS -Xmx1024M -Dorg.slf4j.simpleLogger.defaultLog=trace -Djava.library.path=$BLDLIBDIR -cp $CLASSPATH examples.datatypes.H5Ex_T_String > H5Ex_T_String.out) +if diff H5Ex_T_String.out examples.datatypes.H5Ex_T_String.txt > /dev/null; then + echo " PASSED datatypes.H5Ex_T_String" +else + echo "**FAILED** datatypes.H5Ex_T_String" + nerrors="`expr $nerrors + 1`" +fi + +echo "$JAVAEXE $JAVAEXEFLAGS -Xmx1024M -Dorg.slf4j.simpleLogger.defaultLog=trace -Djava.library.path=$BLDLIBDIR -cp $CLASSPATH examples.datatypes.H5Ex_T_StringAttribute" +($JAVAEXE $JAVAEXEFLAGS -Xmx1024M -Dorg.slf4j.simpleLogger.defaultLog=trace -Djava.library.path=$BLDLIBDIR -cp $CLASSPATH examples.datatypes.H5Ex_T_StringAttribute > H5Ex_T_StringAttribute.out) +if diff H5Ex_T_StringAttribute.out examples.datatypes.H5Ex_T_StringAttribute.txt > /dev/null; then + echo " PASSED datatypes.H5Ex_T_StringAttribute" +else + echo "**FAILED** datatypes.H5Ex_T_StringAttribute" + nerrors="`expr $nerrors + 1`" +fi + +echo "$JAVAEXE $JAVAEXEFLAGS -Xmx1024M -Dorg.slf4j.simpleLogger.defaultLog=trace -Djava.library.path=$BLDLIBDIR -cp $CLASSPATH examples.datatypes.H5Ex_T_VLString" +($JAVAEXE $JAVAEXEFLAGS -Xmx1024M -Dorg.slf4j.simpleLogger.defaultLog=trace -Djava.library.path=$BLDLIBDIR -cp $CLASSPATH examples.datatypes.H5Ex_T_VLString > H5Ex_T_VLString.out) +if diff H5Ex_T_VLString.out examples.datatypes.H5Ex_T_VLString.txt > /dev/null; then + echo " PASSED datatypes.H5Ex_T_VLString" +else + echo "**FAILED** datatypes.H5Ex_T_VLString" + nerrors="`expr $nerrors + 1`" +fi + +# Clean up temporary files/directories +CLEAN_LIBFILES_AND_BLDLIBDIR +CLEAN_DATAFILES_AND_BLDDIR + +# Report test results and exit +if test $nerrors -eq 0 ; then + echo "All $TESTNAME tests passed." + exit $EXIT_SUCCESS +else + echo "$TESTNAME tests failed with $nerrors errors." + exit $EXIT_FAILURE +fi diff --git a/java/examples/groups/CMakeLists.txt b/java/examples/groups/CMakeLists.txt new file mode 100644 index 0000000..0849f0c --- /dev/null +++ b/java/examples/groups/CMakeLists.txt @@ -0,0 +1,138 @@ +cmake_minimum_required (VERSION 3.1.0) +PROJECT (HDFJAVA_EXAMPLES_GROUPS Java) + +set (CMAKE_VERBOSE_MAKEFILE 1) + +INCLUDE_DIRECTORIES ( + ${HDF5_JAVA_JNI_BINARY_DIR} + ${HDF5_JAVA_HDF5_LIB_DIR} +) + +set (HDF_JAVA_EXAMPLES + H5Ex_G_Create + H5Ex_G_Iterate + H5Ex_G_Compact + H5Ex_G_Corder + H5Ex_G_Intermediate + H5Ex_G_Phase + H5Ex_G_Visit +) + +if (WIN32) + set (CMAKE_JAVA_INCLUDE_FLAG_SEP ";") +else (WIN32) + set (CMAKE_JAVA_INCLUDE_FLAG_SEP ":") +endif (WIN32) + +set (CMAKE_JAVA_INCLUDE_PATH "${HDF5_JAVA_JARS}") + +set (CMAKE_JAVA_CLASSPATH ".") +foreach (CMAKE_INCLUDE_PATH ${CMAKE_JAVA_INCLUDE_PATH}) + set (CMAKE_JAVA_CLASSPATH "${CMAKE_JAVA_CLASSPATH}${CMAKE_JAVA_INCLUDE_FLAG_SEP}${CMAKE_INCLUDE_PATH}") +endforeach (CMAKE_INCLUDE_PATH) + +foreach (example ${HDF_JAVA_EXAMPLES}) + file (WRITE ${PROJECT_BINARY_DIR}/${example}_Manifest.txt + "Main-Class: examples.groups.${example} +" + ) + add_jar (${example} MANIFEST ${PROJECT_BINARY_DIR}/${example}_Manifest.txt ${example}.java) + get_target_property (${example}_JAR_FILE ${example} JAR_FILE) +# install_jar (${example} ${HJAVA_INSTALL_DATA_DIR}/examples examples) + get_target_property (${example}_CLASSPATH ${example} CLASSDIR) + add_dependencies (${example} ${HDF5_JAVA_HDF5_LIB_TARGET}) +endforeach (example ${HDF_JAVA_EXAMPLES}) + +set (CMAKE_JAVA_INCLUDE_PATH "${HDF5_JAVA_JARS};${HDF5_JAVA_LOGGING_JAR};${HDF5_JAVA_LOGGING_NOP_JAR}") + +set (CMAKE_JAVA_CLASSPATH ".") +foreach (HDFJAVA_JAR ${CMAKE_JAVA_INCLUDE_PATH}) + set (CMAKE_JAVA_CLASSPATH "${CMAKE_JAVA_CLASSPATH}${CMAKE_JAVA_INCLUDE_FLAG_SEP}${HDFJAVA_JAR}") +endforeach (HDFJAVA_JAR) + +set (HDF_JAVA_TEST_FILES + h5ex_g_iterate.h5 + h5ex_g_visit.h5 +) + +foreach (h5_file ${HDF_JAVA_TEST_FILES}) + set (dest "${PROJECT_BINARY_DIR}/${h5_file}") + #message (STATUS " Copying ${h5_file}") + add_custom_command ( + TARGET H5Ex_G_Visit + POST_BUILD + COMMAND ${CMAKE_COMMAND} + ARGS -E copy_if_different ${PROJECT_SOURCE_DIR}/${h5_file} ${dest} + ) +endforeach (h5_file ${HDF_JAVA_TEST_FILES}) + +MACRO (ADD_H5_TEST resultfile resultcode) + add_test ( + NAME JAVA_groups-${resultfile} + COMMAND "${CMAKE_COMMAND}" + -D "TEST_TESTER=${CMAKE_Java_RUNTIME};${CMAKE_Java_RUNTIME_FLAGS}" + -D "TEST_PROGRAM=examples.groups.${resultfile}" + -D "TEST_ARGS:STRING=${ARGN}" + -D "TEST_CLASSPATH:STRING=${CMAKE_JAVA_CLASSPATH}${CMAKE_JAVA_INCLUDE_FLAG_SEP}${${resultfile}_JAR_FILE}" + -D "TEST_LIBRARY_DIRECTORY=${CMAKE_TEST_OUTPUT_DIRECTORY}" + -D "TEST_FOLDER=${HDFJAVA_EXAMPLES_BINARY_DIR}" + -D "TEST_OUTPUT=groups/${resultfile}.out" + -D "TEST_EXPECT=${resultcode}" + -D "TEST_REFERENCE=groups/${resultfile}.txt" + -P "${HDF_RESOURCES_DIR}/jrunTest.cmake" + ) + if (NOT "${last_test}" STREQUAL "") + set_tests_properties (JAVA_groups-${resultfile} PROPERTIES DEPENDS ${last_test}) + endif (NOT "${last_test}" STREQUAL "") + set (last_test "JAVA_groups-${resultfile}") +ENDMACRO (ADD_H5_TEST file) + +if (BUILD_TESTING) + foreach (example ${HDF_JAVA_EXAMPLES}) + add_test ( + NAME JAVA_groups-${example}-clearall-objects + COMMAND ${CMAKE_COMMAND} + -E remove + ${example}.out + ${example}.out.err + ) + if (NOT "${last_test}" STREQUAL "") + set_tests_properties (JAVA_groups-${example}-clearall-objects PROPERTIES DEPENDS ${last_test}) + endif (NOT "${last_test}" STREQUAL "") + set (last_test "JAVA_groups-${example}-clearall-objects") + if (NOT ${example} STREQUAL "H5Ex_G_Iterate" AND NOT ${example} STREQUAL "H5Ex_G_Visit") + if (${example} STREQUAL "H5Ex_G_Compact") + add_test ( + NAME JAVA_groups-${example}-clearall-h5s + COMMAND ${CMAKE_COMMAND} + -E remove + ${HDFJAVA_EXAMPLES_BINARY_DIR}/${example}1.h5 + ${HDFJAVA_EXAMPLES_BINARY_DIR}/${example}2.h5 + ) + else (${example} STREQUAL "H5Ex_G_Compact") + add_test ( + NAME JAVA_groups-${example}-clearall-h5s + COMMAND ${CMAKE_COMMAND} + -E remove + ${HDFJAVA_EXAMPLES_BINARY_DIR}/${example}.h5 + ) + endif (${example} STREQUAL "H5Ex_G_Compact") + if (NOT "${last_test}" STREQUAL "") + set_tests_properties (JAVA_groups-${example}-clearall-h5s PROPERTIES DEPENDS ${last_test}) + endif (NOT "${last_test}" STREQUAL "") + set (last_test "JAVA_groups-${example}-clearall-h5s") + endif (NOT ${example} STREQUAL "H5Ex_G_Iterate" AND NOT ${example} STREQUAL "H5Ex_G_Visit") + add_test ( + NAME JAVA_groups-${example}-copy-objects + COMMAND ${CMAKE_COMMAND} + -E copy_if_different + ${HDFJAVA_EXAMPLES_SOURCE_DIR}/testfiles/examples.groups.${example}.txt + ${HDFJAVA_EXAMPLES_GROUPS_BINARY_DIR}/${example}.txt + ) + if (NOT "${last_test}" STREQUAL "") + set_tests_properties (JAVA_groups-${example}-copy-objects PROPERTIES DEPENDS ${last_test}) + endif (NOT "${last_test}" STREQUAL "") + set (last_test "JAVA_groups-${example}-copy-objects") + ADD_H5_TEST (${example} 0) + endforeach (example ${HDF_JAVA_EXAMPLES}) +endif (BUILD_TESTING) diff --git a/java/examples/groups/H5Ex_G_Compact.java b/java/examples/groups/H5Ex_G_Compact.java new file mode 100644 index 0000000..ca9b6c8 --- /dev/null +++ b/java/examples/groups/H5Ex_G_Compact.java @@ -0,0 +1,266 @@ +/* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * + * Copyright by The HDF Group. * + * Copyright by the Board of Trustees of the University of Illinois. * + * All rights reserved. * + * * + * This file is part of HDF5. The full HDF5 copyright notice, including * + * terms governing use, modification, and redistribution, is contained in * + * the files COPYING and Copyright.html. COPYING can be found at the root * + * of the source code distribution tree; Copyright.html can be found at the * + * root level of an installed copy of the electronic HDF5 document set and * + * is linked from the top-level documents page. It can also be found at * + * http://hdfgroup.org/HDF5/doc/Copyright.html. If you do not have * + * access to either file, you may request a copy from help@hdfgroup.org. * + * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */ + +/************************************************************ + Creating a file and print the storage layout. + ************************************************************/ + +package examples.groups; + +import hdf.hdf5lib.H5; +import hdf.hdf5lib.HDF5Constants; +import hdf.hdf5lib.structs.H5G_info_t; + +import java.util.EnumSet; +import java.util.HashMap; +import java.util.Map; + +public class H5Ex_G_Compact { + + private static final String FILE1 = "H5Ex_G_Compact1.h5"; + private static final String FILE2 = "H5Ex_G_Compact2.h5"; + private static final String GROUP = "G1"; + + enum H5G_storage { + H5G_STORAGE_TYPE_UNKNOWN(-1), + H5G_STORAGE_TYPE_SYMBOL_TABLE(0), + H5G_STORAGE_TYPE_COMPACT(1), + H5G_STORAGE_TYPE_DENSE(2); + + private static final Map<Integer, H5G_storage> lookup = new HashMap<Integer, H5G_storage>(); + + static { + for (H5G_storage s : EnumSet.allOf(H5G_storage.class)) + lookup.put(s.getCode(), s); + } + + private int code; + + H5G_storage(int layout_type) { + this.code = layout_type; + } + + public int getCode() { + return this.code; + } + + public static H5G_storage get(int code) { + return lookup.get(code); + } + } + + public static void CreateGroup() { + long file_id = -1; + long group_id = -1; + long fapl_id = -1; + H5G_info_t ginfo; + long size; + + // Create file 1. This file will use original format groups. + try { + file_id = H5.H5Fcreate (FILE1, HDF5Constants.H5F_ACC_TRUNC, HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT); + } + catch (Exception e) { + e.printStackTrace(); + } + // Create a group in the file1. + try { + if(file_id >= 0) + group_id = H5.H5Gcreate(file_id, GROUP, HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT); + } + catch (Exception e) { + e.printStackTrace(); + } + + // Obtain the group info and print the group storage type. + try { + if(group_id >= 0) { + ginfo = H5.H5Gget_info(group_id); + System.out.print("Group storage type for " + FILE1 + " is: "); + switch (H5G_storage.get(ginfo.storage_type)) { + case H5G_STORAGE_TYPE_COMPACT: + System.out.println("H5G_STORAGE_TYPE_COMPACT"); // New compact format + break; + case H5G_STORAGE_TYPE_DENSE: + System.out.println("H5G_STORAGE_TYPE_DENSE"); // New dense (indexed) format + break; + case H5G_STORAGE_TYPE_SYMBOL_TABLE: + System.out.println("H5G_STORAGE_TYPE_SYMBOL_TABLE"); // Original format + break; + case H5G_STORAGE_TYPE_UNKNOWN: + System.out.println("H5G_STORAGE_TYPE_UNKNOWN"); + break; + default: + System.out.println("Storage Type Invalid"); + break; + } + } + } + catch (Exception e) { + e.printStackTrace(); + } + + // Close the group. + try { + if (group_id >= 0) + H5.H5Gclose(group_id); + } + catch (Exception e) { + e.printStackTrace(); + } + + // close the file 1. + try { + if (file_id >= 0) + H5.H5Fclose(file_id); + } + catch (Exception e) { + e.printStackTrace(); + } + + // Re-open file 1. Need to get the correct file size. + try { + file_id = H5.H5Fopen(FILE1, HDF5Constants.H5F_ACC_RDONLY, HDF5Constants.H5P_DEFAULT); + } + catch (Exception e) { + e.printStackTrace(); + } + + // Obtain and print the file size. + try { + if (file_id >= 0) { + size = H5.H5Fget_filesize(file_id); + System.out.println("File size for " + FILE1 + " is: " + size + " bytes"); + } + } + catch (Exception e) { + e.printStackTrace(); + } + + // Close FILE1. + try { + if (file_id >= 0) + H5.H5Fclose(file_id); + } + catch (Exception e) { + e.printStackTrace(); + } + + // Set file access property list to allow the latest file format. + // This will allow the library to create new compact format groups. + try { + fapl_id = H5.H5Pcreate(HDF5Constants.H5P_FILE_ACCESS); + if (fapl_id >= 0) + H5.H5Pset_libver_bounds(fapl_id, HDF5Constants.H5F_LIBVER_LATEST, HDF5Constants.H5F_LIBVER_LATEST); + } + catch (Exception e) { + e.printStackTrace(); + } + System.out.println(); + // Create file 2 using the new file access property list. + try { + file_id = H5.H5Fcreate(FILE2, HDF5Constants.H5F_ACC_TRUNC, HDF5Constants.H5P_DEFAULT, fapl_id); + } + catch (Exception e) { + e.printStackTrace(); + } + // Create group in file2. + try { + if(file_id >= 0) + group_id = H5.H5Gcreate(file_id, GROUP, HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT); + } + catch (Exception e) { + e.printStackTrace(); + } + + // Obtain the group info and print the group storage type. + try { + if (group_id >= 0) { + ginfo = H5.H5Gget_info(group_id); + System.out.print("Group storage type for " + FILE2 + " is: "); + switch (H5G_storage.get(ginfo.storage_type)) { + case H5G_STORAGE_TYPE_COMPACT: + System.out.println("H5G_STORAGE_TYPE_COMPACT"); // New compact format + break; + case H5G_STORAGE_TYPE_DENSE: + System.out.println("H5G_STORAGE_TYPE_DENSE"); // New dense (indexed) format + break; + case H5G_STORAGE_TYPE_SYMBOL_TABLE: + System.out.println("H5G_STORAGE_TYPE_SYMBOL_TABLE"); // Original format + break; + case H5G_STORAGE_TYPE_UNKNOWN: + System.out.println("H5G_STORAGE_TYPE_UNKNOWN"); + break; + default: + System.out.println("Storage Type Invalid"); + break; + } + } + } + catch (Exception e) { + e.printStackTrace(); + } + + // Close the group. + try { + if (group_id >= 0) + H5.H5Gclose(group_id); + } + catch (Exception e) { + e.printStackTrace(); + } + + // close the file 2. + try { + if (file_id >= 0) + H5.H5Fclose(file_id); + } + catch (Exception e) { + e.printStackTrace(); + } + + // Re-open file 2. Needed to get the correct file size. + try { + file_id = H5.H5Fopen(FILE2, HDF5Constants.H5F_ACC_RDONLY, fapl_id); + } + catch (Exception e) { + e.printStackTrace(); + } + + // Obtain and print the file size. + try { + if (file_id >= 0) { + size = H5.H5Fget_filesize(file_id); + System.out.println("File size for " + FILE2 + " is: " + size + " bytes"); + } + } + catch (Exception e) { + e.printStackTrace(); + } + + // Close FILE2. + try { + if (file_id >= 0) + H5.H5Fclose(file_id); + } + catch (Exception e) { + e.printStackTrace(); + } + } + + public static void main(String[] args) { + H5Ex_G_Compact.CreateGroup(); + } +} diff --git a/java/examples/groups/H5Ex_G_Corder.java b/java/examples/groups/H5Ex_G_Corder.java new file mode 100644 index 0000000..95790bf --- /dev/null +++ b/java/examples/groups/H5Ex_G_Corder.java @@ -0,0 +1,121 @@ +/* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * + * Copyright by The HDF Group. * + * Copyright by the Board of Trustees of the University of Illinois. * + * All rights reserved. * + * * + * This file is part of HDF5. The full HDF5 copyright notice, including * + * terms governing use, modification, and redistribution, is contained in * + * the files COPYING and Copyright.html. COPYING can be found at the root * + * of the source code distribution tree; Copyright.html can be found at the * + * root level of an installed copy of the electronic HDF5 document set and * + * is linked from the top-level documents page. It can also be found at * + * http://hdfgroup.org/HDF5/doc/Copyright.html. If you do not have * + * access to either file, you may request a copy from help@hdfgroup.org. * + * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */ +/************************************************************ + Creating a file with creation properties and traverse the + groups in alpabetical and creation order. + ************************************************************/ + +package examples.groups; + +import hdf.hdf5lib.H5; +import hdf.hdf5lib.HDF5Constants; +import hdf.hdf5lib.structs.H5G_info_t; + +public class H5Ex_G_Corder { + private static String FILE = "H5Ex_G_Corder.h5"; + + private static void CreateGroup() throws Exception { + long file_id = -1; + long group_id = -1; + long subgroup_id = -1; + long gcpl_id = -1; + int status; + H5G_info_t ginfo; + int i; + String name; + + try { + // Create a new file using default properties. + file_id = H5.H5Fcreate(FILE, HDF5Constants.H5F_ACC_TRUNC, HDF5Constants.H5P_DEFAULT, + HDF5Constants.H5P_DEFAULT); + + // Create group creation property list and enable link creation order tracking. + gcpl_id = H5.H5Pcreate(HDF5Constants.H5P_GROUP_CREATE); + status = H5.H5Pset_link_creation_order(gcpl_id, HDF5Constants.H5P_CRT_ORDER_TRACKED + + HDF5Constants.H5P_CRT_ORDER_INDEXED); + + // Create primary group using the property list. + if (status >= 0) + group_id = H5.H5Gcreate(file_id, "index_group", HDF5Constants.H5P_DEFAULT, gcpl_id, + HDF5Constants.H5P_DEFAULT); + + try { + /* + * Create subgroups in the primary group. These will be tracked by creation order. Note that these + * groups do not have to have the creation order tracking property set. + */ + subgroup_id = H5.H5Gcreate(group_id, "H", HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT, + HDF5Constants.H5P_DEFAULT); + status = H5.H5Gclose(subgroup_id); + subgroup_id = H5.H5Gcreate(group_id, "D", HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT, + HDF5Constants.H5P_DEFAULT); + status = H5.H5Gclose(subgroup_id); + subgroup_id = H5.H5Gcreate(group_id, "F", HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT, + HDF5Constants.H5P_DEFAULT); + status = H5.H5Gclose(subgroup_id); + subgroup_id = H5.H5Gcreate(group_id, "5", HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT, + HDF5Constants.H5P_DEFAULT); + status = H5.H5Gclose(subgroup_id); + + // Get group info. + ginfo = H5.H5Gget_info(group_id); + + // Traverse links in the primary group using alphabetical indices (H5_INDEX_NAME). + System.out.println("Traversing group using alphabetical indices:"); + for (i = 0; i < ginfo.nlinks; i++) { + // Retrieve the name of the ith link in a group + name = H5.H5Lget_name_by_idx(group_id, ".", HDF5Constants.H5_INDEX_NAME, HDF5Constants.H5_ITER_INC, + i, HDF5Constants.H5P_DEFAULT); + System.out.println("Index " + i + ": " + name); + } + + // Traverse links in the primary group by creation order (H5_INDEX_CRT_ORDER). + System.out.println("Traversing group using creation order indices:"); + for (i = 0; i < ginfo.nlinks; i++) { + // Retrieve the name of the ith link in a group + name = H5.H5Lget_name_by_idx(group_id, ".", HDF5Constants.H5_INDEX_CRT_ORDER, + HDF5Constants.H5_ITER_INC, i, HDF5Constants.H5P_DEFAULT); + System.out.println("Index " + i + ": " + name); + } + + } + catch (Exception e) { + e.printStackTrace(); + } + } + catch (Exception e) { + e.printStackTrace(); + } + finally { + // Close and release resources. + if (gcpl_id >= 0) + H5.H5Pclose(gcpl_id); + if (group_id >= 0) + H5.H5Gclose(group_id); + if (file_id >= 0) + H5.H5Fclose(file_id); + } + } + + public static void main(String[] args) { + try { + H5Ex_G_Corder.CreateGroup(); + } + catch (Exception ex) { + ex.printStackTrace(); + } + } + +} diff --git a/java/examples/groups/H5Ex_G_Create.java b/java/examples/groups/H5Ex_G_Create.java new file mode 100644 index 0000000..1902d86 --- /dev/null +++ b/java/examples/groups/H5Ex_G_Create.java @@ -0,0 +1,94 @@ +/* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * + * Copyright by The HDF Group. * + * Copyright by the Board of Trustees of the University of Illinois. * + * All rights reserved. * + * * + * This file is part of HDF5. The full HDF5 copyright notice, including * + * terms governing use, modification, and redistribution, is contained in * + * the files COPYING and Copyright.html. COPYING can be found at the root * + * of the source code distribution tree; Copyright.html can be found at the * + * root level of an installed copy of the electronic HDF5 document set and * + * is linked from the top-level documents page. It can also be found at * + * http://hdfgroup.org/HDF5/doc/Copyright.html. If you do not have * + * access to either file, you may request a copy from help@hdfgroup.org. * + * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */ + +/************************************************************ + This example shows how to create, open, and close a group. + ************************************************************/ + +package examples.groups; + +import hdf.hdf5lib.H5; +import hdf.hdf5lib.HDF5Constants; + +public class H5Ex_G_Create { + private static String FILENAME = "H5Ex_G_Create.h5"; + private static String GROUPNAME = "G1"; + + private static void CreateGroup() { + long file_id = -1; + long group_id = -1; + + // Create a new file using default properties. + try { + file_id = H5.H5Fcreate(FILENAME, HDF5Constants.H5F_ACC_TRUNC, HDF5Constants.H5P_DEFAULT, + HDF5Constants.H5P_DEFAULT); + } + catch (Exception e) { + e.printStackTrace(); + } + + // Create a group in the file. + try { + if (file_id >= 0) + group_id = H5.H5Gcreate(file_id, "/" + GROUPNAME, HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT, + HDF5Constants.H5P_DEFAULT); + } + catch (Exception e) { + e.printStackTrace(); + } + + // Close the group. The handle "group" can no longer be used. + try { + if (group_id >= 0) + H5.H5Gclose(group_id); + } + catch (Exception e) { + e.printStackTrace(); + } + + // Re-open the group, obtaining a new handle. + try { + if (file_id >= 0) + group_id = H5.H5Gopen(file_id, "/" + GROUPNAME, HDF5Constants.H5P_DEFAULT); + } + catch (Exception e) { + e.printStackTrace(); + } + + // Close the group. + try { + if (group_id >= 0) + H5.H5Gclose(group_id); + } + catch (Exception e) { + e.printStackTrace(); + } + + // Close the file. + try { + if (file_id >= 0) + H5.H5Fclose(file_id); + } + catch (Exception e) { + e.printStackTrace(); + } + + } + + public static void main(String[] args) { + H5Ex_G_Create.CreateGroup(); + } + +} diff --git a/java/examples/groups/H5Ex_G_Intermediate.java b/java/examples/groups/H5Ex_G_Intermediate.java new file mode 100644 index 0000000..a3d620b --- /dev/null +++ b/java/examples/groups/H5Ex_G_Intermediate.java @@ -0,0 +1,125 @@ +/* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * + * Copyright by The HDF Group. * + * Copyright by the Board of Trustees of the University of Illinois. * + * All rights reserved. * + * * + * This file is part of HDF5. The full HDF5 copyright notice, including * + * terms governing use, modification, and redistribution, is contained in * + * the files COPYING and Copyright.html. COPYING can be found at the root * + * of the source code distribution tree; Copyright.html can be found at the * + * root level of an installed copy of the electronic HDF5 document set and * + * is linked from the top-level documents page. It can also be found at * + * http://hdfgroup.org/HDF5/doc/Copyright.html. If you do not have * + * access to either file, you may request a copy from help@hdfgroup.org. * + * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */ + +/************************************************************ + This example shows how to create intermediate groups with + a single call to H5Gcreate. + ************************************************************/ +package examples.groups; + +import hdf.hdf5lib.H5; +import hdf.hdf5lib.HDF5Constants; +import hdf.hdf5lib.callbacks.H5O_iterate_cb; +import hdf.hdf5lib.callbacks.H5O_iterate_t; +import hdf.hdf5lib.structs.H5O_info_t; + +import java.util.ArrayList; + +public class H5Ex_G_Intermediate { + + private static String FILE = "H5Ex_G_Intermediate.h5"; + + private void CreateGroup() throws Exception { + + long file_id = -1; + long group_id = -1; + long gcpl_id = -1; + + try { + // Create a new file_id using the default properties. + file_id = H5.H5Fcreate(FILE, HDF5Constants.H5F_ACC_TRUNC, HDF5Constants.H5P_DEFAULT, + HDF5Constants.H5P_DEFAULT); + + // Create group_id creation property list and set it to allow creation of intermediate group_ids. + gcpl_id = H5.H5Pcreate(HDF5Constants.H5P_LINK_CREATE); + H5.H5Pset_create_intermediate_group(gcpl_id, true); + + /* + * Create the group_id /G1/G2/G3. Note that /G1 and /G1/G2 do not exist yet. This call would cause an error + * if we did not use the previously created property list. + */ + group_id = H5 + .H5Gcreate(file_id, "/G1/G2/G3", gcpl_id, HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT); + // Print all the objects in the file_ids to show that intermediate group_ids have been created. + System.out.println("Objects in the file_id:"); + + // H5O_iterate_t iter_data = null; + H5O_iterate_t iter_data = new H5O_iter_data(); + H5O_iterate_cb iter_cb = new H5O_iter_callback(); + + H5.H5Ovisit(file_id, HDF5Constants.H5_INDEX_NAME, HDF5Constants.H5_ITER_NATIVE, iter_cb, iter_data); + } + catch (Exception e) { + e.printStackTrace(); + } + finally { + // Close and release resources. + if (gcpl_id >= 0) + H5.H5Pclose(gcpl_id); + if (group_id >= 0) + H5.H5Gclose(group_id); + if (file_id >= 0) + H5.H5Fclose(file_id); + } + } + + public static void main(String[] args) { + try { + (new H5Ex_G_Intermediate()).CreateGroup(); + } + catch (Exception ex) { + ex.printStackTrace(); + } + } + + private class idata { + public String link_name = null; + public int link_type = -1; + + idata(String name, int type) { + this.link_name = name; + this.link_type = type; + } + } + + private class H5O_iter_data implements H5O_iterate_t { + public ArrayList<idata> iterdata = new ArrayList<idata>(); + } + + private class H5O_iter_callback implements H5O_iterate_cb { + public int callback(long group, String name, H5O_info_t info, H5O_iterate_t op_data) { + idata id = new idata(name, info.type); + ((H5O_iter_data) op_data).iterdata.add(id); + + System.out.print("/"); /* Print root group in object path */ + + // Check if the current object is the root group, and if not print the full path name and type. + + if (name.charAt(0) == '.') /* Root group, do not print '.' */ + System.out.println(" (Group)"); + else if (info.type == HDF5Constants.H5O_TYPE_GROUP) + System.out.println(name + " (Group)"); + else if (info.type == HDF5Constants.H5O_TYPE_DATASET) + System.out.println(name + " (Dataset)"); + else if (info.type == HDF5Constants.H5O_TYPE_NAMED_DATATYPE) + System.out.println(name + " (Datatype)"); + else + System.out.println(name + " (Unknown)"); + + return 0; + } + } + +} diff --git a/java/examples/groups/H5Ex_G_Iterate.java b/java/examples/groups/H5Ex_G_Iterate.java new file mode 100644 index 0000000..d0b4ea4 --- /dev/null +++ b/java/examples/groups/H5Ex_G_Iterate.java @@ -0,0 +1,119 @@ +/* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * + * Copyright by The HDF Group. * + * Copyright by the Board of Trustees of the University of Illinois. * + * All rights reserved. * + * * + * This file is part of HDF5. The full HDF5 copyright notice, including * + * terms governing use, modification, and redistribution, is contained in * + * the files COPYING and Copyright.html. COPYING can be found at the root * + * of the source code distribution tree; Copyright.html can be found at the * + * root level of an installed copy of the electronic HDF5 document set and * + * is linked from the top-level documents page. It can also be found at * + * http://hdfgroup.org/HDF5/doc/Copyright.html. If you do not have * + * access to either file, you may request a copy from help@hdfgroup.org. * + * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */ + +/************************************************************ + This example shows how to iterate over group members using + H5Gget_obj_info_all. + ************************************************************/ +package examples.groups; + +import hdf.hdf5lib.H5; +import hdf.hdf5lib.HDF5Constants; + +import java.util.EnumSet; +import java.util.HashMap; +import java.util.Map; + +public class H5Ex_G_Iterate { + private static String FILENAME = "groups/h5ex_g_iterate.h5"; + private static String DATASETNAME = "/"; + + enum H5O_type { + H5O_TYPE_UNKNOWN(-1), // Unknown object type + H5O_TYPE_GROUP(0), // Object is a group + H5O_TYPE_DATASET(1), // Object is a dataset + H5O_TYPE_NAMED_DATATYPE(2), // Object is a named data type + H5O_TYPE_NTYPES(3); // Number of different object types + private static final Map<Integer, H5O_type> lookup = new HashMap<Integer, H5O_type>(); + + static { + for (H5O_type s : EnumSet.allOf(H5O_type.class)) + lookup.put(s.getCode(), s); + } + + private int code; + + H5O_type(int layout_type) { + this.code = layout_type; + } + + public int getCode() { + return this.code; + } + + public static H5O_type get(int code) { + return lookup.get(code); + } + } + + private static void do_iterate() { + long file_id = -1; + + // Open a file using default properties. + try { + file_id = H5.H5Fopen(FILENAME, HDF5Constants.H5F_ACC_RDONLY, HDF5Constants.H5P_DEFAULT); + } + catch (Exception e) { + e.printStackTrace(); + } + + // Begin iteration. + System.out.println("Objects in root group:"); + try { + if (file_id >= 0) { + int count = (int) H5.H5Gn_members(file_id, DATASETNAME); + String[] oname = new String[count]; + int[] otype = new int[count]; + int[] ltype = new int[count]; + long[] orefs = new long[count]; + H5.H5Gget_obj_info_all(file_id, DATASETNAME, oname, otype, ltype, orefs, HDF5Constants.H5_INDEX_NAME); + + // Get type of the object and display its name and type. + for (int indx = 0; indx < otype.length; indx++) { + switch (H5O_type.get(otype[indx])) { + case H5O_TYPE_GROUP: + System.out.println(" Group: " + oname[indx]); + break; + case H5O_TYPE_DATASET: + System.out.println(" Dataset: " + oname[indx]); + break; + case H5O_TYPE_NAMED_DATATYPE: + System.out.println(" Datatype: " + oname[indx]); + break; + default: + System.out.println(" Unknown: " + oname[indx]); + } + } + } + } + catch (Exception e) { + e.printStackTrace(); + } + + // Close the file. + try { + if (file_id >= 0) + H5.H5Fclose(file_id); + } + catch (Exception e) { + e.printStackTrace(); + } + } + + public static void main(String[] args) { + H5Ex_G_Iterate.do_iterate(); + } + +} diff --git a/java/examples/groups/H5Ex_G_Phase.java b/java/examples/groups/H5Ex_G_Phase.java new file mode 100644 index 0000000..f23d6f2 --- /dev/null +++ b/java/examples/groups/H5Ex_G_Phase.java @@ -0,0 +1,241 @@ +/* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * + * Copyright by The HDF Group. * + * Copyright by the Board of Trustees of the University of Illinois. * + * All rights reserved. * + * * + * This file is part of HDF5. The full HDF5 copyright notice, including * + * terms governing use, modification, and redistribution, is contained in * + * the files COPYING and Copyright.html. COPYING can be found at the root * + * of the source code distribution tree; Copyright.html can be found at the * + * root level of an installed copy of the electronic HDF5 document set and * + * is linked from the top-level documents page. It can also be found at * + * http://hdfgroup.org/HDF5/doc/Copyright.html. If you do not have * + * access to either file, you may request a copy from help@hdfgroup.org. * + * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */ + +/************************************************************ + This example shows how to set the conditions for + conversion between compact and dense (indexed) groups. + ************************************************************/ +package examples.groups; + +import hdf.hdf5lib.H5; +import hdf.hdf5lib.HDF5Constants; +import hdf.hdf5lib.structs.H5G_info_t; + +import java.util.EnumSet; +import java.util.HashMap; +import java.util.Map; + +public class H5Ex_G_Phase { + private static String FILE = "H5Ex_G_Phase.h5"; + private static int MAX_GROUPS = 7; + private static int MAX_COMPACT = 5; + private static int MIN_DENSE = 3; + + enum H5G_storage { + H5G_STORAGE_TYPE_UNKNOWN(-1), + H5G_STORAGE_TYPE_SYMBOL_TABLE(0), + H5G_STORAGE_TYPE_COMPACT(1), + H5G_STORAGE_TYPE_DENSE(2); + + private static final Map<Integer, H5G_storage> lookup = new HashMap<Integer, H5G_storage>(); + + static { + for (H5G_storage s : EnumSet.allOf(H5G_storage.class)) + lookup.put(s.getCode(), s); + } + + private int code; + + H5G_storage(int layout_type) { + this.code = layout_type; + } + + public int getCode() { + return this.code; + } + + public static H5G_storage get(int code) { + return lookup.get(code); + } + } + + private static void CreateGroup() { + long file_id = -1; + long group_id = -1; + long subgroup_id = -1; + long fapl_id = -1; + long gcpl_id = -1; + H5G_info_t ginfo; + String name = "G0"; // Name of subgroup_id + int i; + + // Set file access property list to allow the latest file format.This will allow the library to create new + // format groups. + try { + fapl_id = H5.H5Pcreate(HDF5Constants.H5P_FILE_ACCESS); + if (fapl_id >= 0) + H5.H5Pset_libver_bounds(fapl_id, HDF5Constants.H5F_LIBVER_LATEST, HDF5Constants.H5F_LIBVER_LATEST); + } + catch (Exception e) { + e.printStackTrace(); + } + + // Create group access property list and set the phase change conditions. + try { + gcpl_id = H5.H5Pcreate(HDF5Constants.H5P_GROUP_CREATE); + if (gcpl_id >= 0) + H5.H5Pset_link_phase_change(gcpl_id, MAX_COMPACT, MIN_DENSE); + } + catch (Exception e) { + e.printStackTrace(); + } + + // Create a new file using the default properties. + try { + if (fapl_id >= 0) + file_id = H5.H5Fcreate(FILE, HDF5Constants.H5F_ACC_TRUNC, HDF5Constants.H5P_DEFAULT, fapl_id); + } + catch (Exception e) { + e.printStackTrace(); + } + + // Create primary group. + try { + if ((file_id >= 0) && (gcpl_id >= 0)) + group_id = H5.H5Gcreate(file_id, name, HDF5Constants.H5P_DEFAULT, gcpl_id, HDF5Constants.H5P_DEFAULT); + } + catch (Exception e) { + e.printStackTrace(); + } + + // Add subgroups to "group" one at a time, print the storage type for "group" after each subgroup is created. + for (i = 1; i <= MAX_GROUPS; i++) { + // Define the subgroup name and create the subgroup. + char append = (char) (((char) i) + '0'); + name = name + append; /* G1, G2, G3 etc. */ + try { + if (group_id >= 0) { + subgroup_id = H5.H5Gcreate(group_id, name, HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT, + HDF5Constants.H5P_DEFAULT); + H5.H5Gclose(subgroup_id); + } + } + catch (Exception e) { + e.printStackTrace(); + } + + // Obtain the group info and print the group storage type + try { + if (group_id >= 0) { + ginfo = H5.H5Gget_info(group_id); + System.out.print(ginfo.nlinks + " Group" + (ginfo.nlinks == 1 ? " " : "s") + ": Storage type is "); + switch (H5G_storage.get(ginfo.storage_type)) { + case H5G_STORAGE_TYPE_COMPACT: + System.out.println("H5G_STORAGE_TYPE_COMPACT"); // New compact format + break; + case H5G_STORAGE_TYPE_DENSE: + System.out.println("H5G_STORAGE_TYPE_DENSE"); // New dense (indexed) format + break; + case H5G_STORAGE_TYPE_SYMBOL_TABLE: + System.out.println("H5G_STORAGE_TYPE_SYMBOL_TABLE"); // Original format + break; + case H5G_STORAGE_TYPE_UNKNOWN: + System.out.println("H5G_STORAGE_TYPE_UNKNOWN"); + break; + default: + System.out.println("Storage Type Invalid"); + break; + } + } + } + catch (Exception e) { + e.printStackTrace(); + } + } + + System.out.println(); + + // Delete subgroups one at a time, print the storage type for "group" after each subgroup is deleted. + for (i = MAX_GROUPS; i >= 1; i--) { + // Define the subgroup name and delete the subgroup. + try { + H5.H5Ldelete(group_id, name, HDF5Constants.H5P_DEFAULT); + } + catch (Exception e) { + e.printStackTrace(); + } + name = name.substring(0, i + 1); + + // Obtain the group info and print the group storage type + try { + if (group_id >= 0) { + ginfo = H5.H5Gget_info(group_id); + System.out.print(ginfo.nlinks + " Group" + (ginfo.nlinks == 1 ? " " : "s") + ": Storage type is "); + switch (H5G_storage.get(ginfo.storage_type)) { + case H5G_STORAGE_TYPE_COMPACT: + System.out.println("H5G_STORAGE_TYPE_COMPACT"); // New compact format + break; + case H5G_STORAGE_TYPE_DENSE: + System.out.println("H5G_STORAGE_TYPE_DENSE"); // New dense (indexed) format + break; + case H5G_STORAGE_TYPE_SYMBOL_TABLE: + System.out.println("H5G_STORAGE_TYPE_SYMBOL_TABLE"); // Original format + break; + case H5G_STORAGE_TYPE_UNKNOWN: + System.out.println("H5G_STORAGE_TYPE_UNKNOWN"); + break; + default: + System.out.println("Storage Type Invalid"); + break; + } + } + } + catch (Exception e) { + e.printStackTrace(); + } + } + + // Close and release resources + try { + if (fapl_id >= 0) + H5.H5Pclose(fapl_id); + } + catch (Exception e) { + e.printStackTrace(); + } + + try { + if (gcpl_id >= 0) + H5.H5Pclose(gcpl_id); + } + catch (Exception e) { + e.printStackTrace(); + } + + // Close the group + try { + if (group_id >= 0) + H5.H5Gclose(group_id); + } + catch (Exception e) { + e.printStackTrace(); + } + + // Close the file + try { + if (file_id >= 0) + H5.H5Fclose(file_id); + } + catch (Exception e) { + e.printStackTrace(); + } + + } + + public static void main(String[] args) { + H5Ex_G_Phase.CreateGroup(); + } + +} diff --git a/java/examples/groups/H5Ex_G_Traverse.java b/java/examples/groups/H5Ex_G_Traverse.java new file mode 100644 index 0000000..b00fe97 --- /dev/null +++ b/java/examples/groups/H5Ex_G_Traverse.java @@ -0,0 +1,167 @@ +/* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * + * Copyright by The HDF Group. * + * Copyright by the Board of Trustees of the University of Illinois. * + * All rights reserved. * + * * + * This file is part of HDF5. The full HDF5 copyright notice, including * + * terms governing use, modification, and redistribution, is contained in * + * the files COPYING and Copyright.html. COPYING can be found at the root * + * of the source code distribution tree; Copyright.html can be found at the * + * root level of an installed copy of the electronic HDF5 document set and * + * is linked from the top-level documents page. It can also be found at * + * http://hdfgroup.org/HDF5/doc/Copyright.html. If you do not have * + * access to either file, you may request a copy from help@hdfgroup.org. * + * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */ + +/************************************************************ +This example shows a way to recursively traverse the file +using H5Literate. The method shown here guarantees that +the recursion will not enter an infinite loop, but does +not prevent objects from being visited more than once. +The program prints the directory structure of the file +specified in FILE. The default file used by this example +implements the structure described in the User's Guide, +chapter 4, figure 26. + ************************************************************/ +package examples.groups; + +import hdf.hdf5lib.H5; +import hdf.hdf5lib.HDF5Constants; +import hdf.hdf5lib.callbacks.H5L_iterate_cb; +import hdf.hdf5lib.callbacks.H5L_iterate_t; +import hdf.hdf5lib.structs.H5L_info_t; +import hdf.hdf5lib.structs.H5O_info_t; +import examples.groups.H5Ex_G_Iterate.H5O_type; + +class opdata implements H5L_iterate_t { + int recurs; + opdata prev; + long addr; +} + +public class H5Ex_G_Traverse { + + private static String FILE = "h5ex_g_traverse.h5"; + public static H5L_iterate_cb iter_cb = new H5L_iter_callbackT(); + + private static void OpenGroup() { + long file_id = -1; + H5O_info_t infobuf; + opdata od = new opdata(); + + // Open file and initialize the operator data structure. + try { + file_id = H5.H5Fopen(FILE, HDF5Constants.H5F_ACC_RDONLY, HDF5Constants.H5P_DEFAULT); + if (file_id >= 0) { + infobuf = H5.H5Oget_info(file_id); + od.recurs = 0; + od.prev = null; + od.addr = infobuf.addr; + } + } + catch (Exception e) { + e.printStackTrace(); + } + + // Print the root group and formatting, begin iteration. + try { + System.out.println("/ {"); + // H5L_iterate_cb iter_cb = new H5L_iter_callbackT(); + H5.H5Literate(file_id, HDF5Constants.H5_INDEX_NAME, HDF5Constants.H5_ITER_NATIVE, 0L, iter_cb, od); + System.out.println("}"); + } + catch (Exception e) { + e.printStackTrace(); + } + + // Close and release resources. + try { + if (file_id >= 0) + H5.H5Fclose(file_id); + } + catch (Exception e) { + e.printStackTrace(); + } + } + + public static void main(String[] args) { + H5Ex_G_Traverse.OpenGroup(); + } +} + +class H5L_iter_callbackT implements H5L_iterate_cb { + public int callback(long group, String name, H5L_info_t info, H5L_iterate_t op_data) { + + H5O_info_t infobuf; + int return_val = 0; + opdata od = (opdata) op_data; // Type conversion + int spaces = 2 * (od.recurs + 1); // Number of white spaces to prepend to output. + + // Get type of the object and display its name and type. + // The name of the object is passed to this function by the Library. + try { + infobuf = H5.H5Oget_info_by_name(group, name, HDF5Constants.H5P_DEFAULT); + + for (int i = 0; i < spaces; i++) + System.out.print(" "); // Format output. + switch (H5O_type.get(infobuf.type)) { + case H5O_TYPE_GROUP: + System.out.println("Group: " + name + " { "); + // Check group address against linked list of operator + // data structures. We will always run the check, as the + // reference count cannot be relied upon if there are + // symbolic links, and H5Oget_info_by_name always follows + // symbolic links. Alternatively we could use H5Lget_info + // and never recurse on groups discovered by symbolic + // links, however it could still fail if an object's + // reference count was manually manipulated with + // H5Odecr_refcount. + if (group_check(od, infobuf.addr)) { + for (int i = 0; i < spaces; i++) + System.out.print(" "); + System.out.println(" Warning: Loop detected!"); + } + else { + // Initialize new object of type opdata and begin + // recursive iteration on the discovered + // group. The new opdata is given a pointer to the + // current one. + opdata nextod = new opdata(); + nextod.recurs = od.recurs + 1; + nextod.prev = od; + nextod.addr = infobuf.addr; + H5L_iterate_cb iter_cb2 = new H5L_iter_callbackT(); + return_val = H5.H5Literate_by_name(group, name, HDF5Constants.H5_INDEX_NAME, + HDF5Constants.H5_ITER_NATIVE, 0L, iter_cb2, nextod, HDF5Constants.H5P_DEFAULT); + } + for (int i = 0; i < spaces; i++) + System.out.print(" "); + System.out.println("}"); + break; + case H5O_TYPE_DATASET: + System.out.println("Dataset: " + name); + break; + case H5O_TYPE_NAMED_DATATYPE: + System.out.println("Datatype: " + name); + break; + default: + System.out.println("Unknown: " + name); + } + } + catch (Exception e) { + e.printStackTrace(); + } + + return return_val; + } + + public boolean group_check(opdata od, long target_addr) { + if (od.addr == target_addr) + return true; // Addresses match + else if (od.recurs == 0) + return false; // Root group reached with no matches + else + return group_check(od.prev, target_addr); // Recursively examine the next node + } + +} diff --git a/java/examples/groups/H5Ex_G_Visit.java b/java/examples/groups/H5Ex_G_Visit.java new file mode 100644 index 0000000..c2367a6 --- /dev/null +++ b/java/examples/groups/H5Ex_G_Visit.java @@ -0,0 +1,152 @@ +/* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * + * Copyright by The HDF Group. * + * Copyright by the Board of Trustees of the University of Illinois. * + * All rights reserved. * + * * + * This file is part of HDF5. The full HDF5 copyright notice, including * + * terms governing use, modification, and redistribution, is contained in * + * the files COPYING and Copyright.html. COPYING can be found at the root * + * of the source code distribution tree; Copyright.html can be found at the * + * root level of an installed copy of the electronic HDF5 document set and * + * is linked from the top-level documents page. It can also be found at * + * http://hdfgroup.org/HDF5/doc/Copyright.html. If you do not have * + * access to either file, you may request a copy from help@hdfgroup.org. * + * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */ + +/************************************************************ + This example shows how to recursively traverse a file + using H5Ovisit and H5Lvisit. The program prints all of + the objects in the file specified in FILE, then prints all + of the links in that file. The default file used by this + example implements the structure described in the User's + Guide, chapter 4, figure 26. + ************************************************************/ +package examples.groups; + +import hdf.hdf5lib.H5; +import hdf.hdf5lib.HDF5Constants; +import hdf.hdf5lib.callbacks.H5L_iterate_cb; +import hdf.hdf5lib.callbacks.H5L_iterate_t; +import hdf.hdf5lib.callbacks.H5O_iterate_cb; +import hdf.hdf5lib.callbacks.H5O_iterate_t; +import hdf.hdf5lib.structs.H5L_info_t; +import hdf.hdf5lib.structs.H5O_info_t; + +import java.util.ArrayList; + +public class H5Ex_G_Visit { + + private static String FILE = "groups/h5ex_g_visit.h5"; + + public static void main(String[] args) { + try { + (new H5Ex_G_Visit()).VisitGroup(); + } + catch (Exception ex) { + ex.printStackTrace(); + } + } + + private void VisitGroup() throws Exception { + + long file_id = -1; + + try { + // Open file + file_id = H5.H5Fopen(FILE, HDF5Constants.H5F_ACC_RDONLY, HDF5Constants.H5P_DEFAULT); + + // Begin iteration using H5Ovisit + System.out.println("Objects in the file:"); + H5O_iterate_t iter_data = new H5O_iter_data(); + H5O_iterate_cb iter_cb = new H5O_iter_callback(); + H5.H5Ovisit(file_id, HDF5Constants.H5_INDEX_NAME, HDF5Constants.H5_ITER_NATIVE, iter_cb, iter_data); + System.out.println(); + // Repeat the same process using H5Lvisit + H5L_iterate_t iter_data2 = new H5L_iter_data(); + H5L_iterate_cb iter_cb2 = new H5L_iter_callback(); + System.out.println("Links in the file:"); + H5.H5Lvisit(file_id, HDF5Constants.H5_INDEX_NAME, HDF5Constants.H5_ITER_NATIVE, iter_cb2, iter_data2); + + } + catch (Exception e) { + e.printStackTrace(); + } + finally { + // Close and release resources. + if (file_id >= 0) + H5.H5Fclose(file_id); + } + } + + /************************************************************ + * Operator function for H5Lvisit. This function simply retrieves the info for the object the current link points + * to, and calls the operator function for H5Ovisit. + ************************************************************/ + + private class idata { + public String link_name = null; + public int link_type = -1; + + idata(String name, int type) { + this.link_name = name; + this.link_type = type; + } + } + + private class H5L_iter_data implements H5L_iterate_t { + public ArrayList<idata> iterdata = new ArrayList<idata>(); + } + + private class H5L_iter_callback implements H5L_iterate_cb { + public int callback(long group, String name, H5L_info_t info, H5L_iterate_t op_data) { + + idata id = new idata(name, info.type); + ((H5L_iter_data) op_data).iterdata.add(id); + + H5O_info_t infobuf; + int ret = 0; + try { + // Get type of the object and display its name and type. The name of the object is passed to this + // function by the Library. + infobuf = H5.H5Oget_info_by_name(group, name, HDF5Constants.H5P_DEFAULT); + H5O_iterate_cb iter_cbO = new H5O_iter_callback(); + H5O_iterate_t iter_dataO = new H5O_iter_data(); + ret = iter_cbO.callback(group, name, infobuf, iter_dataO); + } + catch (Exception e) { + e.printStackTrace(); + } + + return ret; + } + } + + private class H5O_iter_data implements H5O_iterate_t { + public ArrayList<idata> iterdata = new ArrayList<idata>(); + } + + private class H5O_iter_callback implements H5O_iterate_cb { + public int callback(long group, String name, H5O_info_t info, H5O_iterate_t op_data) { + idata id = new idata(name, info.type); + ((H5O_iter_data) op_data).iterdata.add(id); + + System.out.print("/"); /* Print root group in object path */ + + // Check if the current object is the root group, and if not print the full path name and type. + + if (name.charAt(0) == '.') /* Root group, do not print '.' */ + System.out.println(" (Group)"); + else if (info.type == HDF5Constants.H5O_TYPE_GROUP) + System.out.println(name + " (Group)"); + else if (info.type == HDF5Constants.H5O_TYPE_DATASET) + System.out.println(name + " (Dataset)"); + else if (info.type == HDF5Constants.H5O_TYPE_NAMED_DATATYPE) + System.out.println(name + " (Datatype)"); + else + System.out.println(name + " (Unknown)"); + + return 0; + } + } + +} diff --git a/java/examples/groups/Makefile.am b/java/examples/groups/Makefile.am new file mode 100644 index 0000000..802e2b0 --- /dev/null +++ b/java/examples/groups/Makefile.am @@ -0,0 +1,68 @@ +# +# Copyright by The HDF Group. +# Copyright by the Board of Trustees of the University of Illinois. +# All rights reserved. +# +# This file is part of HDF5. The full HDF5 copyright notice, including +# terms governing use, modification, and redistribution, is contained in +# the files COPYING and Copyright.html. COPYING can be found at the root +# of the source code distribution tree; Copyright.html can be found at the +# root level of an installed copy of the electronic HDF5 document set and +# is linked from the top-level documents page. It can also be found at +# http://hdfgroup.org/HDF5/doc/Copyright.html. If you do not have +# access to either file, you may request a copy from help@hdfgroup.org. +## +## Makefile.am +## Run automake to generate a Makefile.in from this file. +## +# +# HDF5 Java Library Examples Makefile(.in) + +include $(top_srcdir)/config/commence.am + +# Mark this directory as part of the JNI API +JAVA_API=yes + +JAVAROOT = .classes + +classes: + $(MKDIR_P) $(@D)/$(JAVAROOT) + +pkgpath = examples/groups +hdfjarfile = jar$(PACKAGE_TARNAME)-$(PACKAGE_VERSION).jar +CLASSPATH_ENV=CLASSPATH=.:$(JAVAROOT):$(top_builddir)/java/src/$(hdfjarfile):$(top_srcdir)/java/lib/slf4j-api-1.7.5.jar:$(top_srcdir)/java/lib/ext/slf4j-simple-1.7.5.jar:$$CLASSPATH + +jarfile = jar$(PACKAGE_TARNAME)groups.jar + +AM_JAVACFLAGS = $(H5_JAVACFLAGS) -deprecation + +TESTPACKAGE = + +noinst_JAVA = \ + H5Ex_G_Create.java \ + H5Ex_G_Iterate.java \ + H5Ex_G_Compact.java \ + H5Ex_G_Corder.java \ + H5Ex_G_Intermediate.java \ + H5Ex_G_Phase.java \ + H5Ex_G_Visit.java + + +$(jarfile): classnoinst.stamp classes + $(JAR) cvf $@ -C $(JAVAROOT)/ $(pkgpath) + +noinst_DATA = $(jarfile) + +.PHONY: classes + +check_SCRIPTS = runExample.sh +TEST_SCRIPT = $(check_SCRIPTS) + +CLEANFILES = classnoinst.stamp $(jarfile) $(JAVAROOT)/$(pkgpath)/*.class runExample.sh + +clean: + rm -rf $(JAVAROOT) + rm -f $(jarfile) + rm -f classnoinst.stamp + +include $(top_srcdir)/config/conclude.am diff --git a/java/examples/groups/h5ex_g_iterate.h5 b/java/examples/groups/h5ex_g_iterate.h5 Binary files differnew file mode 100644 index 0000000..e462703 --- /dev/null +++ b/java/examples/groups/h5ex_g_iterate.h5 diff --git a/java/examples/groups/h5ex_g_visit.h5 b/java/examples/groups/h5ex_g_visit.h5 Binary files differnew file mode 100644 index 0000000..d8267b1 --- /dev/null +++ b/java/examples/groups/h5ex_g_visit.h5 diff --git a/java/examples/groups/runExample.sh.in b/java/examples/groups/runExample.sh.in new file mode 100644 index 0000000..a59a894 --- /dev/null +++ b/java/examples/groups/runExample.sh.in @@ -0,0 +1,349 @@ +#! /bin/sh +# +# Copyright by The HDF Group. +# Copyright by the Board of Trustees of the University of Illinois. +# All rights reserved. +# +# This file is part of HDF5. The full HDF5 copyright notice, including +# terms governing use, modification, and redistribution, is contained in +# the files COPYING and Copyright.html. COPYING can be found at the root +# of the source code distribution tree; Copyright.html can be found at the +# root level of an installed copy of the electronic HDF5 document set and +# is linked from the top-level documents page. It can also be found at +# http://hdfgroup.org/HDF5/doc/Copyright.html. If you do not have +# access to either file, you may request a copy from help@hdfgroup.org. +# + +top_builddir=@top_builddir@ +top_srcdir=@top_srcdir@ +srcdir=@srcdir@ + +TESTNAME=EX_Groups +EXIT_SUCCESS=0 +EXIT_FAILURE=1 + +# Set up default variable values if not supplied by the user. +RM='rm -rf' +CMP='cmp' +DIFF='diff -c' +CP='cp' +DIRNAME='dirname' +LS='ls' +AWK='awk' + +nerrors=0 + +# where the libs exist +HDFLIB_HOME="$top_srcdir/java/lib" +BLDLIBDIR="./lib" +BLDREFDIR="./groups" +BLDDIR="." +HDFTEST_HOME="$top_srcdir/java/examples/groups" +JARFILE=jar@PACKAGE_TARNAME@-@PACKAGE_VERSION@.jar +TESTJARFILE=jar@PACKAGE_TARNAME@groups.jar +test -d $BLDLIBDIR || mkdir -p $BLDLIBDIR +test -d $BLDREFDIR || mkdir -p $BLDREFDIR + +###################################################################### +# library files +# -------------------------------------------------------------------- +# All the library files copy from source directory to test directory +# NOTE: Keep this framework to add/remove test files. +# This list are also used for checking exist. +# Comment '#' without space can be used. +# -------------------------------------------------------------------- +LIST_LIBRARY_FILES=" +$HDFLIB_HOME/slf4j-api-1.7.5.jar +$HDFLIB_HOME/ext/slf4j-simple-1.7.5.jar +$top_builddir/src/.libs/libhdf5.* +$top_builddir/java/src/jni/.libs/libhdf5_java.* +$top_builddir/java/src/$JARFILE +" +LIST_REF_FILES=" +$HDFTEST_HOME/h5ex_g_iterate.h5 +$HDFTEST_HOME/h5ex_g_visit.h5 +" +LIST_DATA_FILES=" +$HDFTEST_HOME/../testfiles/examples.groups.H5Ex_G_Create.txt +$HDFTEST_HOME/../testfiles/examples.groups.H5Ex_G_Iterate.txt +$HDFTEST_HOME/../testfiles/examples.groups.H5Ex_G_Compact.txt +$HDFTEST_HOME/../testfiles/examples.groups.H5Ex_G_Corder.txt +$HDFTEST_HOME/../testfiles/examples.groups.H5Ex_G_Intermediate.txt +$HDFTEST_HOME/../testfiles/examples.groups.H5Ex_G_Phase.txt +$HDFTEST_HOME/../testfiles/examples.groups.H5Ex_G_Visit.txt +" + +# +# copy files from source dirs to test dir +# +COPY_LIBFILES="$LIST_LIBRARY_FILES" + +COPY_LIBFILES_TO_BLDLIBDIR() +{ + # copy test files. Used -f to make sure get a new copy + for tstfile in $COPY_LIBFILES + do + # ignore '#' comment + echo $tstfile | tr -d ' ' | grep '^#' > /dev/null + RET=$? + if [ $RET -eq 1 ]; then + # skip cp if srcdir is same as destdir + # this occurs when build/test performed in source dir and + # make cp fail + SDIR=`$DIRNAME $tstfile` + INODE_SDIR=`$LS -i -d $SDIR | $AWK -F' ' '{print $1}'` + INODE_DDIR=`$LS -i -d $TESTDIR | $AWK -F' ' '{print $1}'` + if [ "$INODE_SDIR" != "$INODE_DDIR" ]; then + $CP -f $tstfile $BLDLIBDIR + if [ $? -ne 0 ]; then + echo "Error: FAILED to copy $tstfile ." + + # Comment out this to CREATE expected file + exit $EXIT_FAILURE + fi + fi + fi + done +} + +CLEAN_LIBFILES_AND_BLDLIBDIR() +{ + # skip rm if srcdir is same as destdir + # this occurs when build/test performed in source dir and + # make cp fail + SDIR=`$DIRNAME $tstfile` + INODE_SDIR=`$LS -i -d $SDIR | $AWK -F' ' '{print $1}'` + INODE_DDIR=`$LS -i -d $TESTDIR | $AWK -F' ' '{print $1}'` + if [ "$INODE_SDIR" != "$INODE_DDIR" ]; then + $RM $BLDLIBDIR + fi +} + +COPY_DATAFILES="$LIST_DATA_FILES" + +COPY_DATAFILES_TO_BLDDIR() +{ + # copy test files. Used -f to make sure get a new copy + for tstfile in $COPY_DATAFILES + do + # ignore '#' comment + echo $tstfile | tr -d ' ' | grep '^#' > /dev/null + RET=$? + if [ $RET -eq 1 ]; then + # skip cp if srcdir is same as destdir + # this occurs when build/test performed in source dir and + # make cp fail + SDIR=`$DIRNAME $tstfile` + INODE_SDIR=`$LS -i -d $SDIR | $AWK -F' ' '{print $1}'` + INODE_DDIR=`$LS -i -d $TESTDIR | $AWK -F' ' '{print $1}'` + if [ "$INODE_SDIR" != "$INODE_DDIR" ]; then + $CP -f $tstfile $BLDDIR + if [ $? -ne 0 ]; then + echo "Error: FAILED to copy $tstfile ." + + # Comment out this to CREATE expected file + exit $EXIT_FAILURE + fi + fi + fi + done +} + +CLEAN_DATAFILES_AND_BLDDIR() +{ + # skip rm if srcdir is same as destdir + # this occurs when build/test performed in source dir and + # make cp fail + SDIR=`$DIRNAME $tstfile` + INODE_SDIR=`$LS -i -d $SDIR | $AWK -F' ' '{print $1}'` + INODE_DDIR=`$LS -i -d $TESTDIR | $AWK -F' ' '{print $1}'` + if [ "$INODE_SDIR" != "$INODE_DDIR" ]; then + $RM $BLDDIR/*.txt + $RM $BLDDIR/*.out + fi +} + +COPY_REFFILES="$LIST_REF_FILES" + +COPY_REFFILES_TO_BLDREFDIR() +{ + # copy test files. Used -f to make sure get a new copy + for tstfile in $COPY_REFFILES + do + # ignore '#' comment + echo $tstfile | tr -d ' ' | grep '^#' > /dev/null + RET=$? + if [ $RET -eq 1 ]; then + # skip cp if srcdir is same as destdir + # this occurs when build/test performed in source dir and + # make cp fail + SDIR=`$DIRNAME $tstfile` + INODE_SDIR=`$LS -i -d $SDIR | $AWK -F' ' '{print $1}'` + INODE_DDIR=`$LS -i -d $TESTDIR | $AWK -F' ' '{print $1}'` + if [ "$INODE_SDIR" != "$INODE_DDIR" ]; then + $CP -f $tstfile $BLDREFDIR + if [ $? -ne 0 ]; then + echo "Error: FAILED to copy $tstfile ." + + # Comment out this to CREATE expected file + exit $EXIT_FAILURE + fi + fi + fi + done +} + +CLEAN_REFFILES_AND_BLDREFDIR() +{ + # skip rm if srcdir is same as destdir + # this occurs when build/test performed in source dir and + # make cp fail + SDIR=`$DIRNAME $tstfile` + INODE_SDIR=`$LS -i -d $SDIR | $AWK -F' ' '{print $1}'` + INODE_DDIR=`$LS -i -d $TESTDIR | $AWK -F' ' '{print $1}'` + if [ "$INODE_SDIR" != "$INODE_DDIR" ]; then + $RM $BLDREFDIR + fi +} + +# Print a line-line message left justified in a field of 70 characters +# beginning with the word "Testing". +# +TESTING() { + SPACES=" " + echo "Testing $* $SPACES" | cut -c1-70 | tr -d '\012' +} + +# where Java is installed (requires jdk1.7.x) +JAVAEXE=@JAVA@ +JAVAEXEFLAGS=@H5_JAVAFLAGS@ + +############################################################################### +# DO NOT MODIFY BELOW THIS LINE +############################################################################### + +# prepare for test +COPY_LIBFILES_TO_BLDLIBDIR +COPY_DATAFILES_TO_BLDDIR +COPY_REFFILES_TO_BLDREFDIR + +CPATH=".:"$BLDLIBDIR"/"$JARFILE":"$BLDLIBDIR"/slf4j-api-1.7.5.jar:"$BLDLIBDIR"/slf4j-simple-1.7.5.jar:"$TESTJARFILE"" + +TEST=/usr/bin/test +if [ ! -x /usr/bin/test ] +then +TEST=`which test` +fi + +if $TEST -z "$CLASSPATH"; then + CLASSPATH="" +fi +CLASSPATH=$CPATH":"$CLASSPATH +export CLASSPATH + +if $TEST -n "$JAVAPATH" ; then + PATH=$JAVAPATH":"$PATH + export PATH +fi + +if $TEST -e /bin/uname; then + os_name=`/bin/uname -s` +elif $TEST -e /usr/bin/uname; then + os_name=`/usr/bin/uname -s` +else + os_name=unknown +fi + +if $TEST -z "$LD_LIBRARY_PATH" ; then + LD_LIBRARY_PATH="" +fi + +case $os_name in + Darwin) + DYLD_LIBRARY_PATH=$BLDLIBDIR:$DYLD_LIBRARY_PATH + export DYLD_LIBRARY_PATH + LD_LIBRARY_PATH=$DYLD_LIBRARY_PATH + ;; + *) + LD_LIBRARY_PATH=$BLDLIBDIR:$LD_LIBRARY_PATH + ;; +esac + +export LD_LIBRARY_PATH + +echo "$JAVAEXE $JAVAEXEFLAGS -Xmx1024M -Dorg.slf4j.simpleLogger.defaultLog=trace -Djava.library.path=$BLDLIBDIR -cp $CLASSPATH examples.groups.H5Ex_G_Create" +($JAVAEXE $JAVAEXEFLAGS -Xmx1024M -Dorg.slf4j.simpleLogger.defaultLog=trace -Djava.library.path=$BLDLIBDIR -cp $CLASSPATH examples.groups.H5Ex_G_Create > H5Ex_G_Create.out) +if diff H5Ex_G_Create.out examples.groups.H5Ex_G_Create.txt > /dev/null; then + echo " PASSED groups.H5Ex_G_Create" +else + echo "**FAILED** groups.H5Ex_G_Create" + nerrors="`expr $nerrors + 1`" +fi + +echo "$JAVAEXE $JAVAEXEFLAGS -Xmx1024M -Dorg.slf4j.simpleLogger.defaultLog=trace -Djava.library.path=$BLDLIBDIR -cp $CLASSPATH examples.groups.H5Ex_G_Iterate" +($JAVAEXE $JAVAEXEFLAGS -Xmx1024M -Dorg.slf4j.simpleLogger.defaultLog=trace -Djava.library.path=$BLDLIBDIR -cp $CLASSPATH examples.groups.H5Ex_G_Iterate > H5Ex_G_Iterate.out) +if diff H5Ex_G_Iterate.out examples.groups.H5Ex_G_Iterate.txt > /dev/null; then + echo " PASSED groups.H5Ex_G_Iterate" +else + echo "**FAILED** groups.H5Ex_G_Iterate" + nerrors="`expr $nerrors + 1`" +fi + +echo "$JAVAEXE $JAVAEXEFLAGS -Xmx1024M -Dorg.slf4j.simpleLogger.defaultLog=trace -Djava.library.path=$BLDLIBDIR -cp $CLASSPATH examples.groups.H5Ex_G_Compact" +($JAVAEXE $JAVAEXEFLAGS -Xmx1024M -Dorg.slf4j.simpleLogger.defaultLog=trace -Djava.library.path=$BLDLIBDIR -cp $CLASSPATH examples.groups.H5Ex_G_Compact > H5Ex_G_Compact.out) +if diff H5Ex_G_Compact.out examples.groups.H5Ex_G_Compact.txt > /dev/null; then + echo " PASSED groups.H5Ex_G_Compact" +else + echo "**FAILED** groups.H5Ex_G_Compact" + nerrors="`expr $nerrors + 1`" +fi + +echo "$JAVAEXE $JAVAEXEFLAGS -Xmx1024M -Dorg.slf4j.simpleLogger.defaultLog=trace -Djava.library.path=$BLDLIBDIR -cp $CLASSPATH examples.groups.H5Ex_G_Corder" +($JAVAEXE $JAVAEXEFLAGS -Xmx1024M -Dorg.slf4j.simpleLogger.defaultLog=trace -Djava.library.path=$BLDLIBDIR -cp $CLASSPATH examples.groups.H5Ex_G_Corder > H5Ex_G_Corder.out) +if diff H5Ex_G_Corder.out examples.groups.H5Ex_G_Corder.txt > /dev/null; then + echo " PASSED groups.H5Ex_G_Corder" +else + echo "**FAILED** groups.H5Ex_G_Corder" + nerrors="`expr $nerrors + 1`" +fi + +echo "$JAVAEXE $JAVAEXEFLAGS -Xmx1024M -Dorg.slf4j.simpleLogger.defaultLog=trace -Djava.library.path=$BLDLIBDIR -cp $CLASSPATH examples.groups.H5Ex_G_Intermediate" +($JAVAEXE $JAVAEXEFLAGS -Xmx1024M -Dorg.slf4j.simpleLogger.defaultLog=trace -Djava.library.path=$BLDLIBDIR -cp $CLASSPATH examples.groups.H5Ex_G_Intermediate > H5Ex_G_Intermediate.out) +if diff H5Ex_G_Intermediate.out examples.groups.H5Ex_G_Intermediate.txt > /dev/null; then + echo " PASSED groups.H5Ex_G_Intermediate" +else + echo "**FAILED** groups.H5Ex_G_Intermediate" + nerrors="`expr $nerrors + 1`" +fi + +echo "$JAVAEXE $JAVAEXEFLAGS -Xmx1024M -Dorg.slf4j.simpleLogger.defaultLog=trace -Djava.library.path=$BLDLIBDIR -cp $CLASSPATH examples.groups.H5Ex_G_Phase" +($JAVAEXE $JAVAEXEFLAGS -Xmx1024M -Dorg.slf4j.simpleLogger.defaultLog=trace -Djava.library.path=$BLDLIBDIR -cp $CLASSPATH examples.groups.H5Ex_G_Phase > H5Ex_G_Phase.out) +if diff H5Ex_G_Phase.out examples.groups.H5Ex_G_Phase.txt > /dev/null; then + echo " PASSED groups.H5Ex_G_Phase" +else + echo "**FAILED** groups.H5Ex_G_Phase" + nerrors="`expr $nerrors + 1`" +fi + +echo "$JAVAEXE $JAVAEXEFLAGS -Xmx1024M -Dorg.slf4j.simpleLogger.defaultLog=trace -Djava.library.path=$BLDLIBDIR -cp $CLASSPATH examples.groups.H5Ex_G_Visit" +($JAVAEXE $JAVAEXEFLAGS -Xmx1024M -Dorg.slf4j.simpleLogger.defaultLog=trace -Djava.library.path=$BLDLIBDIR -cp $CLASSPATH examples.groups.H5Ex_G_Visit > H5Ex_G_Visit.out) +if diff H5Ex_G_Visit.out examples.groups.H5Ex_G_Visit.txt > /dev/null; then + echo " PASSED groups.H5Ex_G_Visit" +else + echo "**FAILED** groups.H5Ex_G_Visit" + nerrors="`expr $nerrors + 1`" +fi + +# Clean up temporary files/directories +CLEAN_LIBFILES_AND_BLDLIBDIR +CLEAN_DATAFILES_AND_BLDDIR +CLEAN_REFFILES_AND_BLDREFDIR + +# Report test results and exit +if test $nerrors -eq 0 ; then + echo "All $TESTNAME tests passed." + exit $EXIT_SUCCESS +else + echo "$TESTNAME tests failed with $nerrors errors." + exit $EXIT_FAILURE +fi diff --git a/java/examples/intro/CMakeLists.txt b/java/examples/intro/CMakeLists.txt new file mode 100644 index 0000000..ae3f585 --- /dev/null +++ b/java/examples/intro/CMakeLists.txt @@ -0,0 +1,112 @@ +cmake_minimum_required (VERSION 3.1.0) +PROJECT (HDFJAVA_EXAMPLES_INTRO Java) + +set (CMAKE_VERBOSE_MAKEFILE 1) + +INCLUDE_DIRECTORIES ( + ${HDF5_JAVA_JNI_BINARY_DIR} + ${HDF5_JAVA_HDF5_LIB_DIR} +) + +set (HDF_JAVA_EXAMPLES + H5_CreateAttribute + H5_CreateDataset + H5_CreateFile + H5_CreateGroup + H5_CreateGroupAbsoluteRelative + H5_CreateGroupDataset + H5_ReadWrite +) + +if (WIN32) + set (CMAKE_JAVA_INCLUDE_FLAG_SEP ";") +else (WIN32) + set (CMAKE_JAVA_INCLUDE_FLAG_SEP ":") +endif (WIN32) + +set (CMAKE_JAVA_INCLUDE_PATH "${HDF5_JAVA_JARS}") + +set (CMAKE_JAVA_CLASSPATH ".") +foreach (CMAKE_INCLUDE_PATH ${CMAKE_JAVA_INCLUDE_PATH}) + set (CMAKE_JAVA_CLASSPATH "${CMAKE_JAVA_CLASSPATH}${CMAKE_JAVA_INCLUDE_FLAG_SEP}${CMAKE_INCLUDE_PATH}") +endforeach (CMAKE_INCLUDE_PATH) + +foreach (example ${HDF_JAVA_EXAMPLES}) + file (WRITE ${PROJECT_BINARY_DIR}/${example}_Manifest.txt + "Main-Class: examples.intro.${example} +" + ) + add_jar (${example} MANIFEST ${PROJECT_BINARY_DIR}/${example}_Manifest.txt ${example}.java) + get_target_property (${example}_JAR_FILE ${example} JAR_FILE) +# install_jar (${example} ${HJAVA_INSTALL_DATA_DIR}/examples examples) + get_target_property (${example}_CLASSPATH ${example} CLASSDIR) + add_dependencies (${example} ${HDF5_JAVA_HDF5_LIB_TARGET}) +endforeach (example ${HDF_JAVA_EXAMPLES}) + +set (CMAKE_JAVA_INCLUDE_PATH "${HDF5_JAVA_JARS};${HDF5_JAVA_LOGGING_JAR};${HDF5_JAVA_LOGGING_NOP_JAR}") + +set (CMAKE_JAVA_CLASSPATH ".") +foreach (HDFJAVA_JAR ${CMAKE_JAVA_INCLUDE_PATH}) + set (CMAKE_JAVA_CLASSPATH "${CMAKE_JAVA_CLASSPATH}${CMAKE_JAVA_INCLUDE_FLAG_SEP}${HDFJAVA_JAR}") +endforeach (HDFJAVA_JAR) + +foreach (example ${HDF_JAVA_OBJECT_EXAMPLES}) + file (WRITE ${PROJECT_BINARY_DIR}/${example}_Manifest.txt + "Main-Class: examples.intro.${example} +" + ) + add_jar (${example} MANIFEST ${PROJECT_BINARY_DIR}/${example}_Manifest.txt ${example}.java) + get_target_property (${example}_JAR_FILE ${example} JAR_FILE) +# install_jar (${example} ${HJAVA_INSTALL_DATA_DIR}/examples examples) + get_target_property (${example}_CLASSPATH ${example} CLASSDIR) + add_dependencies (${example} ${HDFJAVA_H5_LIB_TARGET}) +endforeach (example ${HDF_JAVA_OBJECT_EXAMPLES}) + +MACRO (ADD_H5_TEST resultfile resultcode) + add_test ( + NAME JAVA_intro-${resultfile} + COMMAND "${CMAKE_COMMAND}" + -D "TEST_TESTER=${CMAKE_Java_RUNTIME};${CMAKE_Java_RUNTIME_FLAGS}" + -D "TEST_PROGRAM=examples.intro.${resultfile}" + -D "TEST_ARGS:STRING=${ARGN}" + -D "TEST_CLASSPATH:STRING=${CMAKE_JAVA_CLASSPATH}${CMAKE_JAVA_INCLUDE_FLAG_SEP}${${resultfile}_JAR_FILE}" + -D "TEST_LIBRARY_DIRECTORY=${CMAKE_TEST_OUTPUT_DIRECTORY}" + -D "TEST_FOLDER=${HDFJAVA_EXAMPLES_BINARY_DIR}" + -D "TEST_OUTPUT=intro/${resultfile}.out" + -D "TEST_EXPECT=${resultcode}" + -D "TEST_REFERENCE=intro/${resultfile}.txt" + -P "${HDF_RESOURCES_DIR}/jrunTest.cmake" + ) + if (NOT "${last_test}" STREQUAL "") + set_tests_properties (JAVA_intro-${resultfile} PROPERTIES DEPENDS ${last_test}) + endif (NOT "${last_test}" STREQUAL "") + set (last_test "JAVA_intro-${resultfile}") +ENDMACRO (ADD_H5_TEST file) + +if (BUILD_TESTING) + + foreach (example ${HDF_JAVA_EXAMPLES}) + add_test ( + NAME JAVA_intro-${example}-clearall-objects + COMMAND ${CMAKE_COMMAND} + -E remove + ${HDFJAVA_EXAMPLES_BINARY_DIR}/${example}.h5 + ${example}.out + ${example}.out.err + ) + if (NOT "${last_test}" STREQUAL "") + set_tests_properties (JAVA_intro-${example}-clearall-objects PROPERTIES DEPENDS ${last_test}) + endif (NOT "${last_test}" STREQUAL "") + add_test ( + NAME JAVA_intro-${example}-copy-objects + COMMAND ${CMAKE_COMMAND} + -E copy_if_different + ${HDFJAVA_EXAMPLES_SOURCE_DIR}/testfiles/examples.intro.${example}.txt + ${HDFJAVA_EXAMPLES_INTRO_BINARY_DIR}/${example}.txt + ) + set_tests_properties (JAVA_intro-${example}-copy-objects PROPERTIES DEPENDS JAVA_intro-${example}-clearall-objects) + set (last_test "JAVA_intro-${example}-copy-objects") + ADD_H5_TEST (${example} 0) + endforeach (example ${HDF_JAVA_EXAMPLES}) + +endif (BUILD_TESTING) diff --git a/java/examples/intro/H5_CreateAttribute.java b/java/examples/intro/H5_CreateAttribute.java new file mode 100644 index 0000000..16c53d6 --- /dev/null +++ b/java/examples/intro/H5_CreateAttribute.java @@ -0,0 +1,145 @@ +/* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * + * Copyright by The HDF Group. * + * Copyright by the Board of Trustees of the University of Illinois. * + * All rights reserved. * + * * + * This file is part of HDF5. The full HDF5 copyright notice, including * + * terms governing use, modification, and redistribution, is contained in * + * the files COPYING and Copyright.html. COPYING can be found at the root * + * of the source code distribution tree; Copyright.html can be found at the * + * root level of an installed copy of the electronic HDF5 document set and * + * is linked from the top-level documents page. It can also be found at * + * http://hdfgroup.org/HDF5/doc/Copyright.html. If you do not have * + * access to either file, you may request a copy from help@hdfgroup.org. * + * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */ + +/************************************************************ + Creating a dataset attribute. + ************************************************************/ + +package examples.intro; + +import hdf.hdf5lib.H5; +import hdf.hdf5lib.HDF5Constants; + +public class H5_CreateAttribute { + private static String FILENAME = "H5_CreateAttribute.h5"; + private static String DATASETNAME = "dset"; + private static final int DIM_X = 4; + private static final int DIM_Y = 6; + private static String DATASETATTRIBUTE = "Units"; + + private static void CreateDatasetAttribute() { + long file_id = -1; + long dataspace_id = -1; + long dataset_id = -1; + long attribute_id = -1; + long[] dims1 = { DIM_X, DIM_Y }; + long[] dims = { 2 }; + int[] attr_data = { 100, 200 }; + + // Create a new file using default properties. + try { + file_id = H5.H5Fcreate(FILENAME, HDF5Constants.H5F_ACC_TRUNC, HDF5Constants.H5P_DEFAULT, + HDF5Constants.H5P_DEFAULT); + } + catch (Exception e) { + e.printStackTrace(); + } + + // Create the data space for the dataset. + try { + dataspace_id = H5.H5Screate_simple(2, dims1, null); + } + catch (Exception e) { + e.printStackTrace(); + } + + // Create the dataset. + try { + if ((file_id >= 0) && (dataspace_id >= 0)) + dataset_id = H5.H5Dcreate(file_id, "/" + DATASETNAME, HDF5Constants.H5T_STD_I32BE, dataspace_id, + HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT); + } + catch (Exception e) { + e.printStackTrace(); + } + + // Terminate access to the data space. + try { + if (dataspace_id >= 0) + H5.H5Sclose(dataspace_id); + } + catch (Exception e) { + e.printStackTrace(); + } + + // Create the data space for the attribute. + try { + dataspace_id = H5.H5Screate_simple(1, dims, null); + } + catch (Exception e) { + e.printStackTrace(); + } + + // Create a dataset attribute. + try { + if ((dataset_id >= 0) && (dataspace_id >= 0)) + attribute_id = H5.H5Acreate(dataset_id, DATASETATTRIBUTE, HDF5Constants.H5T_STD_I32BE, dataspace_id, + HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT); + } + catch (Exception e) { + e.printStackTrace(); + } + + // Write the attribute data. + try { + if (attribute_id >= 0) + H5.H5Awrite(attribute_id, HDF5Constants.H5T_NATIVE_INT, attr_data); + } + catch (Exception e) { + e.printStackTrace(); + } + + // Close the attribute. + try { + if (attribute_id >= 0) + H5.H5Aclose(attribute_id); + } + catch (Exception e) { + e.printStackTrace(); + } + + // Close the dataspace. + try { + if (dataspace_id >= 0) + H5.H5Sclose(dataspace_id); + } + catch (Exception e) { + e.printStackTrace(); + } + + // Close to the dataset. + try { + if (dataset_id >= 0) + H5.H5Dclose(dataset_id); + } + catch (Exception e) { + e.printStackTrace(); + } + + // Close the file. + try { + if (file_id >= 0) + H5.H5Fclose(file_id); + } + catch (Exception e) { + e.printStackTrace(); + } + } + + public static void main(String[] args) { + H5_CreateAttribute.CreateDatasetAttribute(); + } + +} diff --git a/java/examples/intro/H5_CreateDataset.java b/java/examples/intro/H5_CreateDataset.java new file mode 100644 index 0000000..a16cfe6 --- /dev/null +++ b/java/examples/intro/H5_CreateDataset.java @@ -0,0 +1,97 @@ +/* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * + * Copyright by The HDF Group. * + * Copyright by the Board of Trustees of the University of Illinois. * + * All rights reserved. * + * * + * This file is part of HDF5. The full HDF5 copyright notice, including * + * terms governing use, modification, and redistribution, is contained in * + * the files COPYING and Copyright.html. COPYING can be found at the root * + * of the source code distribution tree; Copyright.html can be found at the * + * root level of an installed copy of the electronic HDF5 document set and * + * is linked from the top-level documents page. It can also be found at * + * http://hdfgroup.org/HDF5/doc/Copyright.html. If you do not have * + * access to either file, you may request a copy from help@hdfgroup.org. * + * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */ + +/************************************************************ + Creating and closing a dataset. + ************************************************************/ + +package examples.intro; + +import hdf.hdf5lib.H5; +import hdf.hdf5lib.HDF5Constants; + +public class H5_CreateDataset { + private static String FILENAME = "H5_CreateDataset.h5"; + private static String DATASETNAME = "dset"; + private static final int DIM_X = 4; + private static final int DIM_Y = 6; + + private static void CreateDataset() { + long file_id = -1; + long dataspace_id = -1; + long dataset_id = -1; + long[] dims = { DIM_X, DIM_Y }; + + // Create a new file using default properties. + try { + file_id = H5.H5Fcreate(FILENAME, HDF5Constants.H5F_ACC_TRUNC, HDF5Constants.H5P_DEFAULT, + HDF5Constants.H5P_DEFAULT); + } + catch (Exception e) { + e.printStackTrace(); + } + + // Create the data space for the dataset. + try { + dataspace_id = H5.H5Screate_simple(2, dims, null); + } + catch (Exception e) { + e.printStackTrace(); + } + + // Create the dataset. + try { + if ((file_id >= 0) && (dataspace_id >= 0)) + dataset_id = H5.H5Dcreate(file_id, "/" + DATASETNAME, HDF5Constants.H5T_STD_I32BE, dataspace_id, + HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT); + } + catch (Exception e) { + e.printStackTrace(); + } + + // End access to the dataset and release resources used by it. + try { + if (dataset_id >= 0) + H5.H5Dclose(dataset_id); + } + catch (Exception e) { + e.printStackTrace(); + } + + // Terminate access to the data space. + try { + if (dataspace_id >= 0) + H5.H5Sclose(dataspace_id); + } + catch (Exception e) { + e.printStackTrace(); + } + + // Close the file. + try { + if (file_id >= 0) + H5.H5Fclose(file_id); + } + catch (Exception e) { + e.printStackTrace(); + } + + } + + public static void main(String[] args) { + H5_CreateDataset.CreateDataset(); + } + +} diff --git a/java/examples/intro/H5_CreateFile.java b/java/examples/intro/H5_CreateFile.java new file mode 100644 index 0000000..eb9f277 --- /dev/null +++ b/java/examples/intro/H5_CreateFile.java @@ -0,0 +1,55 @@ +/* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * + * Copyright by The HDF Group. * + * Copyright by the Board of Trustees of the University of Illinois. * + * All rights reserved. * + * * + * This file is part of HDF5. The full HDF5 copyright notice, including * + * terms governing use, modification, and redistribution, is contained in * + * the files COPYING and Copyright.html. COPYING can be found at the root * + * of the source code distribution tree; Copyright.html can be found at the * + * root level of an installed copy of the electronic HDF5 document set and * + * is linked from the top-level documents page. It can also be found at * + * http://hdfgroup.org/HDF5/doc/Copyright.html. If you do not have * + * access to either file, you may request a copy from help@hdfgroup.org. * + * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */ + +/************************************************************ + Creating and closing a file. + ************************************************************/ + +package examples.intro; + +import hdf.hdf5lib.H5; +import hdf.hdf5lib.HDF5Constants; + +public class H5_CreateFile { + static final String FILENAME = "H5_CreateFile.h5"; + + private static void CreateFile() { + long file_id = -1; + + // Create a new file using default properties. + try { + file_id = H5.H5Fcreate(FILENAME, HDF5Constants.H5F_ACC_TRUNC, HDF5Constants.H5P_DEFAULT, + HDF5Constants.H5P_DEFAULT); + } + catch (Exception e) { + e.printStackTrace(); + } + + // Close the file. + try { + if (file_id >= 0) + H5.H5Fclose(file_id); + } + catch (Exception e) { + e.printStackTrace(); + } + + } + + public static void main(String[] args) { + H5_CreateFile.CreateFile(); + } + +} diff --git a/java/examples/intro/H5_CreateGroup.java b/java/examples/intro/H5_CreateGroup.java new file mode 100644 index 0000000..36bd49a --- /dev/null +++ b/java/examples/intro/H5_CreateGroup.java @@ -0,0 +1,76 @@ +/* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * + * Copyright by The HDF Group. * + * Copyright by the Board of Trustees of the University of Illinois. * + * All rights reserved. * + * * + * This file is part of HDF5. The full HDF5 copyright notice, including * + * terms governing use, modification, and redistribution, is contained in * + * the files COPYING and Copyright.html. COPYING can be found at the root * + * of the source code distribution tree; Copyright.html can be found at the * + * root level of an installed copy of the electronic HDF5 document set and * + * is linked from the top-level documents page. It can also be found at * + * http://hdfgroup.org/HDF5/doc/Copyright.html. If you do not have * + * access to either file, you may request a copy from help@hdfgroup.org. * + * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */ + +/************************************************************ + Creating and closing a group. + ************************************************************/ + +package examples.intro; + +import hdf.hdf5lib.H5; +import hdf.hdf5lib.HDF5Constants; + +public class H5_CreateGroup { + private static String FILENAME = "H5_CreateGroup.h5"; + private static String GROUPNAME = "MyGroup"; + + private static void CreateGroup() { + long file_id = -1; + long group_id = -1; + + // Create a new file using default properties. + try { + file_id = H5.H5Fcreate(FILENAME, HDF5Constants.H5F_ACC_TRUNC, HDF5Constants.H5P_DEFAULT, + HDF5Constants.H5P_DEFAULT); + } + catch (Exception e) { + e.printStackTrace(); + } + + // Create a group in the file. + try { + if (file_id >= 0) + group_id = H5.H5Gcreate(file_id, "/" + GROUPNAME, HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT, + HDF5Constants.H5P_DEFAULT); + } + catch (Exception e) { + e.printStackTrace(); + } + + // Close the group. + try { + if (group_id >= 0) + H5.H5Gclose(group_id); + } + catch (Exception e) { + e.printStackTrace(); + } + + // Close the file. + try { + if (file_id >= 0) + H5.H5Fclose(file_id); + } + catch (Exception e) { + e.printStackTrace(); + } + + } + + public static void main(String[] args) { + H5_CreateGroup.CreateGroup(); + } + +} diff --git a/java/examples/intro/H5_CreateGroupAbsoluteRelative.java b/java/examples/intro/H5_CreateGroupAbsoluteRelative.java new file mode 100644 index 0000000..e0127cc --- /dev/null +++ b/java/examples/intro/H5_CreateGroupAbsoluteRelative.java @@ -0,0 +1,118 @@ +/* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * + * Copyright by The HDF Group. * + * Copyright by the Board of Trustees of the University of Illinois. * + * All rights reserved. * + * * + * This file is part of HDF5. The full HDF5 copyright notice, including * + * terms governing use, modification, and redistribution, is contained in * + * the files COPYING and Copyright.html. COPYING can be found at the root * + * of the source code distribution tree; Copyright.html can be found at the * + * root level of an installed copy of the electronic HDF5 document set and * + * is linked from the top-level documents page. It can also be found at * + * http://hdfgroup.org/HDF5/doc/Copyright.html. If you do not have * + * access to either file, you may request a copy from help@hdfgroup.org. * + * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */ + +/************************************************************ + Creating groups using absolute and relative names. + ************************************************************/ + +package examples.intro; + +import hdf.hdf5lib.H5; +import hdf.hdf5lib.HDF5Constants; + +public class H5_CreateGroupAbsoluteRelative { + private static String FILENAME = "H5_CreateGroupAbsoluteRelative.h5"; + private static String GROUPNAME = "MyGroup"; + private static String GROUPNAME_A = "GroupA"; + private static String GROUPNAME_B = "GroupB"; + + private static void CreateGroupAbsoluteAndRelative() { + long file_id = -1; + long group1_id = -1; + long group2_id = -1; + long group3_id = -1; + + // Create a new file using default properties. + try { + file_id = H5.H5Fcreate(FILENAME, HDF5Constants.H5F_ACC_TRUNC, HDF5Constants.H5P_DEFAULT, + HDF5Constants.H5P_DEFAULT); + } + catch (Exception e) { + e.printStackTrace(); + } + + // Create a group named "/MyGroup" in the file. + try { + if (file_id >= 0) + group1_id = H5.H5Gcreate(file_id, "/" + GROUPNAME, HDF5Constants.H5P_DEFAULT, + HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT); + } + catch (Exception e) { + e.printStackTrace(); + } + + // Create group "Group_A" in group "MyGroup" using absolute name. + try { + if (file_id >= 0) + group2_id = H5.H5Gcreate(file_id, "/" + GROUPNAME + "/" + GROUPNAME_A, HDF5Constants.H5P_DEFAULT, + HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT); + } + catch (Exception e) { + e.printStackTrace(); + } + + // Create group "Group_B" in group "MyGroup" using relative name. + try { + if (group1_id >= 0) + group3_id = H5.H5Gcreate(group1_id, GROUPNAME_B, HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT, + HDF5Constants.H5P_DEFAULT); + } + catch (Exception e) { + e.printStackTrace(); + } + + // Close the group3. + try { + if (group3_id >= 0) + H5.H5Gclose(group3_id); + } + catch (Exception e) { + e.printStackTrace(); + } + + // Close the group2. + try { + if (group2_id >= 0) + H5.H5Gclose(group2_id); + } + catch (Exception e) { + e.printStackTrace(); + } + + // Close the group1. + try { + if (group1_id >= 0) + H5.H5Gclose(group1_id); + } + catch (Exception e) { + e.printStackTrace(); + } + + // Close the file. + try { + if (file_id >= 0) + H5.H5Fclose(file_id); + } + catch (Exception e) { + e.printStackTrace(); + } + + } + + public static void main(String[] args) { + H5_CreateGroupAbsoluteRelative.CreateGroupAbsoluteAndRelative(); + } + +} diff --git a/java/examples/intro/H5_CreateGroupDataset.java b/java/examples/intro/H5_CreateGroupDataset.java new file mode 100644 index 0000000..0607bbd --- /dev/null +++ b/java/examples/intro/H5_CreateGroupDataset.java @@ -0,0 +1,207 @@ +/* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * + * Copyright by The HDF Group. * + * Copyright by the Board of Trustees of the University of Illinois. * + * All rights reserved. * + * * + * This file is part of HDF5. The full HDF5 copyright notice, including * + * terms governing use, modification, and redistribution, is contained in * + * the files COPYING and Copyright.html. COPYING can be found at the root * + * of the source code distribution tree; Copyright.html can be found at the * + * root level of an installed copy of the electronic HDF5 document set and * + * is linked from the top-level documents page. It can also be found at * + * http://hdfgroup.org/HDF5/doc/Copyright.html. If you do not have * + * access to either file, you may request a copy from help@hdfgroup.org. * + * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */ + +/************************************************************ + Create two datasets within groups. + ************************************************************/ + +package examples.intro; + +import hdf.hdf5lib.H5; +import hdf.hdf5lib.HDF5Constants; + +public class H5_CreateGroupDataset { + private static String FILENAME = "H5_CreateGroupDataset.h5"; + private static String GROUPNAME = "MyGroup"; + private static String GROUPNAME_A = "GroupA"; + private static String DATASETNAME1 = "dset1"; + private static String DATASETNAME2 = "dset2"; + private static final int DIM1_X = 3; + private static final int DIM1_Y = 3; + private static final int DIM2_X = 2; + private static final int DIM2_Y = 10; + + private static void h5_crtgrpd() { + long file_id = -1; + long dataspace_id = -1; + long dataset_id = -1; + long group_id = -1; + long group1_id = -1; + long group2_id = -1; + int[][] dset1_data = new int[DIM1_X][DIM1_Y]; + int[][] dset2_data = new int[DIM2_X][DIM2_Y]; + long[] dims1 = { DIM1_X, DIM1_Y }; + long[] dims2 = { DIM2_X, DIM2_Y }; + + // Initialize the first dataset. + for (int indx = 0; indx < DIM1_X; indx++) + for (int jndx = 0; jndx < DIM1_Y; jndx++) + dset1_data[indx][jndx] = jndx + 1; + + // Initialize the second dataset. + for (int indx = 0; indx < DIM2_X; indx++) + for (int jndx = 0; jndx < DIM2_Y; jndx++) + dset2_data[indx][jndx] = jndx + 1; + + // Create a file. + try { + file_id = H5.H5Fcreate(FILENAME, HDF5Constants.H5F_ACC_TRUNC, HDF5Constants.H5P_DEFAULT, + HDF5Constants.H5P_DEFAULT); + // Create a group named "/MyGroup" in the file. + if (file_id >= 0) { + group1_id = H5.H5Gcreate(file_id, "/" + GROUPNAME, HDF5Constants.H5P_DEFAULT, + HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT); + // Create group "Group_A" in group "MyGroup" using absolute name. + if (group1_id >= 0) { + group2_id = H5.H5Gcreate(file_id, "/" + GROUPNAME + "/" + GROUPNAME_A, HDF5Constants.H5P_DEFAULT, + HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT); + if (group2_id >= 0) + H5.H5Gclose(group2_id); + } + if (group1_id >= 0) + H5.H5Gclose(group1_id); + } + } + catch (Exception e) { + e.printStackTrace(); + } + + // Create the data space for the first dataset. + try { + dataspace_id = H5.H5Screate_simple(2, dims1, null); + } + catch (Exception e) { + e.printStackTrace(); + } + + // Create the dataset in group "MyGroup". + try { + if ((file_id >= 0) && (dataspace_id >= 0)) + dataset_id = H5.H5Dcreate(file_id, "/" + GROUPNAME + "/" + DATASETNAME1, HDF5Constants.H5T_STD_I32BE, + dataspace_id, HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT); + } + catch (Exception e) { + e.printStackTrace(); + } + + // Write the first dataset. + try { + if (dataset_id >= 0) + H5.H5Dwrite(dataset_id, HDF5Constants.H5T_NATIVE_INT, HDF5Constants.H5S_ALL, HDF5Constants.H5S_ALL, + HDF5Constants.H5P_DEFAULT, dset1_data); + } + catch (Exception e) { + e.printStackTrace(); + } + + // Close the data space for the first dataset. + try { + if (dataspace_id >= 0) + H5.H5Sclose(dataspace_id); + dataspace_id = -1; + } + catch (Exception e) { + e.printStackTrace(); + } + + // Close the first dataset. + try { + if (dataset_id >= 0) + H5.H5Dclose(dataset_id); + dataset_id = -1; + } + catch (Exception e) { + e.printStackTrace(); + } + + // Open an existing group of the specified file. + try { + if (file_id >= 0) + group_id = H5.H5Gopen(file_id, "/" + GROUPNAME + "/" + GROUPNAME_A, HDF5Constants.H5P_DEFAULT); + } + catch (Exception e) { + e.printStackTrace(); + } + + // Create the data space for the second dataset. + try { + dataspace_id = H5.H5Screate_simple(2, dims2, null); + } + catch (Exception e) { + e.printStackTrace(); + } + + // Create the second dataset in group "Group_A". + try { + if ((group_id >= 0) && (dataspace_id >= 0)) + dataset_id = H5.H5Dcreate(group_id, DATASETNAME2, HDF5Constants.H5T_STD_I32BE, dataspace_id, + HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT); + } + catch (Exception e) { + e.printStackTrace(); + } + + // Write the second dataset. + try { + if (dataset_id >= 0) + H5.H5Dwrite(dataset_id, HDF5Constants.H5T_NATIVE_INT, HDF5Constants.H5S_ALL, HDF5Constants.H5S_ALL, + HDF5Constants.H5P_DEFAULT, dset2_data); + } + catch (Exception e) { + e.printStackTrace(); + } + + // Close the data space for the second dataset. + try { + if (dataspace_id >= 0) + H5.H5Sclose(dataspace_id); + } + catch (Exception e) { + e.printStackTrace(); + } + + // Close the second dataset. + try { + if (dataset_id >= 0) + H5.H5Dclose(dataset_id); + } + catch (Exception e) { + e.printStackTrace(); + } + + // Close the group. + try { + if (group_id >= 0) + H5.H5Gclose(group_id); + } + catch (Exception e) { + e.printStackTrace(); + } + + // Close the file. + try { + if (file_id >= 0) + H5.H5Fclose(file_id); + } + catch (Exception e) { + e.printStackTrace(); + } + } + + public static void main(String[] args) { + H5_CreateGroupDataset.h5_crtgrpd(); + } + +} diff --git a/java/examples/intro/H5_ReadWrite.java b/java/examples/intro/H5_ReadWrite.java new file mode 100644 index 0000000..0d73884 --- /dev/null +++ b/java/examples/intro/H5_ReadWrite.java @@ -0,0 +1,112 @@ +/* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * + * Copyright by The HDF Group. * + * Copyright by the Board of Trustees of the University of Illinois. * + * All rights reserved. * + * * + * This file is part of HDF5. The full HDF5 copyright notice, including * + * terms governing use, modification, and redistribution, is contained in * + * the files COPYING and Copyright.html. COPYING can be found at the root * + * of the source code distribution tree; Copyright.html can be found at the * + * root level of an installed copy of the electronic HDF5 document set and * + * is linked from the top-level documents page. It can also be found at * + * http://hdfgroup.org/HDF5/doc/Copyright.html. If you do not have * + * access to either file, you may request a copy from help@hdfgroup.org. * + * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */ + +/************************************************************ + Writing and reading an existing dataset. + ************************************************************/ + +package examples.intro; + +import hdf.hdf5lib.H5; +import hdf.hdf5lib.HDF5Constants; + +public class H5_ReadWrite { + private static String FILENAME = "H5_ReadWrite.h5"; + private static String DATASETNAME = "dset"; + private static final int DIM_X = 4; + private static final int DIM_Y = 6; + + private static void ReadWriteDataset() { + long file_id = -1; + long dataspace_id = -1; + long dataset_id = -1; + long[] dims = { DIM_X, DIM_Y }; + int[][] dset_data = new int[DIM_X][DIM_Y]; + + // Initialize the dataset. + for (int indx = 0; indx < DIM_X; indx++) + for (int jndx = 0; jndx < DIM_Y; jndx++) + dset_data[indx][jndx] = indx * 6 + jndx + 1; + + // Create a new file using default properties. + try { + file_id = H5.H5Fcreate(FILENAME, HDF5Constants.H5F_ACC_TRUNC, HDF5Constants.H5P_DEFAULT, + HDF5Constants.H5P_DEFAULT); + } + catch (Exception e) { + e.printStackTrace(); + } + + // Create the data space for the dataset. + try { + dataspace_id = H5.H5Screate_simple(2, dims, null); + } + catch (Exception e) { + e.printStackTrace(); + } + + // Create the dataset. + try { + if ((file_id >= 0) && (dataspace_id >= 0)) + dataset_id = H5.H5Dcreate(file_id, "/" + DATASETNAME, HDF5Constants.H5T_STD_I32BE, dataspace_id, + HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT); + } + catch (Exception e) { + e.printStackTrace(); + } + + // Write the dataset. + try { + if (dataset_id >= 0) + H5.H5Dwrite(dataset_id, HDF5Constants.H5T_NATIVE_INT, HDF5Constants.H5S_ALL, HDF5Constants.H5S_ALL, + HDF5Constants.H5P_DEFAULT, dset_data); + } + catch (Exception e) { + e.printStackTrace(); + } + + try { + if (dataset_id >= 0) + H5.H5Dread(dataset_id, HDF5Constants.H5T_NATIVE_INT, HDF5Constants.H5S_ALL, HDF5Constants.H5S_ALL, + HDF5Constants.H5P_DEFAULT, dset_data); + } + catch (Exception e) { + e.printStackTrace(); + } + + // Close the dataset. + try { + if (dataset_id >= 0) + H5.H5Dclose(dataset_id); + } + catch (Exception e) { + e.printStackTrace(); + } + + // Close the file. + try { + if (file_id >= 0) + H5.H5Fclose(file_id); + } + catch (Exception e) { + e.printStackTrace(); + } + } + + public static void main(String[] args) { + H5_ReadWrite.ReadWriteDataset(); + } + +} diff --git a/java/examples/intro/Makefile.am b/java/examples/intro/Makefile.am new file mode 100644 index 0000000..fef33d2 --- /dev/null +++ b/java/examples/intro/Makefile.am @@ -0,0 +1,67 @@ +# +# Copyright by The HDF Group. +# Copyright by the Board of Trustees of the University of Illinois. +# All rights reserved. +# +# This file is part of HDF5. The full HDF5 copyright notice, including +# terms governing use, modification, and redistribution, is contained in +# the files COPYING and Copyright.html. COPYING can be found at the root +# of the source code distribution tree; Copyright.html can be found at the +# root level of an installed copy of the electronic HDF5 document set and +# is linked from the top-level documents page. It can also be found at +# http://hdfgroup.org/HDF5/doc/Copyright.html. If you do not have +# access to either file, you may request a copy from help@hdfgroup.org. +## +## Makefile.am +## Run automake to generate a Makefile.in from this file. +## +# +# HDF5 Java Library Examples Makefile(.in) + +include $(top_srcdir)/config/commence.am + +# Mark this directory as part of the JNI API +JAVA_API=yes + +JAVAROOT = .classes + +classes: + $(MKDIR_P) $(@D)/$(JAVAROOT) + +pkgpath = examples/intro +hdfjarfile = jar$(PACKAGE_TARNAME)-$(PACKAGE_VERSION).jar +CLASSPATH_ENV=CLASSPATH=.:$(JAVAROOT):$(top_builddir)/java/src/$(hdfjarfile):$(top_srcdir)/java/lib/slf4j-api-1.7.5.jar:$(top_srcdir)/java/lib/ext/slf4j-simple-1.7.5.jar:$$CLASSPATH + +jarfile = jar$(PACKAGE_TARNAME)intro.jar + +AM_JAVACFLAGS = $(H5_JAVACFLAGS) -deprecation + +TESTPACKAGE = + +noinst_JAVA = \ + H5_CreateAttribute.java \ + H5_CreateDataset.java \ + H5_CreateFile.java \ + H5_CreateGroup.java \ + H5_CreateGroupAbsoluteRelative.java \ + H5_CreateGroupDataset.java \ + H5_ReadWrite.java + +$(jarfile): classnoinst.stamp classes + $(JAR) cvf $@ -C $(JAVAROOT)/ $(pkgpath) + +noinst_DATA = $(jarfile) + +.PHONY: classes + +check_SCRIPTS = runExample.sh +TEST_SCRIPT = $(check_SCRIPTS) + +CLEANFILES = classnoinst.stamp $(jarfile) $(JAVAROOT)/$(pkgpath)/*.class runExample.sh + +clean: + rm -rf $(JAVAROOT) + rm -f $(jarfile) + rm -f classnoinst.stamp + +include $(top_srcdir)/config/conclude.am diff --git a/java/examples/intro/runExample.sh.in b/java/examples/intro/runExample.sh.in new file mode 100644 index 0000000..ecc51ca --- /dev/null +++ b/java/examples/intro/runExample.sh.in @@ -0,0 +1,298 @@ +#! /bin/sh +# +# Copyright by The HDF Group. +# Copyright by the Board of Trustees of the University of Illinois. +# All rights reserved. +# +# This file is part of HDF5. The full HDF5 copyright notice, including +# terms governing use, modification, and redistribution, is contained in +# the files COPYING and Copyright.html. COPYING can be found at the root +# of the source code distribution tree; Copyright.html can be found at the +# root level of an installed copy of the electronic HDF5 document set and +# is linked from the top-level documents page. It can also be found at +# http://hdfgroup.org/HDF5/doc/Copyright.html. If you do not have +# access to either file, you may request a copy from help@hdfgroup.org. +# + +top_builddir=@top_builddir@ +top_srcdir=@top_srcdir@ +srcdir=@srcdir@ + +TESTNAME=EX_Intro +EXIT_SUCCESS=0 +EXIT_FAILURE=1 + +# Set up default variable values if not supplied by the user. +RM='rm -rf' +CMP='cmp' +DIFF='diff -c' +CP='cp' +DIRNAME='dirname' +LS='ls' +AWK='awk' + +nerrors=0 + +# where the libs exist +HDFLIB_HOME="$top_srcdir/java/lib" +BLDLIBDIR="./lib" +BLDDIR="." +HDFTEST_HOME="$top_srcdir/java/examples/intro" +JARFILE=jar@PACKAGE_TARNAME@-@PACKAGE_VERSION@.jar +TESTJARFILE=jar@PACKAGE_TARNAME@intro.jar +test -d $BLDLIBDIR || mkdir -p $BLDLIBDIR + +###################################################################### +# library files +# -------------------------------------------------------------------- +# All the library files copy from source directory to test directory +# NOTE: Keep this framework to add/remove test files. +# This list are also used for checking exist. +# Comment '#' without space can be used. +# -------------------------------------------------------------------- +LIST_LIBRARY_FILES=" +$HDFLIB_HOME/slf4j-api-1.7.5.jar +$HDFLIB_HOME/ext/slf4j-simple-1.7.5.jar +$top_builddir/src/.libs/libhdf5.* +$top_builddir/java/src/jni/.libs/libhdf5_java.* +$top_builddir/java/src/$JARFILE +" +LIST_DATA_FILES=" +$HDFTEST_HOME/../testfiles/examples.intro.H5_CreateDataset.txt +$HDFTEST_HOME/../testfiles/examples.intro.H5_CreateAttribute.txt +$HDFTEST_HOME/../testfiles/examples.intro.H5_CreateFile.txt +$HDFTEST_HOME/../testfiles/examples.intro.H5_CreateGroup.txt +$HDFTEST_HOME/../testfiles/examples.intro.H5_CreateGroupAbsoluteRelative.txt +$HDFTEST_HOME/../testfiles/examples.intro.H5_CreateGroupDataset.txt +$HDFTEST_HOME/../testfiles/examples.intro.H5_ReadWrite.txt +" + +# +# copy files from source dirs to test dir +# +COPY_LIBFILES="$LIST_LIBRARY_FILES" + +COPY_LIBFILES_TO_BLDLIBDIR() +{ + # copy test files. Used -f to make sure get a new copy + for tstfile in $COPY_LIBFILES + do + # ignore '#' comment + echo $tstfile | tr -d ' ' | grep '^#' > /dev/null + RET=$? + if [ $RET -eq 1 ]; then + # skip cp if srcdir is same as destdir + # this occurs when build/test performed in source dir and + # make cp fail + SDIR=`$DIRNAME $tstfile` + INODE_SDIR=`$LS -i -d $SDIR | $AWK -F' ' '{print $1}'` + INODE_DDIR=`$LS -i -d $TESTDIR | $AWK -F' ' '{print $1}'` + if [ "$INODE_SDIR" != "$INODE_DDIR" ]; then + $CP -f $tstfile $BLDLIBDIR + if [ $? -ne 0 ]; then + echo "Error: FAILED to copy $tstfile ." + + # Comment out this to CREATE expected file + exit $EXIT_FAILURE + fi + fi + fi + done +} + +CLEAN_LIBFILES_AND_BLDLIBDIR() +{ + # skip rm if srcdir is same as destdir + # this occurs when build/test performed in source dir and + # make cp fail + SDIR=`$DIRNAME $tstfile` + INODE_SDIR=`$LS -i -d $SDIR | $AWK -F' ' '{print $1}'` + INODE_DDIR=`$LS -i -d $TESTDIR | $AWK -F' ' '{print $1}'` + if [ "$INODE_SDIR" != "$INODE_DDIR" ]; then + $RM $BLDLIBDIR + fi +} + +COPY_DATAFILES="$LIST_DATA_FILES" + +COPY_DATAFILES_TO_BLDDIR() +{ + # copy test files. Used -f to make sure get a new copy + for tstfile in $COPY_DATAFILES + do + # ignore '#' comment + echo $tstfile | tr -d ' ' | grep '^#' > /dev/null + RET=$? + if [ $RET -eq 1 ]; then + # skip cp if srcdir is same as destdir + # this occurs when build/test performed in source dir and + # make cp fail + SDIR=`$DIRNAME $tstfile` + INODE_SDIR=`$LS -i -d $SDIR | $AWK -F' ' '{print $1}'` + INODE_DDIR=`$LS -i -d $TESTDIR | $AWK -F' ' '{print $1}'` + if [ "$INODE_SDIR" != "$INODE_DDIR" ]; then + $CP -f $tstfile $BLDDIR + if [ $? -ne 0 ]; then + echo "Error: FAILED to copy $tstfile ." + + # Comment out this to CREATE expected file + exit $EXIT_FAILURE + fi + fi + fi + done +} + +CLEAN_DATAFILES_AND_BLDDIR() +{ + # skip rm if srcdir is same as destdir + # this occurs when build/test performed in source dir and + # make cp fail + SDIR=`$DIRNAME $tstfile` + INODE_SDIR=`$LS -i -d $SDIR | $AWK -F' ' '{print $1}'` + INODE_DDIR=`$LS -i -d $TESTDIR | $AWK -F' ' '{print $1}'` + if [ "$INODE_SDIR" != "$INODE_DDIR" ]; then + $RM $BLDDIR/*.txt + $RM $BLDDIR/*.out + fi +} + +# Print a line-line message left justified in a field of 70 characters +# beginning with the word "Testing". +# +TESTING() { + SPACES=" " + echo "Testing $* $SPACES" | cut -c1-70 | tr -d '\012' +} + +# where Java is installed (requires jdk1.7.x) +JAVAEXE=@JAVA@ +JAVAEXEFLAGS=@H5_JAVAFLAGS@ + +############################################################################### +# DO NOT MODIFY BELOW THIS LINE +############################################################################### + +# prepare for test +COPY_LIBFILES_TO_BLDLIBDIR +COPY_DATAFILES_TO_BLDDIR + +CPATH=".:"$BLDLIBDIR"/"$JARFILE":"$BLDLIBDIR"/slf4j-api-1.7.5.jar:"$BLDLIBDIR"/slf4j-simple-1.7.5.jar:"$TESTJARFILE"" + +TEST=/usr/bin/test +if [ ! -x /usr/bin/test ] +then +TEST=`which test` +fi + +if $TEST -z "$CLASSPATH"; then + CLASSPATH="" +fi +CLASSPATH=$CPATH":"$CLASSPATH +export CLASSPATH + +if $TEST -n "$JAVAPATH" ; then + PATH=$JAVAPATH":"$PATH + export PATH +fi + +if $TEST -e /bin/uname; then + os_name=`/bin/uname -s` +elif $TEST -e /usr/bin/uname; then + os_name=`/usr/bin/uname -s` +else + os_name=unknown +fi + +if $TEST -z "$LD_LIBRARY_PATH" ; then + LD_LIBRARY_PATH="" +fi + +case $os_name in + Darwin) + DYLD_LIBRARY_PATH=$BLDLIBDIR:$DYLD_LIBRARY_PATH + export DYLD_LIBRARY_PATH + LD_LIBRARY_PATH=$DYLD_LIBRARY_PATH + ;; + *) + LD_LIBRARY_PATH=$BLDLIBDIR:$LD_LIBRARY_PATH + ;; +esac + +export LD_LIBRARY_PATH + +echo "$JAVAEXE $JAVAEXEFLAGS -Xmx1024M -Dorg.slf4j.simpleLogger.defaultLog=trace -Djava.library.path=$BLDLIBDIR -cp $CLASSPATH examples.intro.H5_CreateDataset" +($JAVAEXE $JAVAEXEFLAGS -Xmx1024M -Dorg.slf4j.simpleLogger.defaultLog=trace -Djava.library.path=$BLDLIBDIR -cp $CLASSPATH examples.intro.H5_CreateDataset > H5_CreateDataset.out) +if diff H5_CreateDataset.out examples.intro.H5_CreateDataset.txt > /dev/null; then + echo " PASSED intro.H5_CreateDataset" +else + echo "**FAILED** intro.H5_CreateDataset" + nerrors="`expr $nerrors + 1`" +fi + +echo "$JAVAEXE $JAVAEXEFLAGS -Xmx1024M -Dorg.slf4j.simpleLogger.defaultLog=trace -Djava.library.path=$BLDLIBDIR -cp $CLASSPATH examples.intro.H5_CreateAttribute" +($JAVAEXE $JAVAEXEFLAGS -Xmx1024M -Dorg.slf4j.simpleLogger.defaultLog=trace -Djava.library.path=$BLDLIBDIR -cp $CLASSPATH examples.intro.H5_CreateAttribute > H5_CreateAttribute.out) +if diff H5_CreateAttribute.out examples.intro.H5_CreateAttribute.txt > /dev/null; then + echo " PASSED intro.H5_CreateAttribute" +else + echo "**FAILED** intro.H5_CreateAttribute" + nerrors="`expr $nerrors + 1`" +fi + +echo "$JAVAEXE $JAVAEXEFLAGS -Xmx1024M -Dorg.slf4j.simpleLogger.defaultLog=trace -Djava.library.path=$BLDLIBDIR -cp $CLASSPATH examples.intro.H5_CreateFile" +($JAVAEXE $JAVAEXEFLAGS -Xmx1024M -Dorg.slf4j.simpleLogger.defaultLog=trace -Djava.library.path=$BLDLIBDIR -cp $CLASSPATH examples.intro.H5_CreateFile > H5_CreateFile.out) +if diff H5_CreateFile.out examples.intro.H5_CreateFile.txt > /dev/null; then + echo " PASSED intro.H5_CreateFile" +else + echo "**FAILED** intro.H5_CreateFile" + nerrors="`expr $nerrors + 1`" +fi + +echo "$JAVAEXE $JAVAEXEFLAGS -Xmx1024M -Dorg.slf4j.simpleLogger.defaultLog=trace -Djava.library.path=$BLDLIBDIR -cp $CLASSPATH examples.intro.H5_CreateGroup" +($JAVAEXE $JAVAEXEFLAGS -Xmx1024M -Dorg.slf4j.simpleLogger.defaultLog=trace -Djava.library.path=$BLDLIBDIR -cp $CLASSPATH examples.intro.H5_CreateGroup > H5_CreateGroup.out) +if diff H5_CreateGroup.out examples.intro.H5_CreateGroup.txt > /dev/null; then + echo " PASSED intro.H5_CreateGroup" +else + echo "**FAILED** intro.H5_CreateGroup" + nerrors="`expr $nerrors + 1`" +fi + +echo "$JAVAEXE $JAVAEXEFLAGS -Xmx1024M -Dorg.slf4j.simpleLogger.defaultLog=trace -Djava.library.path=$BLDLIBDIR -cp $CLASSPATH examples.intro.H5_CreateGroupAbsoluteRelative" +($JAVAEXE $JAVAEXEFLAGS -Xmx1024M -Dorg.slf4j.simpleLogger.defaultLog=trace -Djava.library.path=$BLDLIBDIR -cp $CLASSPATH examples.intro.H5_CreateGroupAbsoluteRelative > H5_CreateGroupAbsoluteRelative.out) +if diff H5_CreateGroupAbsoluteRelative.out examples.intro.H5_CreateGroupAbsoluteRelative.txt > /dev/null; then + echo " PASSED intro.H5_CreateGroupAbsoluteRelative" +else + echo "**FAILED** intro.H5_CreateGroupAbsoluteRelative" + nerrors="`expr $nerrors + 1`" +fi + +echo "$JAVAEXE $JAVAEXEFLAGS -Xmx1024M -Dorg.slf4j.simpleLogger.defaultLog=trace -Djava.library.path=$BLDLIBDIR -cp $CLASSPATH examples.intro.H5_CreateGroupDataset" +($JAVAEXE $JAVAEXEFLAGS -Xmx1024M -Dorg.slf4j.simpleLogger.defaultLog=trace -Djava.library.path=$BLDLIBDIR -cp $CLASSPATH examples.intro.H5_CreateGroupDataset > H5_CreateGroupDataset.out) +if diff H5_CreateGroupDataset.out examples.intro.H5_CreateGroupDataset.txt > /dev/null; then + echo " PASSED intro.H5_CreateGroupDataset" +else + echo "**FAILED** intro.H5_CreateGroupDataset" + nerrors="`expr $nerrors + 1`" +fi + +echo "$JAVAEXE $JAVAEXEFLAGS -Xmx1024M -Dorg.slf4j.simpleLogger.defaultLog=trace -Djava.library.path=$BLDLIBDIR -cp $CLASSPATH examples.intro.H5_ReadWrite" +($JAVAEXE $JAVAEXEFLAGS -Xmx1024M -Dorg.slf4j.simpleLogger.defaultLog=trace -Djava.library.path=$BLDLIBDIR -cp $CLASSPATH examples.intro.H5_ReadWrite > H5_ReadWrite.out) +if diff H5_ReadWrite.out examples.intro.H5_ReadWrite.txt > /dev/null; then + echo " PASSED intro.H5_ReadWrite" +else + echo "**FAILED** intro.H5_ReadWrite" + nerrors="`expr $nerrors + 1`" +fi + +# Clean up temporary files/directories +CLEAN_LIBFILES_AND_BLDLIBDIR +CLEAN_DATAFILES_AND_BLDDIR + +# Report test results and exit +if test $nerrors -eq 0 ; then + echo "All $TESTNAME tests passed." + exit $EXIT_SUCCESS +else + echo "$TESTNAME tests failed with $nerrors errors." + exit $EXIT_FAILURE +fi diff --git a/java/examples/testfiles/examples.datasets.H5Ex_D_Alloc.txt b/java/examples/testfiles/examples.datasets.H5Ex_D_Alloc.txt new file mode 100644 index 0000000..6fd810b --- /dev/null +++ b/java/examples/testfiles/examples.datasets.H5Ex_D_Alloc.txt @@ -0,0 +1,16 @@ +Creating datasets... +DS1 has allocation time H5D_ALLOC_TIME_LATE +DS2 has allocation time H5D_ALLOC_TIME_EARLY + +Space for DS1 has not been allocated. +Storage size for DS1 is: 0 bytes. +Space for DS2 has been allocated. +Storage size for DS2 is: 112 bytes. + +Writing data... + +Space for DS1 has been allocated. +Storage size for DS1 is: 112 bytes. +Space for DS2 has been allocated. +Storage size for DS2 is: 112 bytes. + diff --git a/java/examples/testfiles/examples.datasets.H5Ex_D_Checksum.txt b/java/examples/testfiles/examples.datasets.H5Ex_D_Checksum.txt new file mode 100644 index 0000000..676aebb --- /dev/null +++ b/java/examples/testfiles/examples.datasets.H5Ex_D_Checksum.txt @@ -0,0 +1,3 @@ +Filter type is: H5Z_FILTER_FLETCHER32 + +Maximum value in DS1 is: 1890 diff --git a/java/examples/testfiles/examples.datasets.H5Ex_D_Chunk.txt b/java/examples/testfiles/examples.datasets.H5Ex_D_Chunk.txt new file mode 100644 index 0000000..5f4c2de --- /dev/null +++ b/java/examples/testfiles/examples.datasets.H5Ex_D_Chunk.txt @@ -0,0 +1,26 @@ +Original Data: + [ 1 1 1 1 1 1 1 1 ] + [ 1 1 1 1 1 1 1 1 ] + [ 1 1 1 1 1 1 1 1 ] + [ 1 1 1 1 1 1 1 1 ] + [ 1 1 1 1 1 1 1 1 ] + [ 1 1 1 1 1 1 1 1 ] + +Storage layout for DS1 is: H5D_CHUNKED + +Data as written to disk by hyberslabs: + [ 0 1 0 0 1 0 0 1 ] + [ 1 1 0 1 1 0 1 1 ] + [ 0 0 0 0 0 0 0 0 ] + [ 0 1 0 0 1 0 0 1 ] + [ 1 1 0 1 1 0 1 1 ] + [ 0 0 0 0 0 0 0 0 ] + +Data as read from disk by hyberslab: + [ 0 1 0 0 0 0 0 1 ] + [ 0 1 0 1 0 0 1 1 ] + [ 0 0 0 0 0 0 0 0 ] + [ 0 0 0 0 0 0 0 0 ] + [ 0 1 0 1 0 0 1 1 ] + [ 0 0 0 0 0 0 0 0 ] + diff --git a/java/examples/testfiles/examples.datasets.H5Ex_D_Compact.txt b/java/examples/testfiles/examples.datasets.H5Ex_D_Compact.txt new file mode 100644 index 0000000..e34f3c1 --- /dev/null +++ b/java/examples/testfiles/examples.datasets.H5Ex_D_Compact.txt @@ -0,0 +1,8 @@ +Storage layout for DS1 is: H5D_COMPACT + +Data for DS1 is: + [ 0 -1 -2 -3 -4 -5 -6 ] + [ 0 0 0 0 0 0 0 ] + [ 0 1 2 3 4 5 6 ] + [ 0 2 4 6 8 10 12 ] + diff --git a/java/examples/testfiles/examples.datasets.H5Ex_D_External.txt b/java/examples/testfiles/examples.datasets.H5Ex_D_External.txt new file mode 100644 index 0000000..5878149 --- /dev/null +++ b/java/examples/testfiles/examples.datasets.H5Ex_D_External.txt @@ -0,0 +1,7 @@ +DS1 is stored in file: H5Ex_D_External.data +DS1: + [ 0 -1 -2 -3 -4 -5 -6 ] + [ 0 0 0 0 0 0 0 ] + [ 0 1 2 3 4 5 6 ] + [ 0 2 4 6 8 10 12 ] + diff --git a/java/examples/testfiles/examples.datasets.H5Ex_D_FillValue.txt b/java/examples/testfiles/examples.datasets.H5Ex_D_FillValue.txt new file mode 100644 index 0000000..68d826b --- /dev/null +++ b/java/examples/testfiles/examples.datasets.H5Ex_D_FillValue.txt @@ -0,0 +1,20 @@ +Dataset before being written to: + [ 99 99 99 99 99 99 99 ] + [ 99 99 99 99 99 99 99 ] + [ 99 99 99 99 99 99 99 ] + [ 99 99 99 99 99 99 99 ] + +Dataset after being written to: + [ 0 -1 -2 -3 -4 -5 -6 ] + [ 0 0 0 0 0 0 0 ] + [ 0 1 2 3 4 5 6 ] + [ 0 2 4 6 8 10 12 ] + +Dataset after extension: + [ 0 -1 -2 -3 -4 -5 -6 99 99 99 ] + [ 0 0 0 0 0 0 0 99 99 99 ] + [ 0 1 2 3 4 5 6 99 99 99 ] + [ 0 2 4 6 8 10 12 99 99 99 ] + [ 99 99 99 99 99 99 99 99 99 99 ] + [ 99 99 99 99 99 99 99 99 99 99 ] + diff --git a/java/examples/testfiles/examples.datasets.H5Ex_D_Gzip.txt b/java/examples/testfiles/examples.datasets.H5Ex_D_Gzip.txt new file mode 100644 index 0000000..255a561 --- /dev/null +++ b/java/examples/testfiles/examples.datasets.H5Ex_D_Gzip.txt @@ -0,0 +1,3 @@ +Filter type is: H5Z_FILTER_DEFLATE + +Maximum value in DS1 is: 1890 diff --git a/java/examples/testfiles/examples.datasets.H5Ex_D_Hyperslab.txt b/java/examples/testfiles/examples.datasets.H5Ex_D_Hyperslab.txt new file mode 100644 index 0000000..823dfcc --- /dev/null +++ b/java/examples/testfiles/examples.datasets.H5Ex_D_Hyperslab.txt @@ -0,0 +1,24 @@ +Original Data: + [ 1 1 1 1 1 1 1 1 ] + [ 1 1 1 1 1 1 1 1 ] + [ 1 1 1 1 1 1 1 1 ] + [ 1 1 1 1 1 1 1 1 ] + [ 1 1 1 1 1 1 1 1 ] + [ 1 1 1 1 1 1 1 1 ] + +Data as written to disk by hyberslabs: + [ 0 1 0 0 1 0 0 1 ] + [ 1 1 0 1 1 0 1 1 ] + [ 0 0 0 0 0 0 0 0 ] + [ 0 1 0 0 1 0 0 1 ] + [ 1 1 0 1 1 0 1 1 ] + [ 0 0 0 0 0 0 0 0 ] + +Data as read from disk by hyberslab: + [ 0 1 0 0 0 0 0 1 ] + [ 0 1 0 1 0 0 1 1 ] + [ 0 0 0 0 0 0 0 0 ] + [ 0 0 0 0 0 0 0 0 ] + [ 0 1 0 1 0 0 1 1 ] + [ 0 0 0 0 0 0 0 0 ] + diff --git a/java/examples/testfiles/examples.datasets.H5Ex_D_Nbit.txt b/java/examples/testfiles/examples.datasets.H5Ex_D_Nbit.txt new file mode 100644 index 0000000..a768ba0 --- /dev/null +++ b/java/examples/testfiles/examples.datasets.H5Ex_D_Nbit.txt @@ -0,0 +1,3 @@ +Filter type is: H5Z_FILTER_NBIT + +Maximum value in DS1 is: 1890 diff --git a/java/examples/testfiles/examples.datasets.H5Ex_D_ReadWrite.txt b/java/examples/testfiles/examples.datasets.H5Ex_D_ReadWrite.txt new file mode 100644 index 0000000..e021029 --- /dev/null +++ b/java/examples/testfiles/examples.datasets.H5Ex_D_ReadWrite.txt @@ -0,0 +1,6 @@ +DS1: + [ 0 -1 -2 -3 -4 -5 -6 ] + [ 0 0 0 0 0 0 0 ] + [ 0 1 2 3 4 5 6 ] + [ 0 2 4 6 8 10 12 ] + diff --git a/java/examples/testfiles/examples.datasets.H5Ex_D_Shuffle.txt b/java/examples/testfiles/examples.datasets.H5Ex_D_Shuffle.txt new file mode 100644 index 0000000..ea95f11 --- /dev/null +++ b/java/examples/testfiles/examples.datasets.H5Ex_D_Shuffle.txt @@ -0,0 +1,5 @@ +Filter 0: Type is: H5Z_FILTER_SHUFFLE + +Filter 1: Type is: H5Z_FILTER_DEFLATE + +Maximum value in DS1 is: 1890 diff --git a/java/examples/testfiles/examples.datasets.H5Ex_D_Sofloat.txt b/java/examples/testfiles/examples.datasets.H5Ex_D_Sofloat.txt new file mode 100644 index 0000000..4d4b5d6 --- /dev/null +++ b/java/examples/testfiles/examples.datasets.H5Ex_D_Sofloat.txt @@ -0,0 +1,6 @@ +Maximum value in write buffer is: 106.66666666666667 +Minimum value in write buffer is: 1.7692307692307692 +Filter type is: H5Z_FILTER_SCALEOFFSET + +Maximum value in DS1 is: 106.66169811320755 +Minimum value in DS1 is: 1.7692307692307692 diff --git a/java/examples/testfiles/examples.datasets.H5Ex_D_Soint.txt b/java/examples/testfiles/examples.datasets.H5Ex_D_Soint.txt new file mode 100644 index 0000000..48d0d8c --- /dev/null +++ b/java/examples/testfiles/examples.datasets.H5Ex_D_Soint.txt @@ -0,0 +1,3 @@ +Filter type is: H5Z_FILTER_SCALEOFFSET + +Maximum value in DS1 is: 1890 diff --git a/java/examples/testfiles/examples.datasets.H5Ex_D_Szip.txt b/java/examples/testfiles/examples.datasets.H5Ex_D_Szip.txt new file mode 100644 index 0000000..a1c0d19 --- /dev/null +++ b/java/examples/testfiles/examples.datasets.H5Ex_D_Szip.txt @@ -0,0 +1,3 @@ +Filter type is: H5Z_FILTER_SZIP + +Maximum value in DS1 is: 1890 diff --git a/java/examples/testfiles/examples.datasets.H5Ex_D_Transform.txt b/java/examples/testfiles/examples.datasets.H5Ex_D_Transform.txt new file mode 100644 index 0000000..05257bc --- /dev/null +++ b/java/examples/testfiles/examples.datasets.H5Ex_D_Transform.txt @@ -0,0 +1,15 @@ +Original Data: + [ 0 -1 -2 -3 -4 -5 -6 ] + [ 0 0 0 0 0 0 0 ] + [ 0 1 2 3 4 5 6 ] + [ 0 2 4 6 8 10 12 ] +Data as written with transform 'x+1' + [ 1 0 -1 -2 -3 -4 -5 ] + [ 1 1 1 1 1 1 1 ] + [ 1 2 3 4 5 6 7 ] + [ 1 3 5 7 9 11 13 ] +Data as written with transform 'x+1' and read with transform 'x-1' + [ 0 -1 -2 -3 -4 -5 -6 ] + [ 0 0 0 0 0 0 0 ] + [ 0 1 2 3 4 5 6 ] + [ 0 2 4 6 8 10 12 ] diff --git a/java/examples/testfiles/examples.datasets.H5Ex_D_UnlimitedAdd.txt b/java/examples/testfiles/examples.datasets.H5Ex_D_UnlimitedAdd.txt new file mode 100644 index 0000000..d3a7281 --- /dev/null +++ b/java/examples/testfiles/examples.datasets.H5Ex_D_UnlimitedAdd.txt @@ -0,0 +1,14 @@ +Dataset before extension: + [ 0 -1 -2 -3 -4 -5 -6 ] + [ 0 0 0 0 0 0 0 ] + [ 0 1 2 3 4 5 6 ] + [ 0 2 4 6 8 10 12 ] + +Dataset after extension: + [ 0 -1 -2 -3 -4 -5 -6 7 8 9 ] + [ 0 0 0 0 0 0 0 7 8 9 ] + [ 0 1 2 3 4 5 6 7 8 9 ] + [ 0 2 4 6 8 10 12 7 8 9 ] + [ 0 1 2 3 4 5 6 7 8 9 ] + [ 0 1 2 3 4 5 6 7 8 9 ] + diff --git a/java/examples/testfiles/examples.datasets.H5Ex_D_UnlimitedGzip.txt b/java/examples/testfiles/examples.datasets.H5Ex_D_UnlimitedGzip.txt new file mode 100644 index 0000000..9e36281 --- /dev/null +++ b/java/examples/testfiles/examples.datasets.H5Ex_D_UnlimitedGzip.txt @@ -0,0 +1,16 @@ +Dataset before extension: + [ 0 -1 -2 -3 -4 -5 -6 ] + [ 0 0 0 0 0 0 0 ] + [ 0 1 2 3 4 5 6 ] + [ 0 2 4 6 8 10 12 ] + +Filter type is: H5Z_FILTER_DEFLATE + +Dataset after extension: + [ 0 -1 -2 -3 -4 -5 -6 7 8 9 ] + [ 0 0 0 0 0 0 0 7 8 9 ] + [ 0 1 2 3 4 5 6 7 8 9 ] + [ 0 2 4 6 8 10 12 7 8 9 ] + [ 0 1 2 3 4 5 6 7 8 9 ] + [ 0 1 2 3 4 5 6 7 8 9 ] + diff --git a/java/examples/testfiles/examples.datasets.H5Ex_D_UnlimitedMod.txt b/java/examples/testfiles/examples.datasets.H5Ex_D_UnlimitedMod.txt new file mode 100644 index 0000000..15eee16 --- /dev/null +++ b/java/examples/testfiles/examples.datasets.H5Ex_D_UnlimitedMod.txt @@ -0,0 +1,14 @@ +Dataset before extension: + [ 0 -1 -2 -3 -4 -5 -6 ] + [ 0 0 0 0 0 0 0 ] + [ 0 1 2 3 4 5 6 ] + [ 0 2 4 6 8 10 12 ] + +Dataset after extension: + [ 0 1 2 3 4 5 6 7 8 9 ] + [ 0 1 2 3 4 5 6 7 8 9 ] + [ 0 1 2 3 4 5 6 7 8 9 ] + [ 0 1 2 3 4 5 6 7 8 9 ] + [ 0 1 2 3 4 5 6 7 8 9 ] + [ 0 1 2 3 4 5 6 7 8 9 ] + diff --git a/java/examples/testfiles/examples.datatypes.H5Ex_T_Array.txt b/java/examples/testfiles/examples.datatypes.H5Ex_T_Array.txt new file mode 100644 index 0000000..7bcd8fa --- /dev/null +++ b/java/examples/testfiles/examples.datatypes.H5Ex_T_Array.txt @@ -0,0 +1,21 @@ +DS1 [0]: + [0 0 0 0 0 ] + [0 -1 -2 -3 -4 ] + [0 -2 -4 -6 -8 ] + +DS1 [1]: + [0 1 2 3 4 ] + [1 1 1 1 1 ] + [2 1 0 -1 -2 ] + +DS1 [2]: + [0 2 4 6 8 ] + [2 3 4 5 6 ] + [4 4 4 4 4 ] + +DS1 [3]: + [0 3 6 9 12 ] + [3 5 7 9 11 ] + [6 7 8 9 10 ] + + diff --git a/java/examples/testfiles/examples.datatypes.H5Ex_T_ArrayAttribute.txt b/java/examples/testfiles/examples.datatypes.H5Ex_T_ArrayAttribute.txt new file mode 100644 index 0000000..7d27c0b --- /dev/null +++ b/java/examples/testfiles/examples.datatypes.H5Ex_T_ArrayAttribute.txt @@ -0,0 +1,21 @@ +A1 [0]: + [0 0 0 0 0 ] + [0 -1 -2 -3 -4 ] + [0 -2 -4 -6 -8 ] + +A1 [1]: + [0 1 2 3 4 ] + [1 1 1 1 1 ] + [2 1 0 -1 -2 ] + +A1 [2]: + [0 2 4 6 8 ] + [2 3 4 5 6 ] + [4 4 4 4 4 ] + +A1 [3]: + [0 3 6 9 12 ] + [3 5 7 9 11 ] + [6 7 8 9 10 ] + + diff --git a/java/examples/testfiles/examples.datatypes.H5Ex_T_Bit.txt b/java/examples/testfiles/examples.datatypes.H5Ex_T_Bit.txt new file mode 100644 index 0000000..57769b2 --- /dev/null +++ b/java/examples/testfiles/examples.datatypes.H5Ex_T_Bit.txt @@ -0,0 +1,6 @@ +DS1: + [{0, 0, 0, 0}{3, 0, 1, 1}{2, 0, 2, 2}{1, 0, 3, 3}{0, 0, 0, 0}{3, 0, 1, 1}{2, 0, 2, 2}] + [{0, 0, 0, 0}{0, 0, 0, 0}{0, 0, 0, 0}{0, 0, 0, 0}{0, 0, 0, 0}{0, 0, 0, 0}{0, 0, 0, 0}] + [{0, 0, 0, 0}{0, 0, 0, 0}{0, 0, 0, 0}{0, 0, 0, 0}{0, 0, 0, 0}{0, 0, 0, 0}{0, 0, 0, 0}] + [{0, 0, 0, 0}{0, 0, 0, 0}{0, 0, 0, 0}{0, 0, 0, 0}{0, 0, 0, 0}{0, 0, 0, 0}{0, 0, 0, 0}] + diff --git a/java/examples/testfiles/examples.datatypes.H5Ex_T_BitAttribute.txt b/java/examples/testfiles/examples.datatypes.H5Ex_T_BitAttribute.txt new file mode 100644 index 0000000..683bc7f --- /dev/null +++ b/java/examples/testfiles/examples.datatypes.H5Ex_T_BitAttribute.txt @@ -0,0 +1,6 @@ +A1: + [{0, 0, 0, 0}{3, 0, 1, 1}{2, 0, 2, 2}{1, 0, 3, 3}{0, 0, 0, 0}{3, 0, 1, 1}{2, 0, 2, 2}] + [{0, 0, 0, 0}{0, 0, 0, 0}{0, 0, 0, 0}{0, 0, 0, 0}{0, 0, 0, 0}{0, 0, 0, 0}{0, 0, 0, 0}] + [{0, 0, 0, 0}{0, 0, 0, 0}{0, 0, 0, 0}{0, 0, 0, 0}{0, 0, 0, 0}{0, 0, 0, 0}{0, 0, 0, 0}] + [{0, 0, 0, 0}{0, 0, 0, 0}{0, 0, 0, 0}{0, 0, 0, 0}{0, 0, 0, 0}{0, 0, 0, 0}{0, 0, 0, 0}] + diff --git a/java/examples/testfiles/examples.datatypes.H5Ex_T_Commit.txt b/java/examples/testfiles/examples.datatypes.H5Ex_T_Commit.txt new file mode 100644 index 0000000..e6d0bef --- /dev/null +++ b/java/examples/testfiles/examples.datatypes.H5Ex_T_Commit.txt @@ -0,0 +1,6 @@ +Named datatype: Sensor_Type: + Class: H5T_COMPOUND + Serial number + Location + Temperature (F) + Pressure (inHg) diff --git a/java/examples/testfiles/examples.datatypes.H5Ex_T_Compound.txt b/java/examples/testfiles/examples.datatypes.H5Ex_T_Compound.txt new file mode 100644 index 0000000..0505c78 --- /dev/null +++ b/java/examples/testfiles/examples.datatypes.H5Ex_T_Compound.txt @@ -0,0 +1,25 @@ +DS1 [0]: +Serial number : 1153 +Location : Exterior (static) +Temperature (F) : 53.23 +Pressure (inHg) : 24.57 + +DS1 [1]: +Serial number : 1184 +Location : Intake +Temperature (F) : 55.12 +Pressure (inHg) : 22.95 + +DS1 [2]: +Serial number : 1027 +Location : Intake manifold +Temperature (F) : 103.55 +Pressure (inHg) : 31.23 + +DS1 [3]: +Serial number : 1313 +Location : Exhaust manifold +Temperature (F) : 1252.89 +Pressure (inHg) : 84.11 + + diff --git a/java/examples/testfiles/examples.datatypes.H5Ex_T_CompoundAttribute.txt b/java/examples/testfiles/examples.datatypes.H5Ex_T_CompoundAttribute.txt new file mode 100644 index 0000000..dd77f8d --- /dev/null +++ b/java/examples/testfiles/examples.datatypes.H5Ex_T_CompoundAttribute.txt @@ -0,0 +1,25 @@ +A1 [0]: +Serial number : 1153 +Location : Exterior (static) +Temperature (F) : 53.23 +Pressure (inHg) : 24.57 + +A1 [1]: +Serial number : 1184 +Location : Intake +Temperature (F) : 55.12 +Pressure (inHg) : 22.95 + +A1 [2]: +Serial number : 1027 +Location : Intake manifold +Temperature (F) : 103.55 +Pressure (inHg) : 31.23 + +A1 [3]: +Serial number : 1313 +Location : Exhaust manifold +Temperature (F) : 1252.89 +Pressure (inHg) : 84.11 + + diff --git a/java/examples/testfiles/examples.datatypes.H5Ex_T_Float.txt b/java/examples/testfiles/examples.datatypes.H5Ex_T_Float.txt new file mode 100644 index 0000000..85d8ced --- /dev/null +++ b/java/examples/testfiles/examples.datatypes.H5Ex_T_Float.txt @@ -0,0 +1,6 @@ +DS1: + [ 0.0000 1.0000 2.0000 3.0000 4.0000 5.0000 6.0000] + [ 2.0000 1.6667 2.4000 3.2857 4.2222 5.1818 6.1538] + [ 4.0000 2.3333 2.8000 3.5714 4.4444 5.3636 6.3077] + [ 6.0000 3.0000 3.2000 3.8571 4.6667 5.5455 6.4615] + diff --git a/java/examples/testfiles/examples.datatypes.H5Ex_T_FloatAttribute.txt b/java/examples/testfiles/examples.datatypes.H5Ex_T_FloatAttribute.txt new file mode 100644 index 0000000..cfa1f92 --- /dev/null +++ b/java/examples/testfiles/examples.datatypes.H5Ex_T_FloatAttribute.txt @@ -0,0 +1,6 @@ +A1: + [ 0.0000 1.0000 2.0000 3.0000 4.0000 5.0000 6.0000] + [ 2.0000 1.6667 2.4000 3.2857 4.2222 5.1818 6.1538] + [ 4.0000 2.3333 2.8000 3.5714 4.4444 5.3636 6.3077] + [ 6.0000 3.0000 3.2000 3.8571 4.6667 5.5455 6.4615] + diff --git a/java/examples/testfiles/examples.datatypes.H5Ex_T_Integer.txt b/java/examples/testfiles/examples.datatypes.H5Ex_T_Integer.txt new file mode 100644 index 0000000..f686bd1 --- /dev/null +++ b/java/examples/testfiles/examples.datatypes.H5Ex_T_Integer.txt @@ -0,0 +1,6 @@ +DS1: + [ 0 -1 -2 -3 -4 -5 -6] + [ 0 0 0 0 0 0 0] + [ 0 1 2 3 4 5 6] + [ 0 2 4 6 8 10 12] + diff --git a/java/examples/testfiles/examples.datatypes.H5Ex_T_IntegerAttribute.txt b/java/examples/testfiles/examples.datatypes.H5Ex_T_IntegerAttribute.txt new file mode 100644 index 0000000..dccd4a6 --- /dev/null +++ b/java/examples/testfiles/examples.datatypes.H5Ex_T_IntegerAttribute.txt @@ -0,0 +1,6 @@ +A1: + [ 0 -1 -2 -3 -4 -5 -6] + [ 0 0 0 0 0 0 0] + [ 0 1 2 3 4 5 6] + [ 0 2 4 6 8 10 12] + diff --git a/java/examples/testfiles/examples.datatypes.H5Ex_T_ObjectReference.txt b/java/examples/testfiles/examples.datatypes.H5Ex_T_ObjectReference.txt new file mode 100644 index 0000000..827c042 --- /dev/null +++ b/java/examples/testfiles/examples.datatypes.H5Ex_T_ObjectReference.txt @@ -0,0 +1,4 @@ +DS1[0]: + ->H5G_GROUP: +DS1[1]: + ->H5G_DATASET: diff --git a/java/examples/testfiles/examples.datatypes.H5Ex_T_ObjectReferenceAttribute.txt b/java/examples/testfiles/examples.datatypes.H5Ex_T_ObjectReferenceAttribute.txt new file mode 100644 index 0000000..fe3cdc0 --- /dev/null +++ b/java/examples/testfiles/examples.datatypes.H5Ex_T_ObjectReferenceAttribute.txt @@ -0,0 +1,4 @@ +A1[0]: + ->H5G_GROUP: +A1[1]: + ->H5G_DATASET: diff --git a/java/examples/testfiles/examples.datatypes.H5Ex_T_Opaque.txt b/java/examples/testfiles/examples.datatypes.H5Ex_T_Opaque.txt new file mode 100644 index 0000000..fb74236 --- /dev/null +++ b/java/examples/testfiles/examples.datatypes.H5Ex_T_Opaque.txt @@ -0,0 +1,6 @@ +Datatype tag for DS1 is: "Character array" +DS1[0]: OPAQUE0 +DS1[1]: OPAQUE1 +DS1[2]: OPAQUE2 +DS1[3]: OPAQUE3 + diff --git a/java/examples/testfiles/examples.datatypes.H5Ex_T_OpaqueAttribute.txt b/java/examples/testfiles/examples.datatypes.H5Ex_T_OpaqueAttribute.txt new file mode 100644 index 0000000..bc9a730 --- /dev/null +++ b/java/examples/testfiles/examples.datatypes.H5Ex_T_OpaqueAttribute.txt @@ -0,0 +1,6 @@ +Datatype tag for A1 is: "Character array" +A1[0]: OPAQUE0 +A1[1]: OPAQUE1 +A1[2]: OPAQUE2 +A1[3]: OPAQUE3 + diff --git a/java/examples/testfiles/examples.datatypes.H5Ex_T_String.txt b/java/examples/testfiles/examples.datatypes.H5Ex_T_String.txt new file mode 100644 index 0000000..4df6a41 --- /dev/null +++ b/java/examples/testfiles/examples.datatypes.H5Ex_T_String.txt @@ -0,0 +1,5 @@ +DS1 [0]: Parting +DS1 [1]: is such +DS1 [2]: sweet +DS1 [3]: sorrow. + diff --git a/java/examples/testfiles/examples.datatypes.H5Ex_T_StringAttribute.txt b/java/examples/testfiles/examples.datatypes.H5Ex_T_StringAttribute.txt new file mode 100644 index 0000000..4df6a41 --- /dev/null +++ b/java/examples/testfiles/examples.datatypes.H5Ex_T_StringAttribute.txt @@ -0,0 +1,5 @@ +DS1 [0]: Parting +DS1 [1]: is such +DS1 [2]: sweet +DS1 [3]: sorrow. + diff --git a/java/examples/testfiles/examples.datatypes.H5Ex_T_VLString.txt b/java/examples/testfiles/examples.datatypes.H5Ex_T_VLString.txt new file mode 100644 index 0000000..0322953 --- /dev/null +++ b/java/examples/testfiles/examples.datatypes.H5Ex_T_VLString.txt @@ -0,0 +1,4 @@ +DS1 [0]: Parting +DS1 [1]: is such +DS1 [2]: sweet +DS1 [3]: sorrow. diff --git a/java/examples/testfiles/examples.groups.H5Ex_G_Compact.txt b/java/examples/testfiles/examples.groups.H5Ex_G_Compact.txt new file mode 100644 index 0000000..0a88d3f --- /dev/null +++ b/java/examples/testfiles/examples.groups.H5Ex_G_Compact.txt @@ -0,0 +1,5 @@ +Group storage type for H5Ex_G_Compact1.h5 is: H5G_STORAGE_TYPE_SYMBOL_TABLE +File size for H5Ex_G_Compact1.h5 is: 1832 bytes + +Group storage type for H5Ex_G_Compact2.h5 is: H5G_STORAGE_TYPE_COMPACT +File size for H5Ex_G_Compact2.h5 is: 342 bytes diff --git a/java/examples/testfiles/examples.groups.H5Ex_G_Corder.txt b/java/examples/testfiles/examples.groups.H5Ex_G_Corder.txt new file mode 100644 index 0000000..2d959fc --- /dev/null +++ b/java/examples/testfiles/examples.groups.H5Ex_G_Corder.txt @@ -0,0 +1,10 @@ +Traversing group using alphabetical indices: +Index 0: 5 +Index 1: D +Index 2: F +Index 3: H +Traversing group using creation order indices: +Index 0: H +Index 1: D +Index 2: F +Index 3: 5 diff --git a/java/examples/testfiles/examples.groups.H5Ex_G_Create.txt b/java/examples/testfiles/examples.groups.H5Ex_G_Create.txt new file mode 100644 index 0000000..e69de29 --- /dev/null +++ b/java/examples/testfiles/examples.groups.H5Ex_G_Create.txt diff --git a/java/examples/testfiles/examples.groups.H5Ex_G_Intermediate.txt b/java/examples/testfiles/examples.groups.H5Ex_G_Intermediate.txt new file mode 100644 index 0000000..65a0fc2 --- /dev/null +++ b/java/examples/testfiles/examples.groups.H5Ex_G_Intermediate.txt @@ -0,0 +1,5 @@ +Objects in the file_id: +/ (Group) +/G1 (Group) +/G1/G2 (Group) +/G1/G2/G3 (Group) diff --git a/java/examples/testfiles/examples.groups.H5Ex_G_Iterate.txt b/java/examples/testfiles/examples.groups.H5Ex_G_Iterate.txt new file mode 100644 index 0000000..66a4ae9 --- /dev/null +++ b/java/examples/testfiles/examples.groups.H5Ex_G_Iterate.txt @@ -0,0 +1,5 @@ +Objects in root group: + Dataset: DS1 + Datatype: DT1 + Group: G1 + Dataset: L1 diff --git a/java/examples/testfiles/examples.groups.H5Ex_G_Phase.txt b/java/examples/testfiles/examples.groups.H5Ex_G_Phase.txt new file mode 100644 index 0000000..9e666d4 --- /dev/null +++ b/java/examples/testfiles/examples.groups.H5Ex_G_Phase.txt @@ -0,0 +1,15 @@ +1 Group : Storage type is H5G_STORAGE_TYPE_COMPACT +2 Groups: Storage type is H5G_STORAGE_TYPE_COMPACT +3 Groups: Storage type is H5G_STORAGE_TYPE_COMPACT +4 Groups: Storage type is H5G_STORAGE_TYPE_COMPACT +5 Groups: Storage type is H5G_STORAGE_TYPE_COMPACT +6 Groups: Storage type is H5G_STORAGE_TYPE_DENSE +7 Groups: Storage type is H5G_STORAGE_TYPE_DENSE + +6 Groups: Storage type is H5G_STORAGE_TYPE_DENSE +5 Groups: Storage type is H5G_STORAGE_TYPE_DENSE +4 Groups: Storage type is H5G_STORAGE_TYPE_DENSE +3 Groups: Storage type is H5G_STORAGE_TYPE_DENSE +2 Groups: Storage type is H5G_STORAGE_TYPE_COMPACT +1 Group : Storage type is H5G_STORAGE_TYPE_COMPACT +0 Groups: Storage type is H5G_STORAGE_TYPE_COMPACT diff --git a/java/examples/testfiles/examples.groups.H5Ex_G_Visit.txt b/java/examples/testfiles/examples.groups.H5Ex_G_Visit.txt new file mode 100644 index 0000000..126a588 --- /dev/null +++ b/java/examples/testfiles/examples.groups.H5Ex_G_Visit.txt @@ -0,0 +1,19 @@ +Objects in the file: +/ (Group) +/group1 (Group) +/group1/dset1 (Dataset) +/group1/group3 (Group) +/group1/group3/group4 (Group) +/group1/group3/group4/group1 (Group) +/group1/group3/group4/group2 (Group) + +Links in the file: +/group1 (Group) +/group1/dset1 (Dataset) +/group1/group3 (Group) +/group1/group3/dset2 (Dataset) +/group1/group3/group4 (Group) +/group1/group3/group4/group1 (Group) +/group1/group3/group4/group1/group5 (Group) +/group1/group3/group4/group2 (Group) +/group2 (Group) diff --git a/java/examples/testfiles/examples.intro.H5_CreateAttribute.txt b/java/examples/testfiles/examples.intro.H5_CreateAttribute.txt new file mode 100644 index 0000000..e69de29 --- /dev/null +++ b/java/examples/testfiles/examples.intro.H5_CreateAttribute.txt diff --git a/java/examples/testfiles/examples.intro.H5_CreateDataset.txt b/java/examples/testfiles/examples.intro.H5_CreateDataset.txt new file mode 100644 index 0000000..e69de29 --- /dev/null +++ b/java/examples/testfiles/examples.intro.H5_CreateDataset.txt diff --git a/java/examples/testfiles/examples.intro.H5_CreateFile.txt b/java/examples/testfiles/examples.intro.H5_CreateFile.txt new file mode 100644 index 0000000..e69de29 --- /dev/null +++ b/java/examples/testfiles/examples.intro.H5_CreateFile.txt diff --git a/java/examples/testfiles/examples.intro.H5_CreateGroup.txt b/java/examples/testfiles/examples.intro.H5_CreateGroup.txt new file mode 100644 index 0000000..e69de29 --- /dev/null +++ b/java/examples/testfiles/examples.intro.H5_CreateGroup.txt diff --git a/java/examples/testfiles/examples.intro.H5_CreateGroupAbsoluteRelative.txt b/java/examples/testfiles/examples.intro.H5_CreateGroupAbsoluteRelative.txt new file mode 100644 index 0000000..e69de29 --- /dev/null +++ b/java/examples/testfiles/examples.intro.H5_CreateGroupAbsoluteRelative.txt diff --git a/java/examples/testfiles/examples.intro.H5_CreateGroupDataset.txt b/java/examples/testfiles/examples.intro.H5_CreateGroupDataset.txt new file mode 100644 index 0000000..e69de29 --- /dev/null +++ b/java/examples/testfiles/examples.intro.H5_CreateGroupDataset.txt diff --git a/java/examples/testfiles/examples.intro.H5_ReadWrite.txt b/java/examples/testfiles/examples.intro.H5_ReadWrite.txt new file mode 100644 index 0000000..e69de29 --- /dev/null +++ b/java/examples/testfiles/examples.intro.H5_ReadWrite.txt |