summaryrefslogtreecommitdiffstats
path: root/HDF5Examples/JAVA/H5D/H5Ex_D_UnlimitedGzip.java
diff options
context:
space:
mode:
Diffstat (limited to 'HDF5Examples/JAVA/H5D/H5Ex_D_UnlimitedGzip.java')
-rw-r--r--HDF5Examples/JAVA/H5D/H5Ex_D_UnlimitedGzip.java504
1 files changed, 504 insertions, 0 deletions
diff --git a/HDF5Examples/JAVA/H5D/H5Ex_D_UnlimitedGzip.java b/HDF5Examples/JAVA/H5D/H5Ex_D_UnlimitedGzip.java
new file mode 100644
index 0000000..d15bbf8
--- /dev/null
+++ b/HDF5Examples/JAVA/H5D/H5Ex_D_UnlimitedGzip.java
@@ -0,0 +1,504 @@
+/* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
+ * Copyright by The HDF Group. *
+ * All rights reserved. *
+ * *
+ * This file is part of HDF5. The full HDF5 copyright notice, including *
+ * terms governing use, modification, and redistribution, is contained in *
+ * the COPYING file, which can be found at the root of the source code *
+ * distribution tree, or in https://www.hdfgroup.org/licenses. *
+ * If you do not have access to either file, you may request a copy from *
+ * help@hdfgroup.org. *
+ * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */
+
+/************************************************************
+ This example shows how to create and extend an unlimited
+ dataset with gzip compression. The program first writes
+ integers to a gzip compressed dataset with dataspace
+ dimensions of DIM_XxDIM_Y, then closes the file. Next, it
+ reopens the file, reads back the data, outputs it to the
+ screen, extends the dataset, and writes new data to the
+ extended portions of the dataset. Finally it reopens the
+ file again, reads back the data, and outputs it to the
+ screen.
+ ************************************************************/
+
+import java.util.EnumSet;
+import java.util.HashMap;
+import java.util.Map;
+
+import hdf.hdf5lib.H5;
+import hdf.hdf5lib.HDF5Constants;
+
+public class H5Ex_D_UnlimitedGzip {
+ private static String FILENAME = "H5Ex_D_UnlimitedGzip.h5";
+ private static String DATASETNAME = "DS1";
+ private static final int DIM_X = 4;
+ private static final int DIM_Y = 7;
+ private static final int EDIM_X = 6;
+ private static final int EDIM_Y = 10;
+ private static final int CHUNK_X = 4;
+ private static final int CHUNK_Y = 4;
+ private static final int RANK = 2;
+ private static final int NDIMS = 2;
+
+ // Values for the status of space allocation
+ enum H5Z_filter {
+ H5Z_FILTER_ERROR(HDF5Constants.H5Z_FILTER_ERROR),
+ H5Z_FILTER_NONE(HDF5Constants.H5Z_FILTER_NONE),
+ H5Z_FILTER_DEFLATE(HDF5Constants.H5Z_FILTER_DEFLATE),
+ H5Z_FILTER_SHUFFLE(HDF5Constants.H5Z_FILTER_SHUFFLE),
+ H5Z_FILTER_FLETCHER32(HDF5Constants.H5Z_FILTER_FLETCHER32),
+ H5Z_FILTER_SZIP(HDF5Constants.H5Z_FILTER_SZIP),
+ H5Z_FILTER_NBIT(HDF5Constants.H5Z_FILTER_NBIT),
+ H5Z_FILTER_SCALEOFFSET(HDF5Constants.H5Z_FILTER_SCALEOFFSET),
+ H5Z_FILTER_RESERVED(HDF5Constants.H5Z_FILTER_RESERVED),
+ H5Z_FILTER_MAX(HDF5Constants.H5Z_FILTER_MAX);
+ private static final Map<Integer, H5Z_filter> lookup = new HashMap<Integer, H5Z_filter>();
+
+ static
+ {
+ for (H5Z_filter s : EnumSet.allOf(H5Z_filter.class))
+ lookup.put(s.getCode(), s);
+ }
+
+ private int code;
+
+ H5Z_filter(int layout_type) { this.code = layout_type; }
+
+ public int getCode() { return this.code; }
+
+ public static H5Z_filter get(int code) { return lookup.get(code); }
+ }
+
+ private static boolean checkGzipFilter()
+ {
+ try {
+ int available = H5.H5Zfilter_avail(HDF5Constants.H5Z_FILTER_DEFLATE);
+ if (available == 0) {
+ System.out.println("gzip filter not available.");
+ return false;
+ }
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ try {
+ int filter_info = H5.H5Zget_filter_info(HDF5Constants.H5Z_FILTER_DEFLATE);
+ if (((filter_info & HDF5Constants.H5Z_FILTER_CONFIG_ENCODE_ENABLED) == 0) ||
+ ((filter_info & HDF5Constants.H5Z_FILTER_CONFIG_DECODE_ENABLED) == 0)) {
+ System.out.println("gzip filter not available for encoding and decoding.");
+ return false;
+ }
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+ return true;
+ }
+
+ private static void writeUnlimited()
+ {
+ long file_id = HDF5Constants.H5I_INVALID_HID;
+ long dcpl_id = HDF5Constants.H5I_INVALID_HID;
+ long dataspace_id = HDF5Constants.H5I_INVALID_HID;
+ long dataset_id = HDF5Constants.H5I_INVALID_HID;
+ long[] dims = {DIM_X, DIM_Y};
+ long[] chunk_dims = {CHUNK_X, CHUNK_Y};
+ long[] maxdims = {HDF5Constants.H5S_UNLIMITED, HDF5Constants.H5S_UNLIMITED};
+ int[][] dset_data = new int[DIM_X][DIM_Y];
+
+ // Initialize the dataset.
+ for (int indx = 0; indx < DIM_X; indx++)
+ for (int jndx = 0; jndx < DIM_Y; jndx++)
+ dset_data[indx][jndx] = indx * jndx - jndx;
+
+ // Create a new file using default properties.
+ try {
+ file_id = H5.H5Fcreate(FILENAME, HDF5Constants.H5F_ACC_TRUNC, HDF5Constants.H5P_DEFAULT,
+ HDF5Constants.H5P_DEFAULT);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Create dataspace with unlimited dimensions.
+ try {
+ dataspace_id = H5.H5Screate_simple(RANK, dims, maxdims);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Create the dataset creation property list, add the gzip compression
+ // filter.
+ try {
+ dcpl_id = H5.H5Pcreate(HDF5Constants.H5P_DATASET_CREATE);
+ if (dcpl_id >= 0) {
+ H5.H5Pset_deflate(dcpl_id, 9);
+ // Set the chunk size.
+ H5.H5Pset_chunk(dcpl_id, NDIMS, chunk_dims);
+ }
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Create the unlimited dataset.
+ try {
+ if ((file_id >= 0) && (dataspace_id >= 0) && (dcpl_id >= 0))
+ dataset_id = H5.H5Dcreate(file_id, DATASETNAME, HDF5Constants.H5T_STD_I32LE, dataspace_id,
+ HDF5Constants.H5P_DEFAULT, dcpl_id, HDF5Constants.H5P_DEFAULT);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Write the data to the dataset.
+ try {
+ if (dataset_id >= 0)
+ H5.H5Dwrite(dataset_id, HDF5Constants.H5T_NATIVE_INT, HDF5Constants.H5S_ALL,
+ HDF5Constants.H5S_ALL, HDF5Constants.H5P_DEFAULT, dset_data);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // End access to the dataset and release resources used by it.
+ try {
+ if (dataset_id >= 0)
+ H5.H5Dclose(dataset_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ try {
+ if (dataspace_id >= 0)
+ H5.H5Sclose(dataspace_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ try {
+ if (dcpl_id >= 0)
+ H5.H5Pclose(dcpl_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Close the file.
+ try {
+ if (file_id >= 0)
+ H5.H5Fclose(file_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+ }
+
+ private static void extendUnlimited()
+ {
+ long file_id = HDF5Constants.H5I_INVALID_HID;
+ long dataspace_id = HDF5Constants.H5I_INVALID_HID;
+ long dataset_id = HDF5Constants.H5I_INVALID_HID;
+ long[] dims = {DIM_X, DIM_Y};
+ long[] extdims = {EDIM_X, EDIM_Y};
+ long[] start = {0, 0};
+ long[] count = new long[2];
+ int[][] dset_data;
+ int[][] extend_dset_data = new int[EDIM_X][EDIM_Y];
+
+ // Open an existing file.
+ try {
+ file_id = H5.H5Fopen(FILENAME, HDF5Constants.H5F_ACC_RDWR, HDF5Constants.H5P_DEFAULT);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Open an existing dataset.
+ try {
+ if (file_id >= 0)
+ dataset_id = H5.H5Dopen(file_id, DATASETNAME, HDF5Constants.H5P_DEFAULT);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Get dataspace and allocate memory for read buffer. This is a
+ // two dimensional dataset so the dynamic allocation must be done
+ // in steps.
+ try {
+ if (dataset_id >= 0)
+ dataspace_id = H5.H5Dget_space(dataset_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ try {
+ if (dataspace_id >= 0)
+ H5.H5Sget_simple_extent_dims(dataspace_id, dims, null);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Allocate array of pointers to rows.
+ dset_data = new int[(int)dims[0]][(int)dims[1]];
+
+ // Read the data using the default properties.
+ try {
+ if (dataset_id >= 0)
+ H5.H5Dread(dataset_id, HDF5Constants.H5T_NATIVE_INT, HDF5Constants.H5S_ALL,
+ HDF5Constants.H5S_ALL, HDF5Constants.H5P_DEFAULT, dset_data);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Output the data to the screen.
+ System.out.println("Dataset before extension:");
+ for (int indx = 0; indx < DIM_X; indx++) {
+ System.out.print(" [ ");
+ for (int jndx = 0; jndx < DIM_Y; jndx++)
+ System.out.print(dset_data[indx][jndx] + " ");
+ System.out.println("]");
+ }
+ System.out.println();
+
+ try {
+ if (dataspace_id >= 0)
+ H5.H5Sclose(dataspace_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Extend the dataset.
+ try {
+ if (dataset_id >= 0)
+ H5.H5Dset_extent(dataset_id, extdims);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Retrieve the dataspace for the newly extended dataset.
+ try {
+ if (dataset_id >= 0)
+ dataspace_id = H5.H5Dget_space(dataset_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Initialize data for writing to the extended dataset.
+ for (int indx = 0; indx < EDIM_X; indx++)
+ for (int jndx = 0; jndx < EDIM_Y; jndx++)
+ extend_dset_data[indx][jndx] = jndx;
+
+ // Select the entire dataspace.
+ try {
+ if (dataspace_id >= 0) {
+ H5.H5Sselect_all(dataspace_id);
+
+ // Subtract a hyperslab reflecting the original dimensions from the
+ // selection. The selection now contains only the newly extended
+ // portions of the dataset.
+ count[0] = dims[0];
+ count[1] = dims[1];
+ H5.H5Sselect_hyperslab(dataspace_id, HDF5Constants.H5S_SELECT_NOTB, start, null, count, null);
+
+ // Write the data to the selected portion of the dataset.
+ if (dataset_id >= 0)
+ H5.H5Dwrite(dataset_id, HDF5Constants.H5T_NATIVE_INT, HDF5Constants.H5S_ALL, dataspace_id,
+ HDF5Constants.H5P_DEFAULT, extend_dset_data);
+ }
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // End access to the dataset and release resources used by it.
+ try {
+ if (dataset_id >= 0)
+ H5.H5Dclose(dataset_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ try {
+ if (dataspace_id >= 0)
+ H5.H5Sclose(dataspace_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Close the file.
+ try {
+ if (file_id >= 0)
+ H5.H5Fclose(file_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+ }
+
+ private static void readUnlimited()
+ {
+ long file_id = HDF5Constants.H5I_INVALID_HID;
+ long dataspace_id = HDF5Constants.H5I_INVALID_HID;
+ long dataset_id = HDF5Constants.H5I_INVALID_HID;
+ long dcpl_id = HDF5Constants.H5I_INVALID_HID;
+ long[] dims = {DIM_X, DIM_Y};
+ int[][] dset_data;
+
+ // Open an existing file.
+ try {
+ file_id = H5.H5Fopen(FILENAME, HDF5Constants.H5F_ACC_RDONLY, HDF5Constants.H5P_DEFAULT);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Open an existing dataset.
+ try {
+ if (file_id >= 0)
+ dataset_id = H5.H5Dopen(file_id, DATASETNAME, HDF5Constants.H5P_DEFAULT);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Retrieve the dataset creation property list.
+ try {
+ if (dataset_id >= 0)
+ dcpl_id = H5.H5Dget_create_plist(dataset_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Retrieve and print the filter type. Here we only retrieve the
+ // first filter because we know that we only added one filter.
+ try {
+ if (dcpl_id >= 0) {
+ // Java lib requires a valid filter_name object and cd_values
+ int[] flags = {0};
+ long[] cd_nelmts = {1};
+ int[] cd_values = {0};
+ String[] filter_name = {""};
+ int[] filter_config = {0};
+ int filter_type = -1;
+ filter_type = H5.H5Pget_filter(dcpl_id, 0, flags, cd_nelmts, cd_values, 120, filter_name,
+ filter_config);
+ System.out.print("Filter type is: ");
+ switch (H5Z_filter.get(filter_type)) {
+ case H5Z_FILTER_DEFLATE:
+ System.out.println("H5Z_FILTER_DEFLATE");
+ break;
+ case H5Z_FILTER_SHUFFLE:
+ System.out.println("H5Z_FILTER_SHUFFLE");
+ break;
+ case H5Z_FILTER_FLETCHER32:
+ System.out.println("H5Z_FILTER_FLETCHER32");
+ break;
+ case H5Z_FILTER_SZIP:
+ System.out.println("H5Z_FILTER_SZIP");
+ break;
+ default:
+ System.out.println("H5Z_FILTER_ERROR");
+ }
+ System.out.println();
+ }
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Get dataspace and allocate memory for the read buffer as before.
+ try {
+ if (dataset_id >= 0)
+ dataspace_id = H5.H5Dget_space(dataset_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ try {
+ if (dataspace_id >= 0)
+ H5.H5Sget_simple_extent_dims(dataspace_id, dims, null);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+ // Allocate array of pointers to rows.
+ dset_data = new int[(int)dims[0]][(int)dims[1]];
+
+ // Read the data using the default properties.
+ try {
+ if (dataset_id >= 0)
+ H5.H5Dread(dataset_id, HDF5Constants.H5T_NATIVE_INT, HDF5Constants.H5S_ALL,
+ HDF5Constants.H5S_ALL, HDF5Constants.H5P_DEFAULT, dset_data);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Output the data to the screen.
+ System.out.println("Dataset after extension:");
+ for (int indx = 0; indx < dims[0]; indx++) {
+ System.out.print(" [ ");
+ for (int jndx = 0; jndx < dims[1]; jndx++)
+ System.out.print(dset_data[indx][jndx] + " ");
+ System.out.println("]");
+ }
+ System.out.println();
+
+ // End access to the dataset and release resources used by it.
+ try {
+ if (dataset_id >= 0)
+ H5.H5Dclose(dataset_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ try {
+ if (dataspace_id >= 0)
+ H5.H5Sclose(dataspace_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Close the file.
+ try {
+ if (file_id >= 0)
+ H5.H5Fclose(file_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+ }
+
+ public static void main(String[] args)
+ {
+ // Check if gzip compression is available and can be used for both
+ // compression and decompression. Normally we do not perform error
+ // checking in these examples for the sake of clarity, but in this
+ // case we will make an exception because this filter is an
+ // optional part of the hdf5 library.
+ if (H5Ex_D_UnlimitedGzip.checkGzipFilter()) {
+ H5Ex_D_UnlimitedGzip.writeUnlimited();
+ H5Ex_D_UnlimitedGzip.extendUnlimited();
+ H5Ex_D_UnlimitedGzip.readUnlimited();
+ }
+ }
+}