summaryrefslogtreecommitdiffstats
path: root/java/examples
diff options
context:
space:
mode:
authorAllen Byrne <50328838+byrnHDF@users.noreply.github.com>2022-04-19 18:08:09 (GMT)
committerGitHub <noreply@github.com>2022-04-19 18:08:09 (GMT)
commit03c3a5469542688a52d5a20242d2334408f8ba33 (patch)
tree46e30ea4aa99b68279fe80c9ad56b1a6bbfcfe7c /java/examples
parent32ef796e470da3e4de364d8dc469b03f5f6fafdc (diff)
downloadhdf5-03c3a5469542688a52d5a20242d2334408f8ba33.zip
hdf5-03c3a5469542688a52d5a20242d2334408f8ba33.tar.gz
hdf5-03c3a5469542688a52d5a20242d2334408f8ba33.tar.bz2
Develop clang format java (#1653)
Diffstat (limited to 'java/examples')
-rw-r--r--java/examples/datasets/CMakeLists.txt7
-rw-r--r--java/examples/datasets/H5Ex_D_Alloc.java66
-rw-r--r--java/examples/datasets/H5Ex_D_Checksum.java100
-rw-r--r--java/examples/datasets/H5Ex_D_Chunk.java101
-rw-r--r--java/examples/datasets/H5Ex_D_Compact.java66
-rw-r--r--java/examples/datasets/H5Ex_D_External.java51
-rw-r--r--java/examples/datasets/H5Ex_D_FillValue.java73
-rw-r--r--java/examples/datasets/H5Ex_D_Gzip.java105
-rw-r--r--java/examples/datasets/H5Ex_D_Hyperslab.java70
-rw-r--r--java/examples/datasets/H5Ex_D_Nbit.java113
-rw-r--r--java/examples/datasets/H5Ex_D_ReadWrite.java41
-rw-r--r--java/examples/datasets/H5Ex_D_Shuffle.java112
-rw-r--r--java/examples/datasets/H5Ex_D_Sofloat.java114
-rw-r--r--java/examples/datasets/H5Ex_D_Soint.java110
-rw-r--r--java/examples/datasets/H5Ex_D_Szip.java107
-rw-r--r--java/examples/datasets/H5Ex_D_Transform.java55
-rw-r--r--java/examples/datasets/H5Ex_D_UnlimitedAdd.java85
-rw-r--r--java/examples/datasets/H5Ex_D_UnlimitedGzip.java138
-rw-r--r--java/examples/datasets/H5Ex_D_UnlimitedMod.java81
-rw-r--r--java/examples/datatypes/CMakeLists.txt7
-rw-r--r--java/examples/datatypes/H5Ex_T_Array.java63
-rw-r--r--java/examples/datatypes/H5Ex_T_ArrayAttribute.java67
-rw-r--r--java/examples/datatypes/H5Ex_T_Bit.java51
-rw-r--r--java/examples/datatypes/H5Ex_T_BitAttribute.java52
-rw-r--r--java/examples/datatypes/H5Ex_T_Commit.java100
-rw-r--r--java/examples/datatypes/H5Ex_T_Compound.java152
-rw-r--r--java/examples/datatypes/H5Ex_T_CompoundAttribute.java154
-rw-r--r--java/examples/datatypes/H5Ex_T_Float.java51
-rw-r--r--java/examples/datatypes/H5Ex_T_FloatAttribute.java54
-rw-r--r--java/examples/datatypes/H5Ex_T_Integer.java47
-rw-r--r--java/examples/datatypes/H5Ex_T_IntegerAttribute.java48
-rw-r--r--java/examples/datatypes/H5Ex_T_ObjectReference.java144
-rw-r--r--java/examples/datatypes/H5Ex_T_ObjectReferenceAttribute.java160
-rw-r--r--java/examples/datatypes/H5Ex_T_Opaque.java59
-rw-r--r--java/examples/datatypes/H5Ex_T_OpaqueAttribute.java59
-rw-r--r--java/examples/datatypes/H5Ex_T_String.java71
-rw-r--r--java/examples/datatypes/H5Ex_T_StringAttribute.java73
-rw-r--r--java/examples/datatypes/H5Ex_T_VLString.java50
-rw-r--r--java/examples/groups/CMakeLists.txt7
-rw-r--r--java/examples/groups/H5Ex_G_Compact.java52
-rw-r--r--java/examples/groups/H5Ex_G_Corder.java56
-rw-r--r--java/examples/groups/H5Ex_G_Create.java19
-rw-r--r--java/examples/groups/H5Ex_G_Intermediate.java42
-rw-r--r--java/examples/groups/H5Ex_G_Iterate.java52
-rw-r--r--java/examples/groups/H5Ex_G_Phase.java73
-rw-r--r--java/examples/groups/H5Ex_G_Traverse.java45
-rw-r--r--java/examples/groups/H5Ex_G_Visit.java53
-rw-r--r--java/examples/intro/CMakeLists.txt7
-rw-r--r--java/examples/intro/H5_CreateAttribute.java38
-rw-r--r--java/examples/intro/H5_CreateDataset.java28
-rw-r--r--java/examples/intro/H5_CreateFile.java11
-rw-r--r--java/examples/intro/H5_CreateGroup.java19
-rw-r--r--java/examples/intro/H5_CreateGroupAbsoluteRelative.java27
-rw-r--r--java/examples/intro/H5_CreateGroupDataset.java68
-rw-r--r--java/examples/intro/H5_ReadWrite.java35
55 files changed, 1924 insertions, 1765 deletions
diff --git a/java/examples/datasets/CMakeLists.txt b/java/examples/datasets/CMakeLists.txt
index 8198135..e63ead0 100644
--- a/java/examples/datasets/CMakeLists.txt
+++ b/java/examples/datasets/CMakeLists.txt
@@ -62,6 +62,13 @@ foreach (example ${HDF_JAVA_EXAMPLES})
# install_jar (${example} ${HJAVA_INSTALL_DATA_DIR}/examples examples)
get_target_property (${example}_CLASSPATH ${example} CLASSDIR)
add_dependencies (${example} ${HDF5_JAVA_HDF5_LIB_TARGET})
+
+ #-----------------------------------------------------------------------------
+ # Add Target to clang-format
+ #-----------------------------------------------------------------------------
+ if (HDF5_ENABLE_FORMATTERS)
+ clang_format (HDF5_JAVA_${example}_SRC_FORMAT ${example}.java)
+ endif ()
endforeach ()
set (CMAKE_JAVA_INCLUDE_PATH "${HDF5_JAVA_JARS};${HDF5_JAVA_LOGGING_JAR};${HDF5_JAVA_LOGGING_NOP_JAR}")
diff --git a/java/examples/datasets/H5Ex_D_Alloc.java b/java/examples/datasets/H5Ex_D_Alloc.java
index 4e10c23..4853cc0 100644
--- a/java/examples/datasets/H5Ex_D_Alloc.java
+++ b/java/examples/datasets/H5Ex_D_Alloc.java
@@ -29,49 +29,47 @@ import hdf.hdf5lib.H5;
import hdf.hdf5lib.HDF5Constants;
public class H5Ex_D_Alloc {
- private static String FILENAME = "H5Ex_D_Alloc.h5";
+ private static String FILENAME = "H5Ex_D_Alloc.h5";
private static String DATASETNAME1 = "DS1";
private static String DATASETNAME2 = "DS2";
- private static final int DIM_X = 4;
- private static final int DIM_Y = 7;
- private static final int FILLVAL = 99;
- private static final int RANK = 2;
+ private static final int DIM_X = 4;
+ private static final int DIM_Y = 7;
+ private static final int FILLVAL = 99;
+ private static final int RANK = 2;
// Values for the status of space allocation
enum H5D_space_status {
- H5D_SPACE_STATUS_ERROR(-1), H5D_SPACE_STATUS_NOT_ALLOCATED(0), H5D_SPACE_STATUS_PART_ALLOCATED(1), H5D_SPACE_STATUS_ALLOCATED(
- 2);
+ H5D_SPACE_STATUS_ERROR(-1),
+ H5D_SPACE_STATUS_NOT_ALLOCATED(0),
+ H5D_SPACE_STATUS_PART_ALLOCATED(1),
+ H5D_SPACE_STATUS_ALLOCATED(2);
private static final Map<Integer, H5D_space_status> lookup = new HashMap<Integer, H5D_space_status>();
- static {
+ static
+ {
for (H5D_space_status s : EnumSet.allOf(H5D_space_status.class))
lookup.put(s.getCode(), s);
}
private int code;
- H5D_space_status(int space_status) {
- this.code = space_status;
- }
+ H5D_space_status(int space_status) { this.code = space_status; }
- public int getCode() {
- return this.code;
- }
+ public int getCode() { return this.code; }
- public static H5D_space_status get(int code) {
- return lookup.get(code);
- }
+ public static H5D_space_status get(int code) { return lookup.get(code); }
}
- private static void allocation() {
- long file_id = HDF5Constants.H5I_INVALID_HID;
+ private static void allocation()
+ {
+ long file_id = HDF5Constants.H5I_INVALID_HID;
long filespace_id = HDF5Constants.H5I_INVALID_HID;
- long dataset_id1 = HDF5Constants.H5I_INVALID_HID;
- long dataset_id2 = HDF5Constants.H5I_INVALID_HID;
- long dcpl_id = HDF5Constants.H5I_INVALID_HID;
- long[] dims = { DIM_X, DIM_Y };
+ long dataset_id1 = HDF5Constants.H5I_INVALID_HID;
+ long dataset_id2 = HDF5Constants.H5I_INVALID_HID;
+ long dcpl_id = HDF5Constants.H5I_INVALID_HID;
+ long[] dims = {DIM_X, DIM_Y};
int[][] dset_data = new int[DIM_X][DIM_Y];
- int space_status = 0;
+ int space_status = 0;
long storage_size = 0;
// Initialize the dataset.
@@ -82,7 +80,7 @@ public class H5Ex_D_Alloc {
// Create a file using default properties.
try {
file_id = H5.H5Fcreate(FILENAME, HDF5Constants.H5F_ACC_TRUNC, HDF5Constants.H5P_DEFAULT,
- HDF5Constants.H5P_DEFAULT);
+ HDF5Constants.H5P_DEFAULT);
}
catch (Exception e) {
e.printStackTrace();
@@ -125,7 +123,8 @@ public class H5Ex_D_Alloc {
try {
if ((file_id >= 0) && (filespace_id >= 0))
dataset_id1 = H5.H5Dcreate(file_id, DATASETNAME1, HDF5Constants.H5T_NATIVE_INT, filespace_id,
- HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT);
+ HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT,
+ HDF5Constants.H5P_DEFAULT);
}
catch (Exception e) {
e.printStackTrace();
@@ -135,7 +134,7 @@ public class H5Ex_D_Alloc {
try {
if ((file_id >= 0) && (filespace_id >= 0) && (dcpl_id >= 0))
dataset_id2 = H5.H5Dcreate(file_id, DATASETNAME2, HDF5Constants.H5T_NATIVE_INT, filespace_id,
- HDF5Constants.H5P_DEFAULT, dcpl_id, HDF5Constants.H5P_DEFAULT);
+ HDF5Constants.H5P_DEFAULT, dcpl_id, HDF5Constants.H5P_DEFAULT);
}
catch (Exception e) {
e.printStackTrace();
@@ -190,16 +189,16 @@ public class H5Ex_D_Alloc {
// Write the data to the datasets.
try {
if (dataset_id1 >= 0)
- H5.H5Dwrite(dataset_id1, HDF5Constants.H5T_NATIVE_INT, HDF5Constants.H5S_ALL, HDF5Constants.H5S_ALL,
- HDF5Constants.H5P_DEFAULT, dset_data[0]);
+ H5.H5Dwrite(dataset_id1, HDF5Constants.H5T_NATIVE_INT, HDF5Constants.H5S_ALL,
+ HDF5Constants.H5S_ALL, HDF5Constants.H5P_DEFAULT, dset_data[0]);
}
catch (Exception e) {
e.printStackTrace();
}
try {
if (dataset_id2 >= 0)
- H5.H5Dwrite(dataset_id2, HDF5Constants.H5T_NATIVE_INT, HDF5Constants.H5S_ALL, HDF5Constants.H5S_ALL,
- HDF5Constants.H5P_DEFAULT, dset_data[0]);
+ H5.H5Dwrite(dataset_id2, HDF5Constants.H5T_NATIVE_INT, HDF5Constants.H5S_ALL,
+ HDF5Constants.H5S_ALL, HDF5Constants.H5P_DEFAULT, dset_data[0]);
}
catch (Exception e) {
e.printStackTrace();
@@ -291,8 +290,5 @@ public class H5Ex_D_Alloc {
}
}
- public static void main(String[] args) {
- H5Ex_D_Alloc.allocation();
- }
-
+ public static void main(String[] args) { H5Ex_D_Alloc.allocation(); }
}
diff --git a/java/examples/datasets/H5Ex_D_Checksum.java b/java/examples/datasets/H5Ex_D_Checksum.java
index 781dd68..7b01176 100644
--- a/java/examples/datasets/H5Ex_D_Checksum.java
+++ b/java/examples/datasets/H5Ex_D_Checksum.java
@@ -30,42 +30,46 @@ import hdf.hdf5lib.H5;
import hdf.hdf5lib.HDF5Constants;
public class H5Ex_D_Checksum {
- private static String FILENAME = "H5Ex_D_Checksum.h5";
+ private static String FILENAME = "H5Ex_D_Checksum.h5";
private static String DATASETNAME = "DS1";
- private static final int DIM_X = 32;
- private static final int DIM_Y = 64;
- private static final int CHUNK_X = 4;
- private static final int CHUNK_Y = 8;
- private static final int RANK = 2;
- private static final int NDIMS = 2;
+ private static final int DIM_X = 32;
+ private static final int DIM_Y = 64;
+ private static final int CHUNK_X = 4;
+ private static final int CHUNK_Y = 8;
+ private static final int RANK = 2;
+ private static final int NDIMS = 2;
// Values for the status of space allocation
enum H5Z_filter {
- H5Z_FILTER_ERROR(-1), H5Z_FILTER_NONE(0), H5Z_FILTER_DEFLATE(1), H5Z_FILTER_SHUFFLE(2), H5Z_FILTER_FLETCHER32(3), H5Z_FILTER_SZIP(
- 4), H5Z_FILTER_NBIT(5), H5Z_FILTER_SCALEOFFSET(6), H5Z_FILTER_RESERVED(256), H5Z_FILTER_MAX(65535);
+ H5Z_FILTER_ERROR(-1),
+ H5Z_FILTER_NONE(0),
+ H5Z_FILTER_DEFLATE(1),
+ H5Z_FILTER_SHUFFLE(2),
+ H5Z_FILTER_FLETCHER32(3),
+ H5Z_FILTER_SZIP(4),
+ H5Z_FILTER_NBIT(5),
+ H5Z_FILTER_SCALEOFFSET(6),
+ H5Z_FILTER_RESERVED(256),
+ H5Z_FILTER_MAX(65535);
private static final Map<Integer, H5Z_filter> lookup = new HashMap<Integer, H5Z_filter>();
- static {
+ static
+ {
for (H5Z_filter s : EnumSet.allOf(H5Z_filter.class))
lookup.put(s.getCode(), s);
}
private int code;
- H5Z_filter(int layout_type) {
- this.code = layout_type;
- }
+ H5Z_filter(int layout_type) { this.code = layout_type; }
- public int getCode() {
- return this.code;
- }
+ public int getCode() { return this.code; }
- public static H5Z_filter get(int code) {
- return lookup.get(code);
- }
+ public static H5Z_filter get(int code) { return lookup.get(code); }
}
- private static boolean checkFletcher32Filter() {
+ private static boolean checkFletcher32Filter()
+ {
try {
int available = H5.H5Zfilter_avail(H5Z_filter.H5Z_FILTER_FLETCHER32.getCode());
if (available == 0) {
@@ -79,8 +83,8 @@ public class H5Ex_D_Checksum {
try {
int filter_info = H5.H5Zget_filter_info(HDF5Constants.H5Z_FILTER_FLETCHER32);
- if (((filter_info & HDF5Constants.H5Z_FILTER_CONFIG_ENCODE_ENABLED) == 0)
- || ((filter_info & HDF5Constants.H5Z_FILTER_CONFIG_DECODE_ENABLED) == 0)) {
+ if (((filter_info & HDF5Constants.H5Z_FILTER_CONFIG_ENCODE_ENABLED) == 0) ||
+ ((filter_info & HDF5Constants.H5Z_FILTER_CONFIG_DECODE_ENABLED) == 0)) {
System.out.println("N-Bit filter not available for encoding and decoding.");
return false;
}
@@ -91,13 +95,14 @@ public class H5Ex_D_Checksum {
return true;
}
- private static void writeChecksum() {
- long file_id = HDF5Constants.H5I_INVALID_HID;
+ private static void writeChecksum()
+ {
+ long file_id = HDF5Constants.H5I_INVALID_HID;
long filespace_id = HDF5Constants.H5I_INVALID_HID;
- long dataset_id = HDF5Constants.H5I_INVALID_HID;
- long dcpl_id = HDF5Constants.H5I_INVALID_HID;
- long[] dims = { DIM_X, DIM_Y };
- long[] chunk_dims = { CHUNK_X, CHUNK_Y };
+ long dataset_id = HDF5Constants.H5I_INVALID_HID;
+ long dcpl_id = HDF5Constants.H5I_INVALID_HID;
+ long[] dims = {DIM_X, DIM_Y};
+ long[] chunk_dims = {CHUNK_X, CHUNK_Y};
int[][] dset_data = new int[DIM_X][DIM_Y];
// Initialize data.
@@ -108,7 +113,7 @@ public class H5Ex_D_Checksum {
// Create a new file using default properties.
try {
file_id = H5.H5Fcreate(FILENAME, HDF5Constants.H5F_ACC_TRUNC, HDF5Constants.H5P_DEFAULT,
- HDF5Constants.H5P_DEFAULT);
+ HDF5Constants.H5P_DEFAULT);
}
catch (Exception e) {
e.printStackTrace();
@@ -140,7 +145,7 @@ public class H5Ex_D_Checksum {
try {
if ((file_id >= 0) && (filespace_id >= 0) && (dcpl_id >= 0))
dataset_id = H5.H5Dcreate(file_id, DATASETNAME, HDF5Constants.H5T_STD_I32LE, filespace_id,
- HDF5Constants.H5P_DEFAULT, dcpl_id, HDF5Constants.H5P_DEFAULT);
+ HDF5Constants.H5P_DEFAULT, dcpl_id, HDF5Constants.H5P_DEFAULT);
}
catch (Exception e) {
e.printStackTrace();
@@ -149,8 +154,8 @@ public class H5Ex_D_Checksum {
// Write the data to the dataset.
try {
if (dataset_id >= 0)
- H5.H5Dwrite(dataset_id, HDF5Constants.H5T_NATIVE_INT, HDF5Constants.H5S_ALL, HDF5Constants.H5S_ALL,
- HDF5Constants.H5P_DEFAULT, dset_data);
+ H5.H5Dwrite(dataset_id, HDF5Constants.H5T_NATIVE_INT, HDF5Constants.H5S_ALL,
+ HDF5Constants.H5S_ALL, HDF5Constants.H5P_DEFAULT, dset_data);
}
catch (Exception e) {
e.printStackTrace();
@@ -191,10 +196,11 @@ public class H5Ex_D_Checksum {
}
}
- private static void readChecksum() {
- long file_id = HDF5Constants.H5I_INVALID_HID;
- long dataset_id = HDF5Constants.H5I_INVALID_HID;
- long dcpl_id = HDF5Constants.H5I_INVALID_HID;
+ private static void readChecksum()
+ {
+ long file_id = HDF5Constants.H5I_INVALID_HID;
+ long dataset_id = HDF5Constants.H5I_INVALID_HID;
+ long dcpl_id = HDF5Constants.H5I_INVALID_HID;
int[][] dset_data = new int[DIM_X][DIM_Y];
// Open an existing file.
@@ -228,14 +234,14 @@ public class H5Ex_D_Checksum {
try {
if (dcpl_id >= 0) {
// Java lib requires a valid filter_name object and cd_values
- int[] flags = { 0 };
- long[] cd_nelmts = { 1 };
- int[] cd_values = { 0 };
- String[] filter_name = { "" };
- int[] filter_config = { 0 };
- int filter_type = -1;
- filter_type = H5
- .H5Pget_filter(dcpl_id, 0, flags, cd_nelmts, cd_values, 120, filter_name, filter_config);
+ int[] flags = {0};
+ long[] cd_nelmts = {1};
+ int[] cd_values = {0};
+ String[] filter_name = {""};
+ int[] filter_config = {0};
+ int filter_type = -1;
+ filter_type = H5.H5Pget_filter(dcpl_id, 0, flags, cd_nelmts, cd_values, 120, filter_name,
+ filter_config);
System.out.print("Filter type is: ");
switch (H5Z_filter.get(filter_type)) {
case H5Z_FILTER_DEFLATE:
@@ -264,7 +270,7 @@ public class H5Ex_D_Checksum {
try {
if (dataset_id >= 0) {
int status = H5.H5Dread(dataset_id, HDF5Constants.H5T_NATIVE_INT, HDF5Constants.H5S_ALL,
- HDF5Constants.H5S_ALL, HDF5Constants.H5P_DEFAULT, dset_data);
+ HDF5Constants.H5S_ALL, HDF5Constants.H5P_DEFAULT, dset_data);
// Check if the read was successful. Normally we do not perform
// error checking in these examples for the sake of clarity, but in
// this case we will make an exception because this is how the
@@ -328,7 +334,8 @@ public class H5Ex_D_Checksum {
}
}
- public static void main(String[] args) {
+ public static void main(String[] args)
+ {
// Check if the Fletcher32 filter is available and can be used for
// both encoding and decoding. Normally we do not perform error
// checking in these examples for the sake of clarity, but in this
@@ -340,5 +347,4 @@ public class H5Ex_D_Checksum {
H5Ex_D_Checksum.readChecksum();
}
}
-
}
diff --git a/java/examples/datasets/H5Ex_D_Chunk.java b/java/examples/datasets/H5Ex_D_Chunk.java
index 2ddf293..fbfc148 100644
--- a/java/examples/datasets/H5Ex_D_Chunk.java
+++ b/java/examples/datasets/H5Ex_D_Chunk.java
@@ -30,47 +30,48 @@ import hdf.hdf5lib.H5;
import hdf.hdf5lib.HDF5Constants;
public class H5Ex_D_Chunk {
- private static String FILENAME = "H5Ex_D_Chunk.h5";
+ private static String FILENAME = "H5Ex_D_Chunk.h5";
private static String DATASETNAME = "DS1";
- private static final int DIM_X = 6;
- private static final int DIM_Y = 8;
- private static final int CHUNK_X = 4;
- private static final int CHUNK_Y = 4;
- private static final int RANK = 2;
- private static final int NDIMS = 2;
+ private static final int DIM_X = 6;
+ private static final int DIM_Y = 8;
+ private static final int CHUNK_X = 4;
+ private static final int CHUNK_Y = 4;
+ private static final int RANK = 2;
+ private static final int NDIMS = 2;
// Values for the status of space allocation
enum H5D_layout {
- H5D_LAYOUT_ERROR(-1), H5D_COMPACT(0), H5D_CONTIGUOUS(1), H5D_CHUNKED(2), H5D_VIRTUAL(3), H5D_NLAYOUTS(4);
+ H5D_LAYOUT_ERROR(-1),
+ H5D_COMPACT(0),
+ H5D_CONTIGUOUS(1),
+ H5D_CHUNKED(2),
+ H5D_VIRTUAL(3),
+ H5D_NLAYOUTS(4);
private static final Map<Integer, H5D_layout> lookup = new HashMap<Integer, H5D_layout>();
- static {
+ static
+ {
for (H5D_layout s : EnumSet.allOf(H5D_layout.class))
lookup.put(s.getCode(), s);
}
private int code;
- H5D_layout(int layout_type) {
- this.code = layout_type;
- }
+ H5D_layout(int layout_type) { this.code = layout_type; }
- public int getCode() {
- return this.code;
- }
+ public int getCode() { return this.code; }
- public static H5D_layout get(int code) {
- return lookup.get(code);
- }
+ public static H5D_layout get(int code) { return lookup.get(code); }
}
- private static void writeChunk() {
- long file_id = HDF5Constants.H5I_INVALID_HID;
+ private static void writeChunk()
+ {
+ long file_id = HDF5Constants.H5I_INVALID_HID;
long filespace_id = HDF5Constants.H5I_INVALID_HID;
- long dataset_id = HDF5Constants.H5I_INVALID_HID;
- long dcpl_id = HDF5Constants.H5I_INVALID_HID;
- long[] dims = { DIM_X, DIM_Y };
- long[] chunk_dims = { CHUNK_X, CHUNK_Y };
+ long dataset_id = HDF5Constants.H5I_INVALID_HID;
+ long dcpl_id = HDF5Constants.H5I_INVALID_HID;
+ long[] dims = {DIM_X, DIM_Y};
+ long[] chunk_dims = {CHUNK_X, CHUNK_Y};
int[][] dset_data = new int[DIM_X][DIM_Y];
// Initialize data to "1", to make it easier to see the selections.
@@ -91,7 +92,7 @@ public class H5Ex_D_Chunk {
// Create a new file using default properties.
try {
file_id = H5.H5Fcreate(FILENAME, HDF5Constants.H5F_ACC_TRUNC, HDF5Constants.H5P_DEFAULT,
- HDF5Constants.H5P_DEFAULT);
+ HDF5Constants.H5P_DEFAULT);
}
catch (Exception e) {
e.printStackTrace();
@@ -127,20 +128,21 @@ public class H5Ex_D_Chunk {
try {
if ((file_id >= 0) && (filespace_id >= 0) && (dcpl_id >= 0))
dataset_id = H5.H5Dcreate(file_id, DATASETNAME, HDF5Constants.H5T_STD_I32LE, filespace_id,
- HDF5Constants.H5P_DEFAULT, dcpl_id, HDF5Constants.H5P_DEFAULT);
+ HDF5Constants.H5P_DEFAULT, dcpl_id, HDF5Constants.H5P_DEFAULT);
}
catch (Exception e) {
e.printStackTrace();
}
// Define and select the first part of the hyperslab selection.
- long[] start = { 0, 0 };
- long[] stride = { 3, 3 };
- long[] count = { 2, 3 };
- long[] block = { 2, 2 };
+ long[] start = {0, 0};
+ long[] stride = {3, 3};
+ long[] count = {2, 3};
+ long[] block = {2, 2};
try {
if ((filespace_id >= 0))
- H5.H5Sselect_hyperslab(filespace_id, HDF5Constants.H5S_SELECT_SET, start, stride, count, block);
+ H5.H5Sselect_hyperslab(filespace_id, HDF5Constants.H5S_SELECT_SET, start, stride, count,
+ block);
}
catch (Exception e) {
e.printStackTrace();
@@ -152,12 +154,13 @@ public class H5Ex_D_Chunk {
block[1] = 1;
try {
if ((filespace_id >= 0)) {
- H5.H5Sselect_hyperslab(filespace_id, HDF5Constants.H5S_SELECT_NOTB, start, stride, count, block);
+ H5.H5Sselect_hyperslab(filespace_id, HDF5Constants.H5S_SELECT_NOTB, start, stride, count,
+ block);
// Write the data to the dataset.
if (dataset_id >= 0)
H5.H5Dwrite(dataset_id, HDF5Constants.H5T_NATIVE_INT, HDF5Constants.H5S_ALL, filespace_id,
- HDF5Constants.H5P_DEFAULT, dset_data);
+ HDF5Constants.H5P_DEFAULT, dset_data);
}
}
catch (Exception e) {
@@ -199,11 +202,12 @@ public class H5Ex_D_Chunk {
}
}
- private static void readChunk() {
- long file_id = HDF5Constants.H5I_INVALID_HID;
+ private static void readChunk()
+ {
+ long file_id = HDF5Constants.H5I_INVALID_HID;
long filespace_id = HDF5Constants.H5I_INVALID_HID;
- long dataset_id = HDF5Constants.H5I_INVALID_HID;
- long dcpl_id = HDF5Constants.H5I_INVALID_HID;
+ long dataset_id = HDF5Constants.H5I_INVALID_HID;
+ long dcpl_id = HDF5Constants.H5I_INVALID_HID;
int[][] dset_data = new int[DIM_X][DIM_Y];
// Open an existing file.
@@ -267,8 +271,8 @@ public class H5Ex_D_Chunk {
// Read the data using the default properties.
try {
if (dataset_id >= 0)
- H5.H5Dread(dataset_id, HDF5Constants.H5T_NATIVE_INT, HDF5Constants.H5S_ALL, HDF5Constants.H5S_ALL,
- HDF5Constants.H5P_DEFAULT, dset_data);
+ H5.H5Dread(dataset_id, HDF5Constants.H5T_NATIVE_INT, HDF5Constants.H5S_ALL,
+ HDF5Constants.H5S_ALL, HDF5Constants.H5P_DEFAULT, dset_data);
}
catch (Exception e) {
e.printStackTrace();
@@ -294,18 +298,19 @@ public class H5Ex_D_Chunk {
if (dataset_id >= 0) {
filespace_id = H5.H5Dget_space(dataset_id);
- long[] start = { 0, 1 };
- long[] stride = { 4, 4 };
- long[] count = { 2, 2 };
- long[] block = { 2, 3 };
+ long[] start = {0, 1};
+ long[] stride = {4, 4};
+ long[] count = {2, 2};
+ long[] block = {2, 3};
if (filespace_id >= 0) {
- H5.H5Sselect_hyperslab(filespace_id, HDF5Constants.H5S_SELECT_SET, start, stride, count, block);
+ H5.H5Sselect_hyperslab(filespace_id, HDF5Constants.H5S_SELECT_SET, start, stride, count,
+ block);
// Read the data using the previously defined hyperslab.
if ((dataset_id >= 0) && (filespace_id >= 0))
- H5.H5Dread(dataset_id, HDF5Constants.H5T_NATIVE_INT, HDF5Constants.H5S_ALL, filespace_id,
- HDF5Constants.H5P_DEFAULT, dset_data);
+ H5.H5Dread(dataset_id, HDF5Constants.H5T_NATIVE_INT, HDF5Constants.H5S_ALL,
+ filespace_id, HDF5Constants.H5P_DEFAULT, dset_data);
}
}
}
@@ -358,9 +363,9 @@ public class H5Ex_D_Chunk {
}
}
- public static void main(String[] args) {
+ public static void main(String[] args)
+ {
H5Ex_D_Chunk.writeChunk();
H5Ex_D_Chunk.readChunk();
}
-
}
diff --git a/java/examples/datasets/H5Ex_D_Compact.java b/java/examples/datasets/H5Ex_D_Compact.java
index 0abf8da..3a60283 100644
--- a/java/examples/datasets/H5Ex_D_Compact.java
+++ b/java/examples/datasets/H5Ex_D_Compact.java
@@ -27,43 +27,44 @@ import hdf.hdf5lib.H5;
import hdf.hdf5lib.HDF5Constants;
public class H5Ex_D_Compact {
- private static String FILENAME = "H5Ex_D_Compact.h5";
+ private static String FILENAME = "H5Ex_D_Compact.h5";
private static String DATASETNAME = "DS1";
- private static final int DIM_X = 4;
- private static final int DIM_Y = 7;
- private static final int RANK = 2;
+ private static final int DIM_X = 4;
+ private static final int DIM_Y = 7;
+ private static final int RANK = 2;
// Values for the status of space allocation
enum H5D_layout {
- H5D_LAYOUT_ERROR(-1), H5D_COMPACT(0), H5D_CONTIGUOUS(1), H5D_CHUNKED(2), H5D_VIRTUAL(3), H5D_NLAYOUTS(4);
+ H5D_LAYOUT_ERROR(-1),
+ H5D_COMPACT(0),
+ H5D_CONTIGUOUS(1),
+ H5D_CHUNKED(2),
+ H5D_VIRTUAL(3),
+ H5D_NLAYOUTS(4);
private static final Map<Integer, H5D_layout> lookup = new HashMap<Integer, H5D_layout>();
- static {
+ static
+ {
for (H5D_layout s : EnumSet.allOf(H5D_layout.class))
lookup.put(s.getCode(), s);
}
private int code;
- H5D_layout(int layout_type) {
- this.code = layout_type;
- }
+ H5D_layout(int layout_type) { this.code = layout_type; }
- public int getCode() {
- return this.code;
- }
+ public int getCode() { return this.code; }
- public static H5D_layout get(int code) {
- return lookup.get(code);
- }
+ public static H5D_layout get(int code) { return lookup.get(code); }
}
- private static void writeCompact() {
- long file_id = HDF5Constants.H5I_INVALID_HID;
+ private static void writeCompact()
+ {
+ long file_id = HDF5Constants.H5I_INVALID_HID;
long filespace_id = HDF5Constants.H5I_INVALID_HID;
- long dataset_id = HDF5Constants.H5I_INVALID_HID;
- long dcpl_id = HDF5Constants.H5I_INVALID_HID;
- long[] dims = { DIM_X, DIM_Y };
+ long dataset_id = HDF5Constants.H5I_INVALID_HID;
+ long dcpl_id = HDF5Constants.H5I_INVALID_HID;
+ long[] dims = {DIM_X, DIM_Y};
int[][] dset_data = new int[DIM_X][DIM_Y];
// Initialize data.
@@ -74,7 +75,7 @@ public class H5Ex_D_Compact {
// Create a new file using default properties.
try {
file_id = H5.H5Fcreate(FILENAME, HDF5Constants.H5F_ACC_TRUNC, HDF5Constants.H5P_DEFAULT,
- HDF5Constants.H5P_DEFAULT);
+ HDF5Constants.H5P_DEFAULT);
}
catch (Exception e) {
e.printStackTrace();
@@ -110,7 +111,7 @@ public class H5Ex_D_Compact {
try {
if ((file_id >= 0) && (filespace_id >= 0) && (dcpl_id >= 0))
dataset_id = H5.H5Dcreate(file_id, DATASETNAME, HDF5Constants.H5T_STD_I32LE, filespace_id,
- HDF5Constants.H5P_DEFAULT, dcpl_id, HDF5Constants.H5P_DEFAULT);
+ HDF5Constants.H5P_DEFAULT, dcpl_id, HDF5Constants.H5P_DEFAULT);
}
catch (Exception e) {
e.printStackTrace();
@@ -119,8 +120,8 @@ public class H5Ex_D_Compact {
// Write the data to the dataset.
try {
if (dataset_id >= 0)
- H5.H5Dwrite(dataset_id, HDF5Constants.H5T_NATIVE_INT, HDF5Constants.H5S_ALL, HDF5Constants.H5S_ALL,
- HDF5Constants.H5P_DEFAULT, dset_data);
+ H5.H5Dwrite(dataset_id, HDF5Constants.H5T_NATIVE_INT, HDF5Constants.H5S_ALL,
+ HDF5Constants.H5S_ALL, HDF5Constants.H5P_DEFAULT, dset_data);
}
catch (Exception e) {
e.printStackTrace();
@@ -161,11 +162,12 @@ public class H5Ex_D_Compact {
}
}
- private static void readCompact() {
- long file_id = HDF5Constants.H5I_INVALID_HID;
+ private static void readCompact()
+ {
+ long file_id = HDF5Constants.H5I_INVALID_HID;
long filespace_id = HDF5Constants.H5I_INVALID_HID;
- long dataset_id = HDF5Constants.H5I_INVALID_HID;
- long dcpl_id = HDF5Constants.H5I_INVALID_HID;
+ long dataset_id = HDF5Constants.H5I_INVALID_HID;
+ long dcpl_id = HDF5Constants.H5I_INVALID_HID;
int[][] dset_data = new int[DIM_X][DIM_Y];
// Open file and dataset using the default properties.
@@ -229,8 +231,8 @@ public class H5Ex_D_Compact {
// Read the data using the default properties.
try {
if (dataset_id >= 0)
- H5.H5Dread(dataset_id, HDF5Constants.H5T_NATIVE_INT, HDF5Constants.H5S_ALL, HDF5Constants.H5S_ALL,
- HDF5Constants.H5P_DEFAULT, dset_data);
+ H5.H5Dread(dataset_id, HDF5Constants.H5T_NATIVE_INT, HDF5Constants.H5S_ALL,
+ HDF5Constants.H5S_ALL, HDF5Constants.H5P_DEFAULT, dset_data);
}
catch (Exception e) {
e.printStackTrace();
@@ -281,9 +283,9 @@ public class H5Ex_D_Compact {
}
}
- public static void main(String[] args) {
+ public static void main(String[] args)
+ {
H5Ex_D_Compact.writeCompact();
H5Ex_D_Compact.readCompact();
}
-
}
diff --git a/java/examples/datasets/H5Ex_D_External.java b/java/examples/datasets/H5Ex_D_External.java
index 9c3787f..d706fb7 100644
--- a/java/examples/datasets/H5Ex_D_External.java
+++ b/java/examples/datasets/H5Ex_D_External.java
@@ -24,20 +24,21 @@ import hdf.hdf5lib.H5;
import hdf.hdf5lib.HDF5Constants;
public class H5Ex_D_External {
- private static String FILENAME = "H5Ex_D_External.h5";
- private static String EXTERNALNAME = "H5Ex_D_External.data";
- private static String DATASETNAME = "DS1";
- private static final int DIM_X = 4;
- private static final int DIM_Y = 7;
- private static final int RANK = 2;
+ private static String FILENAME = "H5Ex_D_External.h5";
+ private static String EXTERNALNAME = "H5Ex_D_External.data";
+ private static String DATASETNAME = "DS1";
+ private static final int DIM_X = 4;
+ private static final int DIM_Y = 7;
+ private static final int RANK = 2;
private static final int NAME_BUF_SIZE = 32;
- private static void writeExternal() {
- long file_id = HDF5Constants.H5I_INVALID_HID;
- long dcpl_id = HDF5Constants.H5I_INVALID_HID;
+ private static void writeExternal()
+ {
+ long file_id = HDF5Constants.H5I_INVALID_HID;
+ long dcpl_id = HDF5Constants.H5I_INVALID_HID;
long filespace_id = HDF5Constants.H5I_INVALID_HID;
- long dataset_id = HDF5Constants.H5I_INVALID_HID;
- long[] dims = { DIM_X, DIM_Y };
+ long dataset_id = HDF5Constants.H5I_INVALID_HID;
+ long[] dims = {DIM_X, DIM_Y};
int[][] dset_data = new int[DIM_X][DIM_Y];
// Initialize the dataset.
@@ -48,7 +49,7 @@ public class H5Ex_D_External {
// Create a new file using default properties.
try {
file_id = H5.H5Fcreate(FILENAME, HDF5Constants.H5F_ACC_TRUNC, HDF5Constants.H5P_DEFAULT,
- HDF5Constants.H5P_DEFAULT);
+ HDF5Constants.H5P_DEFAULT);
}
catch (Exception e) {
e.printStackTrace();
@@ -84,7 +85,7 @@ public class H5Ex_D_External {
try {
if ((file_id >= 0) && (filespace_id >= 0) && (dcpl_id >= 0))
dataset_id = H5.H5Dcreate(file_id, DATASETNAME, HDF5Constants.H5T_STD_I32LE, filespace_id,
- HDF5Constants.H5P_DEFAULT, dcpl_id, HDF5Constants.H5P_DEFAULT);
+ HDF5Constants.H5P_DEFAULT, dcpl_id, HDF5Constants.H5P_DEFAULT);
}
catch (Exception e) {
e.printStackTrace();
@@ -92,8 +93,8 @@ public class H5Ex_D_External {
// Write the dataset.
try {
- H5.H5Dwrite(dataset_id, HDF5Constants.H5T_NATIVE_INT, HDF5Constants.H5S_ALL, HDF5Constants.H5S_ALL,
- HDF5Constants.H5P_DEFAULT, dset_data);
+ H5.H5Dwrite(dataset_id, HDF5Constants.H5T_NATIVE_INT, HDF5Constants.H5S_ALL,
+ HDF5Constants.H5S_ALL, HDF5Constants.H5P_DEFAULT, dset_data);
}
catch (Exception e) {
e.printStackTrace();
@@ -133,15 +134,15 @@ public class H5Ex_D_External {
catch (Exception e) {
e.printStackTrace();
}
-
}
- private static void readExternal() {
- long file_id = HDF5Constants.H5I_INVALID_HID;
- long dcpl_id = HDF5Constants.H5I_INVALID_HID;
- long dataset_id = HDF5Constants.H5I_INVALID_HID;
+ private static void readExternal()
+ {
+ long file_id = HDF5Constants.H5I_INVALID_HID;
+ long dcpl_id = HDF5Constants.H5I_INVALID_HID;
+ long dataset_id = HDF5Constants.H5I_INVALID_HID;
int[][] dset_data = new int[DIM_X][DIM_Y];
- String[] Xname = new String[1];
+ String[] Xname = new String[1];
// Open file using the default properties.
try {
@@ -183,8 +184,8 @@ public class H5Ex_D_External {
// Read the data using the default properties.
try {
if (dataset_id >= 0)
- H5.H5Dread(dataset_id, HDF5Constants.H5T_NATIVE_INT, HDF5Constants.H5S_ALL, HDF5Constants.H5S_ALL,
- HDF5Constants.H5P_DEFAULT, dset_data);
+ H5.H5Dread(dataset_id, HDF5Constants.H5T_NATIVE_INT, HDF5Constants.H5S_ALL,
+ HDF5Constants.H5S_ALL, HDF5Constants.H5P_DEFAULT, dset_data);
}
catch (Exception e) {
e.printStackTrace();
@@ -227,9 +228,9 @@ public class H5Ex_D_External {
}
}
- public static void main(String[] args) {
+ public static void main(String[] args)
+ {
H5Ex_D_External.writeExternal();
H5Ex_D_External.readExternal();
}
-
}
diff --git a/java/examples/datasets/H5Ex_D_FillValue.java b/java/examples/datasets/H5Ex_D_FillValue.java
index 3526993..db4dff7 100644
--- a/java/examples/datasets/H5Ex_D_FillValue.java
+++ b/java/examples/datasets/H5Ex_D_FillValue.java
@@ -26,29 +26,30 @@ import hdf.hdf5lib.H5;
import hdf.hdf5lib.HDF5Constants;
public class H5Ex_D_FillValue {
- private static String FILENAME = "H5Ex_D_FillValue.h5";
+ private static String FILENAME = "H5Ex_D_FillValue.h5";
private static String DATASETNAME = "ExtendibleArray";
- private static final int DIM_X = 4;
- private static final int DIM_Y = 7;
- private static final int EDIM_X = 6;
- private static final int EDIM_Y = 10;
- private static final int CHUNK_X = 4;
- private static final int CHUNK_Y = 4;
- private static final int RANK = 2;
- private static final int NDIMS = 2;
- private static final int FILLVAL = 99;
-
- private static void fillValue() {
- long file_id = HDF5Constants.H5I_INVALID_HID;
- long dcpl_id = HDF5Constants.H5I_INVALID_HID;
- long dataspace_id = HDF5Constants.H5I_INVALID_HID;
- long dataset_id = HDF5Constants.H5I_INVALID_HID;
- long[] dims = { DIM_X, DIM_Y };
- long[] extdims = { EDIM_X, EDIM_Y };
- long[] chunk_dims = { CHUNK_X, CHUNK_Y };
- long[] maxdims = { HDF5Constants.H5S_UNLIMITED, HDF5Constants.H5S_UNLIMITED };
- int[][] write_dset_data = new int[DIM_X][DIM_Y];
- int[][] read_dset_data = new int[DIM_X][DIM_Y];
+ private static final int DIM_X = 4;
+ private static final int DIM_Y = 7;
+ private static final int EDIM_X = 6;
+ private static final int EDIM_Y = 10;
+ private static final int CHUNK_X = 4;
+ private static final int CHUNK_Y = 4;
+ private static final int RANK = 2;
+ private static final int NDIMS = 2;
+ private static final int FILLVAL = 99;
+
+ private static void fillValue()
+ {
+ long file_id = HDF5Constants.H5I_INVALID_HID;
+ long dcpl_id = HDF5Constants.H5I_INVALID_HID;
+ long dataspace_id = HDF5Constants.H5I_INVALID_HID;
+ long dataset_id = HDF5Constants.H5I_INVALID_HID;
+ long[] dims = {DIM_X, DIM_Y};
+ long[] extdims = {EDIM_X, EDIM_Y};
+ long[] chunk_dims = {CHUNK_X, CHUNK_Y};
+ long[] maxdims = {HDF5Constants.H5S_UNLIMITED, HDF5Constants.H5S_UNLIMITED};
+ int[][] write_dset_data = new int[DIM_X][DIM_Y];
+ int[][] read_dset_data = new int[DIM_X][DIM_Y];
int[][] extend_dset_data = new int[EDIM_X][EDIM_Y];
// Initialize the dataset.
@@ -59,7 +60,7 @@ public class H5Ex_D_FillValue {
// Create a new file using default properties.
try {
file_id = H5.H5Fcreate(FILENAME, HDF5Constants.H5F_ACC_TRUNC, HDF5Constants.H5P_DEFAULT,
- HDF5Constants.H5P_DEFAULT);
+ HDF5Constants.H5P_DEFAULT);
}
catch (Exception e) {
e.printStackTrace();
@@ -92,7 +93,7 @@ public class H5Ex_D_FillValue {
// Set the fill value for the dataset
try {
- int[] fill_value = { FILLVAL };
+ int[] fill_value = {FILLVAL};
if (dcpl_id >= 0)
H5.H5Pset_fill_value(dcpl_id, HDF5Constants.H5T_NATIVE_INT, fill_value);
}
@@ -115,7 +116,7 @@ public class H5Ex_D_FillValue {
try {
if ((file_id >= 0) && (dataspace_id >= 0) && (dcpl_id >= 0))
dataset_id = H5.H5Dcreate(file_id, DATASETNAME, HDF5Constants.H5T_STD_I32LE, dataspace_id,
- HDF5Constants.H5P_DEFAULT, dcpl_id, HDF5Constants.H5P_DEFAULT);
+ HDF5Constants.H5P_DEFAULT, dcpl_id, HDF5Constants.H5P_DEFAULT);
}
catch (Exception e) {
e.printStackTrace();
@@ -124,8 +125,8 @@ public class H5Ex_D_FillValue {
// Read values from the dataset, which has not been written to yet.
try {
if (dataset_id >= 0)
- H5.H5Dread(dataset_id, HDF5Constants.H5T_NATIVE_INT, HDF5Constants.H5S_ALL, HDF5Constants.H5S_ALL,
- HDF5Constants.H5P_DEFAULT, read_dset_data);
+ H5.H5Dread(dataset_id, HDF5Constants.H5T_NATIVE_INT, HDF5Constants.H5S_ALL,
+ HDF5Constants.H5S_ALL, HDF5Constants.H5P_DEFAULT, read_dset_data);
}
catch (Exception e) {
e.printStackTrace();
@@ -144,8 +145,8 @@ public class H5Ex_D_FillValue {
// Write the data to the dataset.
try {
if (dataset_id >= 0)
- H5.H5Dwrite(dataset_id, HDF5Constants.H5T_NATIVE_INT, HDF5Constants.H5S_ALL, HDF5Constants.H5S_ALL,
- HDF5Constants.H5P_DEFAULT, write_dset_data);
+ H5.H5Dwrite(dataset_id, HDF5Constants.H5T_NATIVE_INT, HDF5Constants.H5S_ALL,
+ HDF5Constants.H5S_ALL, HDF5Constants.H5P_DEFAULT, write_dset_data);
}
catch (Exception e) {
e.printStackTrace();
@@ -154,8 +155,8 @@ public class H5Ex_D_FillValue {
// Read the data back.
try {
if (dataset_id >= 0)
- H5.H5Dread(dataset_id, HDF5Constants.H5T_NATIVE_INT, HDF5Constants.H5S_ALL, HDF5Constants.H5S_ALL,
- HDF5Constants.H5P_DEFAULT, read_dset_data);
+ H5.H5Dread(dataset_id, HDF5Constants.H5T_NATIVE_INT, HDF5Constants.H5S_ALL,
+ HDF5Constants.H5S_ALL, HDF5Constants.H5P_DEFAULT, read_dset_data);
}
catch (Exception e) {
e.printStackTrace();
@@ -183,8 +184,8 @@ public class H5Ex_D_FillValue {
// Read from the extended dataset.
try {
if (dataset_id >= 0)
- H5.H5Dread(dataset_id, HDF5Constants.H5T_NATIVE_INT, HDF5Constants.H5S_ALL, HDF5Constants.H5S_ALL,
- HDF5Constants.H5P_DEFAULT, extend_dset_data);
+ H5.H5Dread(dataset_id, HDF5Constants.H5T_NATIVE_INT, HDF5Constants.H5S_ALL,
+ HDF5Constants.H5S_ALL, HDF5Constants.H5P_DEFAULT, extend_dset_data);
}
catch (Exception e) {
e.printStackTrace();
@@ -233,11 +234,7 @@ public class H5Ex_D_FillValue {
catch (Exception e) {
e.printStackTrace();
}
-
- }
-
- public static void main(String[] args) {
- H5Ex_D_FillValue.fillValue();
}
+ public static void main(String[] args) { H5Ex_D_FillValue.fillValue(); }
}
diff --git a/java/examples/datasets/H5Ex_D_Gzip.java b/java/examples/datasets/H5Ex_D_Gzip.java
index 404ff05..0a94254 100644
--- a/java/examples/datasets/H5Ex_D_Gzip.java
+++ b/java/examples/datasets/H5Ex_D_Gzip.java
@@ -29,45 +29,46 @@ import hdf.hdf5lib.H5;
import hdf.hdf5lib.HDF5Constants;
public class H5Ex_D_Gzip {
- private static String FILENAME = "H5Ex_D_Gzip.h5";
+ private static String FILENAME = "H5Ex_D_Gzip.h5";
private static String DATASETNAME = "DS1";
- private static final int DIM_X = 32;
- private static final int DIM_Y = 64;
- private static final int CHUNK_X = 4;
- private static final int CHUNK_Y = 8;
- private static final int RANK = 2;
- private static final int NDIMS = 2;
+ private static final int DIM_X = 32;
+ private static final int DIM_Y = 64;
+ private static final int CHUNK_X = 4;
+ private static final int CHUNK_Y = 8;
+ private static final int RANK = 2;
+ private static final int NDIMS = 2;
// Values for the status of space allocation
enum H5Z_filter {
- H5Z_FILTER_ERROR(HDF5Constants.H5Z_FILTER_ERROR), H5Z_FILTER_NONE(HDF5Constants.H5Z_FILTER_NONE), H5Z_FILTER_DEFLATE(
- HDF5Constants.H5Z_FILTER_DEFLATE), H5Z_FILTER_SHUFFLE(HDF5Constants.H5Z_FILTER_SHUFFLE), H5Z_FILTER_FLETCHER32(
- HDF5Constants.H5Z_FILTER_FLETCHER32), H5Z_FILTER_SZIP(HDF5Constants.H5Z_FILTER_SZIP), H5Z_FILTER_NBIT(
- HDF5Constants.H5Z_FILTER_NBIT), H5Z_FILTER_SCALEOFFSET(HDF5Constants.H5Z_FILTER_SCALEOFFSET), H5Z_FILTER_RESERVED(
- HDF5Constants.H5Z_FILTER_RESERVED), H5Z_FILTER_MAX(HDF5Constants.H5Z_FILTER_MAX);
+ H5Z_FILTER_ERROR(HDF5Constants.H5Z_FILTER_ERROR),
+ H5Z_FILTER_NONE(HDF5Constants.H5Z_FILTER_NONE),
+ H5Z_FILTER_DEFLATE(HDF5Constants.H5Z_FILTER_DEFLATE),
+ H5Z_FILTER_SHUFFLE(HDF5Constants.H5Z_FILTER_SHUFFLE),
+ H5Z_FILTER_FLETCHER32(HDF5Constants.H5Z_FILTER_FLETCHER32),
+ H5Z_FILTER_SZIP(HDF5Constants.H5Z_FILTER_SZIP),
+ H5Z_FILTER_NBIT(HDF5Constants.H5Z_FILTER_NBIT),
+ H5Z_FILTER_SCALEOFFSET(HDF5Constants.H5Z_FILTER_SCALEOFFSET),
+ H5Z_FILTER_RESERVED(HDF5Constants.H5Z_FILTER_RESERVED),
+ H5Z_FILTER_MAX(HDF5Constants.H5Z_FILTER_MAX);
private static final Map<Integer, H5Z_filter> lookup = new HashMap<Integer, H5Z_filter>();
- static {
+ static
+ {
for (H5Z_filter s : EnumSet.allOf(H5Z_filter.class))
lookup.put(s.getCode(), s);
}
private int code;
- H5Z_filter(int layout_type) {
- this.code = layout_type;
- }
+ H5Z_filter(int layout_type) { this.code = layout_type; }
- public int getCode() {
- return this.code;
- }
+ public int getCode() { return this.code; }
- public static H5Z_filter get(int code) {
- return lookup.get(code);
- }
+ public static H5Z_filter get(int code) { return lookup.get(code); }
}
- private static boolean checkGzipFilter() {
+ private static boolean checkGzipFilter()
+ {
try {
int available = H5.H5Zfilter_avail(HDF5Constants.H5Z_FILTER_DEFLATE);
if (available == 0) {
@@ -81,8 +82,8 @@ public class H5Ex_D_Gzip {
try {
int filter_info = H5.H5Zget_filter_info(HDF5Constants.H5Z_FILTER_DEFLATE);
- if (((filter_info & HDF5Constants.H5Z_FILTER_CONFIG_ENCODE_ENABLED) == 0)
- || ((filter_info & HDF5Constants.H5Z_FILTER_CONFIG_DECODE_ENABLED) == 0)) {
+ if (((filter_info & HDF5Constants.H5Z_FILTER_CONFIG_ENCODE_ENABLED) == 0) ||
+ ((filter_info & HDF5Constants.H5Z_FILTER_CONFIG_DECODE_ENABLED) == 0)) {
System.out.println("gzip filter not available for encoding and decoding.");
return false;
}
@@ -93,13 +94,14 @@ public class H5Ex_D_Gzip {
return true;
}
- private static void writeGzip() {
- long file_id = HDF5Constants.H5I_INVALID_HID;
+ private static void writeGzip()
+ {
+ long file_id = HDF5Constants.H5I_INVALID_HID;
long filespace_id = HDF5Constants.H5I_INVALID_HID;
- long dataset_id = HDF5Constants.H5I_INVALID_HID;
- long dcpl_id = HDF5Constants.H5I_INVALID_HID;
- long[] dims = { DIM_X, DIM_Y };
- long[] chunk_dims = { CHUNK_X, CHUNK_Y };
+ long dataset_id = HDF5Constants.H5I_INVALID_HID;
+ long dcpl_id = HDF5Constants.H5I_INVALID_HID;
+ long[] dims = {DIM_X, DIM_Y};
+ long[] chunk_dims = {CHUNK_X, CHUNK_Y};
int[][] dset_data = new int[DIM_X][DIM_Y];
// Initialize data.
@@ -110,7 +112,7 @@ public class H5Ex_D_Gzip {
// Create a new file using default properties.
try {
file_id = H5.H5Fcreate(FILENAME, HDF5Constants.H5F_ACC_TRUNC, HDF5Constants.H5P_DEFAULT,
- HDF5Constants.H5P_DEFAULT);
+ HDF5Constants.H5P_DEFAULT);
}
catch (Exception e) {
e.printStackTrace();
@@ -143,7 +145,7 @@ public class H5Ex_D_Gzip {
try {
if ((file_id >= 0) && (filespace_id >= 0) && (dcpl_id >= 0))
dataset_id = H5.H5Dcreate(file_id, DATASETNAME, HDF5Constants.H5T_STD_I32LE, filespace_id,
- HDF5Constants.H5P_DEFAULT, dcpl_id, HDF5Constants.H5P_DEFAULT);
+ HDF5Constants.H5P_DEFAULT, dcpl_id, HDF5Constants.H5P_DEFAULT);
}
catch (Exception e) {
e.printStackTrace();
@@ -152,8 +154,8 @@ public class H5Ex_D_Gzip {
// Write the data to the dataset.
try {
if (dataset_id >= 0)
- H5.H5Dwrite(dataset_id, HDF5Constants.H5T_NATIVE_INT, HDF5Constants.H5S_ALL, HDF5Constants.H5S_ALL,
- HDF5Constants.H5P_DEFAULT, dset_data);
+ H5.H5Dwrite(dataset_id, HDF5Constants.H5T_NATIVE_INT, HDF5Constants.H5S_ALL,
+ HDF5Constants.H5S_ALL, HDF5Constants.H5P_DEFAULT, dset_data);
}
catch (Exception e) {
e.printStackTrace();
@@ -194,10 +196,11 @@ public class H5Ex_D_Gzip {
}
}
- private static void readGzip() {
- long file_id = HDF5Constants.H5I_INVALID_HID;
- long dataset_id = HDF5Constants.H5I_INVALID_HID;
- long dcpl_id = HDF5Constants.H5I_INVALID_HID;
+ private static void readGzip()
+ {
+ long file_id = HDF5Constants.H5I_INVALID_HID;
+ long dataset_id = HDF5Constants.H5I_INVALID_HID;
+ long dcpl_id = HDF5Constants.H5I_INVALID_HID;
int[][] dset_data = new int[DIM_X][DIM_Y];
// Open an existing file.
@@ -231,14 +234,14 @@ public class H5Ex_D_Gzip {
try {
if (dcpl_id >= 0) {
// Java lib requires a valid filter_name object and cd_values
- int[] flags = { 0 };
- long[] cd_nelmts = { 1 };
- int[] cd_values = { 0 };
- String[] filter_name = { "" };
- int[] filter_config = { 0 };
- int filter_type = -1;
- filter_type = H5
- .H5Pget_filter(dcpl_id, 0, flags, cd_nelmts, cd_values, 120, filter_name, filter_config);
+ int[] flags = {0};
+ long[] cd_nelmts = {1};
+ int[] cd_values = {0};
+ String[] filter_name = {""};
+ int[] filter_config = {0};
+ int filter_type = -1;
+ filter_type = H5.H5Pget_filter(dcpl_id, 0, flags, cd_nelmts, cd_values, 120, filter_name,
+ filter_config);
System.out.print("Filter type is: ");
switch (H5Z_filter.get(filter_type)) {
case H5Z_FILTER_DEFLATE:
@@ -272,8 +275,8 @@ public class H5Ex_D_Gzip {
// Read the data using the default properties.
try {
if (dataset_id >= 0) {
- H5.H5Dread(dataset_id, HDF5Constants.H5T_NATIVE_INT, HDF5Constants.H5S_ALL, HDF5Constants.H5S_ALL,
- HDF5Constants.H5P_DEFAULT, dset_data);
+ H5.H5Dread(dataset_id, HDF5Constants.H5T_NATIVE_INT, HDF5Constants.H5S_ALL,
+ HDF5Constants.H5S_ALL, HDF5Constants.H5P_DEFAULT, dset_data);
}
}
catch (Exception e) {
@@ -318,7 +321,8 @@ public class H5Ex_D_Gzip {
}
}
- public static void main(String[] args) {
+ public static void main(String[] args)
+ {
// Check if gzip compression is available and can be used for both
// compression and decompression. Normally we do not perform error
// checking in these examples for the sake of clarity, but in this
@@ -329,5 +333,4 @@ public class H5Ex_D_Gzip {
H5Ex_D_Gzip.readGzip();
}
}
-
}
diff --git a/java/examples/datasets/H5Ex_D_Hyperslab.java b/java/examples/datasets/H5Ex_D_Hyperslab.java
index fa3473f..0575d50 100644
--- a/java/examples/datasets/H5Ex_D_Hyperslab.java
+++ b/java/examples/datasets/H5Ex_D_Hyperslab.java
@@ -26,17 +26,18 @@ import hdf.hdf5lib.H5;
import hdf.hdf5lib.HDF5Constants;
public class H5Ex_D_Hyperslab {
- private static String FILENAME = "H5Ex_D_Hyperslab.h5";
+ private static String FILENAME = "H5Ex_D_Hyperslab.h5";
private static String DATASETNAME = "DS1";
- private static final int DIM_X = 6;
- private static final int DIM_Y = 8;
- private static final int RANK = 2;
+ private static final int DIM_X = 6;
+ private static final int DIM_Y = 8;
+ private static final int RANK = 2;
- private static void writeHyperslab() {
- long file_id = HDF5Constants.H5I_INVALID_HID;
+ private static void writeHyperslab()
+ {
+ long file_id = HDF5Constants.H5I_INVALID_HID;
long filespace_id = HDF5Constants.H5I_INVALID_HID;
- long dataset_id = HDF5Constants.H5I_INVALID_HID;
- long[] dims = { DIM_X, DIM_Y };
+ long dataset_id = HDF5Constants.H5I_INVALID_HID;
+ long[] dims = {DIM_X, DIM_Y};
int[][] dset_data = new int[DIM_X][DIM_Y];
// Initialize data to "1", to make it easier to see the selections.
@@ -57,7 +58,7 @@ public class H5Ex_D_Hyperslab {
// Create a new file using default properties.
try {
file_id = H5.H5Fcreate(FILENAME, HDF5Constants.H5F_ACC_TRUNC, HDF5Constants.H5P_DEFAULT,
- HDF5Constants.H5P_DEFAULT);
+ HDF5Constants.H5P_DEFAULT);
}
catch (Exception e) {
e.printStackTrace();
@@ -76,20 +77,22 @@ public class H5Ex_D_Hyperslab {
try {
if ((file_id >= 0) && (filespace_id >= 0))
dataset_id = H5.H5Dcreate(file_id, DATASETNAME, HDF5Constants.H5T_STD_I32LE, filespace_id,
- HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT);
+ HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT,
+ HDF5Constants.H5P_DEFAULT);
}
catch (Exception e) {
e.printStackTrace();
}
// Define and select the first part of the hyperslab selection.
- long[] start = { 0, 0 };
- long[] stride = { 3, 3 };
- long[] count = { 2, 3 };
- long[] block = { 2, 2 };
+ long[] start = {0, 0};
+ long[] stride = {3, 3};
+ long[] count = {2, 3};
+ long[] block = {2, 2};
try {
if ((filespace_id >= 0))
- H5.H5Sselect_hyperslab(filespace_id, HDF5Constants.H5S_SELECT_SET, start, stride, count, block);
+ H5.H5Sselect_hyperslab(filespace_id, HDF5Constants.H5S_SELECT_SET, start, stride, count,
+ block);
}
catch (Exception e) {
e.printStackTrace();
@@ -101,12 +104,13 @@ public class H5Ex_D_Hyperslab {
block[1] = 1;
try {
if ((filespace_id >= 0)) {
- H5.H5Sselect_hyperslab(filespace_id, HDF5Constants.H5S_SELECT_NOTB, start, stride, count, block);
+ H5.H5Sselect_hyperslab(filespace_id, HDF5Constants.H5S_SELECT_NOTB, start, stride, count,
+ block);
// Write the data to the dataset.
if (dataset_id >= 0)
H5.H5Dwrite(dataset_id, HDF5Constants.H5T_NATIVE_INT, HDF5Constants.H5S_ALL, filespace_id,
- HDF5Constants.H5P_DEFAULT, dset_data);
+ HDF5Constants.H5P_DEFAULT, dset_data);
}
}
catch (Exception e) {
@@ -140,11 +144,12 @@ public class H5Ex_D_Hyperslab {
}
}
- private static void readHyperslab() {
- long file_id = HDF5Constants.H5I_INVALID_HID;
+ private static void readHyperslab()
+ {
+ long file_id = HDF5Constants.H5I_INVALID_HID;
long filespace_id = HDF5Constants.H5I_INVALID_HID;
- long dataset_id = HDF5Constants.H5I_INVALID_HID;
- long dcpl_id = HDF5Constants.H5I_INVALID_HID;
+ long dataset_id = HDF5Constants.H5I_INVALID_HID;
+ long dcpl_id = HDF5Constants.H5I_INVALID_HID;
int[][] dset_data = new int[DIM_X][DIM_Y];
// Open an existing file.
@@ -167,8 +172,8 @@ public class H5Ex_D_Hyperslab {
// Read the data using the default properties.
try {
if (dataset_id >= 0)
- H5.H5Dread(dataset_id, HDF5Constants.H5T_NATIVE_INT, HDF5Constants.H5S_ALL, HDF5Constants.H5S_ALL,
- HDF5Constants.H5P_DEFAULT, dset_data);
+ H5.H5Dread(dataset_id, HDF5Constants.H5T_NATIVE_INT, HDF5Constants.H5S_ALL,
+ HDF5Constants.H5S_ALL, HDF5Constants.H5P_DEFAULT, dset_data);
}
catch (Exception e) {
e.printStackTrace();
@@ -194,18 +199,19 @@ public class H5Ex_D_Hyperslab {
if (dataset_id >= 0) {
filespace_id = H5.H5Dget_space(dataset_id);
- long[] start = { 0, 1 };
- long[] stride = { 4, 4 };
- long[] count = { 2, 2 };
- long[] block = { 2, 3 };
+ long[] start = {0, 1};
+ long[] stride = {4, 4};
+ long[] count = {2, 2};
+ long[] block = {2, 3};
if (filespace_id >= 0) {
- H5.H5Sselect_hyperslab(filespace_id, HDF5Constants.H5S_SELECT_SET, start, stride, count, block);
+ H5.H5Sselect_hyperslab(filespace_id, HDF5Constants.H5S_SELECT_SET, start, stride, count,
+ block);
// Read the data using the previously defined hyperslab.
if ((dataset_id >= 0) && (filespace_id >= 0))
- H5.H5Dread(dataset_id, HDF5Constants.H5T_NATIVE_INT, HDF5Constants.H5S_ALL, filespace_id,
- HDF5Constants.H5P_DEFAULT, dset_data);
+ H5.H5Dread(dataset_id, HDF5Constants.H5T_NATIVE_INT, HDF5Constants.H5S_ALL,
+ filespace_id, HDF5Constants.H5P_DEFAULT, dset_data);
}
}
}
@@ -258,9 +264,9 @@ public class H5Ex_D_Hyperslab {
}
}
- public static void main(String[] args) {
+ public static void main(String[] args)
+ {
H5Ex_D_Hyperslab.writeHyperslab();
H5Ex_D_Hyperslab.readHyperslab();
}
-
}
diff --git a/java/examples/datasets/H5Ex_D_Nbit.java b/java/examples/datasets/H5Ex_D_Nbit.java
index 35d23a9..d54ce21 100644
--- a/java/examples/datasets/H5Ex_D_Nbit.java
+++ b/java/examples/datasets/H5Ex_D_Nbit.java
@@ -29,45 +29,46 @@ import hdf.hdf5lib.H5;
import hdf.hdf5lib.HDF5Constants;
public class H5Ex_D_Nbit {
- private static String FILENAME = "H5Ex_D_Nbit.h5";
+ private static String FILENAME = "H5Ex_D_Nbit.h5";
private static String DATASETNAME = "DS1";
- private static final int DIM_X = 32;
- private static final int DIM_Y = 64;
- private static final int CHUNK_X = 4;
- private static final int CHUNK_Y = 8;
- private static final int RANK = 2;
- private static final int NDIMS = 2;
+ private static final int DIM_X = 32;
+ private static final int DIM_Y = 64;
+ private static final int CHUNK_X = 4;
+ private static final int CHUNK_Y = 8;
+ private static final int RANK = 2;
+ private static final int NDIMS = 2;
// Values for the status of space allocation
enum H5Z_filter {
- H5Z_FILTER_ERROR(HDF5Constants.H5Z_FILTER_ERROR), H5Z_FILTER_NONE(HDF5Constants.H5Z_FILTER_NONE), H5Z_FILTER_DEFLATE(
- HDF5Constants.H5Z_FILTER_DEFLATE), H5Z_FILTER_SHUFFLE(HDF5Constants.H5Z_FILTER_SHUFFLE), H5Z_FILTER_FLETCHER32(
- HDF5Constants.H5Z_FILTER_FLETCHER32), H5Z_FILTER_SZIP(HDF5Constants.H5Z_FILTER_SZIP), H5Z_FILTER_NBIT(
- HDF5Constants.H5Z_FILTER_NBIT), H5Z_FILTER_SCALEOFFSET(HDF5Constants.H5Z_FILTER_SCALEOFFSET), H5Z_FILTER_RESERVED(
- HDF5Constants.H5Z_FILTER_RESERVED), H5Z_FILTER_MAX(HDF5Constants.H5Z_FILTER_MAX);
+ H5Z_FILTER_ERROR(HDF5Constants.H5Z_FILTER_ERROR),
+ H5Z_FILTER_NONE(HDF5Constants.H5Z_FILTER_NONE),
+ H5Z_FILTER_DEFLATE(HDF5Constants.H5Z_FILTER_DEFLATE),
+ H5Z_FILTER_SHUFFLE(HDF5Constants.H5Z_FILTER_SHUFFLE),
+ H5Z_FILTER_FLETCHER32(HDF5Constants.H5Z_FILTER_FLETCHER32),
+ H5Z_FILTER_SZIP(HDF5Constants.H5Z_FILTER_SZIP),
+ H5Z_FILTER_NBIT(HDF5Constants.H5Z_FILTER_NBIT),
+ H5Z_FILTER_SCALEOFFSET(HDF5Constants.H5Z_FILTER_SCALEOFFSET),
+ H5Z_FILTER_RESERVED(HDF5Constants.H5Z_FILTER_RESERVED),
+ H5Z_FILTER_MAX(HDF5Constants.H5Z_FILTER_MAX);
private static final Map<Integer, H5Z_filter> lookup = new HashMap<Integer, H5Z_filter>();
- static {
+ static
+ {
for (H5Z_filter s : EnumSet.allOf(H5Z_filter.class))
lookup.put(s.getCode(), s);
}
private int code;
- H5Z_filter(int layout_type) {
- this.code = layout_type;
- }
+ H5Z_filter(int layout_type) { this.code = layout_type; }
- public int getCode() {
- return this.code;
- }
+ public int getCode() { return this.code; }
- public static H5Z_filter get(int code) {
- return lookup.get(code);
- }
+ public static H5Z_filter get(int code) { return lookup.get(code); }
}
- private static boolean checkNbitFilter() {
+ private static boolean checkNbitFilter()
+ {
try {
// Check if N-Bit compression is available and can be used for both compression and decompression.
int available = H5.H5Zfilter_avail(HDF5Constants.H5Z_FILTER_NBIT);
@@ -82,8 +83,8 @@ public class H5Ex_D_Nbit {
try {
int filter_info = H5.H5Zget_filter_info(HDF5Constants.H5Z_FILTER_NBIT);
- if (((filter_info & HDF5Constants.H5Z_FILTER_CONFIG_ENCODE_ENABLED) == 0)
- || ((filter_info & HDF5Constants.H5Z_FILTER_CONFIG_DECODE_ENABLED) == 0)) {
+ if (((filter_info & HDF5Constants.H5Z_FILTER_CONFIG_ENCODE_ENABLED) == 0) ||
+ ((filter_info & HDF5Constants.H5Z_FILTER_CONFIG_DECODE_ENABLED) == 0)) {
System.out.println("N-Bit filter not available for encoding and decoding.");
return false;
}
@@ -94,14 +95,15 @@ public class H5Ex_D_Nbit {
return true;
}
- private static void writeData() throws Exception {
- long file_id = HDF5Constants.H5I_INVALID_HID;
+ private static void writeData() throws Exception
+ {
+ long file_id = HDF5Constants.H5I_INVALID_HID;
long filespace_id = HDF5Constants.H5I_INVALID_HID;
- long dataset_id = HDF5Constants.H5I_INVALID_HID;
- long dtype_id = HDF5Constants.H5I_INVALID_HID;
- long dcpl_id = HDF5Constants.H5I_INVALID_HID;
- long[] dims = { DIM_X, DIM_Y };
- long[] chunk_dims = { CHUNK_X, CHUNK_Y };
+ long dataset_id = HDF5Constants.H5I_INVALID_HID;
+ long dtype_id = HDF5Constants.H5I_INVALID_HID;
+ long dcpl_id = HDF5Constants.H5I_INVALID_HID;
+ long[] dims = {DIM_X, DIM_Y};
+ long[] chunk_dims = {CHUNK_X, CHUNK_Y};
int[][] dset_data = new int[DIM_X][DIM_Y];
// Initialize data.
@@ -112,7 +114,7 @@ public class H5Ex_D_Nbit {
try {
// Create a new file using the default properties.
file_id = H5.H5Fcreate(FILENAME, HDF5Constants.H5F_ACC_TRUNC, HDF5Constants.H5P_DEFAULT,
- HDF5Constants.H5P_DEFAULT);
+ HDF5Constants.H5P_DEFAULT);
// Create dataspace. Setting maximum size to NULL sets the maximum
// size to be the current size.
@@ -130,12 +132,12 @@ public class H5Ex_D_Nbit {
H5.H5Pset_chunk(dcpl_id, NDIMS, chunk_dims);
// Create the dataset.
- dataset_id = H5.H5Dcreate(file_id, DATASETNAME, dtype_id, filespace_id, HDF5Constants.H5P_DEFAULT, dcpl_id,
- HDF5Constants.H5P_DEFAULT);
+ dataset_id = H5.H5Dcreate(file_id, DATASETNAME, dtype_id, filespace_id, HDF5Constants.H5P_DEFAULT,
+ dcpl_id, HDF5Constants.H5P_DEFAULT);
// Write the data to the dataset.
- H5.H5Dwrite(dataset_id, HDF5Constants.H5T_NATIVE_INT, HDF5Constants.H5S_ALL, HDF5Constants.H5S_ALL,
- HDF5Constants.H5P_DEFAULT, dset_data);
+ H5.H5Dwrite(dataset_id, HDF5Constants.H5T_NATIVE_INT, HDF5Constants.H5S_ALL,
+ HDF5Constants.H5S_ALL, HDF5Constants.H5P_DEFAULT, dset_data);
}
catch (Exception e) {
e.printStackTrace();
@@ -155,10 +157,11 @@ public class H5Ex_D_Nbit {
}
}
- private static void readData() throws Exception {
- long file_id = HDF5Constants.H5I_INVALID_HID;
- long dataset_id = HDF5Constants.H5I_INVALID_HID;
- long dcpl_id = HDF5Constants.H5I_INVALID_HID;
+ private static void readData() throws Exception
+ {
+ long file_id = HDF5Constants.H5I_INVALID_HID;
+ long dataset_id = HDF5Constants.H5I_INVALID_HID;
+ long dcpl_id = HDF5Constants.H5I_INVALID_HID;
int[][] dset_data = new int[DIM_X][DIM_Y];
// Open an existing file.
@@ -192,14 +195,14 @@ public class H5Ex_D_Nbit {
try {
if (dcpl_id >= 0) {
// Java lib requires a valid filter_name object and cd_values
- int[] flags = { 0 };
- long[] cd_nelmts = { 1 };
- int[] cd_values = { 0 };
- String[] filter_name = { "" };
- int[] filter_config = { 0 };
- int filter_type = -1;
- filter_type = H5
- .H5Pget_filter(dcpl_id, 0, flags, cd_nelmts, cd_values, 120, filter_name, filter_config);
+ int[] flags = {0};
+ long[] cd_nelmts = {1};
+ int[] cd_values = {0};
+ String[] filter_name = {""};
+ int[] filter_config = {0};
+ int filter_type = -1;
+ filter_type = H5.H5Pget_filter(dcpl_id, 0, flags, cd_nelmts, cd_values, 120, filter_name,
+ filter_config);
System.out.print("Filter type is: ");
switch (H5Z_filter.get(filter_type)) {
case H5Z_FILTER_DEFLATE:
@@ -234,7 +237,7 @@ public class H5Ex_D_Nbit {
try {
if (dataset_id >= 0) {
int status = H5.H5Dread(dataset_id, HDF5Constants.H5T_NATIVE_INT, HDF5Constants.H5S_ALL,
- HDF5Constants.H5S_ALL, HDF5Constants.H5P_DEFAULT, dset_data);
+ HDF5Constants.H5S_ALL, HDF5Constants.H5P_DEFAULT, dset_data);
// Check if the read was successful.
if (status < 0)
System.out.print("Dataset read failed!");
@@ -280,14 +283,14 @@ public class H5Ex_D_Nbit {
catch (Exception e) {
e.printStackTrace();
}
-
}
- public static void main(String[] args) {
+ public static void main(String[] args)
+ {
/*
- * Check if N-Bit compression is available and can be used for both compression and decompression. Normally we
- * do not perform error checking in these examples for the sake of clarity, but in this case we will make an
- * exception because this filter is an optional part of the hdf5 library.
+ * Check if N-Bit compression is available and can be used for both compression and decompression.
+ * Normally we do not perform error checking in these examples for the sake of clarity, but in this
+ * case we will make an exception because this filter is an optional part of the hdf5 library.
*/
try {
if (H5Ex_D_Nbit.checkNbitFilter()) {
diff --git a/java/examples/datasets/H5Ex_D_ReadWrite.java b/java/examples/datasets/H5Ex_D_ReadWrite.java
index db930d3..4b26a2c 100644
--- a/java/examples/datasets/H5Ex_D_ReadWrite.java
+++ b/java/examples/datasets/H5Ex_D_ReadWrite.java
@@ -24,17 +24,18 @@ import hdf.hdf5lib.H5;
import hdf.hdf5lib.HDF5Constants;
public class H5Ex_D_ReadWrite {
- private static String FILENAME = "H5Ex_D_ReadWrite.h5";
+ private static String FILENAME = "H5Ex_D_ReadWrite.h5";
private static String DATASETNAME = "DS1";
- private static final int DIM_X = 4;
- private static final int DIM_Y = 7;
- private static final int RANK = 2;
+ private static final int DIM_X = 4;
+ private static final int DIM_Y = 7;
+ private static final int RANK = 2;
- private static void WriteDataset() {
- long file_id = HDF5Constants.H5I_INVALID_HID;
+ private static void WriteDataset()
+ {
+ long file_id = HDF5Constants.H5I_INVALID_HID;
long filespace_id = HDF5Constants.H5I_INVALID_HID;
- long dataset_id = HDF5Constants.H5I_INVALID_HID;
- long[] dims = { DIM_X, DIM_Y };
+ long dataset_id = HDF5Constants.H5I_INVALID_HID;
+ long[] dims = {DIM_X, DIM_Y};
int[][] dset_data = new int[DIM_X][DIM_Y];
// Initialize data.
@@ -45,7 +46,7 @@ public class H5Ex_D_ReadWrite {
// Create a new file using default properties.
try {
file_id = H5.H5Fcreate(FILENAME, HDF5Constants.H5F_ACC_TRUNC, HDF5Constants.H5P_DEFAULT,
- HDF5Constants.H5P_DEFAULT);
+ HDF5Constants.H5P_DEFAULT);
}
catch (Exception e) {
e.printStackTrace();
@@ -64,7 +65,8 @@ public class H5Ex_D_ReadWrite {
try {
if ((file_id >= 0) && (filespace_id >= 0))
dataset_id = H5.H5Dcreate(file_id, DATASETNAME, HDF5Constants.H5T_STD_I32LE, filespace_id,
- HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT);
+ HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT,
+ HDF5Constants.H5P_DEFAULT);
}
catch (Exception e) {
e.printStackTrace();
@@ -73,8 +75,8 @@ public class H5Ex_D_ReadWrite {
// Write the data to the dataset.
try {
if (dataset_id >= 0)
- H5.H5Dwrite(dataset_id, HDF5Constants.H5T_NATIVE_INT, HDF5Constants.H5S_ALL, HDF5Constants.H5S_ALL,
- HDF5Constants.H5P_DEFAULT, dset_data);
+ H5.H5Dwrite(dataset_id, HDF5Constants.H5T_NATIVE_INT, HDF5Constants.H5S_ALL,
+ HDF5Constants.H5S_ALL, HDF5Constants.H5P_DEFAULT, dset_data);
}
catch (Exception e) {
e.printStackTrace();
@@ -107,9 +109,10 @@ public class H5Ex_D_ReadWrite {
}
}
- private static void ReadDataset() {
- long file_id = HDF5Constants.H5I_INVALID_HID;
- long dataset_id = HDF5Constants.H5I_INVALID_HID;
+ private static void ReadDataset()
+ {
+ long file_id = HDF5Constants.H5I_INVALID_HID;
+ long dataset_id = HDF5Constants.H5I_INVALID_HID;
int[][] dset_data = new int[DIM_X][DIM_Y];
// Open file using the default properties.
@@ -132,8 +135,8 @@ public class H5Ex_D_ReadWrite {
// Read the data using the default properties.
try {
if (dataset_id >= 0)
- H5.H5Dread(dataset_id, HDF5Constants.H5T_NATIVE_INT, HDF5Constants.H5S_ALL, HDF5Constants.H5S_ALL,
- HDF5Constants.H5P_DEFAULT, dset_data);
+ H5.H5Dread(dataset_id, HDF5Constants.H5T_NATIVE_INT, HDF5Constants.H5S_ALL,
+ HDF5Constants.H5S_ALL, HDF5Constants.H5P_DEFAULT, dset_data);
}
catch (Exception e) {
e.printStackTrace();
@@ -168,9 +171,9 @@ public class H5Ex_D_ReadWrite {
}
}
- public static void main(String[] args) {
+ public static void main(String[] args)
+ {
H5Ex_D_ReadWrite.WriteDataset();
H5Ex_D_ReadWrite.ReadDataset();
}
-
}
diff --git a/java/examples/datasets/H5Ex_D_Shuffle.java b/java/examples/datasets/H5Ex_D_Shuffle.java
index 1dd7c6a..54a77c7 100644
--- a/java/examples/datasets/H5Ex_D_Shuffle.java
+++ b/java/examples/datasets/H5Ex_D_Shuffle.java
@@ -30,45 +30,46 @@ import hdf.hdf5lib.H5;
import hdf.hdf5lib.HDF5Constants;
public class H5Ex_D_Shuffle {
- private static String FILENAME = "H5Ex_D_Shuffle.h5";
+ private static String FILENAME = "H5Ex_D_Shuffle.h5";
private static String DATASETNAME = "DS1";
- private static final int DIM_X = 32;
- private static final int DIM_Y = 64;
- private static final int CHUNK_X = 4;
- private static final int CHUNK_Y = 8;
- private static final int RANK = 2;
- private static final int NDIMS = 2;
+ private static final int DIM_X = 32;
+ private static final int DIM_Y = 64;
+ private static final int CHUNK_X = 4;
+ private static final int CHUNK_Y = 8;
+ private static final int RANK = 2;
+ private static final int NDIMS = 2;
// Values for the status of space allocation
enum H5Z_filter {
- H5Z_FILTER_ERROR(HDF5Constants.H5Z_FILTER_ERROR), H5Z_FILTER_NONE(HDF5Constants.H5Z_FILTER_NONE), H5Z_FILTER_DEFLATE(
- HDF5Constants.H5Z_FILTER_DEFLATE), H5Z_FILTER_SHUFFLE(HDF5Constants.H5Z_FILTER_SHUFFLE), H5Z_FILTER_FLETCHER32(
- HDF5Constants.H5Z_FILTER_FLETCHER32), H5Z_FILTER_SZIP(HDF5Constants.H5Z_FILTER_SZIP), H5Z_FILTER_NBIT(
- HDF5Constants.H5Z_FILTER_NBIT), H5Z_FILTER_SCALEOFFSET(HDF5Constants.H5Z_FILTER_SCALEOFFSET), H5Z_FILTER_RESERVED(
- HDF5Constants.H5Z_FILTER_RESERVED), H5Z_FILTER_MAX(HDF5Constants.H5Z_FILTER_MAX);
+ H5Z_FILTER_ERROR(HDF5Constants.H5Z_FILTER_ERROR),
+ H5Z_FILTER_NONE(HDF5Constants.H5Z_FILTER_NONE),
+ H5Z_FILTER_DEFLATE(HDF5Constants.H5Z_FILTER_DEFLATE),
+ H5Z_FILTER_SHUFFLE(HDF5Constants.H5Z_FILTER_SHUFFLE),
+ H5Z_FILTER_FLETCHER32(HDF5Constants.H5Z_FILTER_FLETCHER32),
+ H5Z_FILTER_SZIP(HDF5Constants.H5Z_FILTER_SZIP),
+ H5Z_FILTER_NBIT(HDF5Constants.H5Z_FILTER_NBIT),
+ H5Z_FILTER_SCALEOFFSET(HDF5Constants.H5Z_FILTER_SCALEOFFSET),
+ H5Z_FILTER_RESERVED(HDF5Constants.H5Z_FILTER_RESERVED),
+ H5Z_FILTER_MAX(HDF5Constants.H5Z_FILTER_MAX);
private static final Map<Integer, H5Z_filter> lookup = new HashMap<Integer, H5Z_filter>();
- static {
+ static
+ {
for (H5Z_filter s : EnumSet.allOf(H5Z_filter.class))
lookup.put(s.getCode(), s);
}
private int code;
- H5Z_filter(int layout_type) {
- this.code = layout_type;
- }
+ H5Z_filter(int layout_type) { this.code = layout_type; }
- public int getCode() {
- return this.code;
- }
+ public int getCode() { return this.code; }
- public static H5Z_filter get(int code) {
- return lookup.get(code);
- }
+ public static H5Z_filter get(int code) { return lookup.get(code); }
}
- private static boolean checkGzipFilter() {
+ private static boolean checkGzipFilter()
+ {
try {
int available = H5.H5Zfilter_avail(HDF5Constants.H5Z_FILTER_DEFLATE);
if (available == 0) {
@@ -82,8 +83,8 @@ public class H5Ex_D_Shuffle {
try {
int filter_info = H5.H5Zget_filter_info(HDF5Constants.H5Z_FILTER_DEFLATE);
- if (((filter_info & HDF5Constants.H5Z_FILTER_CONFIG_ENCODE_ENABLED) == 0)
- || ((filter_info & HDF5Constants.H5Z_FILTER_CONFIG_DECODE_ENABLED) == 0)) {
+ if (((filter_info & HDF5Constants.H5Z_FILTER_CONFIG_ENCODE_ENABLED) == 0) ||
+ ((filter_info & HDF5Constants.H5Z_FILTER_CONFIG_DECODE_ENABLED) == 0)) {
System.out.println("gzip filter not available for encoding and decoding.");
return false;
}
@@ -94,7 +95,8 @@ public class H5Ex_D_Shuffle {
return true;
}
- private static boolean checkShuffleFilter() {
+ private static boolean checkShuffleFilter()
+ {
try {
int available = H5.H5Zfilter_avail(HDF5Constants.H5Z_FILTER_SHUFFLE);
if (available == 0) {
@@ -108,8 +110,8 @@ public class H5Ex_D_Shuffle {
try {
int filter_info = H5.H5Zget_filter_info(HDF5Constants.H5Z_FILTER_SHUFFLE);
- if (((filter_info & HDF5Constants.H5Z_FILTER_CONFIG_ENCODE_ENABLED) == 0)
- || ((filter_info & HDF5Constants.H5Z_FILTER_CONFIG_DECODE_ENABLED) == 0)) {
+ if (((filter_info & HDF5Constants.H5Z_FILTER_CONFIG_ENCODE_ENABLED) == 0) ||
+ ((filter_info & HDF5Constants.H5Z_FILTER_CONFIG_DECODE_ENABLED) == 0)) {
System.out.println("Shuffle filter not available for encoding and decoding.");
return false;
}
@@ -120,13 +122,14 @@ public class H5Ex_D_Shuffle {
return true;
}
- private static void writeShuffle() {
- long file_id = HDF5Constants.H5I_INVALID_HID;
+ private static void writeShuffle()
+ {
+ long file_id = HDF5Constants.H5I_INVALID_HID;
long filespace_id = HDF5Constants.H5I_INVALID_HID;
- long dataset_id = HDF5Constants.H5I_INVALID_HID;
- long dcpl_id = HDF5Constants.H5I_INVALID_HID;
- long[] dims = { DIM_X, DIM_Y };
- long[] chunk_dims = { CHUNK_X, CHUNK_Y };
+ long dataset_id = HDF5Constants.H5I_INVALID_HID;
+ long dcpl_id = HDF5Constants.H5I_INVALID_HID;
+ long[] dims = {DIM_X, DIM_Y};
+ long[] chunk_dims = {CHUNK_X, CHUNK_Y};
int[][] dset_data = new int[DIM_X][DIM_Y];
// Initialize data.
@@ -137,7 +140,7 @@ public class H5Ex_D_Shuffle {
// Create a new file using default properties.
try {
file_id = H5.H5Fcreate(FILENAME, HDF5Constants.H5F_ACC_TRUNC, HDF5Constants.H5P_DEFAULT,
- HDF5Constants.H5P_DEFAULT);
+ HDF5Constants.H5P_DEFAULT);
}
catch (Exception e) {
e.printStackTrace();
@@ -176,7 +179,7 @@ public class H5Ex_D_Shuffle {
try {
if ((file_id >= 0) && (filespace_id >= 0) && (dcpl_id >= 0))
dataset_id = H5.H5Dcreate(file_id, DATASETNAME, HDF5Constants.H5T_STD_I32LE, filespace_id,
- HDF5Constants.H5P_DEFAULT, dcpl_id, HDF5Constants.H5P_DEFAULT);
+ HDF5Constants.H5P_DEFAULT, dcpl_id, HDF5Constants.H5P_DEFAULT);
}
catch (Exception e) {
e.printStackTrace();
@@ -185,8 +188,8 @@ public class H5Ex_D_Shuffle {
// Write the data to the dataset.
try {
if (dataset_id >= 0)
- H5.H5Dwrite(dataset_id, HDF5Constants.H5T_NATIVE_INT, HDF5Constants.H5S_ALL, HDF5Constants.H5S_ALL,
- HDF5Constants.H5P_DEFAULT, dset_data);
+ H5.H5Dwrite(dataset_id, HDF5Constants.H5T_NATIVE_INT, HDF5Constants.H5S_ALL,
+ HDF5Constants.H5S_ALL, HDF5Constants.H5P_DEFAULT, dset_data);
}
catch (Exception e) {
e.printStackTrace();
@@ -227,10 +230,11 @@ public class H5Ex_D_Shuffle {
}
}
- private static void readShuffle() {
- long file_id = HDF5Constants.H5I_INVALID_HID;
- long dataset_id = HDF5Constants.H5I_INVALID_HID;
- long dcpl_id = HDF5Constants.H5I_INVALID_HID;
+ private static void readShuffle()
+ {
+ long file_id = HDF5Constants.H5I_INVALID_HID;
+ long dataset_id = HDF5Constants.H5I_INVALID_HID;
+ long dcpl_id = HDF5Constants.H5I_INVALID_HID;
int[][] dset_data = new int[DIM_X][DIM_Y];
// Open an existing file.
@@ -266,14 +270,14 @@ public class H5Ex_D_Shuffle {
int nfilters = H5.H5Pget_nfilters(dcpl_id);
for (int indx = 0; indx < nfilters; indx++) {
// Java lib requires a valid filter_name object and cd_values
- int[] flags = { 0 };
- long[] cd_nelmts = { 1 };
- int[] cd_values = { 0 };
- String[] filter_name = { "" };
- int[] filter_config = { 0 };
- int filter_type = -1;
- filter_type = H5.H5Pget_filter(dcpl_id, indx, flags, cd_nelmts, cd_values, 120, filter_name,
- filter_config);
+ int[] flags = {0};
+ long[] cd_nelmts = {1};
+ int[] cd_values = {0};
+ String[] filter_name = {""};
+ int[] filter_config = {0};
+ int filter_type = -1;
+ filter_type = H5.H5Pget_filter(dcpl_id, indx, flags, cd_nelmts, cd_values, 120,
+ filter_name, filter_config);
System.out.print("Filter " + indx + ": Type is: ");
switch (H5Z_filter.get(filter_type)) {
case H5Z_FILTER_DEFLATE:
@@ -308,8 +312,8 @@ public class H5Ex_D_Shuffle {
// Read the data using the default properties.
try {
if (dataset_id >= 0) {
- H5.H5Dread(dataset_id, HDF5Constants.H5T_NATIVE_INT, HDF5Constants.H5S_ALL, HDF5Constants.H5S_ALL,
- HDF5Constants.H5P_DEFAULT, dset_data);
+ H5.H5Dread(dataset_id, HDF5Constants.H5T_NATIVE_INT, HDF5Constants.H5S_ALL,
+ HDF5Constants.H5S_ALL, HDF5Constants.H5P_DEFAULT, dset_data);
}
}
catch (Exception e) {
@@ -354,7 +358,8 @@ public class H5Ex_D_Shuffle {
}
}
- public static void main(String[] args) {
+ public static void main(String[] args)
+ {
// Check if gzip compression is available and can be used for both
// compression and decompression. Normally we do not perform error
// checking in these examples for the sake of clarity, but in this
@@ -366,5 +371,4 @@ public class H5Ex_D_Shuffle {
H5Ex_D_Shuffle.readShuffle();
}
}
-
}
diff --git a/java/examples/datasets/H5Ex_D_Sofloat.java b/java/examples/datasets/H5Ex_D_Sofloat.java
index 8edde09..a5f5cd8 100644
--- a/java/examples/datasets/H5Ex_D_Sofloat.java
+++ b/java/examples/datasets/H5Ex_D_Sofloat.java
@@ -23,9 +23,9 @@ package examples.datasets;
import java.text.DecimalFormat;
import java.text.DecimalFormatSymbols;
-import java.util.Locale;
import java.util.EnumSet;
import java.util.HashMap;
+import java.util.Locale;
import java.util.Map;
import hdf.hdf5lib.H5;
@@ -33,45 +33,46 @@ import hdf.hdf5lib.HDF5Constants;
public class H5Ex_D_Sofloat {
- private static String FILENAME = "H5Ex_D_Sofloat.h5";
+ private static String FILENAME = "H5Ex_D_Sofloat.h5";
private static String DATASETNAME = "DS1";
- private static final int DIM_X = 32;
- private static final int DIM_Y = 64;
- private static final int CHUNK_X = 4;
- private static final int CHUNK_Y = 8;
- private static final int RANK = 2;
- private static final int NDIMS = 2;
+ private static final int DIM_X = 32;
+ private static final int DIM_Y = 64;
+ private static final int CHUNK_X = 4;
+ private static final int CHUNK_Y = 8;
+ private static final int RANK = 2;
+ private static final int NDIMS = 2;
// Values for the status of space allocation
enum H5Z_filter {
- H5Z_FILTER_ERROR(HDF5Constants.H5Z_FILTER_ERROR), H5Z_FILTER_NONE(HDF5Constants.H5Z_FILTER_NONE), H5Z_FILTER_DEFLATE(
- HDF5Constants.H5Z_FILTER_DEFLATE), H5Z_FILTER_SHUFFLE(HDF5Constants.H5Z_FILTER_SHUFFLE), H5Z_FILTER_FLETCHER32(
- HDF5Constants.H5Z_FILTER_FLETCHER32), H5Z_FILTER_SZIP(HDF5Constants.H5Z_FILTER_SZIP), H5Z_FILTER_NBIT(
- HDF5Constants.H5Z_FILTER_NBIT), H5Z_FILTER_SCALEOFFSET(HDF5Constants.H5Z_FILTER_SCALEOFFSET), H5Z_FILTER_RESERVED(
- HDF5Constants.H5Z_FILTER_RESERVED), H5Z_FILTER_MAX(HDF5Constants.H5Z_FILTER_MAX);
+ H5Z_FILTER_ERROR(HDF5Constants.H5Z_FILTER_ERROR),
+ H5Z_FILTER_NONE(HDF5Constants.H5Z_FILTER_NONE),
+ H5Z_FILTER_DEFLATE(HDF5Constants.H5Z_FILTER_DEFLATE),
+ H5Z_FILTER_SHUFFLE(HDF5Constants.H5Z_FILTER_SHUFFLE),
+ H5Z_FILTER_FLETCHER32(HDF5Constants.H5Z_FILTER_FLETCHER32),
+ H5Z_FILTER_SZIP(HDF5Constants.H5Z_FILTER_SZIP),
+ H5Z_FILTER_NBIT(HDF5Constants.H5Z_FILTER_NBIT),
+ H5Z_FILTER_SCALEOFFSET(HDF5Constants.H5Z_FILTER_SCALEOFFSET),
+ H5Z_FILTER_RESERVED(HDF5Constants.H5Z_FILTER_RESERVED),
+ H5Z_FILTER_MAX(HDF5Constants.H5Z_FILTER_MAX);
private static final Map<Integer, H5Z_filter> lookup = new HashMap<Integer, H5Z_filter>();
- static {
+ static
+ {
for (H5Z_filter s : EnumSet.allOf(H5Z_filter.class))
lookup.put(s.getCode(), s);
}
private int code;
- H5Z_filter(int layout_type) {
- this.code = layout_type;
- }
+ H5Z_filter(int layout_type) { this.code = layout_type; }
- public int getCode() {
- return this.code;
- }
+ public int getCode() { return this.code; }
- public static H5Z_filter get(int code) {
- return lookup.get(code);
- }
+ public static H5Z_filter get(int code) { return lookup.get(code); }
}
- private static boolean checkScaleoffsetFilter() {
+ private static boolean checkScaleoffsetFilter()
+ {
try {
int available = H5.H5Zfilter_avail(HDF5Constants.H5Z_FILTER_SCALEOFFSET);
if (available == 0) {
@@ -85,8 +86,8 @@ public class H5Ex_D_Sofloat {
try {
int filter_info = H5.H5Zget_filter_info(HDF5Constants.H5Z_FILTER_SCALEOFFSET);
- if (((filter_info & HDF5Constants.H5Z_FILTER_CONFIG_ENCODE_ENABLED) == 0)
- || ((filter_info & HDF5Constants.H5Z_FILTER_CONFIG_DECODE_ENABLED) == 0)) {
+ if (((filter_info & HDF5Constants.H5Z_FILTER_CONFIG_ENCODE_ENABLED) == 0) ||
+ ((filter_info & HDF5Constants.H5Z_FILTER_CONFIG_DECODE_ENABLED) == 0)) {
System.out.println("Scale-Offset filter not available for encoding and decoding.");
return false;
}
@@ -97,20 +98,21 @@ public class H5Ex_D_Sofloat {
return true;
}
- private static void writeData() {
- long file_id = HDF5Constants.H5I_INVALID_HID;
- long filespace_id = HDF5Constants.H5I_INVALID_HID;
- long dataset_id = HDF5Constants.H5I_INVALID_HID;
- long dcpl_id = HDF5Constants.H5I_INVALID_HID;
- long[] dims = { DIM_X, DIM_Y };
- long[] chunk_dims = { CHUNK_X, CHUNK_Y };
+ private static void writeData()
+ {
+ long file_id = HDF5Constants.H5I_INVALID_HID;
+ long filespace_id = HDF5Constants.H5I_INVALID_HID;
+ long dataset_id = HDF5Constants.H5I_INVALID_HID;
+ long dcpl_id = HDF5Constants.H5I_INVALID_HID;
+ long[] dims = {DIM_X, DIM_Y};
+ long[] chunk_dims = {CHUNK_X, CHUNK_Y};
double[][] dset_data = new double[DIM_X][DIM_Y];
// Initialize data.
for (int indx = 0; indx < DIM_X; indx++)
for (int jndx = 0; jndx < DIM_Y; jndx++) {
- double x = indx;
- double y = jndx;
+ double x = indx;
+ double y = jndx;
dset_data[indx][jndx] = (x + 1) / (y + 0.3) + y;
}
@@ -133,7 +135,7 @@ public class H5Ex_D_Sofloat {
// Create a new file using the default properties.
try {
file_id = H5.H5Fcreate(FILENAME, HDF5Constants.H5F_ACC_TRUNC, HDF5Constants.H5P_DEFAULT,
- HDF5Constants.H5P_DEFAULT);
+ HDF5Constants.H5P_DEFAULT);
}
catch (Exception e) {
e.printStackTrace();
@@ -164,7 +166,7 @@ public class H5Ex_D_Sofloat {
try {
if ((file_id >= 0) && (filespace_id >= 0) && (dcpl_id >= 0))
dataset_id = H5.H5Dcreate(file_id, DATASETNAME, HDF5Constants.H5T_IEEE_F64LE, filespace_id,
- HDF5Constants.H5P_DEFAULT, dcpl_id, HDF5Constants.H5P_DEFAULT);
+ HDF5Constants.H5P_DEFAULT, dcpl_id, HDF5Constants.H5P_DEFAULT);
}
catch (Exception e) {
e.printStackTrace();
@@ -173,8 +175,8 @@ public class H5Ex_D_Sofloat {
// Write the data to the dataset.
try {
if (dataset_id >= 0)
- H5.H5Dwrite(dataset_id, HDF5Constants.H5T_NATIVE_DOUBLE, HDF5Constants.H5S_ALL, HDF5Constants.H5S_ALL,
- HDF5Constants.H5P_DEFAULT, dset_data);
+ H5.H5Dwrite(dataset_id, HDF5Constants.H5T_NATIVE_DOUBLE, HDF5Constants.H5S_ALL,
+ HDF5Constants.H5S_ALL, HDF5Constants.H5P_DEFAULT, dset_data);
}
catch (Exception e) {
e.printStackTrace();
@@ -215,10 +217,11 @@ public class H5Ex_D_Sofloat {
}
}
- private static void readData() {
- long file_id = HDF5Constants.H5I_INVALID_HID;
- long dataset_id = HDF5Constants.H5I_INVALID_HID;
- long dcpl_id = HDF5Constants.H5I_INVALID_HID;
+ private static void readData()
+ {
+ long file_id = HDF5Constants.H5I_INVALID_HID;
+ long dataset_id = HDF5Constants.H5I_INVALID_HID;
+ long dcpl_id = HDF5Constants.H5I_INVALID_HID;
double[][] dset_data = new double[DIM_X][DIM_Y];
// Open file using the default properties.
@@ -251,15 +254,15 @@ public class H5Ex_D_Sofloat {
try {
if (dcpl_id >= 0) {
// Java lib requires a valid filter_name object and cd_values
- int[] flags = { 0 };
- long[] cd_nelmts = { 1 };
- int[] cd_values = { 0 };
- String[] filter_name = { "" };
- int[] filter_config = { 0 };
- int filter_type = -1;
-
- filter_type = H5
- .H5Pget_filter(dcpl_id, 0, flags, cd_nelmts, cd_values, 120, filter_name, filter_config);
+ int[] flags = {0};
+ long[] cd_nelmts = {1};
+ int[] cd_values = {0};
+ String[] filter_name = {""};
+ int[] filter_config = {0};
+ int filter_type = -1;
+
+ filter_type = H5.H5Pget_filter(dcpl_id, 0, flags, cd_nelmts, cd_values, 120, filter_name,
+ filter_config);
System.out.print("Filter type is: ");
switch (H5Z_filter.get(filter_type)) {
case H5Z_FILTER_DEFLATE:
@@ -293,8 +296,8 @@ public class H5Ex_D_Sofloat {
// Read the data using the default properties.
try {
if (dataset_id >= 0)
- H5.H5Dread(dataset_id, HDF5Constants.H5T_NATIVE_DOUBLE, HDF5Constants.H5S_ALL, HDF5Constants.H5S_ALL,
- HDF5Constants.H5P_DEFAULT, dset_data);
+ H5.H5Dread(dataset_id, HDF5Constants.H5T_NATIVE_DOUBLE, HDF5Constants.H5S_ALL,
+ HDF5Constants.H5S_ALL, HDF5Constants.H5P_DEFAULT, dset_data);
}
catch (Exception e) {
e.printStackTrace();
@@ -343,7 +346,8 @@ public class H5Ex_D_Sofloat {
}
}
- public static void main(String[] args) {
+ public static void main(String[] args)
+ {
// Check if Scale-Offset compression is available and can be used
// for both compression and decompression. Normally we do not
diff --git a/java/examples/datasets/H5Ex_D_Soint.java b/java/examples/datasets/H5Ex_D_Soint.java
index dd7664f..3eb8e37 100644
--- a/java/examples/datasets/H5Ex_D_Soint.java
+++ b/java/examples/datasets/H5Ex_D_Soint.java
@@ -30,45 +30,46 @@ import hdf.hdf5lib.HDF5Constants;
public class H5Ex_D_Soint {
- private static String FILENAME = "H5Ex_D_Soint.h5";
+ private static String FILENAME = "H5Ex_D_Soint.h5";
private static String DATASETNAME = "DS1";
- private static final int DIM_X = 32;
- private static final int DIM_Y = 64;
- private static final int CHUNK_X = 4;
- private static final int CHUNK_Y = 8;
- private static final int RANK = 2;
- private static final int NDIMS = 2;
+ private static final int DIM_X = 32;
+ private static final int DIM_Y = 64;
+ private static final int CHUNK_X = 4;
+ private static final int CHUNK_Y = 8;
+ private static final int RANK = 2;
+ private static final int NDIMS = 2;
// Values for the status of space allocation
enum H5Z_filter {
- H5Z_FILTER_ERROR(HDF5Constants.H5Z_FILTER_ERROR), H5Z_FILTER_NONE(HDF5Constants.H5Z_FILTER_NONE), H5Z_FILTER_DEFLATE(
- HDF5Constants.H5Z_FILTER_DEFLATE), H5Z_FILTER_SHUFFLE(HDF5Constants.H5Z_FILTER_SHUFFLE), H5Z_FILTER_FLETCHER32(
- HDF5Constants.H5Z_FILTER_FLETCHER32), H5Z_FILTER_SZIP(HDF5Constants.H5Z_FILTER_SZIP), H5Z_FILTER_NBIT(
- HDF5Constants.H5Z_FILTER_NBIT), H5Z_FILTER_SCALEOFFSET(HDF5Constants.H5Z_FILTER_SCALEOFFSET), H5Z_FILTER_RESERVED(
- HDF5Constants.H5Z_FILTER_RESERVED), H5Z_FILTER_MAX(HDF5Constants.H5Z_FILTER_MAX);
+ H5Z_FILTER_ERROR(HDF5Constants.H5Z_FILTER_ERROR),
+ H5Z_FILTER_NONE(HDF5Constants.H5Z_FILTER_NONE),
+ H5Z_FILTER_DEFLATE(HDF5Constants.H5Z_FILTER_DEFLATE),
+ H5Z_FILTER_SHUFFLE(HDF5Constants.H5Z_FILTER_SHUFFLE),
+ H5Z_FILTER_FLETCHER32(HDF5Constants.H5Z_FILTER_FLETCHER32),
+ H5Z_FILTER_SZIP(HDF5Constants.H5Z_FILTER_SZIP),
+ H5Z_FILTER_NBIT(HDF5Constants.H5Z_FILTER_NBIT),
+ H5Z_FILTER_SCALEOFFSET(HDF5Constants.H5Z_FILTER_SCALEOFFSET),
+ H5Z_FILTER_RESERVED(HDF5Constants.H5Z_FILTER_RESERVED),
+ H5Z_FILTER_MAX(HDF5Constants.H5Z_FILTER_MAX);
private static final Map<Integer, H5Z_filter> lookup = new HashMap<Integer, H5Z_filter>();
- static {
+ static
+ {
for (H5Z_filter s : EnumSet.allOf(H5Z_filter.class))
lookup.put(s.getCode(), s);
}
private int code;
- H5Z_filter(int layout_type) {
- this.code = layout_type;
- }
+ H5Z_filter(int layout_type) { this.code = layout_type; }
- public int getCode() {
- return this.code;
- }
+ public int getCode() { return this.code; }
- public static H5Z_filter get(int code) {
- return lookup.get(code);
- }
+ public static H5Z_filter get(int code) { return lookup.get(code); }
}
- private static boolean checkScaleoffsetFilter() {
+ private static boolean checkScaleoffsetFilter()
+ {
try {
int available = H5.H5Zfilter_avail(HDF5Constants.H5Z_FILTER_SCALEOFFSET);
if (available == 0) {
@@ -82,8 +83,8 @@ public class H5Ex_D_Soint {
try {
int filter_info = H5.H5Zget_filter_info(HDF5Constants.H5Z_FILTER_SCALEOFFSET);
- if (((filter_info & HDF5Constants.H5Z_FILTER_CONFIG_ENCODE_ENABLED) == 0)
- || ((filter_info & HDF5Constants.H5Z_FILTER_CONFIG_DECODE_ENABLED) == 0)) {
+ if (((filter_info & HDF5Constants.H5Z_FILTER_CONFIG_ENCODE_ENABLED) == 0) ||
+ ((filter_info & HDF5Constants.H5Z_FILTER_CONFIG_DECODE_ENABLED) == 0)) {
System.out.println("Scale-Offset filter not available for encoding and decoding.");
return false;
}
@@ -94,13 +95,14 @@ public class H5Ex_D_Soint {
return true;
}
- private static void writeData() {
- long file_id = HDF5Constants.H5I_INVALID_HID;
+ private static void writeData()
+ {
+ long file_id = HDF5Constants.H5I_INVALID_HID;
long filespace_id = HDF5Constants.H5I_INVALID_HID;
- long dataset_id = HDF5Constants.H5I_INVALID_HID;
- long dcpl_id = HDF5Constants.H5I_INVALID_HID;
- long[] dims = { DIM_X, DIM_Y };
- long[] chunk_dims = { CHUNK_X, CHUNK_Y };
+ long dataset_id = HDF5Constants.H5I_INVALID_HID;
+ long dcpl_id = HDF5Constants.H5I_INVALID_HID;
+ long[] dims = {DIM_X, DIM_Y};
+ long[] chunk_dims = {CHUNK_X, CHUNK_Y};
int[][] dset_data = new int[DIM_X][DIM_Y];
// Initialize data.
@@ -111,7 +113,7 @@ public class H5Ex_D_Soint {
// Create a new file using the default properties.
try {
file_id = H5.H5Fcreate(FILENAME, HDF5Constants.H5F_ACC_TRUNC, HDF5Constants.H5P_DEFAULT,
- HDF5Constants.H5P_DEFAULT);
+ HDF5Constants.H5P_DEFAULT);
}
catch (Exception e) {
e.printStackTrace();
@@ -130,7 +132,8 @@ public class H5Ex_D_Soint {
try {
dcpl_id = H5.H5Pcreate(HDF5Constants.H5P_DATASET_CREATE);
if (dcpl_id >= 0) {
- H5.H5Pset_scaleoffset(dcpl_id, HDF5Constants.H5Z_SO_INT, HDF5Constants.H5Z_SO_INT_MINBITS_DEFAULT);
+ H5.H5Pset_scaleoffset(dcpl_id, HDF5Constants.H5Z_SO_INT,
+ HDF5Constants.H5Z_SO_INT_MINBITS_DEFAULT);
H5.H5Pset_chunk(dcpl_id, NDIMS, chunk_dims);
}
}
@@ -142,7 +145,7 @@ public class H5Ex_D_Soint {
try {
if ((file_id >= 0) && (filespace_id >= 0) && (dcpl_id >= 0))
dataset_id = H5.H5Dcreate(file_id, DATASETNAME, HDF5Constants.H5T_STD_I32LE, filespace_id,
- HDF5Constants.H5P_DEFAULT, dcpl_id, HDF5Constants.H5P_DEFAULT);
+ HDF5Constants.H5P_DEFAULT, dcpl_id, HDF5Constants.H5P_DEFAULT);
}
catch (Exception e) {
e.printStackTrace();
@@ -151,8 +154,8 @@ public class H5Ex_D_Soint {
// Write the data to the dataset.
try {
if (dataset_id >= 0)
- H5.H5Dwrite(dataset_id, HDF5Constants.H5T_NATIVE_INT, HDF5Constants.H5S_ALL, HDF5Constants.H5S_ALL,
- HDF5Constants.H5P_DEFAULT, dset_data);
+ H5.H5Dwrite(dataset_id, HDF5Constants.H5T_NATIVE_INT, HDF5Constants.H5S_ALL,
+ HDF5Constants.H5S_ALL, HDF5Constants.H5P_DEFAULT, dset_data);
}
catch (Exception e) {
e.printStackTrace();
@@ -193,10 +196,11 @@ public class H5Ex_D_Soint {
}
}
- private static void readData() {
- long file_id = HDF5Constants.H5I_INVALID_HID;
- long dataset_id = HDF5Constants.H5I_INVALID_HID;
- long dcpl_id = HDF5Constants.H5I_INVALID_HID;
+ private static void readData()
+ {
+ long file_id = HDF5Constants.H5I_INVALID_HID;
+ long dataset_id = HDF5Constants.H5I_INVALID_HID;
+ long dcpl_id = HDF5Constants.H5I_INVALID_HID;
int[][] dset_data = new int[DIM_X][DIM_Y];
// Open file using the default properties.
@@ -229,15 +233,15 @@ public class H5Ex_D_Soint {
try {
if (dcpl_id >= 0) {
// Java lib requires a valid filter_name object and cd_values
- int[] flags = { 0 };
- long[] cd_nelmts = { 1 };
- int[] cd_values = { 0 };
- String[] filter_name = { "" };
- int[] filter_config = { 0 };
- int filter_type = -1;
-
- filter_type = H5
- .H5Pget_filter(dcpl_id, 0, flags, cd_nelmts, cd_values, 120, filter_name, filter_config);
+ int[] flags = {0};
+ long[] cd_nelmts = {1};
+ int[] cd_values = {0};
+ String[] filter_name = {""};
+ int[] filter_config = {0};
+ int filter_type = -1;
+
+ filter_type = H5.H5Pget_filter(dcpl_id, 0, flags, cd_nelmts, cd_values, 120, filter_name,
+ filter_config);
System.out.print("Filter type is: ");
switch (H5Z_filter.get(filter_type)) {
case H5Z_FILTER_DEFLATE:
@@ -271,8 +275,8 @@ public class H5Ex_D_Soint {
// Read the data using the default properties.
try {
if (dataset_id >= 0)
- H5.H5Dread(dataset_id, HDF5Constants.H5T_NATIVE_INT, HDF5Constants.H5S_ALL, HDF5Constants.H5S_ALL,
- HDF5Constants.H5P_DEFAULT, dset_data);
+ H5.H5Dread(dataset_id, HDF5Constants.H5T_NATIVE_INT, HDF5Constants.H5S_ALL,
+ HDF5Constants.H5S_ALL, HDF5Constants.H5P_DEFAULT, dset_data);
}
catch (Exception e) {
e.printStackTrace();
@@ -316,7 +320,8 @@ public class H5Ex_D_Soint {
}
}
- public static void main(String[] args) {
+ public static void main(String[] args)
+ {
// Check if Scale-Offset compression is available and can be used
// for both compression and decompression. Normally we do not
@@ -328,5 +333,4 @@ public class H5Ex_D_Soint {
H5Ex_D_Soint.readData();
}
}
-
}
diff --git a/java/examples/datasets/H5Ex_D_Szip.java b/java/examples/datasets/H5Ex_D_Szip.java
index 3fdc712..0426a87 100644
--- a/java/examples/datasets/H5Ex_D_Szip.java
+++ b/java/examples/datasets/H5Ex_D_Szip.java
@@ -29,45 +29,46 @@ import hdf.hdf5lib.H5;
import hdf.hdf5lib.HDF5Constants;
public class H5Ex_D_Szip {
- private static String FILENAME = "H5Ex_D_Szip.h5";
+ private static String FILENAME = "H5Ex_D_Szip.h5";
private static String DATASETNAME = "DS1";
- private static final int DIM_X = 32;
- private static final int DIM_Y = 64;
- private static final int CHUNK_X = 4;
- private static final int CHUNK_Y = 8;
- private static final int RANK = 2;
- private static final int NDIMS = 2;
+ private static final int DIM_X = 32;
+ private static final int DIM_Y = 64;
+ private static final int CHUNK_X = 4;
+ private static final int CHUNK_Y = 8;
+ private static final int RANK = 2;
+ private static final int NDIMS = 2;
// Values for the status of space allocation
enum H5Z_filter {
- H5Z_FILTER_ERROR(HDF5Constants.H5Z_FILTER_ERROR), H5Z_FILTER_NONE(HDF5Constants.H5Z_FILTER_NONE), H5Z_FILTER_DEFLATE(
- HDF5Constants.H5Z_FILTER_DEFLATE), H5Z_FILTER_SHUFFLE(HDF5Constants.H5Z_FILTER_SHUFFLE), H5Z_FILTER_FLETCHER32(
- HDF5Constants.H5Z_FILTER_FLETCHER32), H5Z_FILTER_SZIP(HDF5Constants.H5Z_FILTER_SZIP), H5Z_FILTER_NBIT(
- HDF5Constants.H5Z_FILTER_NBIT), H5Z_FILTER_SCALEOFFSET(HDF5Constants.H5Z_FILTER_SCALEOFFSET), H5Z_FILTER_RESERVED(
- HDF5Constants.H5Z_FILTER_RESERVED), H5Z_FILTER_MAX(HDF5Constants.H5Z_FILTER_MAX);
+ H5Z_FILTER_ERROR(HDF5Constants.H5Z_FILTER_ERROR),
+ H5Z_FILTER_NONE(HDF5Constants.H5Z_FILTER_NONE),
+ H5Z_FILTER_DEFLATE(HDF5Constants.H5Z_FILTER_DEFLATE),
+ H5Z_FILTER_SHUFFLE(HDF5Constants.H5Z_FILTER_SHUFFLE),
+ H5Z_FILTER_FLETCHER32(HDF5Constants.H5Z_FILTER_FLETCHER32),
+ H5Z_FILTER_SZIP(HDF5Constants.H5Z_FILTER_SZIP),
+ H5Z_FILTER_NBIT(HDF5Constants.H5Z_FILTER_NBIT),
+ H5Z_FILTER_SCALEOFFSET(HDF5Constants.H5Z_FILTER_SCALEOFFSET),
+ H5Z_FILTER_RESERVED(HDF5Constants.H5Z_FILTER_RESERVED),
+ H5Z_FILTER_MAX(HDF5Constants.H5Z_FILTER_MAX);
private static final Map<Integer, H5Z_filter> lookup = new HashMap<Integer, H5Z_filter>();
- static {
+ static
+ {
for (H5Z_filter s : EnumSet.allOf(H5Z_filter.class))
lookup.put(s.getCode(), s);
}
private int code;
- H5Z_filter(int layout_type) {
- this.code = layout_type;
- }
+ H5Z_filter(int layout_type) { this.code = layout_type; }
- public int getCode() {
- return this.code;
- }
+ public int getCode() { return this.code; }
- public static H5Z_filter get(int code) {
- return lookup.get(code);
- }
+ public static H5Z_filter get(int code) { return lookup.get(code); }
}
- private static boolean checkSzipFilter() {
+ private static boolean checkSzipFilter()
+ {
try {
int available = H5.H5Zfilter_avail(HDF5Constants.H5Z_FILTER_SZIP);
if (available == 0) {
@@ -81,8 +82,8 @@ public class H5Ex_D_Szip {
try {
int filter_info = H5.H5Zget_filter_info(HDF5Constants.H5Z_FILTER_SZIP);
- if (((filter_info & HDF5Constants.H5Z_FILTER_CONFIG_ENCODE_ENABLED) == 0)
- || ((filter_info & HDF5Constants.H5Z_FILTER_CONFIG_DECODE_ENABLED) == 0)) {
+ if (((filter_info & HDF5Constants.H5Z_FILTER_CONFIG_ENCODE_ENABLED) == 0) ||
+ ((filter_info & HDF5Constants.H5Z_FILTER_CONFIG_DECODE_ENABLED) == 0)) {
System.out.println("szip filter not available for encoding and decoding.");
return false;
}
@@ -93,13 +94,14 @@ public class H5Ex_D_Szip {
return true;
}
- private static void writeSzip() {
- long file_id = HDF5Constants.H5I_INVALID_HID;
+ private static void writeSzip()
+ {
+ long file_id = HDF5Constants.H5I_INVALID_HID;
long filespace_id = HDF5Constants.H5I_INVALID_HID;
- long dataset_id = HDF5Constants.H5I_INVALID_HID;
- long dcpl_id = HDF5Constants.H5I_INVALID_HID;
- long[] dims = { DIM_X, DIM_Y };
- long[] chunk_dims = { CHUNK_X, CHUNK_Y };
+ long dataset_id = HDF5Constants.H5I_INVALID_HID;
+ long dcpl_id = HDF5Constants.H5I_INVALID_HID;
+ long[] dims = {DIM_X, DIM_Y};
+ long[] chunk_dims = {CHUNK_X, CHUNK_Y};
int[][] dset_data = new int[DIM_X][DIM_Y];
// Initialize data.
@@ -110,7 +112,7 @@ public class H5Ex_D_Szip {
// Create a new file using default properties.
try {
file_id = H5.H5Fcreate(FILENAME, HDF5Constants.H5F_ACC_TRUNC, HDF5Constants.H5P_DEFAULT,
- HDF5Constants.H5P_DEFAULT);
+ HDF5Constants.H5P_DEFAULT);
}
catch (Exception e) {
e.printStackTrace();
@@ -143,7 +145,7 @@ public class H5Ex_D_Szip {
try {
if ((file_id >= 0) && (filespace_id >= 0) && (dcpl_id >= 0))
dataset_id = H5.H5Dcreate(file_id, DATASETNAME, HDF5Constants.H5T_STD_I32LE, filespace_id,
- HDF5Constants.H5P_DEFAULT, dcpl_id, HDF5Constants.H5P_DEFAULT);
+ HDF5Constants.H5P_DEFAULT, dcpl_id, HDF5Constants.H5P_DEFAULT);
}
catch (Exception e) {
e.printStackTrace();
@@ -152,8 +154,8 @@ public class H5Ex_D_Szip {
// Write the data to the dataset.
try {
if (dataset_id >= 0)
- H5.H5Dwrite(dataset_id, HDF5Constants.H5T_NATIVE_INT, HDF5Constants.H5S_ALL, HDF5Constants.H5S_ALL,
- HDF5Constants.H5P_DEFAULT, dset_data);
+ H5.H5Dwrite(dataset_id, HDF5Constants.H5T_NATIVE_INT, HDF5Constants.H5S_ALL,
+ HDF5Constants.H5S_ALL, HDF5Constants.H5P_DEFAULT, dset_data);
}
catch (Exception e) {
e.printStackTrace();
@@ -194,10 +196,11 @@ public class H5Ex_D_Szip {
}
}
- private static void readSzip() {
- long file_id = HDF5Constants.H5I_INVALID_HID;
- long dataset_id = HDF5Constants.H5I_INVALID_HID;
- long dcpl_id = HDF5Constants.H5I_INVALID_HID;
+ private static void readSzip()
+ {
+ long file_id = HDF5Constants.H5I_INVALID_HID;
+ long dataset_id = HDF5Constants.H5I_INVALID_HID;
+ long dcpl_id = HDF5Constants.H5I_INVALID_HID;
int[][] dset_data = new int[DIM_X][DIM_Y];
// Open an existing file.
@@ -231,15 +234,15 @@ public class H5Ex_D_Szip {
try {
if (dcpl_id >= 0) {
// Java lib requires a valid filter_name object and cd_values
- int[] flags = { 0 };
- long[] cd_nelmts = { 1 };
- int[] cd_values = { 0 };
- String[] filter_name = { "" };
- int[] filter_config = { 0 };
- int filter_type = -1;
-
- filter_type = H5
- .H5Pget_filter(dcpl_id, 0, flags, cd_nelmts, cd_values, 120, filter_name, filter_config);
+ int[] flags = {0};
+ long[] cd_nelmts = {1};
+ int[] cd_values = {0};
+ String[] filter_name = {""};
+ int[] filter_config = {0};
+ int filter_type = -1;
+
+ filter_type = H5.H5Pget_filter(dcpl_id, 0, flags, cd_nelmts, cd_values, 120, filter_name,
+ filter_config);
System.out.print("Filter type is: ");
switch (H5Z_filter.get(filter_type)) {
case H5Z_FILTER_DEFLATE:
@@ -273,8 +276,8 @@ public class H5Ex_D_Szip {
// Read the data using the default properties.
try {
if (dataset_id >= 0) {
- H5.H5Dread(dataset_id, HDF5Constants.H5T_NATIVE_INT, HDF5Constants.H5S_ALL, HDF5Constants.H5S_ALL,
- HDF5Constants.H5P_DEFAULT, dset_data);
+ H5.H5Dread(dataset_id, HDF5Constants.H5T_NATIVE_INT, HDF5Constants.H5S_ALL,
+ HDF5Constants.H5S_ALL, HDF5Constants.H5P_DEFAULT, dset_data);
}
}
catch (Exception e) {
@@ -319,7 +322,8 @@ public class H5Ex_D_Szip {
}
}
- public static void main(String[] args) {
+ public static void main(String[] args)
+ {
// Check if gzip compression is available and can be used for both
// compression and decompression. Normally we do not perform error
// checking in these examples for the sake of clarity, but in this
@@ -330,5 +334,4 @@ public class H5Ex_D_Szip {
H5Ex_D_Szip.readSzip();
}
}
-
}
diff --git a/java/examples/datasets/H5Ex_D_Transform.java b/java/examples/datasets/H5Ex_D_Transform.java
index 069e80b..16ab423 100644
--- a/java/examples/datasets/H5Ex_D_Transform.java
+++ b/java/examples/datasets/H5Ex_D_Transform.java
@@ -27,20 +27,21 @@ import hdf.hdf5lib.HDF5Constants;
public class H5Ex_D_Transform {
- private static String FILE = "H5Ex_D_Transform.h5";
- private static String DATASET = "DS1";
- private static final int DIM_X = 4;
- private static final int DIM_Y = 7;
- private static String TRANSFORM = "x+1";
+ private static String FILE = "H5Ex_D_Transform.h5";
+ private static String DATASET = "DS1";
+ private static final int DIM_X = 4;
+ private static final int DIM_Y = 7;
+ private static String TRANSFORM = "x+1";
private static String RTRANSFORM = "x-1";
- private static void writeData() {
- long file_id = HDF5Constants.H5I_INVALID_HID;
+ private static void writeData()
+ {
+ long file_id = HDF5Constants.H5I_INVALID_HID;
long filespace_id = HDF5Constants.H5I_INVALID_HID;
- long dataset_id = HDF5Constants.H5I_INVALID_HID;
- long dxpl_id = HDF5Constants.H5I_INVALID_HID;
+ long dataset_id = HDF5Constants.H5I_INVALID_HID;
+ long dxpl_id = HDF5Constants.H5I_INVALID_HID;
- long[] dims = { DIM_X, DIM_Y };
+ long[] dims = {DIM_X, DIM_Y};
int[][] dset_data = new int[DIM_X][DIM_Y];
// Initialize data.
@@ -60,7 +61,7 @@ public class H5Ex_D_Transform {
// Create a new file using the default properties.
try {
file_id = H5.H5Fcreate(FILE, HDF5Constants.H5F_ACC_TRUNC, HDF5Constants.H5P_DEFAULT,
- HDF5Constants.H5P_DEFAULT);
+ HDF5Constants.H5P_DEFAULT);
}
catch (Exception e) {
e.printStackTrace();
@@ -90,7 +91,8 @@ public class H5Ex_D_Transform {
try {
if ((file_id >= 0) && (filespace_id >= 0))
dataset_id = H5.H5Dcreate(file_id, DATASET, HDF5Constants.H5T_NATIVE_INT, filespace_id,
- HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT);
+ HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT,
+ HDF5Constants.H5P_DEFAULT);
}
catch (Exception e) {
e.printStackTrace();
@@ -99,8 +101,8 @@ public class H5Ex_D_Transform {
// Write the data to the dataset using the dataset transfer property list.
try {
if ((dataset_id >= 0) && (dxpl_id >= 0))
- H5.H5Dwrite(dataset_id, HDF5Constants.H5T_NATIVE_INT, HDF5Constants.H5S_ALL, HDF5Constants.H5S_ALL,
- dxpl_id, dset_data);
+ H5.H5Dwrite(dataset_id, HDF5Constants.H5T_NATIVE_INT, HDF5Constants.H5S_ALL,
+ HDF5Constants.H5S_ALL, dxpl_id, dset_data);
}
catch (Exception e) {
e.printStackTrace();
@@ -141,11 +143,12 @@ public class H5Ex_D_Transform {
}
}
- private static void readData() {
+ private static void readData()
+ {
- long file_id = HDF5Constants.H5I_INVALID_HID;
- long dataset_id = HDF5Constants.H5I_INVALID_HID;
- long dxpl_id = HDF5Constants.H5I_INVALID_HID;
+ long file_id = HDF5Constants.H5I_INVALID_HID;
+ long dataset_id = HDF5Constants.H5I_INVALID_HID;
+ long dxpl_id = HDF5Constants.H5I_INVALID_HID;
int[][] dset_data = new int[DIM_X][DIM_Y];
// Open an existing file using the default properties.
@@ -168,8 +171,8 @@ public class H5Ex_D_Transform {
// Read the data using the default properties.
try {
if (dataset_id >= 0)
- H5.H5Dread(dataset_id, HDF5Constants.H5T_NATIVE_INT, HDF5Constants.H5S_ALL, HDF5Constants.H5S_ALL,
- HDF5Constants.H5P_DEFAULT, dset_data);
+ H5.H5Dread(dataset_id, HDF5Constants.H5T_NATIVE_INT, HDF5Constants.H5S_ALL,
+ HDF5Constants.H5S_ALL, HDF5Constants.H5P_DEFAULT, dset_data);
}
catch (Exception e) {
e.printStackTrace();
@@ -197,8 +200,8 @@ public class H5Ex_D_Transform {
// Read the data using the dataset transfer property list.
try {
if ((dataset_id >= 0) && (dxpl_id >= 0))
- H5.H5Dread(dataset_id, HDF5Constants.H5T_NATIVE_INT, HDF5Constants.H5S_ALL, HDF5Constants.H5S_ALL,
- dxpl_id, dset_data);
+ H5.H5Dread(dataset_id, HDF5Constants.H5T_NATIVE_INT, HDF5Constants.H5S_ALL,
+ HDF5Constants.H5S_ALL, dxpl_id, dset_data);
}
catch (Exception e) {
e.printStackTrace();
@@ -206,8 +209,8 @@ public class H5Ex_D_Transform {
// Output the data to the screen.
- System.out.println("Data as written with transform '" + TRANSFORM + "' and read with transform '"
- + RTRANSFORM + "'");
+ System.out.println("Data as written with transform '" + TRANSFORM + "' and read with transform '" +
+ RTRANSFORM + "'");
for (int i = 0; i < DIM_X; i++) {
System.out.print(" [");
for (int j = 0; j < DIM_Y; j++)
@@ -239,9 +242,9 @@ public class H5Ex_D_Transform {
}
}
- public static void main(String[] args) {
+ public static void main(String[] args)
+ {
H5Ex_D_Transform.writeData();
H5Ex_D_Transform.readData();
}
-
}
diff --git a/java/examples/datasets/H5Ex_D_UnlimitedAdd.java b/java/examples/datasets/H5Ex_D_UnlimitedAdd.java
index c82b2d6..4154cf3 100644
--- a/java/examples/datasets/H5Ex_D_UnlimitedAdd.java
+++ b/java/examples/datasets/H5Ex_D_UnlimitedAdd.java
@@ -26,25 +26,26 @@ import hdf.hdf5lib.H5;
import hdf.hdf5lib.HDF5Constants;
public class H5Ex_D_UnlimitedAdd {
- private static String FILENAME = "H5Ex_D_UnlimitedAdd.h5";
+ private static String FILENAME = "H5Ex_D_UnlimitedAdd.h5";
private static String DATASETNAME = "DS1";
- private static final int DIM_X = 4;
- private static final int DIM_Y = 7;
- private static final int EDIM_X = 6;
- private static final int EDIM_Y = 10;
- private static final int CHUNK_X = 4;
- private static final int CHUNK_Y = 4;
- private static final int RANK = 2;
- private static final int NDIMS = 2;
-
- private static void writeUnlimited() {
- long file_id = HDF5Constants.H5I_INVALID_HID;
- long dcpl_id = HDF5Constants.H5I_INVALID_HID;
+ private static final int DIM_X = 4;
+ private static final int DIM_Y = 7;
+ private static final int EDIM_X = 6;
+ private static final int EDIM_Y = 10;
+ private static final int CHUNK_X = 4;
+ private static final int CHUNK_Y = 4;
+ private static final int RANK = 2;
+ private static final int NDIMS = 2;
+
+ private static void writeUnlimited()
+ {
+ long file_id = HDF5Constants.H5I_INVALID_HID;
+ long dcpl_id = HDF5Constants.H5I_INVALID_HID;
long dataspace_id = HDF5Constants.H5I_INVALID_HID;
- long dataset_id = HDF5Constants.H5I_INVALID_HID;
- long[] dims = { DIM_X, DIM_Y };
- long[] chunk_dims = { CHUNK_X, CHUNK_Y };
- long[] maxdims = { HDF5Constants.H5S_UNLIMITED, HDF5Constants.H5S_UNLIMITED };
+ long dataset_id = HDF5Constants.H5I_INVALID_HID;
+ long[] dims = {DIM_X, DIM_Y};
+ long[] chunk_dims = {CHUNK_X, CHUNK_Y};
+ long[] maxdims = {HDF5Constants.H5S_UNLIMITED, HDF5Constants.H5S_UNLIMITED};
int[][] dset_data = new int[DIM_X][DIM_Y];
// Initialize the dataset.
@@ -55,7 +56,7 @@ public class H5Ex_D_UnlimitedAdd {
// Create a new file using default properties.
try {
file_id = H5.H5Fcreate(FILENAME, HDF5Constants.H5F_ACC_TRUNC, HDF5Constants.H5P_DEFAULT,
- HDF5Constants.H5P_DEFAULT);
+ HDF5Constants.H5P_DEFAULT);
}
catch (Exception e) {
e.printStackTrace();
@@ -90,7 +91,7 @@ public class H5Ex_D_UnlimitedAdd {
try {
if ((file_id >= 0) && (dataspace_id >= 0) && (dcpl_id >= 0))
dataset_id = H5.H5Dcreate(file_id, DATASETNAME, HDF5Constants.H5T_STD_I32LE, dataspace_id,
- HDF5Constants.H5P_DEFAULT, dcpl_id, HDF5Constants.H5P_DEFAULT);
+ HDF5Constants.H5P_DEFAULT, dcpl_id, HDF5Constants.H5P_DEFAULT);
}
catch (Exception e) {
e.printStackTrace();
@@ -99,8 +100,8 @@ public class H5Ex_D_UnlimitedAdd {
// Write the data to the dataset.
try {
if (dataset_id >= 0)
- H5.H5Dwrite(dataset_id, HDF5Constants.H5T_NATIVE_INT, HDF5Constants.H5S_ALL, HDF5Constants.H5S_ALL,
- HDF5Constants.H5P_DEFAULT, dset_data);
+ H5.H5Dwrite(dataset_id, HDF5Constants.H5T_NATIVE_INT, HDF5Constants.H5S_ALL,
+ HDF5Constants.H5S_ALL, HDF5Constants.H5P_DEFAULT, dset_data);
}
catch (Exception e) {
e.printStackTrace();
@@ -141,14 +142,15 @@ public class H5Ex_D_UnlimitedAdd {
}
}
- private static void extendUnlimited() {
- long file_id = HDF5Constants.H5I_INVALID_HID;
+ private static void extendUnlimited()
+ {
+ long file_id = HDF5Constants.H5I_INVALID_HID;
long dataspace_id = HDF5Constants.H5I_INVALID_HID;
- long dataset_id = HDF5Constants.H5I_INVALID_HID;
- long[] dims = { DIM_X, DIM_Y };
- long[] extdims = { EDIM_X, EDIM_Y };
- long[] start = { 0, 0 };
- long[] count = new long[2];
+ long dataset_id = HDF5Constants.H5I_INVALID_HID;
+ long[] dims = {DIM_X, DIM_Y};
+ long[] extdims = {EDIM_X, EDIM_Y};
+ long[] start = {0, 0};
+ long[] count = new long[2];
int[][] dset_data;
int[][] extend_dset_data = new int[EDIM_X][EDIM_Y];
@@ -189,13 +191,13 @@ public class H5Ex_D_UnlimitedAdd {
}
// Allocate array of pointers to rows.
- dset_data = new int[(int) dims[0]][(int) dims[1]];
+ dset_data = new int[(int)dims[0]][(int)dims[1]];
// Read the data using the default properties.
try {
if (dataset_id >= 0)
- H5.H5Dread(dataset_id, HDF5Constants.H5T_NATIVE_INT, HDF5Constants.H5S_ALL, HDF5Constants.H5S_ALL,
- HDF5Constants.H5P_DEFAULT, dset_data);
+ H5.H5Dread(dataset_id, HDF5Constants.H5T_NATIVE_INT, HDF5Constants.H5S_ALL,
+ HDF5Constants.H5S_ALL, HDF5Constants.H5P_DEFAULT, dset_data);
}
catch (Exception e) {
e.printStackTrace();
@@ -257,7 +259,7 @@ public class H5Ex_D_UnlimitedAdd {
// Write the data to the selected portion of the dataset.
if (dataset_id >= 0)
H5.H5Dwrite(dataset_id, HDF5Constants.H5T_NATIVE_INT, HDF5Constants.H5S_ALL, dataspace_id,
- HDF5Constants.H5P_DEFAULT, extend_dset_data);
+ HDF5Constants.H5P_DEFAULT, extend_dset_data);
}
}
catch (Exception e) {
@@ -291,11 +293,12 @@ public class H5Ex_D_UnlimitedAdd {
}
}
- private static void readUnlimited() {
- long file_id = HDF5Constants.H5I_INVALID_HID;
+ private static void readUnlimited()
+ {
+ long file_id = HDF5Constants.H5I_INVALID_HID;
long dataspace_id = HDF5Constants.H5I_INVALID_HID;
- long dataset_id = HDF5Constants.H5I_INVALID_HID;
- long[] dims = { DIM_X, DIM_Y };
+ long dataset_id = HDF5Constants.H5I_INVALID_HID;
+ long[] dims = {DIM_X, DIM_Y};
int[][] dset_data;
// Open an existing file.
@@ -332,13 +335,13 @@ public class H5Ex_D_UnlimitedAdd {
e.printStackTrace();
}
// Allocate array of pointers to rows.
- dset_data = new int[(int) dims[0]][(int) dims[1]];
+ dset_data = new int[(int)dims[0]][(int)dims[1]];
// Read the data using the default properties.
try {
if (dataset_id >= 0)
- H5.H5Dread(dataset_id, HDF5Constants.H5T_NATIVE_INT, HDF5Constants.H5S_ALL, HDF5Constants.H5S_ALL,
- HDF5Constants.H5P_DEFAULT, dset_data);
+ H5.H5Dread(dataset_id, HDF5Constants.H5T_NATIVE_INT, HDF5Constants.H5S_ALL,
+ HDF5Constants.H5S_ALL, HDF5Constants.H5P_DEFAULT, dset_data);
}
catch (Exception e) {
e.printStackTrace();
@@ -381,10 +384,10 @@ public class H5Ex_D_UnlimitedAdd {
}
}
- public static void main(String[] args) {
+ public static void main(String[] args)
+ {
H5Ex_D_UnlimitedAdd.writeUnlimited();
H5Ex_D_UnlimitedAdd.extendUnlimited();
H5Ex_D_UnlimitedAdd.readUnlimited();
}
-
}
diff --git a/java/examples/datasets/H5Ex_D_UnlimitedGzip.java b/java/examples/datasets/H5Ex_D_UnlimitedGzip.java
index 675b1ba..e084641 100644
--- a/java/examples/datasets/H5Ex_D_UnlimitedGzip.java
+++ b/java/examples/datasets/H5Ex_D_UnlimitedGzip.java
@@ -31,47 +31,48 @@ import hdf.hdf5lib.H5;
import hdf.hdf5lib.HDF5Constants;
public class H5Ex_D_UnlimitedGzip {
- private static String FILENAME = "H5Ex_D_UnlimitedGzip.h5";
+ private static String FILENAME = "H5Ex_D_UnlimitedGzip.h5";
private static String DATASETNAME = "DS1";
- private static final int DIM_X = 4;
- private static final int DIM_Y = 7;
- private static final int EDIM_X = 6;
- private static final int EDIM_Y = 10;
- private static final int CHUNK_X = 4;
- private static final int CHUNK_Y = 4;
- private static final int RANK = 2;
- private static final int NDIMS = 2;
+ private static final int DIM_X = 4;
+ private static final int DIM_Y = 7;
+ private static final int EDIM_X = 6;
+ private static final int EDIM_Y = 10;
+ private static final int CHUNK_X = 4;
+ private static final int CHUNK_Y = 4;
+ private static final int RANK = 2;
+ private static final int NDIMS = 2;
// Values for the status of space allocation
enum H5Z_filter {
- H5Z_FILTER_ERROR(HDF5Constants.H5Z_FILTER_ERROR), H5Z_FILTER_NONE(HDF5Constants.H5Z_FILTER_NONE), H5Z_FILTER_DEFLATE(
- HDF5Constants.H5Z_FILTER_DEFLATE), H5Z_FILTER_SHUFFLE(HDF5Constants.H5Z_FILTER_SHUFFLE), H5Z_FILTER_FLETCHER32(
- HDF5Constants.H5Z_FILTER_FLETCHER32), H5Z_FILTER_SZIP(HDF5Constants.H5Z_FILTER_SZIP), H5Z_FILTER_NBIT(
- HDF5Constants.H5Z_FILTER_NBIT), H5Z_FILTER_SCALEOFFSET(HDF5Constants.H5Z_FILTER_SCALEOFFSET), H5Z_FILTER_RESERVED(
- HDF5Constants.H5Z_FILTER_RESERVED), H5Z_FILTER_MAX(HDF5Constants.H5Z_FILTER_MAX);
+ H5Z_FILTER_ERROR(HDF5Constants.H5Z_FILTER_ERROR),
+ H5Z_FILTER_NONE(HDF5Constants.H5Z_FILTER_NONE),
+ H5Z_FILTER_DEFLATE(HDF5Constants.H5Z_FILTER_DEFLATE),
+ H5Z_FILTER_SHUFFLE(HDF5Constants.H5Z_FILTER_SHUFFLE),
+ H5Z_FILTER_FLETCHER32(HDF5Constants.H5Z_FILTER_FLETCHER32),
+ H5Z_FILTER_SZIP(HDF5Constants.H5Z_FILTER_SZIP),
+ H5Z_FILTER_NBIT(HDF5Constants.H5Z_FILTER_NBIT),
+ H5Z_FILTER_SCALEOFFSET(HDF5Constants.H5Z_FILTER_SCALEOFFSET),
+ H5Z_FILTER_RESERVED(HDF5Constants.H5Z_FILTER_RESERVED),
+ H5Z_FILTER_MAX(HDF5Constants.H5Z_FILTER_MAX);
private static final Map<Integer, H5Z_filter> lookup = new HashMap<Integer, H5Z_filter>();
- static {
+ static
+ {
for (H5Z_filter s : EnumSet.allOf(H5Z_filter.class))
lookup.put(s.getCode(), s);
}
private int code;
- H5Z_filter(int layout_type) {
- this.code = layout_type;
- }
+ H5Z_filter(int layout_type) { this.code = layout_type; }
- public int getCode() {
- return this.code;
- }
+ public int getCode() { return this.code; }
- public static H5Z_filter get(int code) {
- return lookup.get(code);
- }
+ public static H5Z_filter get(int code) { return lookup.get(code); }
}
- private static boolean checkGzipFilter() {
+ private static boolean checkGzipFilter()
+ {
try {
int available = H5.H5Zfilter_avail(HDF5Constants.H5Z_FILTER_DEFLATE);
if (available == 0) {
@@ -85,8 +86,8 @@ public class H5Ex_D_UnlimitedGzip {
try {
int filter_info = H5.H5Zget_filter_info(HDF5Constants.H5Z_FILTER_DEFLATE);
- if (((filter_info & HDF5Constants.H5Z_FILTER_CONFIG_ENCODE_ENABLED) == 0)
- || ((filter_info & HDF5Constants.H5Z_FILTER_CONFIG_DECODE_ENABLED) == 0)) {
+ if (((filter_info & HDF5Constants.H5Z_FILTER_CONFIG_ENCODE_ENABLED) == 0) ||
+ ((filter_info & HDF5Constants.H5Z_FILTER_CONFIG_DECODE_ENABLED) == 0)) {
System.out.println("gzip filter not available for encoding and decoding.");
return false;
}
@@ -97,14 +98,15 @@ public class H5Ex_D_UnlimitedGzip {
return true;
}
- private static void writeUnlimited() {
- long file_id = HDF5Constants.H5I_INVALID_HID;
- long dcpl_id = HDF5Constants.H5I_INVALID_HID;
+ private static void writeUnlimited()
+ {
+ long file_id = HDF5Constants.H5I_INVALID_HID;
+ long dcpl_id = HDF5Constants.H5I_INVALID_HID;
long dataspace_id = HDF5Constants.H5I_INVALID_HID;
- long dataset_id = HDF5Constants.H5I_INVALID_HID;
- long[] dims = { DIM_X, DIM_Y };
- long[] chunk_dims = { CHUNK_X, CHUNK_Y };
- long[] maxdims = { HDF5Constants.H5S_UNLIMITED, HDF5Constants.H5S_UNLIMITED };
+ long dataset_id = HDF5Constants.H5I_INVALID_HID;
+ long[] dims = {DIM_X, DIM_Y};
+ long[] chunk_dims = {CHUNK_X, CHUNK_Y};
+ long[] maxdims = {HDF5Constants.H5S_UNLIMITED, HDF5Constants.H5S_UNLIMITED};
int[][] dset_data = new int[DIM_X][DIM_Y];
// Initialize the dataset.
@@ -115,7 +117,7 @@ public class H5Ex_D_UnlimitedGzip {
// Create a new file using default properties.
try {
file_id = H5.H5Fcreate(FILENAME, HDF5Constants.H5F_ACC_TRUNC, HDF5Constants.H5P_DEFAULT,
- HDF5Constants.H5P_DEFAULT);
+ HDF5Constants.H5P_DEFAULT);
}
catch (Exception e) {
e.printStackTrace();
@@ -147,7 +149,7 @@ public class H5Ex_D_UnlimitedGzip {
try {
if ((file_id >= 0) && (dataspace_id >= 0) && (dcpl_id >= 0))
dataset_id = H5.H5Dcreate(file_id, DATASETNAME, HDF5Constants.H5T_STD_I32LE, dataspace_id,
- HDF5Constants.H5P_DEFAULT, dcpl_id, HDF5Constants.H5P_DEFAULT);
+ HDF5Constants.H5P_DEFAULT, dcpl_id, HDF5Constants.H5P_DEFAULT);
}
catch (Exception e) {
e.printStackTrace();
@@ -156,8 +158,8 @@ public class H5Ex_D_UnlimitedGzip {
// Write the data to the dataset.
try {
if (dataset_id >= 0)
- H5.H5Dwrite(dataset_id, HDF5Constants.H5T_NATIVE_INT, HDF5Constants.H5S_ALL, HDF5Constants.H5S_ALL,
- HDF5Constants.H5P_DEFAULT, dset_data);
+ H5.H5Dwrite(dataset_id, HDF5Constants.H5T_NATIVE_INT, HDF5Constants.H5S_ALL,
+ HDF5Constants.H5S_ALL, HDF5Constants.H5P_DEFAULT, dset_data);
}
catch (Exception e) {
e.printStackTrace();
@@ -198,14 +200,15 @@ public class H5Ex_D_UnlimitedGzip {
}
}
- private static void extendUnlimited() {
- long file_id = HDF5Constants.H5I_INVALID_HID;
+ private static void extendUnlimited()
+ {
+ long file_id = HDF5Constants.H5I_INVALID_HID;
long dataspace_id = HDF5Constants.H5I_INVALID_HID;
- long dataset_id = HDF5Constants.H5I_INVALID_HID;
- long[] dims = { DIM_X, DIM_Y };
- long[] extdims = { EDIM_X, EDIM_Y };
- long[] start = { 0, 0 };
- long[] count = new long[2];
+ long dataset_id = HDF5Constants.H5I_INVALID_HID;
+ long[] dims = {DIM_X, DIM_Y};
+ long[] extdims = {EDIM_X, EDIM_Y};
+ long[] start = {0, 0};
+ long[] count = new long[2];
int[][] dset_data;
int[][] extend_dset_data = new int[EDIM_X][EDIM_Y];
@@ -246,13 +249,13 @@ public class H5Ex_D_UnlimitedGzip {
}
// Allocate array of pointers to rows.
- dset_data = new int[(int) dims[0]][(int) dims[1]];
+ dset_data = new int[(int)dims[0]][(int)dims[1]];
// Read the data using the default properties.
try {
if (dataset_id >= 0)
- H5.H5Dread(dataset_id, HDF5Constants.H5T_NATIVE_INT, HDF5Constants.H5S_ALL, HDF5Constants.H5S_ALL,
- HDF5Constants.H5P_DEFAULT, dset_data);
+ H5.H5Dread(dataset_id, HDF5Constants.H5T_NATIVE_INT, HDF5Constants.H5S_ALL,
+ HDF5Constants.H5S_ALL, HDF5Constants.H5P_DEFAULT, dset_data);
}
catch (Exception e) {
e.printStackTrace();
@@ -314,7 +317,7 @@ public class H5Ex_D_UnlimitedGzip {
// Write the data to the selected portion of the dataset.
if (dataset_id >= 0)
H5.H5Dwrite(dataset_id, HDF5Constants.H5T_NATIVE_INT, HDF5Constants.H5S_ALL, dataspace_id,
- HDF5Constants.H5P_DEFAULT, extend_dset_data);
+ HDF5Constants.H5P_DEFAULT, extend_dset_data);
}
}
catch (Exception e) {
@@ -348,12 +351,13 @@ public class H5Ex_D_UnlimitedGzip {
}
}
- private static void readUnlimited() {
- long file_id = HDF5Constants.H5I_INVALID_HID;
+ private static void readUnlimited()
+ {
+ long file_id = HDF5Constants.H5I_INVALID_HID;
long dataspace_id = HDF5Constants.H5I_INVALID_HID;
- long dataset_id = HDF5Constants.H5I_INVALID_HID;
- long dcpl_id = HDF5Constants.H5I_INVALID_HID;
- long[] dims = { DIM_X, DIM_Y };
+ long dataset_id = HDF5Constants.H5I_INVALID_HID;
+ long dcpl_id = HDF5Constants.H5I_INVALID_HID;
+ long[] dims = {DIM_X, DIM_Y};
int[][] dset_data;
// Open an existing file.
@@ -387,14 +391,14 @@ public class H5Ex_D_UnlimitedGzip {
try {
if (dcpl_id >= 0) {
// Java lib requires a valid filter_name object and cd_values
- int[] flags = { 0 };
- long[] cd_nelmts = { 1 };
- int[] cd_values = { 0 };
- String[] filter_name = { "" };
- int[] filter_config = { 0 };
- int filter_type = -1;
- filter_type = H5
- .H5Pget_filter(dcpl_id, 0, flags, cd_nelmts, cd_values, 120, filter_name, filter_config);
+ int[] flags = {0};
+ long[] cd_nelmts = {1};
+ int[] cd_values = {0};
+ String[] filter_name = {""};
+ int[] filter_config = {0};
+ int filter_type = -1;
+ filter_type = H5.H5Pget_filter(dcpl_id, 0, flags, cd_nelmts, cd_values, 120, filter_name,
+ filter_config);
System.out.print("Filter type is: ");
switch (H5Z_filter.get(filter_type)) {
case H5Z_FILTER_DEFLATE:
@@ -436,13 +440,13 @@ public class H5Ex_D_UnlimitedGzip {
e.printStackTrace();
}
// Allocate array of pointers to rows.
- dset_data = new int[(int) dims[0]][(int) dims[1]];
+ dset_data = new int[(int)dims[0]][(int)dims[1]];
// Read the data using the default properties.
try {
if (dataset_id >= 0)
- H5.H5Dread(dataset_id, HDF5Constants.H5T_NATIVE_INT, HDF5Constants.H5S_ALL, HDF5Constants.H5S_ALL,
- HDF5Constants.H5P_DEFAULT, dset_data);
+ H5.H5Dread(dataset_id, HDF5Constants.H5T_NATIVE_INT, HDF5Constants.H5S_ALL,
+ HDF5Constants.H5S_ALL, HDF5Constants.H5P_DEFAULT, dset_data);
}
catch (Exception e) {
e.printStackTrace();
@@ -485,7 +489,8 @@ public class H5Ex_D_UnlimitedGzip {
}
}
- public static void main(String[] args) {
+ public static void main(String[] args)
+ {
// Check if gzip compression is available and can be used for both
// compression and decompression. Normally we do not perform error
// checking in these examples for the sake of clarity, but in this
@@ -497,5 +502,4 @@ public class H5Ex_D_UnlimitedGzip {
H5Ex_D_UnlimitedGzip.readUnlimited();
}
}
-
}
diff --git a/java/examples/datasets/H5Ex_D_UnlimitedMod.java b/java/examples/datasets/H5Ex_D_UnlimitedMod.java
index 0708898..ccabcdd 100644
--- a/java/examples/datasets/H5Ex_D_UnlimitedMod.java
+++ b/java/examples/datasets/H5Ex_D_UnlimitedMod.java
@@ -26,25 +26,26 @@ import hdf.hdf5lib.H5;
import hdf.hdf5lib.HDF5Constants;
public class H5Ex_D_UnlimitedMod {
- private static String FILENAME = "H5Ex_D_UnlimitedMod.h5";
+ private static String FILENAME = "H5Ex_D_UnlimitedMod.h5";
private static String DATASETNAME = "DS1";
- private static final int DIM_X = 4;
- private static final int DIM_Y = 7;
- private static final int EDIM_X = 6;
- private static final int EDIM_Y = 10;
- private static final int CHUNK_X = 4;
- private static final int CHUNK_Y = 4;
- private static final int RANK = 2;
- private static final int NDIMS = 2;
-
- private static void writeUnlimited() {
- long file_id = HDF5Constants.H5I_INVALID_HID;
- long dcpl_id = HDF5Constants.H5I_INVALID_HID;
+ private static final int DIM_X = 4;
+ private static final int DIM_Y = 7;
+ private static final int EDIM_X = 6;
+ private static final int EDIM_Y = 10;
+ private static final int CHUNK_X = 4;
+ private static final int CHUNK_Y = 4;
+ private static final int RANK = 2;
+ private static final int NDIMS = 2;
+
+ private static void writeUnlimited()
+ {
+ long file_id = HDF5Constants.H5I_INVALID_HID;
+ long dcpl_id = HDF5Constants.H5I_INVALID_HID;
long dataspace_id = HDF5Constants.H5I_INVALID_HID;
- long dataset_id = HDF5Constants.H5I_INVALID_HID;
- long[] dims = { DIM_X, DIM_Y };
- long[] chunk_dims = { CHUNK_X, CHUNK_Y };
- long[] maxdims = { HDF5Constants.H5S_UNLIMITED, HDF5Constants.H5S_UNLIMITED };
+ long dataset_id = HDF5Constants.H5I_INVALID_HID;
+ long[] dims = {DIM_X, DIM_Y};
+ long[] chunk_dims = {CHUNK_X, CHUNK_Y};
+ long[] maxdims = {HDF5Constants.H5S_UNLIMITED, HDF5Constants.H5S_UNLIMITED};
int[][] dset_data = new int[DIM_X][DIM_Y];
// Initialize the dataset.
@@ -55,7 +56,7 @@ public class H5Ex_D_UnlimitedMod {
// Create a new file using default properties.
try {
file_id = H5.H5Fcreate(FILENAME, HDF5Constants.H5F_ACC_TRUNC, HDF5Constants.H5P_DEFAULT,
- HDF5Constants.H5P_DEFAULT);
+ HDF5Constants.H5P_DEFAULT);
}
catch (Exception e) {
e.printStackTrace();
@@ -90,7 +91,7 @@ public class H5Ex_D_UnlimitedMod {
try {
if ((file_id >= 0) && (dataspace_id >= 0) && (dcpl_id >= 0))
dataset_id = H5.H5Dcreate(file_id, DATASETNAME, HDF5Constants.H5T_STD_I32LE, dataspace_id,
- HDF5Constants.H5P_DEFAULT, dcpl_id, HDF5Constants.H5P_DEFAULT);
+ HDF5Constants.H5P_DEFAULT, dcpl_id, HDF5Constants.H5P_DEFAULT);
}
catch (Exception e) {
e.printStackTrace();
@@ -99,8 +100,8 @@ public class H5Ex_D_UnlimitedMod {
// Write the data to the dataset.
try {
if (dataset_id >= 0)
- H5.H5Dwrite(dataset_id, HDF5Constants.H5T_NATIVE_INT, HDF5Constants.H5S_ALL, HDF5Constants.H5S_ALL,
- HDF5Constants.H5P_DEFAULT, dset_data);
+ H5.H5Dwrite(dataset_id, HDF5Constants.H5T_NATIVE_INT, HDF5Constants.H5S_ALL,
+ HDF5Constants.H5S_ALL, HDF5Constants.H5P_DEFAULT, dset_data);
}
catch (Exception e) {
e.printStackTrace();
@@ -141,12 +142,13 @@ public class H5Ex_D_UnlimitedMod {
}
}
- private static void extendUnlimited() {
- long file_id = HDF5Constants.H5I_INVALID_HID;
+ private static void extendUnlimited()
+ {
+ long file_id = HDF5Constants.H5I_INVALID_HID;
long dataspace_id = HDF5Constants.H5I_INVALID_HID;
- long dataset_id = HDF5Constants.H5I_INVALID_HID;
- long[] dims = { DIM_X, DIM_Y };
- long[] extdims = { EDIM_X, EDIM_Y };
+ long dataset_id = HDF5Constants.H5I_INVALID_HID;
+ long[] dims = {DIM_X, DIM_Y};
+ long[] extdims = {EDIM_X, EDIM_Y};
int[][] dset_data;
int[][] extend_dset_data = new int[EDIM_X][EDIM_Y];
@@ -187,13 +189,13 @@ public class H5Ex_D_UnlimitedMod {
}
// Allocate array of pointers to rows.
- dset_data = new int[(int) dims[0]][(int) dims[1]];
+ dset_data = new int[(int)dims[0]][(int)dims[1]];
// Read the data using the default properties.
try {
if (dataset_id >= 0)
- H5.H5Dread(dataset_id, HDF5Constants.H5T_NATIVE_INT, HDF5Constants.H5S_ALL, HDF5Constants.H5S_ALL,
- HDF5Constants.H5P_DEFAULT, dset_data);
+ H5.H5Dread(dataset_id, HDF5Constants.H5T_NATIVE_INT, HDF5Constants.H5S_ALL,
+ HDF5Constants.H5S_ALL, HDF5Constants.H5P_DEFAULT, dset_data);
}
catch (Exception e) {
e.printStackTrace();
@@ -244,7 +246,7 @@ public class H5Ex_D_UnlimitedMod {
try {
if ((dataspace_id >= 0) && (dataset_id >= 0))
H5.H5Dwrite(dataset_id, HDF5Constants.H5T_NATIVE_INT, HDF5Constants.H5S_ALL, dataspace_id,
- HDF5Constants.H5P_DEFAULT, extend_dset_data);
+ HDF5Constants.H5P_DEFAULT, extend_dset_data);
}
catch (Exception e) {
e.printStackTrace();
@@ -277,11 +279,12 @@ public class H5Ex_D_UnlimitedMod {
}
}
- private static void readUnlimited() {
- long file_id = HDF5Constants.H5I_INVALID_HID;
+ private static void readUnlimited()
+ {
+ long file_id = HDF5Constants.H5I_INVALID_HID;
long dataspace_id = HDF5Constants.H5I_INVALID_HID;
- long dataset_id = HDF5Constants.H5I_INVALID_HID;
- long[] dims = { DIM_X, DIM_Y };
+ long dataset_id = HDF5Constants.H5I_INVALID_HID;
+ long[] dims = {DIM_X, DIM_Y};
int[][] dset_data;
// Open an existing file.
@@ -318,13 +321,13 @@ public class H5Ex_D_UnlimitedMod {
e.printStackTrace();
}
// Allocate array of pointers to rows.
- dset_data = new int[(int) dims[0]][(int) dims[1]];
+ dset_data = new int[(int)dims[0]][(int)dims[1]];
// Read the data using the default properties.
try {
if (dataset_id >= 0)
- H5.H5Dread(dataset_id, HDF5Constants.H5T_NATIVE_INT, HDF5Constants.H5S_ALL, HDF5Constants.H5S_ALL,
- HDF5Constants.H5P_DEFAULT, dset_data);
+ H5.H5Dread(dataset_id, HDF5Constants.H5T_NATIVE_INT, HDF5Constants.H5S_ALL,
+ HDF5Constants.H5S_ALL, HDF5Constants.H5P_DEFAULT, dset_data);
}
catch (Exception e) {
e.printStackTrace();
@@ -367,10 +370,10 @@ public class H5Ex_D_UnlimitedMod {
}
}
- public static void main(String[] args) {
+ public static void main(String[] args)
+ {
H5Ex_D_UnlimitedMod.writeUnlimited();
H5Ex_D_UnlimitedMod.extendUnlimited();
H5Ex_D_UnlimitedMod.readUnlimited();
}
-
}
diff --git a/java/examples/datatypes/CMakeLists.txt b/java/examples/datatypes/CMakeLists.txt
index 6997b2e..4512221 100644
--- a/java/examples/datatypes/CMakeLists.txt
+++ b/java/examples/datatypes/CMakeLists.txt
@@ -47,6 +47,13 @@ foreach (example ${HDF_JAVA_EXAMPLES})
# install_jar (${example} ${HJAVA_INSTALL_DATA_DIR}/examples examples)
get_target_property (${example}_CLASSPATH ${example} CLASSDIR)
add_dependencies (${example} ${HDF5_JAVA_HDF5_LIB_TARGET})
+
+ #-----------------------------------------------------------------------------
+ # Add Target to clang-format
+ #-----------------------------------------------------------------------------
+ if (HDF5_ENABLE_FORMATTERS)
+ clang_format (HDF5_JAVA_${example}_SRC_FORMAT ${example}.java)
+ endif ()
endforeach ()
set (CMAKE_JAVA_INCLUDE_PATH "${HDF5_JAVA_JARS};${HDF5_JAVA_LOGGING_JAR};${HDF5_JAVA_LOGGING_NOP_JAR}")
diff --git a/java/examples/datatypes/H5Ex_T_Array.java b/java/examples/datatypes/H5Ex_T_Array.java
index 3939b38..4e9de2a 100644
--- a/java/examples/datatypes/H5Ex_T_Array.java
+++ b/java/examples/datatypes/H5Ex_T_Array.java
@@ -24,22 +24,23 @@ import hdf.hdf5lib.H5;
import hdf.hdf5lib.HDF5Constants;
public class H5Ex_T_Array {
- private static String FILENAME = "H5Ex_T_Array.h5";
+ private static String FILENAME = "H5Ex_T_Array.h5";
private static String DATASETNAME = "DS1";
- private static final int DIM0 = 4;
- private static final int ADIM0 = 3;
- private static final int ADIM1 = 5;
- private static final int RANK = 1;
- private static final int NDIMS = 2;
-
- private static void CreateDataset() {
- long file_id = HDF5Constants.H5I_INVALID_HID;
- long filetype_id = HDF5Constants.H5I_INVALID_HID;
- long memtype_id = HDF5Constants.H5I_INVALID_HID;
- long dataspace_id = HDF5Constants.H5I_INVALID_HID;
- long dataset_id = HDF5Constants.H5I_INVALID_HID;
- long[] dims = { DIM0 };
- long[] adims = { ADIM0, ADIM1 };
+ private static final int DIM0 = 4;
+ private static final int ADIM0 = 3;
+ private static final int ADIM1 = 5;
+ private static final int RANK = 1;
+ private static final int NDIMS = 2;
+
+ private static void CreateDataset()
+ {
+ long file_id = HDF5Constants.H5I_INVALID_HID;
+ long filetype_id = HDF5Constants.H5I_INVALID_HID;
+ long memtype_id = HDF5Constants.H5I_INVALID_HID;
+ long dataspace_id = HDF5Constants.H5I_INVALID_HID;
+ long dataset_id = HDF5Constants.H5I_INVALID_HID;
+ long[] dims = {DIM0};
+ long[] adims = {ADIM0, ADIM1};
int[][][] dset_data = new int[DIM0][ADIM0][ADIM1];
// Initialize data. indx is the element in the dataspace, jndx and kndx the
@@ -52,7 +53,7 @@ public class H5Ex_T_Array {
// Create a new file using default properties.
try {
file_id = H5.H5Fcreate(FILENAME, HDF5Constants.H5F_ACC_TRUNC, HDF5Constants.H5P_DEFAULT,
- HDF5Constants.H5P_DEFAULT);
+ HDF5Constants.H5P_DEFAULT);
}
catch (Exception e) {
e.printStackTrace();
@@ -86,8 +87,9 @@ public class H5Ex_T_Array {
// Create the dataset.
try {
if ((file_id >= 0) && (dataspace_id >= 0) && (filetype_id >= 0))
- dataset_id = H5.H5Dcreate(file_id, DATASETNAME, filetype_id, dataspace_id, HDF5Constants.H5P_DEFAULT,
- HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT);
+ dataset_id =
+ H5.H5Dcreate(file_id, DATASETNAME, filetype_id, dataspace_id, HDF5Constants.H5P_DEFAULT,
+ HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT);
}
catch (Exception e) {
e.printStackTrace();
@@ -97,7 +99,7 @@ public class H5Ex_T_Array {
try {
if ((dataset_id >= 0) && (memtype_id >= 0))
H5.H5Dwrite(dataset_id, memtype_id, HDF5Constants.H5S_ALL, HDF5Constants.H5S_ALL,
- HDF5Constants.H5P_DEFAULT, dset_data);
+ HDF5Constants.H5P_DEFAULT, dset_data);
}
catch (Exception e) {
e.printStackTrace();
@@ -147,16 +149,16 @@ public class H5Ex_T_Array {
catch (Exception e) {
e.printStackTrace();
}
-
}
- private static void ReadDataset() {
- long file_id = HDF5Constants.H5I_INVALID_HID;
+ private static void ReadDataset()
+ {
+ long file_id = HDF5Constants.H5I_INVALID_HID;
long filetype_id = HDF5Constants.H5I_INVALID_HID;
- long memtype_id = HDF5Constants.H5I_INVALID_HID;
- long dataset_id = HDF5Constants.H5I_INVALID_HID;
- long[] dims = { DIM0 };
- long[] adims = { ADIM0, ADIM1 };
+ long memtype_id = HDF5Constants.H5I_INVALID_HID;
+ long dataset_id = HDF5Constants.H5I_INVALID_HID;
+ long[] dims = {DIM0};
+ long[] adims = {ADIM0, ADIM1};
int[][][] dset_data;
// Open an existing file.
@@ -196,7 +198,7 @@ public class H5Ex_T_Array {
// Allocate array of pointers to two-dimensional arrays (the
// elements of the dataset.
- dset_data = new int[(int) dims[0]][(int) (adims[0])][(int) (adims[1])];
+ dset_data = new int[(int)dims[0]][(int)(adims[0])][(int)(adims[1])];
// Create array datatypes for memory.
try {
@@ -210,7 +212,7 @@ public class H5Ex_T_Array {
try {
if ((dataset_id >= 0) && (memtype_id >= 0))
H5.H5Dread(dataset_id, memtype_id, HDF5Constants.H5S_ALL, HDF5Constants.H5S_ALL,
- HDF5Constants.H5P_DEFAULT, dset_data);
+ HDF5Constants.H5P_DEFAULT, dset_data);
}
catch (Exception e) {
e.printStackTrace();
@@ -264,10 +266,10 @@ public class H5Ex_T_Array {
catch (Exception e) {
e.printStackTrace();
}
-
}
- public static void main(String[] args) {
+ public static void main(String[] args)
+ {
H5Ex_T_Array.CreateDataset();
// Now we begin the read section of this example. Here we assume
// the dataset and array have the same name and rank, but can have
@@ -275,5 +277,4 @@ public class H5Ex_T_Array {
// data using malloc().
H5Ex_T_Array.ReadDataset();
}
-
}
diff --git a/java/examples/datatypes/H5Ex_T_ArrayAttribute.java b/java/examples/datatypes/H5Ex_T_ArrayAttribute.java
index c4c4bc4..45b44c1 100644
--- a/java/examples/datatypes/H5Ex_T_ArrayAttribute.java
+++ b/java/examples/datatypes/H5Ex_T_ArrayAttribute.java
@@ -24,24 +24,25 @@ import hdf.hdf5lib.H5;
import hdf.hdf5lib.HDF5Constants;
public class H5Ex_T_ArrayAttribute {
- private static String FILENAME = "H5Ex_T_ArrayAttribute.h5";
- private static String DATASETNAME = "DS1";
+ private static String FILENAME = "H5Ex_T_ArrayAttribute.h5";
+ private static String DATASETNAME = "DS1";
private static String ATTRIBUTENAME = "A1";
- private static final int DIM0 = 4;
- private static final int ADIM0 = 3;
- private static final int ADIM1 = 5;
- private static final int RANK = 1;
- private static final int NDIMS = 2;
-
- private static void CreateDataset() {
- long file_id = HDF5Constants.H5I_INVALID_HID;
- long filetype_id = HDF5Constants.H5I_INVALID_HID;
- long memtype_id = HDF5Constants.H5I_INVALID_HID;
- long dataspace_id = HDF5Constants.H5I_INVALID_HID;
- long dataset_id = HDF5Constants.H5I_INVALID_HID;
- long attribute_id = HDF5Constants.H5I_INVALID_HID;
- long[] dims = { DIM0 };
- long[] adims = { ADIM0, ADIM1 };
+ private static final int DIM0 = 4;
+ private static final int ADIM0 = 3;
+ private static final int ADIM1 = 5;
+ private static final int RANK = 1;
+ private static final int NDIMS = 2;
+
+ private static void CreateDataset()
+ {
+ long file_id = HDF5Constants.H5I_INVALID_HID;
+ long filetype_id = HDF5Constants.H5I_INVALID_HID;
+ long memtype_id = HDF5Constants.H5I_INVALID_HID;
+ long dataspace_id = HDF5Constants.H5I_INVALID_HID;
+ long dataset_id = HDF5Constants.H5I_INVALID_HID;
+ long attribute_id = HDF5Constants.H5I_INVALID_HID;
+ long[] dims = {DIM0};
+ long[] adims = {ADIM0, ADIM1};
int[][][] dset_data = new int[DIM0][ADIM0][ADIM1];
// Initialize data. indx is the element in the dataspace, jndx and kndx the
@@ -54,7 +55,7 @@ public class H5Ex_T_ArrayAttribute {
// Create a new file using default properties.
try {
file_id = H5.H5Fcreate(FILENAME, HDF5Constants.H5F_ACC_TRUNC, HDF5Constants.H5P_DEFAULT,
- HDF5Constants.H5P_DEFAULT);
+ HDF5Constants.H5P_DEFAULT);
}
catch (Exception e) {
e.printStackTrace();
@@ -81,7 +82,8 @@ public class H5Ex_T_ArrayAttribute {
dataspace_id = H5.H5Screate(HDF5Constants.H5S_SCALAR);
if (dataspace_id >= 0) {
dataset_id = H5.H5Dcreate(file_id, DATASETNAME, HDF5Constants.H5T_STD_I32LE, dataspace_id,
- HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT);
+ HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT,
+ HDF5Constants.H5P_DEFAULT);
H5.H5Sclose(dataspace_id);
dataspace_id = HDF5Constants.H5I_INVALID_HID;
}
@@ -103,7 +105,7 @@ public class H5Ex_T_ArrayAttribute {
try {
if ((dataset_id >= 0) && (dataspace_id >= 0) && (filetype_id >= 0))
attribute_id = H5.H5Acreate(dataset_id, ATTRIBUTENAME, filetype_id, dataspace_id,
- HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT);
+ HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT);
}
catch (Exception e) {
e.printStackTrace();
@@ -170,17 +172,17 @@ public class H5Ex_T_ArrayAttribute {
catch (Exception e) {
e.printStackTrace();
}
-
}
- private static void ReadDataset() {
- long file_id = HDF5Constants.H5I_INVALID_HID;
- long filetype_id = HDF5Constants.H5I_INVALID_HID;
- long memtype_id = HDF5Constants.H5I_INVALID_HID;
- long dataset_id = HDF5Constants.H5I_INVALID_HID;
+ private static void ReadDataset()
+ {
+ long file_id = HDF5Constants.H5I_INVALID_HID;
+ long filetype_id = HDF5Constants.H5I_INVALID_HID;
+ long memtype_id = HDF5Constants.H5I_INVALID_HID;
+ long dataset_id = HDF5Constants.H5I_INVALID_HID;
long attribute_id = HDF5Constants.H5I_INVALID_HID;
- long[] dims = { DIM0 };
- long[] adims = { ADIM0, ADIM1 };
+ long[] dims = {DIM0};
+ long[] adims = {ADIM0, ADIM1};
int[][][] dset_data;
// Open an existing file.
@@ -203,7 +205,7 @@ public class H5Ex_T_ArrayAttribute {
try {
if (dataset_id >= 0)
attribute_id = H5.H5Aopen_by_name(dataset_id, ".", ATTRIBUTENAME, HDF5Constants.H5P_DEFAULT,
- HDF5Constants.H5P_DEFAULT);
+ HDF5Constants.H5P_DEFAULT);
}
catch (Exception e) {
e.printStackTrace();
@@ -229,7 +231,7 @@ public class H5Ex_T_ArrayAttribute {
// Allocate array of pointers to two-dimensional arrays (the
// elements of the dataset.
- dset_data = new int[(int) dims[0]][(int) (adims[0])][(int) (adims[1])];
+ dset_data = new int[(int)dims[0]][(int)(adims[0])][(int)(adims[1])];
// Create array datatypes for memory.
try {
@@ -304,10 +306,10 @@ public class H5Ex_T_ArrayAttribute {
catch (Exception e) {
e.printStackTrace();
}
-
}
- public static void main(String[] args) {
+ public static void main(String[] args)
+ {
H5Ex_T_ArrayAttribute.CreateDataset();
// Now we begin the read section of this example. Here we assume
// the dataset and array have the same name and rank, but can have
@@ -315,5 +317,4 @@ public class H5Ex_T_ArrayAttribute {
// data using malloc().
H5Ex_T_ArrayAttribute.ReadDataset();
}
-
}
diff --git a/java/examples/datatypes/H5Ex_T_Bit.java b/java/examples/datatypes/H5Ex_T_Bit.java
index 45d4e8a..6d1a253 100644
--- a/java/examples/datatypes/H5Ex_T_Bit.java
+++ b/java/examples/datatypes/H5Ex_T_Bit.java
@@ -24,17 +24,18 @@ import hdf.hdf5lib.H5;
import hdf.hdf5lib.HDF5Constants;
public class H5Ex_T_Bit {
- private static String FILENAME = "H5Ex_T_Bit.h5";
+ private static String FILENAME = "H5Ex_T_Bit.h5";
private static String DATASETNAME = "DS1";
- private static final int DIM0 = 4;
- private static final int DIM1 = 7;
- private static final int RANK = 2;
+ private static final int DIM0 = 4;
+ private static final int DIM1 = 7;
+ private static final int RANK = 2;
- private static void CreateDataset() {
- long file_id = HDF5Constants.H5I_INVALID_HID;
+ private static void CreateDataset()
+ {
+ long file_id = HDF5Constants.H5I_INVALID_HID;
long dataspace_id = HDF5Constants.H5I_INVALID_HID;
- long dataset_id = HDF5Constants.H5I_INVALID_HID;
- long[] dims = { DIM0, DIM1 };
+ long dataset_id = HDF5Constants.H5I_INVALID_HID;
+ long[] dims = {DIM0, DIM1};
int[][] dset_data = new int[DIM0][DIM1];
// Initialize data.
@@ -42,15 +43,15 @@ public class H5Ex_T_Bit {
for (int jndx = 0; jndx < DIM1; jndx++) {
dset_data[indx][jndx] = 0;
dset_data[indx][jndx] |= (indx * jndx - jndx) & 0x03; /* Field "A" */
- dset_data[indx][jndx] |= (indx & 0x03) << 2; /* Field "B" */
- dset_data[indx][jndx] |= (jndx & 0x03) << 4; /* Field "C" */
+ dset_data[indx][jndx] |= (indx & 0x03) << 2; /* Field "B" */
+ dset_data[indx][jndx] |= (jndx & 0x03) << 4; /* Field "C" */
dset_data[indx][jndx] |= ((indx + jndx) & 0x03) << 6; /* Field "D" */
}
// Create a new file using default properties.
try {
file_id = H5.H5Fcreate(FILENAME, HDF5Constants.H5F_ACC_TRUNC, HDF5Constants.H5P_DEFAULT,
- HDF5Constants.H5P_DEFAULT);
+ HDF5Constants.H5P_DEFAULT);
}
catch (Exception e) {
e.printStackTrace();
@@ -69,7 +70,8 @@ public class H5Ex_T_Bit {
try {
if ((file_id >= 0) && (dataspace_id >= 0))
dataset_id = H5.H5Dcreate(file_id, DATASETNAME, HDF5Constants.H5T_STD_B8BE, dataspace_id,
- HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT);
+ HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT,
+ HDF5Constants.H5P_DEFAULT);
}
catch (Exception e) {
e.printStackTrace();
@@ -78,8 +80,8 @@ public class H5Ex_T_Bit {
// Write the bitfield data to the dataset.
try {
if (dataset_id >= 0)
- H5.H5Dwrite(dataset_id, HDF5Constants.H5T_NATIVE_B8, HDF5Constants.H5S_ALL, HDF5Constants.H5S_ALL,
- HDF5Constants.H5P_DEFAULT, dset_data);
+ H5.H5Dwrite(dataset_id, HDF5Constants.H5T_NATIVE_B8, HDF5Constants.H5S_ALL,
+ HDF5Constants.H5S_ALL, HDF5Constants.H5P_DEFAULT, dset_data);
}
catch (Exception e) {
e.printStackTrace();
@@ -111,14 +113,14 @@ public class H5Ex_T_Bit {
catch (Exception e) {
e.printStackTrace();
}
-
}
- private static void ReadDataset() {
- long file_id = HDF5Constants.H5I_INVALID_HID;
+ private static void ReadDataset()
+ {
+ long file_id = HDF5Constants.H5I_INVALID_HID;
long dataspace_id = HDF5Constants.H5I_INVALID_HID;
- long dataset_id = HDF5Constants.H5I_INVALID_HID;
- long[] dims = { DIM0, DIM1 };
+ long dataset_id = HDF5Constants.H5I_INVALID_HID;
+ long[] dims = {DIM0, DIM1};
int[][] dset_data;
// Open an existing file.
@@ -157,13 +159,13 @@ public class H5Ex_T_Bit {
// Allocate array of pointers to two-dimensional arrays (the
// elements of the dataset.
- dset_data = new int[(int) dims[0]][(int) (dims[1])];
+ dset_data = new int[(int)dims[0]][(int)(dims[1])];
// Read data.
try {
if (dataset_id >= 0)
- H5.H5Dread(dataset_id, HDF5Constants.H5T_NATIVE_B8, HDF5Constants.H5S_ALL, HDF5Constants.H5S_ALL,
- HDF5Constants.H5P_DEFAULT, dset_data);
+ H5.H5Dread(dataset_id, HDF5Constants.H5T_NATIVE_B8, HDF5Constants.H5S_ALL,
+ HDF5Constants.H5S_ALL, HDF5Constants.H5P_DEFAULT, dset_data);
}
catch (Exception e) {
e.printStackTrace();
@@ -209,10 +211,10 @@ public class H5Ex_T_Bit {
catch (Exception e) {
e.printStackTrace();
}
-
}
- public static void main(String[] args) {
+ public static void main(String[] args)
+ {
H5Ex_T_Bit.CreateDataset();
// Now we begin the read section of this example. Here we assume
// the dataset and array have the same name and rank, but can have
@@ -220,5 +222,4 @@ public class H5Ex_T_Bit {
// data using malloc().
H5Ex_T_Bit.ReadDataset();
}
-
}
diff --git a/java/examples/datatypes/H5Ex_T_BitAttribute.java b/java/examples/datatypes/H5Ex_T_BitAttribute.java
index 9b33ca5..3ad643a 100644
--- a/java/examples/datatypes/H5Ex_T_BitAttribute.java
+++ b/java/examples/datatypes/H5Ex_T_BitAttribute.java
@@ -24,19 +24,20 @@ import hdf.hdf5lib.H5;
import hdf.hdf5lib.HDF5Constants;
public class H5Ex_T_BitAttribute {
- private static String FILENAME = "H5Ex_T_BitAttribute.h5";
- private static String DATASETNAME = "DS1";
+ private static String FILENAME = "H5Ex_T_BitAttribute.h5";
+ private static String DATASETNAME = "DS1";
private static String ATTRIBUTENAME = "A1";
- private static final int DIM0 = 4;
- private static final int DIM1 = 7;
- private static final int RANK = 2;
+ private static final int DIM0 = 4;
+ private static final int DIM1 = 7;
+ private static final int RANK = 2;
- private static void CreateDataset() {
- long file_id = HDF5Constants.H5I_INVALID_HID;
+ private static void CreateDataset()
+ {
+ long file_id = HDF5Constants.H5I_INVALID_HID;
long dataspace_id = HDF5Constants.H5I_INVALID_HID;
- long dataset_id = HDF5Constants.H5I_INVALID_HID;
+ long dataset_id = HDF5Constants.H5I_INVALID_HID;
long attribute_id = HDF5Constants.H5I_INVALID_HID;
- long[] dims = { DIM0, DIM1 };
+ long[] dims = {DIM0, DIM1};
int[][] dset_data = new int[DIM0][DIM1];
// Initialize data.
@@ -44,15 +45,15 @@ public class H5Ex_T_BitAttribute {
for (int jndx = 0; jndx < DIM1; jndx++) {
dset_data[indx][jndx] = 0;
dset_data[indx][jndx] |= (indx * jndx - jndx) & 0x03; /* Field "A" */
- dset_data[indx][jndx] |= (indx & 0x03) << 2; /* Field "B" */
- dset_data[indx][jndx] |= (jndx & 0x03) << 4; /* Field "C" */
+ dset_data[indx][jndx] |= (indx & 0x03) << 2; /* Field "B" */
+ dset_data[indx][jndx] |= (jndx & 0x03) << 4; /* Field "C" */
dset_data[indx][jndx] |= ((indx + jndx) & 0x03) << 6; /* Field "D" */
}
// Create a new file using default properties.
try {
file_id = H5.H5Fcreate(FILENAME, HDF5Constants.H5F_ACC_TRUNC, HDF5Constants.H5P_DEFAULT,
- HDF5Constants.H5P_DEFAULT);
+ HDF5Constants.H5P_DEFAULT);
}
catch (Exception e) {
e.printStackTrace();
@@ -63,7 +64,8 @@ public class H5Ex_T_BitAttribute {
dataspace_id = H5.H5Screate(HDF5Constants.H5S_SCALAR);
if (dataspace_id >= 0) {
dataset_id = H5.H5Dcreate(file_id, DATASETNAME, HDF5Constants.H5T_STD_I32LE, dataspace_id,
- HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT);
+ HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT,
+ HDF5Constants.H5P_DEFAULT);
H5.H5Sclose(dataspace_id);
dataspace_id = HDF5Constants.H5I_INVALID_HID;
}
@@ -84,8 +86,9 @@ public class H5Ex_T_BitAttribute {
// Create the attribute and write the array data to it.
try {
if ((dataset_id >= 0) && (dataspace_id >= 0))
- attribute_id = H5.H5Acreate(dataset_id, ATTRIBUTENAME, HDF5Constants.H5T_STD_B8BE, dataspace_id,
- HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT);
+ attribute_id =
+ H5.H5Acreate(dataset_id, ATTRIBUTENAME, HDF5Constants.H5T_STD_B8BE, dataspace_id,
+ HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT);
}
catch (Exception e) {
e.printStackTrace();
@@ -134,15 +137,15 @@ public class H5Ex_T_BitAttribute {
catch (Exception e) {
e.printStackTrace();
}
-
}
- private static void ReadDataset() {
- long file_id = HDF5Constants.H5I_INVALID_HID;
+ private static void ReadDataset()
+ {
+ long file_id = HDF5Constants.H5I_INVALID_HID;
long dataspace_id = HDF5Constants.H5I_INVALID_HID;
- long dataset_id = HDF5Constants.H5I_INVALID_HID;
+ long dataset_id = HDF5Constants.H5I_INVALID_HID;
long attribute_id = HDF5Constants.H5I_INVALID_HID;
- long[] dims = { DIM0, DIM1 };
+ long[] dims = {DIM0, DIM1};
int[][] dset_data;
// Open an existing file.
@@ -165,7 +168,7 @@ public class H5Ex_T_BitAttribute {
try {
if (dataset_id >= 0)
attribute_id = H5.H5Aopen_by_name(dataset_id, ".", ATTRIBUTENAME, HDF5Constants.H5P_DEFAULT,
- HDF5Constants.H5P_DEFAULT);
+ HDF5Constants.H5P_DEFAULT);
}
catch (Exception e) {
e.printStackTrace();
@@ -190,7 +193,7 @@ public class H5Ex_T_BitAttribute {
// Allocate array of pointers to two-dimensional arrays (the
// elements of the dataset.
- dset_data = new int[(int) dims[0]][(int) (dims[1])];
+ dset_data = new int[(int)dims[0]][(int)(dims[1])];
// Read data.
try {
@@ -249,10 +252,10 @@ public class H5Ex_T_BitAttribute {
catch (Exception e) {
e.printStackTrace();
}
-
}
- public static void main(String[] args) {
+ public static void main(String[] args)
+ {
H5Ex_T_BitAttribute.CreateDataset();
// Now we begin the read section of this example. Here we assume
// the dataset and array have the same name and rank, but can have
@@ -260,5 +263,4 @@ public class H5Ex_T_BitAttribute {
// data using malloc().
H5Ex_T_BitAttribute.ReadDataset();
}
-
}
diff --git a/java/examples/datatypes/H5Ex_T_Commit.java b/java/examples/datatypes/H5Ex_T_Commit.java
index 62db5ea..44586ef 100644
--- a/java/examples/datatypes/H5Ex_T_Commit.java
+++ b/java/examples/datatypes/H5Ex_T_Commit.java
@@ -21,77 +21,74 @@
package examples.datatypes;
-import hdf.hdf5lib.H5;
-import hdf.hdf5lib.HDF5Constants;
-
import java.util.EnumSet;
import java.util.HashMap;
import java.util.Map;
+import hdf.hdf5lib.H5;
+import hdf.hdf5lib.HDF5Constants;
+
public class H5Ex_T_Commit {
- private static String FILENAME = "H5Ex_T_Commit.h5";
- private static String DATATYPENAME = "Sensor_Type";
- protected static final int INTEGERSIZE = 4;
- protected static final int DOUBLESIZE = 8;
+ private static String FILENAME = "H5Ex_T_Commit.h5";
+ private static String DATATYPENAME = "Sensor_Type";
+ protected static final int INTEGERSIZE = 4;
+ protected static final int DOUBLESIZE = 8;
protected final static int MAXSTRINGSIZE = 80;
// Values for the various classes of datatypes
enum H5T_class {
- H5T_NO_CLASS(HDF5Constants.H5T_NO_CLASS), // error
- H5T_INTEGER(HDF5Constants.H5T_INTEGER), // integer types
- H5T_FLOAT(HDF5Constants.H5T_FLOAT), // floating-point types
- H5T_TIME(HDF5Constants.H5T_TIME), // date and time types
- H5T_STRING(HDF5Constants.H5T_STRING), // character string types
- H5T_BITFIELD(HDF5Constants.H5T_BITFIELD), // bit field types
- H5T_OPAQUE(HDF5Constants.H5T_OPAQUE), // opaque types
- H5T_COMPOUND(HDF5Constants.H5T_COMPOUND), // compound types
+ H5T_NO_CLASS(HDF5Constants.H5T_NO_CLASS), // error
+ H5T_INTEGER(HDF5Constants.H5T_INTEGER), // integer types
+ H5T_FLOAT(HDF5Constants.H5T_FLOAT), // floating-point types
+ H5T_TIME(HDF5Constants.H5T_TIME), // date and time types
+ H5T_STRING(HDF5Constants.H5T_STRING), // character string types
+ H5T_BITFIELD(HDF5Constants.H5T_BITFIELD), // bit field types
+ H5T_OPAQUE(HDF5Constants.H5T_OPAQUE), // opaque types
+ H5T_COMPOUND(HDF5Constants.H5T_COMPOUND), // compound types
H5T_REFERENCE(HDF5Constants.H5T_REFERENCE), // reference types
- H5T_ENUM(HDF5Constants.H5T_ENUM), // enumeration types
- H5T_VLEN(HDF5Constants.H5T_VLEN), // Variable-Length types
- H5T_ARRAY(HDF5Constants.H5T_ARRAY), // Array types
- H5T_NCLASSES(11); // this must be last
+ H5T_ENUM(HDF5Constants.H5T_ENUM), // enumeration types
+ H5T_VLEN(HDF5Constants.H5T_VLEN), // Variable-Length types
+ H5T_ARRAY(HDF5Constants.H5T_ARRAY), // Array types
+ H5T_NCLASSES(11); // this must be last
private static final Map<Long, H5T_class> lookup = new HashMap<Long, H5T_class>();
- static {
+ static
+ {
for (H5T_class s : EnumSet.allOf(H5T_class.class))
lookup.put(s.getCode(), s);
}
private long code;
- H5T_class(long layout_type) {
- this.code = layout_type;
- }
+ H5T_class(long layout_type) { this.code = layout_type; }
- public long getCode() {
- return this.code;
- }
+ public long getCode() { return this.code; }
- public static H5T_class get(long typeclass_id) {
- return lookup.get(typeclass_id);
- }
+ public static H5T_class get(long typeclass_id) { return lookup.get(typeclass_id); }
}
// The supporting Sensor_Datatype class.
private static class Sensor_Datatype {
static int numberMembers = 4;
- static int[] memberDims = { 1, 1, 1, 1 };
+ static int[] memberDims = {1, 1, 1, 1};
- String[] memberNames = { "Serial number", "Location", "Temperature (F)", "Pressure (inHg)" };
- long[] memberFileTypes = { HDF5Constants.H5T_STD_I32BE, HDF5Constants.H5T_C_S1, HDF5Constants.H5T_IEEE_F64BE,
- HDF5Constants.H5T_IEEE_F64BE };
- static int[] memberStorage = { INTEGERSIZE, MAXSTRINGSIZE, DOUBLESIZE, DOUBLESIZE };
+ String[] memberNames = {"Serial number", "Location", "Temperature (F)", "Pressure (inHg)"};
+ long[] memberFileTypes = {HDF5Constants.H5T_STD_I32BE, HDF5Constants.H5T_C_S1,
+ HDF5Constants.H5T_IEEE_F64BE, HDF5Constants.H5T_IEEE_F64BE};
+ static int[] memberStorage = {INTEGERSIZE, MAXSTRINGSIZE, DOUBLESIZE, DOUBLESIZE};
// Data size is the storage size for the members not the object.
- static long getDataSize() {
+ static long getDataSize()
+ {
long data_size = 0;
for (int indx = 0; indx < numberMembers; indx++)
data_size += memberStorage[indx] * memberDims[indx];
return data_size;
}
- static int getOffset(int memberItem) {
+ static int getOffset(int memberItem)
+ {
int data_offset = 0;
for (int indx = 0; indx < memberItem; indx++)
data_offset += memberStorage[indx];
@@ -99,15 +96,16 @@ public class H5Ex_T_Commit {
}
}
- private static void CreateDataType() {
- long file_id = HDF5Constants.H5I_INVALID_HID;
- long strtype_id = HDF5Constants.H5I_INVALID_HID;
- long filetype_id = HDF5Constants.H5I_INVALID_HID;
+ private static void CreateDataType()
+ {
+ long file_id = HDF5Constants.H5I_INVALID_HID;
+ long strtype_id = HDF5Constants.H5I_INVALID_HID;
+ long filetype_id = HDF5Constants.H5I_INVALID_HID;
Sensor_Datatype datatypes = new Sensor_Datatype();
// Create a new file using default properties.
try {
file_id = H5.H5Fcreate(FILENAME, HDF5Constants.H5F_ACC_TRUNC, HDF5Constants.H5P_DEFAULT,
- HDF5Constants.H5P_DEFAULT);
+ HDF5Constants.H5P_DEFAULT);
}
catch (Exception e) {
e.printStackTrace();
@@ -134,7 +132,8 @@ public class H5Ex_T_Commit {
long type_id = datatypes.memberFileTypes[indx];
if (type_id == HDF5Constants.H5T_C_S1)
type_id = strtype_id;
- H5.H5Tinsert(filetype_id, datatypes.memberNames[indx], Sensor_Datatype.getOffset(indx), type_id);
+ H5.H5Tinsert(filetype_id, datatypes.memberNames[indx], Sensor_Datatype.getOffset(indx),
+ type_id);
}
}
}
@@ -145,8 +144,8 @@ public class H5Ex_T_Commit {
// Commit the compound datatype to the file, creating a named datatype.
try {
if ((file_id >= 0) && (filetype_id >= 0))
- H5.H5Tcommit(file_id, DATATYPENAME, filetype_id, HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT,
- HDF5Constants.H5P_DEFAULT);
+ H5.H5Tcommit(file_id, DATATYPENAME, filetype_id, HDF5Constants.H5P_DEFAULT,
+ HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT);
}
catch (Exception e) {
e.printStackTrace();
@@ -178,13 +177,13 @@ public class H5Ex_T_Commit {
catch (Exception e) {
e.printStackTrace();
}
-
}
- private static void ReadDataType() {
- long file_id = HDF5Constants.H5I_INVALID_HID;
+ private static void ReadDataType()
+ {
+ long file_id = HDF5Constants.H5I_INVALID_HID;
long typeclass_id = HDF5Constants.H5I_INVALID_HID;
- long filetype_id = HDF5Constants.H5I_INVALID_HID;
+ long filetype_id = HDF5Constants.H5I_INVALID_HID;
// Open an existing file.
try {
@@ -247,10 +246,10 @@ public class H5Ex_T_Commit {
catch (Exception e) {
e.printStackTrace();
}
-
}
- public static void main(String[] args) {
+ public static void main(String[] args)
+ {
H5Ex_T_Commit.CreateDataType();
// Now we begin the read section of this example. Here we assume
// the dataset and array have the same name and rank, but can have
@@ -258,5 +257,4 @@ public class H5Ex_T_Commit {
// data using malloc().
H5Ex_T_Commit.ReadDataType();
}
-
}
diff --git a/java/examples/datatypes/H5Ex_T_Compound.java b/java/examples/datatypes/H5Ex_T_Compound.java
index 8c83ebb..a78d0e3 100644
--- a/java/examples/datatypes/H5Ex_T_Compound.java
+++ b/java/examples/datatypes/H5Ex_T_Compound.java
@@ -20,49 +20,52 @@
package examples.datatypes;
-import hdf.hdf5lib.H5;
-import hdf.hdf5lib.HDF5Constants;
-
import java.nio.ByteBuffer;
import java.nio.ByteOrder;
import java.nio.charset.Charset;
+import hdf.hdf5lib.H5;
+import hdf.hdf5lib.HDF5Constants;
+
public class H5Ex_T_Compound {
- private static String FILENAME = "H5Ex_T_Compound.h5";
- private static String DATASETNAME = "DS1";
- private static final int DIM0 = 4;
- private static final int RANK = 1;
- protected static final int INTEGERSIZE = 4;
- protected static final int DOUBLESIZE = 8;
+ private static String FILENAME = "H5Ex_T_Compound.h5";
+ private static String DATASETNAME = "DS1";
+ private static final int DIM0 = 4;
+ private static final int RANK = 1;
+ protected static final int INTEGERSIZE = 4;
+ protected static final int DOUBLESIZE = 8;
protected final static int MAXSTRINGSIZE = 80;
static class Sensor_Datatype {
static int numberMembers = 4;
- static int[] memberDims = { 1, 1, 1, 1 };
+ static int[] memberDims = {1, 1, 1, 1};
- static String[] memberNames = { "Serial number", "Location", "Temperature (F)", "Pressure (inHg)" };
- static long[] memberMemTypes = { HDF5Constants.H5T_NATIVE_INT, HDF5Constants.H5T_C_S1,
- HDF5Constants.H5T_NATIVE_DOUBLE, HDF5Constants.H5T_NATIVE_DOUBLE };
- static long[] memberFileTypes = { HDF5Constants.H5T_STD_I32BE, HDF5Constants.H5T_C_S1,
- HDF5Constants.H5T_IEEE_F64BE, HDF5Constants.H5T_IEEE_F64BE };
- static int[] memberStorage = { INTEGERSIZE, MAXSTRINGSIZE, DOUBLESIZE, DOUBLESIZE };
+ static String[] memberNames = {"Serial number", "Location", "Temperature (F)", "Pressure (inHg)"};
+ static long[] memberMemTypes = {HDF5Constants.H5T_NATIVE_INT, HDF5Constants.H5T_C_S1,
+ HDF5Constants.H5T_NATIVE_DOUBLE, HDF5Constants.H5T_NATIVE_DOUBLE};
+ static long[] memberFileTypes = {HDF5Constants.H5T_STD_I32BE, HDF5Constants.H5T_C_S1,
+ HDF5Constants.H5T_IEEE_F64BE, HDF5Constants.H5T_IEEE_F64BE};
+ static int[] memberStorage = {INTEGERSIZE, MAXSTRINGSIZE, DOUBLESIZE, DOUBLESIZE};
// Data size is the storage size for the members.
- static long getTotalDataSize() {
+ static long getTotalDataSize()
+ {
long data_size = 0;
for (int indx = 0; indx < numberMembers; indx++)
data_size += memberStorage[indx] * memberDims[indx];
return DIM0 * data_size;
}
- static long getDataSize() {
+ static long getDataSize()
+ {
long data_size = 0;
for (int indx = 0; indx < numberMembers; indx++)
data_size += memberStorage[indx] * memberDims[indx];
return data_size;
}
- static int getOffset(int memberItem) {
+ static int getOffset(int memberItem)
+ {
int data_offset = 0;
for (int indx = 0; indx < memberItem; indx++)
data_offset += memberStorage[indx];
@@ -76,60 +79,63 @@ public class H5Ex_T_Compound {
public double temperature;
public double pressure;
- Sensor(int serial_no, String location, double temperature, double pressure) {
- this.serial_no = serial_no;
- this.location = location;
+ Sensor(int serial_no, String location, double temperature, double pressure)
+ {
+ this.serial_no = serial_no;
+ this.location = location;
this.temperature = temperature;
- this.pressure = pressure;
+ this.pressure = pressure;
}
- Sensor(ByteBuffer databuf, int dbposition) {
- readBuffer(databuf, dbposition);
- }
+ Sensor(ByteBuffer databuf, int dbposition) { readBuffer(databuf, dbposition); }
- void writeBuffer(ByteBuffer databuf, int dbposition) {
+ void writeBuffer(ByteBuffer databuf, int dbposition)
+ {
databuf.putInt(dbposition + Sensor_Datatype.getOffset(0), serial_no);
byte[] temp_str = location.getBytes(Charset.forName("UTF-8"));
- int arraylen = (temp_str.length > MAXSTRINGSIZE) ? MAXSTRINGSIZE : temp_str.length;
+ int arraylen = (temp_str.length > MAXSTRINGSIZE) ? MAXSTRINGSIZE : temp_str.length;
for (int ndx = 0; ndx < arraylen; ndx++)
databuf.put(dbposition + Sensor_Datatype.getOffset(1) + ndx, temp_str[ndx]);
for (int ndx = arraylen; ndx < MAXSTRINGSIZE; ndx++)
- databuf.put(dbposition + Sensor_Datatype.getOffset(1) + arraylen, (byte) 0);
+ databuf.put(dbposition + Sensor_Datatype.getOffset(1) + arraylen, (byte)0);
databuf.putDouble(dbposition + Sensor_Datatype.getOffset(2), temperature);
databuf.putDouble(dbposition + Sensor_Datatype.getOffset(3), pressure);
}
- void readBuffer(ByteBuffer databuf, int dbposition) {
- this.serial_no = databuf.getInt(dbposition + Sensor_Datatype.getOffset(0));
+ void readBuffer(ByteBuffer databuf, int dbposition)
+ {
+ this.serial_no = databuf.getInt(dbposition + Sensor_Datatype.getOffset(0));
ByteBuffer stringbuf = databuf.duplicate();
stringbuf.position(dbposition + Sensor_Datatype.getOffset(1));
stringbuf.limit(dbposition + Sensor_Datatype.getOffset(1) + MAXSTRINGSIZE);
byte[] bytearr = new byte[stringbuf.remaining()];
stringbuf.get(bytearr);
- this.location = new String(bytearr, Charset.forName("UTF-8")).trim();
+ this.location = new String(bytearr, Charset.forName("UTF-8")).trim();
this.temperature = databuf.getDouble(dbposition + Sensor_Datatype.getOffset(2));
- this.pressure = databuf.getDouble(dbposition + Sensor_Datatype.getOffset(3));
+ this.pressure = databuf.getDouble(dbposition + Sensor_Datatype.getOffset(3));
}
@Override
- public String toString() {
- return String.format("Serial number : " + serial_no + "%n" +
- "Location : " + location + "%n" +
- "Temperature (F) : " + temperature + "%n" +
- "Pressure (inHg) : " + pressure + "%n");
+ public String toString()
+ {
+ return String.format("Serial number : " + serial_no + "%n"
+ + "Location : " + location + "%n"
+ + "Temperature (F) : " + temperature + "%n"
+ + "Pressure (inHg) : " + pressure + "%n");
}
}
- private static void CreateDataset() {
- long file_id = HDF5Constants.H5I_INVALID_HID;
- long strtype_id = HDF5Constants.H5I_INVALID_HID;
- long memtype_id = HDF5Constants.H5I_INVALID_HID;
- long filetype_id = HDF5Constants.H5I_INVALID_HID;
- long dataspace_id = HDF5Constants.H5I_INVALID_HID;
- long dataset_id = HDF5Constants.H5I_INVALID_HID;
- long[] dims = { DIM0 };
+ private static void CreateDataset()
+ {
+ long file_id = HDF5Constants.H5I_INVALID_HID;
+ long strtype_id = HDF5Constants.H5I_INVALID_HID;
+ long memtype_id = HDF5Constants.H5I_INVALID_HID;
+ long filetype_id = HDF5Constants.H5I_INVALID_HID;
+ long dataspace_id = HDF5Constants.H5I_INVALID_HID;
+ long dataset_id = HDF5Constants.H5I_INVALID_HID;
+ long[] dims = {DIM0};
Sensor[] object_data = new Sensor[DIM0];
- byte[] dset_data = null;
+ byte[] dset_data = null;
// Initialize data.
object_data[0] = new Sensor(1153, new String("Exterior (static)"), 53.23, 24.57);
@@ -140,7 +146,7 @@ public class H5Ex_T_Compound {
// Create a new file using default properties.
try {
file_id = H5.H5Fcreate(FILENAME, HDF5Constants.H5F_ACC_TRUNC, HDF5Constants.H5P_DEFAULT,
- HDF5Constants.H5P_DEFAULT);
+ HDF5Constants.H5P_DEFAULT);
}
catch (Exception e) {
e.printStackTrace();
@@ -164,8 +170,8 @@ public class H5Ex_T_Compound {
long type_id = Sensor_Datatype.memberMemTypes[indx];
if (type_id == HDF5Constants.H5T_C_S1)
type_id = strtype_id;
- H5.H5Tinsert(memtype_id, Sensor_Datatype.memberNames[indx], Sensor_Datatype.getOffset(indx),
- type_id);
+ H5.H5Tinsert(memtype_id, Sensor_Datatype.memberNames[indx],
+ Sensor_Datatype.getOffset(indx), type_id);
}
}
}
@@ -184,8 +190,8 @@ public class H5Ex_T_Compound {
long type_id = Sensor_Datatype.memberFileTypes[indx];
if (type_id == HDF5Constants.H5T_C_S1)
type_id = strtype_id;
- H5.H5Tinsert(filetype_id, Sensor_Datatype.memberNames[indx], Sensor_Datatype.getOffset(indx),
- type_id);
+ H5.H5Tinsert(filetype_id, Sensor_Datatype.memberNames[indx],
+ Sensor_Datatype.getOffset(indx), type_id);
}
}
}
@@ -205,8 +211,9 @@ public class H5Ex_T_Compound {
// Create the dataset.
try {
if ((file_id >= 0) && (dataspace_id >= 0) && (filetype_id >= 0))
- dataset_id = H5.H5Dcreate(file_id, DATASETNAME, filetype_id, dataspace_id, HDF5Constants.H5P_DEFAULT,
- HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT);
+ dataset_id =
+ H5.H5Dcreate(file_id, DATASETNAME, filetype_id, dataspace_id, HDF5Constants.H5P_DEFAULT,
+ HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT);
}
catch (Exception e) {
e.printStackTrace();
@@ -214,16 +221,16 @@ public class H5Ex_T_Compound {
// Write the compound data to the dataset.
// allocate memory for read buffer.
- dset_data = new byte[(int)dims[0] * (int)Sensor_Datatype.getDataSize()];
+ dset_data = new byte[(int)dims[0] * (int)Sensor_Datatype.getDataSize()];
ByteBuffer outBuf = ByteBuffer.wrap(dset_data);
outBuf.order(ByteOrder.nativeOrder());
- for (int indx = 0; indx < (int) dims[0]; indx++) {
+ for (int indx = 0; indx < (int)dims[0]; indx++) {
object_data[indx].writeBuffer(outBuf, indx * (int)Sensor_Datatype.getDataSize());
}
try {
if ((dataset_id >= 0) && (memtype_id >= 0))
H5.H5Dwrite(dataset_id, memtype_id, HDF5Constants.H5S_ALL, HDF5Constants.H5S_ALL,
- HDF5Constants.H5P_DEFAULT, dset_data);
+ HDF5Constants.H5P_DEFAULT, dset_data);
}
catch (Exception e) {
e.printStackTrace();
@@ -281,16 +288,16 @@ public class H5Ex_T_Compound {
catch (Exception e) {
e.printStackTrace();
}
-
}
- private static void ReadDataset() {
- long file_id = HDF5Constants.H5I_INVALID_HID;
- long strtype_id = HDF5Constants.H5I_INVALID_HID;
- long memtype_id = HDF5Constants.H5I_INVALID_HID;
+ private static void ReadDataset()
+ {
+ long file_id = HDF5Constants.H5I_INVALID_HID;
+ long strtype_id = HDF5Constants.H5I_INVALID_HID;
+ long memtype_id = HDF5Constants.H5I_INVALID_HID;
long dataspace_id = HDF5Constants.H5I_INVALID_HID;
- long dataset_id = HDF5Constants.H5I_INVALID_HID;
- long[] dims = { DIM0 };
+ long dataset_id = HDF5Constants.H5I_INVALID_HID;
+ long[] dims = {DIM0};
Sensor[] object_data2;
byte[] dset_data;
@@ -346,8 +353,8 @@ public class H5Ex_T_Compound {
long type_id = Sensor_Datatype.memberMemTypes[indx];
if (type_id == HDF5Constants.H5T_C_S1)
type_id = strtype_id;
- H5.H5Tinsert(memtype_id, Sensor_Datatype.memberNames[indx], Sensor_Datatype.getOffset(indx),
- type_id);
+ H5.H5Tinsert(memtype_id, Sensor_Datatype.memberNames[indx],
+ Sensor_Datatype.getOffset(indx), type_id);
}
}
}
@@ -356,19 +363,19 @@ public class H5Ex_T_Compound {
}
// allocate memory for read buffer.
- dset_data = new byte[(int) dims[0] * (int)Sensor_Datatype.getDataSize()];
+ dset_data = new byte[(int)dims[0] * (int)Sensor_Datatype.getDataSize()];
- object_data2 = new Sensor[(int) dims[0]];
+ object_data2 = new Sensor[(int)dims[0]];
// Read data.
try {
if ((dataset_id >= 0) && (memtype_id >= 0))
H5.H5Dread(dataset_id, memtype_id, HDF5Constants.H5S_ALL, HDF5Constants.H5S_ALL,
- HDF5Constants.H5P_DEFAULT, dset_data);
+ HDF5Constants.H5P_DEFAULT, dset_data);
ByteBuffer inBuf = ByteBuffer.wrap(dset_data);
inBuf.order(ByteOrder.nativeOrder());
- for (int indx = 0; indx < (int) dims[0]; indx++) {
+ for (int indx = 0; indx < (int)dims[0]; indx++) {
object_data2[indx] = new Sensor(inBuf, indx * (int)Sensor_Datatype.getDataSize());
}
}
@@ -425,10 +432,10 @@ public class H5Ex_T_Compound {
catch (Exception e) {
e.printStackTrace();
}
-
}
- public static void main(String[] args) {
+ public static void main(String[] args)
+ {
H5Ex_T_Compound.CreateDataset();
// Now we begin the read section of this example. Here we assume
// the dataset and array have the same name and rank, but can have
@@ -436,5 +443,4 @@ public class H5Ex_T_Compound {
// data using malloc().
H5Ex_T_Compound.ReadDataset();
}
-
}
diff --git a/java/examples/datatypes/H5Ex_T_CompoundAttribute.java b/java/examples/datatypes/H5Ex_T_CompoundAttribute.java
index 58d2fb7..f331a1e 100644
--- a/java/examples/datatypes/H5Ex_T_CompoundAttribute.java
+++ b/java/examples/datatypes/H5Ex_T_CompoundAttribute.java
@@ -20,52 +20,55 @@
package examples.datatypes;
-import hdf.hdf5lib.H5;
-import hdf.hdf5lib.HDF5Constants;
-
import java.nio.ByteBuffer;
import java.nio.ByteOrder;
import java.nio.charset.Charset;
+import hdf.hdf5lib.H5;
+import hdf.hdf5lib.HDF5Constants;
+
public class H5Ex_T_CompoundAttribute {
- private static String FILENAME = "H5Ex_T_CompoundAttribute.h5";
- private static String DATASETNAME = "DS1";
- private static String ATTRIBUTENAME = "A1";
- private static final int DIM0 = 4;
- private static final int RANK = 1;
- protected static final int INTEGERSIZE = 4;
- protected static final int DOUBLESIZE = 8;
+ private static String FILENAME = "H5Ex_T_CompoundAttribute.h5";
+ private static String DATASETNAME = "DS1";
+ private static String ATTRIBUTENAME = "A1";
+ private static final int DIM0 = 4;
+ private static final int RANK = 1;
+ protected static final int INTEGERSIZE = 4;
+ protected static final int DOUBLESIZE = 8;
protected final static int MAXSTRINGSIZE = 80;
// Using Java Externalization will add a two-byte object header in
// the stream, which needs to be called out in the datatypes.
static class Sensor_Datatype {
static int numberMembers = 4;
- static int[] memberDims = { 1, 1, 1, 1 };
+ static int[] memberDims = {1, 1, 1, 1};
- static String[] memberNames = { "Serial number", "Location", "Temperature (F)", "Pressure (inHg)" };
- static long[] memberMemTypes = { HDF5Constants.H5T_NATIVE_INT, HDF5Constants.H5T_C_S1,
- HDF5Constants.H5T_NATIVE_DOUBLE, HDF5Constants.H5T_NATIVE_DOUBLE };
- static long[] memberFileTypes = { HDF5Constants.H5T_STD_I32BE, HDF5Constants.H5T_C_S1,
- HDF5Constants.H5T_IEEE_F64BE, HDF5Constants.H5T_IEEE_F64BE };
- static int[] memberStorage = { INTEGERSIZE, MAXSTRINGSIZE, DOUBLESIZE, DOUBLESIZE };
+ static String[] memberNames = {"Serial number", "Location", "Temperature (F)", "Pressure (inHg)"};
+ static long[] memberMemTypes = {HDF5Constants.H5T_NATIVE_INT, HDF5Constants.H5T_C_S1,
+ HDF5Constants.H5T_NATIVE_DOUBLE, HDF5Constants.H5T_NATIVE_DOUBLE};
+ static long[] memberFileTypes = {HDF5Constants.H5T_STD_I32BE, HDF5Constants.H5T_C_S1,
+ HDF5Constants.H5T_IEEE_F64BE, HDF5Constants.H5T_IEEE_F64BE};
+ static int[] memberStorage = {INTEGERSIZE, MAXSTRINGSIZE, DOUBLESIZE, DOUBLESIZE};
// Data size is the storage size for the members not the object.
- static long getTotalDataSize() {
+ static long getTotalDataSize()
+ {
long data_size = 0;
for (int indx = 0; indx < numberMembers; indx++)
data_size += memberStorage[indx] * memberDims[indx];
return DIM0 * data_size;
}
- static long getDataSize() {
+ static long getDataSize()
+ {
long data_size = 0;
for (int indx = 0; indx < numberMembers; indx++)
data_size += memberStorage[indx] * memberDims[indx];
return data_size;
}
- static int getOffset(int memberItem) {
+ static int getOffset(int memberItem)
+ {
int data_offset = 0;
for (int indx = 0; indx < memberItem; indx++)
data_offset += memberStorage[indx];
@@ -79,61 +82,64 @@ public class H5Ex_T_CompoundAttribute {
public double temperature;
public double pressure;
- Sensor(int serial_no, String location, double temperature, double pressure) {
- this.serial_no = serial_no;
- this.location = location;
+ Sensor(int serial_no, String location, double temperature, double pressure)
+ {
+ this.serial_no = serial_no;
+ this.location = location;
this.temperature = temperature;
- this.pressure = pressure;
+ this.pressure = pressure;
}
- Sensor(ByteBuffer databuf, int dbposition) {
- readBuffer(databuf, dbposition);
- }
+ Sensor(ByteBuffer databuf, int dbposition) { readBuffer(databuf, dbposition); }
- void writeBuffer(ByteBuffer databuf, int dbposition) {
+ void writeBuffer(ByteBuffer databuf, int dbposition)
+ {
databuf.putInt(dbposition + Sensor_Datatype.getOffset(0), serial_no);
byte[] temp_str = location.getBytes(Charset.forName("UTF-8"));
- int arraylen = (temp_str.length > MAXSTRINGSIZE) ? MAXSTRINGSIZE : temp_str.length;
+ int arraylen = (temp_str.length > MAXSTRINGSIZE) ? MAXSTRINGSIZE : temp_str.length;
for (int ndx = 0; ndx < arraylen; ndx++)
databuf.put(dbposition + Sensor_Datatype.getOffset(1) + ndx, temp_str[ndx]);
for (int ndx = arraylen; ndx < MAXSTRINGSIZE; ndx++)
- databuf.put(dbposition + Sensor_Datatype.getOffset(1) + arraylen, (byte) 0);
+ databuf.put(dbposition + Sensor_Datatype.getOffset(1) + arraylen, (byte)0);
databuf.putDouble(dbposition + Sensor_Datatype.getOffset(2), temperature);
databuf.putDouble(dbposition + Sensor_Datatype.getOffset(3), pressure);
}
- void readBuffer(ByteBuffer databuf, int dbposition) {
- this.serial_no = databuf.getInt(dbposition + Sensor_Datatype.getOffset(0));
+ void readBuffer(ByteBuffer databuf, int dbposition)
+ {
+ this.serial_no = databuf.getInt(dbposition + Sensor_Datatype.getOffset(0));
ByteBuffer stringbuf = databuf.duplicate();
stringbuf.position(dbposition + Sensor_Datatype.getOffset(1));
stringbuf.limit(dbposition + Sensor_Datatype.getOffset(1) + MAXSTRINGSIZE);
byte[] bytearr = new byte[stringbuf.remaining()];
stringbuf.get(bytearr);
- this.location = new String(bytearr, Charset.forName("UTF-8")).trim();
+ this.location = new String(bytearr, Charset.forName("UTF-8")).trim();
this.temperature = databuf.getDouble(dbposition + Sensor_Datatype.getOffset(2));
- this.pressure = databuf.getDouble(dbposition + Sensor_Datatype.getOffset(3));
+ this.pressure = databuf.getDouble(dbposition + Sensor_Datatype.getOffset(3));
}
@Override
- public String toString() {
- return String.format("Serial number : " + serial_no + "%n" +
- "Location : " + location + "%n" +
- "Temperature (F) : " + temperature + "%n" +
- "Pressure (inHg) : " + pressure + "%n");
+ public String toString()
+ {
+ return String.format("Serial number : " + serial_no + "%n"
+ + "Location : " + location + "%n"
+ + "Temperature (F) : " + temperature + "%n"
+ + "Pressure (inHg) : " + pressure + "%n");
}
}
- private static void CreateDataset() {
- long file_id = HDF5Constants.H5I_INVALID_HID;
- long strtype_id = HDF5Constants.H5I_INVALID_HID;
- long memtype_id = HDF5Constants.H5I_INVALID_HID;
- long filetype_id = HDF5Constants.H5I_INVALID_HID;
- long dataspace_id = HDF5Constants.H5I_INVALID_HID;
- long dataset_id = HDF5Constants.H5I_INVALID_HID;
- long attribute_id = HDF5Constants.H5I_INVALID_HID;
- long[] dims = { DIM0 };
+ private static void CreateDataset()
+ {
+ long file_id = HDF5Constants.H5I_INVALID_HID;
+ long strtype_id = HDF5Constants.H5I_INVALID_HID;
+ long memtype_id = HDF5Constants.H5I_INVALID_HID;
+ long filetype_id = HDF5Constants.H5I_INVALID_HID;
+ long dataspace_id = HDF5Constants.H5I_INVALID_HID;
+ long dataset_id = HDF5Constants.H5I_INVALID_HID;
+ long attribute_id = HDF5Constants.H5I_INVALID_HID;
+ long[] dims = {DIM0};
Sensor[] object_data = new Sensor[DIM0];
- byte[] dset_data = null;
+ byte[] dset_data = null;
// Initialize data.
object_data[0] = new Sensor(1153, new String("Exterior (static)"), 53.23, 24.57);
@@ -144,7 +150,7 @@ public class H5Ex_T_CompoundAttribute {
// Create a new file using default properties.
try {
file_id = H5.H5Fcreate(FILENAME, HDF5Constants.H5F_ACC_TRUNC, HDF5Constants.H5P_DEFAULT,
- HDF5Constants.H5P_DEFAULT);
+ HDF5Constants.H5P_DEFAULT);
}
catch (Exception e) {
e.printStackTrace();
@@ -168,8 +174,8 @@ public class H5Ex_T_CompoundAttribute {
long type_id = Sensor_Datatype.memberMemTypes[indx];
if (type_id == HDF5Constants.H5T_C_S1)
type_id = strtype_id;
- H5.H5Tinsert(memtype_id, Sensor_Datatype.memberNames[indx], Sensor_Datatype.getOffset(indx),
- type_id);
+ H5.H5Tinsert(memtype_id, Sensor_Datatype.memberNames[indx],
+ Sensor_Datatype.getOffset(indx), type_id);
}
}
}
@@ -188,8 +194,8 @@ public class H5Ex_T_CompoundAttribute {
long type_id = Sensor_Datatype.memberFileTypes[indx];
if (type_id == HDF5Constants.H5T_C_S1)
type_id = strtype_id;
- H5.H5Tinsert(filetype_id, Sensor_Datatype.memberNames[indx], Sensor_Datatype.getOffset(indx),
- type_id);
+ H5.H5Tinsert(filetype_id, Sensor_Datatype.memberNames[indx],
+ Sensor_Datatype.getOffset(indx), type_id);
}
}
}
@@ -202,7 +208,8 @@ public class H5Ex_T_CompoundAttribute {
dataspace_id = H5.H5Screate(HDF5Constants.H5S_SCALAR);
if (dataspace_id >= 0) {
dataset_id = H5.H5Dcreate(file_id, DATASETNAME, HDF5Constants.H5T_STD_I32LE, dataspace_id,
- HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT);
+ HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT,
+ HDF5Constants.H5P_DEFAULT);
H5.H5Sclose(dataspace_id);
dataspace_id = HDF5Constants.H5I_INVALID_HID;
}
@@ -224,17 +231,17 @@ public class H5Ex_T_CompoundAttribute {
try {
if ((dataset_id >= 0) && (dataspace_id >= 0) && (filetype_id >= 0))
attribute_id = H5.H5Acreate(dataset_id, ATTRIBUTENAME, filetype_id, dataspace_id,
- HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT);
+ HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT);
}
catch (Exception e) {
e.printStackTrace();
}
// Write the compound data.
- dset_data = new byte[(int) dims[0] * (int)Sensor_Datatype.getDataSize()];
+ dset_data = new byte[(int)dims[0] * (int)Sensor_Datatype.getDataSize()];
ByteBuffer outBuf = ByteBuffer.wrap(dset_data);
outBuf.order(ByteOrder.nativeOrder());
- for (int indx = 0; indx < (int) dims[0]; indx++) {
+ for (int indx = 0; indx < (int)dims[0]; indx++) {
object_data[indx].writeBuffer(outBuf, indx * (int)Sensor_Datatype.getDataSize());
}
try {
@@ -305,17 +312,17 @@ public class H5Ex_T_CompoundAttribute {
catch (Exception e) {
e.printStackTrace();
}
-
}
- private static void ReadDataset() {
- long file_id = HDF5Constants.H5I_INVALID_HID;
- long strtype_id = HDF5Constants.H5I_INVALID_HID;
- long memtype_id = HDF5Constants.H5I_INVALID_HID;
+ private static void ReadDataset()
+ {
+ long file_id = HDF5Constants.H5I_INVALID_HID;
+ long strtype_id = HDF5Constants.H5I_INVALID_HID;
+ long memtype_id = HDF5Constants.H5I_INVALID_HID;
long dataspace_id = HDF5Constants.H5I_INVALID_HID;
- long dataset_id = HDF5Constants.H5I_INVALID_HID;
+ long dataset_id = HDF5Constants.H5I_INVALID_HID;
long attribute_id = HDF5Constants.H5I_INVALID_HID;
- long[] dims = { DIM0 };
+ long[] dims = {DIM0};
Sensor[] object_data2;
byte[] dset_data;
@@ -339,7 +346,7 @@ public class H5Ex_T_CompoundAttribute {
try {
if (dataset_id >= 0)
attribute_id = H5.H5Aopen_by_name(dataset_id, ".", ATTRIBUTENAME, HDF5Constants.H5P_DEFAULT,
- HDF5Constants.H5P_DEFAULT);
+ HDF5Constants.H5P_DEFAULT);
}
catch (Exception e) {
e.printStackTrace();
@@ -382,8 +389,8 @@ public class H5Ex_T_CompoundAttribute {
long type_id = Sensor_Datatype.memberMemTypes[indx];
if (type_id == HDF5Constants.H5T_C_S1)
type_id = strtype_id;
- H5.H5Tinsert(memtype_id, Sensor_Datatype.memberNames[indx], Sensor_Datatype.getOffset(indx),
- type_id);
+ H5.H5Tinsert(memtype_id, Sensor_Datatype.memberNames[indx],
+ Sensor_Datatype.getOffset(indx), type_id);
}
}
}
@@ -392,9 +399,9 @@ public class H5Ex_T_CompoundAttribute {
}
// allocate memory for read buffer.
- dset_data = new byte[(int) dims[0] * (int)Sensor_Datatype.getDataSize()];
+ dset_data = new byte[(int)dims[0] * (int)Sensor_Datatype.getDataSize()];
- object_data2 = new Sensor[(int) dims[0]];
+ object_data2 = new Sensor[(int)dims[0]];
// Read data.
try {
@@ -403,7 +410,7 @@ public class H5Ex_T_CompoundAttribute {
ByteBuffer inBuf = ByteBuffer.wrap(dset_data);
inBuf.order(ByteOrder.nativeOrder());
- for (int indx = 0; indx < (int) dims[0]; indx++) {
+ for (int indx = 0; indx < (int)dims[0]; indx++) {
object_data2[indx] = new Sensor(inBuf, indx * (int)Sensor_Datatype.getDataSize());
}
}
@@ -468,10 +475,10 @@ public class H5Ex_T_CompoundAttribute {
catch (Exception e) {
e.printStackTrace();
}
-
}
- public static void main(String[] args) {
+ public static void main(String[] args)
+ {
H5Ex_T_CompoundAttribute.CreateDataset();
// Now we begin the read section of this example. Here we assume
// the dataset and array have the same name and rank, but can have
@@ -479,5 +486,4 @@ public class H5Ex_T_CompoundAttribute {
// data using malloc().
H5Ex_T_CompoundAttribute.ReadDataset();
}
-
}
diff --git a/java/examples/datatypes/H5Ex_T_Float.java b/java/examples/datatypes/H5Ex_T_Float.java
index e8da7f6..9ca099e 100644
--- a/java/examples/datatypes/H5Ex_T_Float.java
+++ b/java/examples/datatypes/H5Ex_T_Float.java
@@ -28,17 +28,18 @@ import hdf.hdf5lib.H5;
import hdf.hdf5lib.HDF5Constants;
public class H5Ex_T_Float {
- private static String FILENAME = "H5Ex_T_Float.h5";
+ private static String FILENAME = "H5Ex_T_Float.h5";
private static String DATASETNAME = "DS1";
- private static final int DIM0 = 4;
- private static final int DIM1 = 7;
- private static final int RANK = 2;
-
- private static void CreateDataset() {
- long file_id = HDF5Constants.H5I_INVALID_HID;
- long dataspace_id = HDF5Constants.H5I_INVALID_HID;
- long dataset_id = HDF5Constants.H5I_INVALID_HID;
- long[] dims = { DIM0, DIM1 };
+ private static final int DIM0 = 4;
+ private static final int DIM1 = 7;
+ private static final int RANK = 2;
+
+ private static void CreateDataset()
+ {
+ long file_id = HDF5Constants.H5I_INVALID_HID;
+ long dataspace_id = HDF5Constants.H5I_INVALID_HID;
+ long dataset_id = HDF5Constants.H5I_INVALID_HID;
+ long[] dims = {DIM0, DIM1};
double[][] dset_data = new double[DIM0][DIM1];
// Initialize data.
@@ -50,7 +51,7 @@ public class H5Ex_T_Float {
// Create a new file using default properties.
try {
file_id = H5.H5Fcreate(FILENAME, HDF5Constants.H5F_ACC_TRUNC, HDF5Constants.H5P_DEFAULT,
- HDF5Constants.H5P_DEFAULT);
+ HDF5Constants.H5P_DEFAULT);
}
catch (Exception e) {
e.printStackTrace();
@@ -73,7 +74,8 @@ public class H5Ex_T_Float {
try {
if ((file_id >= 0) && (dataspace_id >= 0))
dataset_id = H5.H5Dcreate(file_id, DATASETNAME, HDF5Constants.H5T_IEEE_F64LE, dataspace_id,
- HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT);
+ HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT,
+ HDF5Constants.H5P_DEFAULT);
}
catch (Exception e) {
e.printStackTrace();
@@ -82,8 +84,8 @@ public class H5Ex_T_Float {
// Write the data to the dataset.
try {
if (dataset_id >= 0)
- H5.H5Dwrite(dataset_id, HDF5Constants.H5T_NATIVE_DOUBLE, HDF5Constants.H5S_ALL, HDF5Constants.H5S_ALL,
- HDF5Constants.H5P_DEFAULT, dset_data);
+ H5.H5Dwrite(dataset_id, HDF5Constants.H5T_NATIVE_DOUBLE, HDF5Constants.H5S_ALL,
+ HDF5Constants.H5S_ALL, HDF5Constants.H5P_DEFAULT, dset_data);
}
catch (Exception e) {
e.printStackTrace();
@@ -115,14 +117,14 @@ public class H5Ex_T_Float {
catch (Exception e) {
e.printStackTrace();
}
-
}
- private static void ReadDataset() {
- long file_id = HDF5Constants.H5I_INVALID_HID;
+ private static void ReadDataset()
+ {
+ long file_id = HDF5Constants.H5I_INVALID_HID;
long dataspace_id = HDF5Constants.H5I_INVALID_HID;
- long dataset_id = HDF5Constants.H5I_INVALID_HID;
- long[] dims = { DIM0, DIM1 };
+ long dataset_id = HDF5Constants.H5I_INVALID_HID;
+ long[] dims = {DIM0, DIM1};
double[][] dset_data;
// Open an existing file.
@@ -161,13 +163,13 @@ public class H5Ex_T_Float {
// Allocate array of pointers to two-dimensional arrays (the
// elements of the dataset.
- dset_data = new double[(int) dims[0]][(int) (dims[1])];
+ dset_data = new double[(int)dims[0]][(int)(dims[1])];
// Read data.
try {
if (dataset_id >= 0)
- H5.H5Dread(dataset_id, HDF5Constants.H5T_NATIVE_DOUBLE, HDF5Constants.H5S_ALL, HDF5Constants.H5S_ALL,
- HDF5Constants.H5P_DEFAULT, dset_data);
+ H5.H5Dread(dataset_id, HDF5Constants.H5T_NATIVE_DOUBLE, HDF5Constants.H5S_ALL,
+ HDF5Constants.H5S_ALL, HDF5Constants.H5P_DEFAULT, dset_data);
}
catch (Exception e) {
e.printStackTrace();
@@ -211,10 +213,10 @@ public class H5Ex_T_Float {
catch (Exception e) {
e.printStackTrace();
}
-
}
- public static void main(String[] args) {
+ public static void main(String[] args)
+ {
H5Ex_T_Float.CreateDataset();
// Now we begin the read section of this example. Here we assume
// the dataset and array have the same name and rank, but can have
@@ -222,5 +224,4 @@ public class H5Ex_T_Float {
// data using malloc().
H5Ex_T_Float.ReadDataset();
}
-
}
diff --git a/java/examples/datatypes/H5Ex_T_FloatAttribute.java b/java/examples/datatypes/H5Ex_T_FloatAttribute.java
index eb8e1f8..426c4dd 100644
--- a/java/examples/datatypes/H5Ex_T_FloatAttribute.java
+++ b/java/examples/datatypes/H5Ex_T_FloatAttribute.java
@@ -28,19 +28,20 @@ import hdf.hdf5lib.H5;
import hdf.hdf5lib.HDF5Constants;
public class H5Ex_T_FloatAttribute {
- private static String FILENAME = "H5Ex_T_FloatAttribute.h5";
- private static String DATASETNAME = "DS1";
+ private static String FILENAME = "H5Ex_T_FloatAttribute.h5";
+ private static String DATASETNAME = "DS1";
private static String ATTRIBUTENAME = "A1";
- private static final int DIM0 = 4;
- private static final int DIM1 = 7;
- private static final int RANK = 2;
-
- private static void CreateDataset() {
- long file_id = HDF5Constants.H5I_INVALID_HID;
- long dataspace_id = HDF5Constants.H5I_INVALID_HID;
- long dataset_id = HDF5Constants.H5I_INVALID_HID;
- long attribute_id = HDF5Constants.H5I_INVALID_HID;
- long[] dims = { DIM0, DIM1 };
+ private static final int DIM0 = 4;
+ private static final int DIM1 = 7;
+ private static final int RANK = 2;
+
+ private static void CreateDataset()
+ {
+ long file_id = HDF5Constants.H5I_INVALID_HID;
+ long dataspace_id = HDF5Constants.H5I_INVALID_HID;
+ long dataset_id = HDF5Constants.H5I_INVALID_HID;
+ long attribute_id = HDF5Constants.H5I_INVALID_HID;
+ long[] dims = {DIM0, DIM1};
double[][] dset_data = new double[DIM0][DIM1];
// Initialize data.
@@ -52,7 +53,7 @@ public class H5Ex_T_FloatAttribute {
// Create a new file using default properties.
try {
file_id = H5.H5Fcreate(FILENAME, HDF5Constants.H5F_ACC_TRUNC, HDF5Constants.H5P_DEFAULT,
- HDF5Constants.H5P_DEFAULT);
+ HDF5Constants.H5P_DEFAULT);
}
catch (Exception e) {
e.printStackTrace();
@@ -63,7 +64,8 @@ public class H5Ex_T_FloatAttribute {
dataspace_id = H5.H5Screate(HDF5Constants.H5S_SCALAR);
if (dataspace_id >= 0) {
dataset_id = H5.H5Dcreate(file_id, DATASETNAME, HDF5Constants.H5T_STD_I32LE, dataspace_id,
- HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT);
+ HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT,
+ HDF5Constants.H5P_DEFAULT);
H5.H5Sclose(dataspace_id);
dataspace_id = HDF5Constants.H5I_INVALID_HID;
}
@@ -84,8 +86,9 @@ public class H5Ex_T_FloatAttribute {
// Create the attribute and write the array data to it.
try {
if ((dataset_id >= 0) && (dataspace_id >= 0))
- attribute_id = H5.H5Acreate(dataset_id, ATTRIBUTENAME, HDF5Constants.H5T_IEEE_F64LE, dataspace_id,
- HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT);
+ attribute_id =
+ H5.H5Acreate(dataset_id, ATTRIBUTENAME, HDF5Constants.H5T_IEEE_F64LE, dataspace_id,
+ HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT);
}
catch (Exception e) {
e.printStackTrace();
@@ -134,15 +137,15 @@ public class H5Ex_T_FloatAttribute {
catch (Exception e) {
e.printStackTrace();
}
-
}
- private static void ReadDataset() {
- long file_id = HDF5Constants.H5I_INVALID_HID;
+ private static void ReadDataset()
+ {
+ long file_id = HDF5Constants.H5I_INVALID_HID;
long dataspace_id = HDF5Constants.H5I_INVALID_HID;
- long dataset_id = HDF5Constants.H5I_INVALID_HID;
+ long dataset_id = HDF5Constants.H5I_INVALID_HID;
long attribute_id = HDF5Constants.H5I_INVALID_HID;
- long[] dims = { DIM0, DIM1 };
+ long[] dims = {DIM0, DIM1};
double[][] dset_data;
// Open an existing file.
@@ -165,7 +168,7 @@ public class H5Ex_T_FloatAttribute {
try {
if (dataset_id >= 0)
attribute_id = H5.H5Aopen_by_name(dataset_id, ".", ATTRIBUTENAME, HDF5Constants.H5P_DEFAULT,
- HDF5Constants.H5P_DEFAULT);
+ HDF5Constants.H5P_DEFAULT);
}
catch (Exception e) {
e.printStackTrace();
@@ -190,7 +193,7 @@ public class H5Ex_T_FloatAttribute {
// Allocate array of pointers to two-dimensional arrays (the
// elements of the dataset.
- dset_data = new double[(int) dims[0]][(int) (dims[1])];
+ dset_data = new double[(int)dims[0]][(int)(dims[1])];
// Read data.
try {
@@ -247,10 +250,10 @@ public class H5Ex_T_FloatAttribute {
catch (Exception e) {
e.printStackTrace();
}
-
}
- public static void main(String[] args) {
+ public static void main(String[] args)
+ {
H5Ex_T_FloatAttribute.CreateDataset();
// Now we begin the read section of this example. Here we assume
// the dataset and array have the same name and rank, but can have
@@ -258,5 +261,4 @@ public class H5Ex_T_FloatAttribute {
// data using malloc().
H5Ex_T_FloatAttribute.ReadDataset();
}
-
}
diff --git a/java/examples/datatypes/H5Ex_T_Integer.java b/java/examples/datatypes/H5Ex_T_Integer.java
index bb8e0cb..919ea7e 100644
--- a/java/examples/datatypes/H5Ex_T_Integer.java
+++ b/java/examples/datatypes/H5Ex_T_Integer.java
@@ -26,17 +26,18 @@ import hdf.hdf5lib.H5;
import hdf.hdf5lib.HDF5Constants;
public class H5Ex_T_Integer {
- private static String FILENAME = "H5Ex_T_Integer.h5";
+ private static String FILENAME = "H5Ex_T_Integer.h5";
private static String DATASETNAME = "DS1";
- private static final int DIM0 = 4;
- private static final int DIM1 = 7;
- private static final int RANK = 2;
+ private static final int DIM0 = 4;
+ private static final int DIM1 = 7;
+ private static final int RANK = 2;
- private static void CreateDataset() {
- long file_id = HDF5Constants.H5I_INVALID_HID;
+ private static void CreateDataset()
+ {
+ long file_id = HDF5Constants.H5I_INVALID_HID;
long dataspace_id = HDF5Constants.H5I_INVALID_HID;
- long dataset_id = HDF5Constants.H5I_INVALID_HID;
- long[] dims = { DIM0, DIM1 };
+ long dataset_id = HDF5Constants.H5I_INVALID_HID;
+ long[] dims = {DIM0, DIM1};
int[][] dset_data = new int[DIM0][DIM1];
// Initialize data.
@@ -48,7 +49,7 @@ public class H5Ex_T_Integer {
// Create a new file using default properties.
try {
file_id = H5.H5Fcreate(FILENAME, HDF5Constants.H5F_ACC_TRUNC, HDF5Constants.H5P_DEFAULT,
- HDF5Constants.H5P_DEFAULT);
+ HDF5Constants.H5P_DEFAULT);
}
catch (Exception e) {
e.printStackTrace();
@@ -70,7 +71,8 @@ public class H5Ex_T_Integer {
try {
if ((file_id >= 0) && (dataspace_id >= 0))
dataset_id = H5.H5Dcreate(file_id, DATASETNAME, HDF5Constants.H5T_STD_I64BE, dataspace_id,
- HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT);
+ HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT,
+ HDF5Constants.H5P_DEFAULT);
}
catch (Exception e) {
e.printStackTrace();
@@ -79,8 +81,8 @@ public class H5Ex_T_Integer {
// Write the data to the dataset.
try {
if (dataset_id >= 0)
- H5.H5Dwrite(dataset_id, HDF5Constants.H5T_NATIVE_INT, HDF5Constants.H5S_ALL, HDF5Constants.H5S_ALL,
- HDF5Constants.H5P_DEFAULT, dset_data);
+ H5.H5Dwrite(dataset_id, HDF5Constants.H5T_NATIVE_INT, HDF5Constants.H5S_ALL,
+ HDF5Constants.H5S_ALL, HDF5Constants.H5P_DEFAULT, dset_data);
}
catch (Exception e) {
e.printStackTrace();
@@ -112,14 +114,14 @@ public class H5Ex_T_Integer {
catch (Exception e) {
e.printStackTrace();
}
-
}
- private static void ReadDataset() {
- long file_id = HDF5Constants.H5I_INVALID_HID;
+ private static void ReadDataset()
+ {
+ long file_id = HDF5Constants.H5I_INVALID_HID;
long dataspace_id = HDF5Constants.H5I_INVALID_HID;
- long dataset_id = HDF5Constants.H5I_INVALID_HID;
- long[] dims = { DIM0, DIM1 };
+ long dataset_id = HDF5Constants.H5I_INVALID_HID;
+ long[] dims = {DIM0, DIM1};
int[][] dset_data;
// Open an existing file.
@@ -158,13 +160,13 @@ public class H5Ex_T_Integer {
// Allocate array of pointers to two-dimensional arrays (the
// elements of the dataset.
- dset_data = new int[(int) dims[0]][(int) (dims[1])];
+ dset_data = new int[(int)dims[0]][(int)(dims[1])];
// Read data.
try {
if (dataset_id >= 0)
- H5.H5Dread(dataset_id, HDF5Constants.H5T_NATIVE_INT, HDF5Constants.H5S_ALL, HDF5Constants.H5S_ALL,
- HDF5Constants.H5P_DEFAULT, dset_data);
+ H5.H5Dread(dataset_id, HDF5Constants.H5T_NATIVE_INT, HDF5Constants.H5S_ALL,
+ HDF5Constants.H5S_ALL, HDF5Constants.H5P_DEFAULT, dset_data);
}
catch (Exception e) {
e.printStackTrace();
@@ -208,10 +210,10 @@ public class H5Ex_T_Integer {
catch (Exception e) {
e.printStackTrace();
}
-
}
- public static void main(String[] args) {
+ public static void main(String[] args)
+ {
H5Ex_T_Integer.CreateDataset();
// Now we begin the read section of this example. Here we assume
// the dataset and array have the same name and rank, but can have
@@ -219,5 +221,4 @@ public class H5Ex_T_Integer {
// data using malloc().
H5Ex_T_Integer.ReadDataset();
}
-
}
diff --git a/java/examples/datatypes/H5Ex_T_IntegerAttribute.java b/java/examples/datatypes/H5Ex_T_IntegerAttribute.java
index b0df5e4..4ec98c4 100644
--- a/java/examples/datatypes/H5Ex_T_IntegerAttribute.java
+++ b/java/examples/datatypes/H5Ex_T_IntegerAttribute.java
@@ -26,19 +26,20 @@ import hdf.hdf5lib.H5;
import hdf.hdf5lib.HDF5Constants;
public class H5Ex_T_IntegerAttribute {
- private static String FILENAME = "H5Ex_T_IntegerAttribute.h5";
- private static String DATASETNAME = "DS1";
+ private static String FILENAME = "H5Ex_T_IntegerAttribute.h5";
+ private static String DATASETNAME = "DS1";
private static String ATTRIBUTENAME = "A1";
- private static final int DIM0 = 4;
- private static final int DIM1 = 7;
- private static final int RANK = 2;
+ private static final int DIM0 = 4;
+ private static final int DIM1 = 7;
+ private static final int RANK = 2;
- private static void CreateDataset() {
- long file_id = HDF5Constants.H5I_INVALID_HID;
+ private static void CreateDataset()
+ {
+ long file_id = HDF5Constants.H5I_INVALID_HID;
long dataspace_id = HDF5Constants.H5I_INVALID_HID;
- long dataset_id = HDF5Constants.H5I_INVALID_HID;
+ long dataset_id = HDF5Constants.H5I_INVALID_HID;
long attribute_id = HDF5Constants.H5I_INVALID_HID;
- long[] dims = { DIM0, DIM1 };
+ long[] dims = {DIM0, DIM1};
int[][] dset_data = new int[DIM0][DIM1];
// Initialize data.
@@ -50,7 +51,7 @@ public class H5Ex_T_IntegerAttribute {
// Create a new file using default properties.
try {
file_id = H5.H5Fcreate(FILENAME, HDF5Constants.H5F_ACC_TRUNC, HDF5Constants.H5P_DEFAULT,
- HDF5Constants.H5P_DEFAULT);
+ HDF5Constants.H5P_DEFAULT);
}
catch (Exception e) {
e.printStackTrace();
@@ -61,7 +62,8 @@ public class H5Ex_T_IntegerAttribute {
dataspace_id = H5.H5Screate(HDF5Constants.H5S_SCALAR);
if (dataspace_id >= 0) {
dataset_id = H5.H5Dcreate(file_id, DATASETNAME, HDF5Constants.H5T_STD_I32LE, dataspace_id,
- HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT);
+ HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT,
+ HDF5Constants.H5P_DEFAULT);
H5.H5Sclose(dataspace_id);
dataspace_id = HDF5Constants.H5I_INVALID_HID;
}
@@ -82,8 +84,9 @@ public class H5Ex_T_IntegerAttribute {
// Create the attribute and write the array data to it.
try {
if ((dataset_id >= 0) && (dataspace_id >= 0))
- attribute_id = H5.H5Acreate(dataset_id, ATTRIBUTENAME, HDF5Constants.H5T_STD_I64BE, dataspace_id,
- HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT);
+ attribute_id =
+ H5.H5Acreate(dataset_id, ATTRIBUTENAME, HDF5Constants.H5T_STD_I64BE, dataspace_id,
+ HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT);
}
catch (Exception e) {
e.printStackTrace();
@@ -132,15 +135,15 @@ public class H5Ex_T_IntegerAttribute {
catch (Exception e) {
e.printStackTrace();
}
-
}
- private static void ReadDataset() {
- long file_id = HDF5Constants.H5I_INVALID_HID;
+ private static void ReadDataset()
+ {
+ long file_id = HDF5Constants.H5I_INVALID_HID;
long dataspace_id = HDF5Constants.H5I_INVALID_HID;
- long dataset_id = HDF5Constants.H5I_INVALID_HID;
+ long dataset_id = HDF5Constants.H5I_INVALID_HID;
long attribute_id = HDF5Constants.H5I_INVALID_HID;
- long[] dims = { DIM0, DIM1 };
+ long[] dims = {DIM0, DIM1};
int[][] dset_data;
// Open an existing file.
@@ -163,7 +166,7 @@ public class H5Ex_T_IntegerAttribute {
try {
if (dataset_id >= 0)
attribute_id = H5.H5Aopen_by_name(dataset_id, ".", ATTRIBUTENAME, HDF5Constants.H5P_DEFAULT,
- HDF5Constants.H5P_DEFAULT);
+ HDF5Constants.H5P_DEFAULT);
}
catch (Exception e) {
e.printStackTrace();
@@ -188,7 +191,7 @@ public class H5Ex_T_IntegerAttribute {
// Allocate array of pointers to two-dimensional arrays (the
// elements of the dataset.
- dset_data = new int[(int) dims[0]][(int) (dims[1])];
+ dset_data = new int[(int)dims[0]][(int)(dims[1])];
// Read data.
try {
@@ -245,10 +248,10 @@ public class H5Ex_T_IntegerAttribute {
catch (Exception e) {
e.printStackTrace();
}
-
}
- public static void main(String[] args) {
+ public static void main(String[] args)
+ {
H5Ex_T_IntegerAttribute.CreateDataset();
// Now we begin the read section of this example. Here we assume
// the dataset and array have the same name and rank, but can have
@@ -256,5 +259,4 @@ public class H5Ex_T_IntegerAttribute {
// data using malloc().
H5Ex_T_IntegerAttribute.ReadDataset();
}
-
}
diff --git a/java/examples/datatypes/H5Ex_T_ObjectReference.java b/java/examples/datatypes/H5Ex_T_ObjectReference.java
index b0f98de..9220d8f 100644
--- a/java/examples/datatypes/H5Ex_T_ObjectReference.java
+++ b/java/examples/datatypes/H5Ex_T_ObjectReference.java
@@ -28,54 +28,50 @@ import hdf.hdf5lib.H5;
import hdf.hdf5lib.HDF5Constants;
public class H5Ex_T_ObjectReference {
- private static String FILENAME = "H5Ex_T_ObjectReference.h5";
- private static String DATASETNAME = "DS1";
+ private static String FILENAME = "H5Ex_T_ObjectReference.h5";
+ private static String DATASETNAME = "DS1";
private static String DATASETNAME2 = "DS2";
- private static String GROUPNAME = "G1";
- private static final int DIM0 = 2;
- private static final int RANK = 1;
+ private static String GROUPNAME = "G1";
+ private static final int DIM0 = 2;
+ private static final int RANK = 1;
// Values for the status of space allocation
enum H5G_obj {
- H5G_UNKNOWN(HDF5Constants.H5O_TYPE_UNKNOWN), /* Unknown object type */
- H5G_GROUP(HDF5Constants.H5O_TYPE_GROUP), /* Object is a group */
- H5G_DATASET(HDF5Constants.H5O_TYPE_DATASET), /* Object is a dataset */
+ H5G_UNKNOWN(HDF5Constants.H5O_TYPE_UNKNOWN), /* Unknown object type */
+ H5G_GROUP(HDF5Constants.H5O_TYPE_GROUP), /* Object is a group */
+ H5G_DATASET(HDF5Constants.H5O_TYPE_DATASET), /* Object is a dataset */
H5G_TYPE(HDF5Constants.H5O_TYPE_NAMED_DATATYPE); /* Object is a named data type */
private static final Map<Integer, H5G_obj> lookup = new HashMap<Integer, H5G_obj>();
- static {
+ static
+ {
for (H5G_obj s : EnumSet.allOf(H5G_obj.class))
lookup.put(s.getCode(), s);
}
private int code;
- H5G_obj(int layout_type) {
- this.code = layout_type;
- }
+ H5G_obj(int layout_type) { this.code = layout_type; }
- public int getCode() {
- return this.code;
- }
+ public int getCode() { return this.code; }
- public static H5G_obj get(int code) {
- return lookup.get(code);
- }
+ public static H5G_obj get(int code) { return lookup.get(code); }
}
- private static void writeObjRef() {
- long file_id = HDF5Constants.H5I_INVALID_HID;
- long dataspace_id = HDF5Constants.H5I_INVALID_HID;
- long filespace_id = HDF5Constants.H5I_INVALID_HID;
- long group_id = HDF5Constants.H5I_INVALID_HID;
- long dataset_id = HDF5Constants.H5I_INVALID_HID;
- long[] dims = { DIM0 };
+ private static void writeObjRef()
+ {
+ long file_id = HDF5Constants.H5I_INVALID_HID;
+ long dataspace_id = HDF5Constants.H5I_INVALID_HID;
+ long filespace_id = HDF5Constants.H5I_INVALID_HID;
+ long group_id = HDF5Constants.H5I_INVALID_HID;
+ long dataset_id = HDF5Constants.H5I_INVALID_HID;
+ long[] dims = {DIM0};
byte[][] dset_data = new byte[DIM0][HDF5Constants.H5R_REF_BUF_SIZE];
// Create a new file using default properties.
try {
file_id = H5.H5Fcreate(FILENAME, HDF5Constants.H5F_ACC_TRUNC, HDF5Constants.H5P_DEFAULT,
- HDF5Constants.H5P_DEFAULT);
+ HDF5Constants.H5P_DEFAULT);
}
catch (Exception e) {
e.printStackTrace();
@@ -86,7 +82,8 @@ public class H5Ex_T_ObjectReference {
dataspace_id = H5.H5Screate(HDF5Constants.H5S_SCALAR);
if ((file_id >= 0) && (dataspace_id >= 0)) {
dataset_id = H5.H5Dcreate(file_id, DATASETNAME2, HDF5Constants.H5T_STD_I32LE, dataspace_id,
- HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT);
+ HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT,
+ HDF5Constants.H5P_DEFAULT);
if (dataset_id >= 0)
H5.H5Dclose(dataset_id);
dataset_id = HDF5Constants.H5I_INVALID_HID;
@@ -101,8 +98,8 @@ public class H5Ex_T_ObjectReference {
// Create a group in the file.
try {
if (file_id >= 0)
- group_id = H5.H5Gcreate(file_id, GROUPNAME, HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT,
- HDF5Constants.H5P_DEFAULT);
+ group_id = H5.H5Gcreate(file_id, GROUPNAME, HDF5Constants.H5P_DEFAULT,
+ HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT);
if (group_id >= 0)
H5.H5Gclose(group_id);
group_id = HDF5Constants.H5I_INVALID_HID;
@@ -119,7 +116,7 @@ public class H5Ex_T_ObjectReference {
catch (Throwable err) {
err.printStackTrace();
}
-
+
try {
dset_data[1] = H5.H5Rcreate_object(file_id, DATASETNAME2, HDF5Constants.H5P_DEFAULT);
}
@@ -127,7 +124,7 @@ public class H5Ex_T_ObjectReference {
err.printStackTrace();
}
}
-
+
// Create dataspace. Setting maximum size to NULL sets the maximum
// size to be the current size.
try {
@@ -136,22 +133,23 @@ public class H5Ex_T_ObjectReference {
catch (Exception e) {
e.printStackTrace();
}
-
+
// Create the dataset.
try {
if ((file_id >= 0) && (filespace_id >= 0))
dataset_id = H5.H5Dcreate(file_id, DATASETNAME, HDF5Constants.H5T_STD_REF, filespace_id,
- HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT);
+ HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT,
+ HDF5Constants.H5P_DEFAULT);
}
catch (Exception e) {
e.printStackTrace();
}
-
+
// Write the object references to it.
try {
if (dataset_id >= 0)
- H5.H5Dwrite(dataset_id, HDF5Constants.H5T_STD_REF, HDF5Constants.H5S_ALL, HDF5Constants.H5S_ALL,
- HDF5Constants.H5P_DEFAULT, dset_data);
+ H5.H5Dwrite(dataset_id, HDF5Constants.H5T_STD_REF, HDF5Constants.H5S_ALL,
+ HDF5Constants.H5S_ALL, HDF5Constants.H5P_DEFAULT, dset_data);
}
catch (Exception e) {
e.printStackTrace();
@@ -161,8 +159,16 @@ public class H5Ex_T_ObjectReference {
ex.printStackTrace();
}
finally {
- try {H5.H5Rdestroy(dset_data[1]);} catch (Exception ex) {}
- try {H5.H5Rdestroy(dset_data[0]);} catch (Exception ex) {}
+ try {
+ H5.H5Rdestroy(dset_data[1]);
+ }
+ catch (Exception ex) {
+ }
+ try {
+ H5.H5Rdestroy(dset_data[0]);
+ }
+ catch (Exception ex) {
+ }
}
// End access to the dataset and release resources used by it.
@@ -193,13 +199,14 @@ public class H5Ex_T_ObjectReference {
}
}
- private static void readObjRef() {
- long file_id = HDF5Constants.H5I_INVALID_HID;
- long dataset_id = HDF5Constants.H5I_INVALID_HID;
- long dataspace_id = HDF5Constants.H5I_INVALID_HID;
- int object_type = -1;
- long object_id = HDF5Constants.H5I_INVALID_HID;
- long[] dims = { DIM0 };
+ private static void readObjRef()
+ {
+ long file_id = HDF5Constants.H5I_INVALID_HID;
+ long dataset_id = HDF5Constants.H5I_INVALID_HID;
+ long dataspace_id = HDF5Constants.H5I_INVALID_HID;
+ int object_type = -1;
+ long object_id = HDF5Constants.H5I_INVALID_HID;
+ long[] dims = {DIM0};
byte[][] dset_data = new byte[DIM0][HDF5Constants.H5R_REF_BUF_SIZE];
// Open an existing file.
@@ -209,23 +216,24 @@ public class H5Ex_T_ObjectReference {
// Open an existing dataset.
try {
dataset_id = H5.H5Dopen(file_id, DATASETNAME, HDF5Constants.H5P_DEFAULT);
-
+
try {
// Get dataspace and allocate memory for read buffer.
dataspace_id = H5.H5Dget_space(dataset_id);
H5.H5Sget_simple_extent_dims(dataspace_id, dims, null);
-
+
// Read data.
- H5.H5Dread(dataset_id, HDF5Constants.H5T_STD_REF, HDF5Constants.H5S_ALL, HDF5Constants.H5S_ALL,
- HDF5Constants.H5P_DEFAULT, dset_data);
-
+ H5.H5Dread(dataset_id, HDF5Constants.H5T_STD_REF, HDF5Constants.H5S_ALL,
+ HDF5Constants.H5S_ALL, HDF5Constants.H5P_DEFAULT, dset_data);
+
// Output the data to the screen.
for (int indx = 0; indx < dims[0]; indx++) {
System.out.println(DATASETNAME + "[" + indx + "]:");
System.out.print(" ->");
// Open the referenced object, get its name and type.
try {
- object_id = H5.H5Ropen_object(dset_data[indx], HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT);
+ object_id = H5.H5Ropen_object(dset_data[indx], HDF5Constants.H5P_DEFAULT,
+ HDF5Constants.H5P_DEFAULT);
try {
object_type = H5.H5Rget_obj_type3(dset_data[indx], HDF5Constants.H5R_OBJECT);
String obj_name = null;
@@ -255,14 +263,22 @@ public class H5Ex_T_ObjectReference {
e.printStackTrace();
}
finally {
- try {H5.H5Oclose(object_id);} catch (Exception e) {}
+ try {
+ H5.H5Oclose(object_id);
+ }
+ catch (Exception e) {
+ }
}
}
catch (Exception e4) {
e4.printStackTrace();
}
finally {
- try {H5.H5Rdestroy(dset_data[indx]);} catch (Exception e4) {}
+ try {
+ H5.H5Rdestroy(dset_data[indx]);
+ }
+ catch (Exception e4) {
+ }
}
} // end for
}
@@ -270,25 +286,38 @@ public class H5Ex_T_ObjectReference {
e3.printStackTrace();
}
finally {
- try {H5.H5Sclose(dataspace_id);} catch (Exception e3) {}
+ try {
+ H5.H5Sclose(dataspace_id);
+ }
+ catch (Exception e3) {
+ }
}
}
catch (Exception e2) {
e2.printStackTrace();
}
finally {
- try {H5.H5Dclose(dataset_id);} catch (Exception e2) {}
+ try {
+ H5.H5Dclose(dataset_id);
+ }
+ catch (Exception e2) {
+ }
}
}
catch (Exception e1) {
e1.printStackTrace();
}
finally {
- try {H5.H5Fclose(file_id);} catch (Exception e1) {}
+ try {
+ H5.H5Fclose(file_id);
+ }
+ catch (Exception e1) {
+ }
}
}
- public static void main(String[] args) {
+ public static void main(String[] args)
+ {
// Check if gzip compression is available and can be used for both
// compression and decompression. Normally we do not perform error
// checking in these examples for the sake of clarity, but in this
@@ -297,5 +326,4 @@ public class H5Ex_T_ObjectReference {
H5Ex_T_ObjectReference.writeObjRef();
H5Ex_T_ObjectReference.readObjRef();
}
-
}
diff --git a/java/examples/datatypes/H5Ex_T_ObjectReferenceAttribute.java b/java/examples/datatypes/H5Ex_T_ObjectReferenceAttribute.java
index f61ae0d..be84e51 100644
--- a/java/examples/datatypes/H5Ex_T_ObjectReferenceAttribute.java
+++ b/java/examples/datatypes/H5Ex_T_ObjectReferenceAttribute.java
@@ -28,55 +28,51 @@ import hdf.hdf5lib.H5;
import hdf.hdf5lib.HDF5Constants;
public class H5Ex_T_ObjectReferenceAttribute {
- private static String FILENAME = "H5Ex_T_ObjectReferenceAttribute.h5";
- private static String DATASETNAME = "DS1";
+ private static String FILENAME = "H5Ex_T_ObjectReferenceAttribute.h5";
+ private static String DATASETNAME = "DS1";
private static String ATTRIBUTENAME = "A1";
- private static String DATASETNAME2 = "DS2";
- private static String GROUPNAME = "G1";
- private static final int DIM0 = 2;
- private static final int RANK = 1;
+ private static String DATASETNAME2 = "DS2";
+ private static String GROUPNAME = "G1";
+ private static final int DIM0 = 2;
+ private static final int RANK = 1;
// Values for the status of space allocation
enum H5G_obj {
- H5G_UNKNOWN(HDF5Constants.H5O_TYPE_UNKNOWN), /* Unknown object type */
- H5G_GROUP(HDF5Constants.H5O_TYPE_GROUP), /* Object is a group */
- H5G_DATASET(HDF5Constants.H5O_TYPE_DATASET), /* Object is a dataset */
+ H5G_UNKNOWN(HDF5Constants.H5O_TYPE_UNKNOWN), /* Unknown object type */
+ H5G_GROUP(HDF5Constants.H5O_TYPE_GROUP), /* Object is a group */
+ H5G_DATASET(HDF5Constants.H5O_TYPE_DATASET), /* Object is a dataset */
H5G_TYPE(HDF5Constants.H5O_TYPE_NAMED_DATATYPE); /* Object is a named data type */
private static final Map<Integer, H5G_obj> lookup = new HashMap<Integer, H5G_obj>();
- static {
+ static
+ {
for (H5G_obj s : EnumSet.allOf(H5G_obj.class))
lookup.put(s.getCode(), s);
}
private int code;
- H5G_obj(int layout_type) {
- this.code = layout_type;
- }
+ H5G_obj(int layout_type) { this.code = layout_type; }
- public int getCode() {
- return this.code;
- }
+ public int getCode() { return this.code; }
- public static H5G_obj get(int code) {
- return lookup.get(code);
- }
+ public static H5G_obj get(int code) { return lookup.get(code); }
}
- private static void CreateDataset() {
- long file_id = HDF5Constants.H5I_INVALID_HID;
- long dataspace_id = HDF5Constants.H5I_INVALID_HID;
- long group_id = HDF5Constants.H5I_INVALID_HID;
- long dataset_id = HDF5Constants.H5I_INVALID_HID;
- long attribute_id = HDF5Constants.H5I_INVALID_HID;
- long[] dims = { DIM0 };
+ private static void CreateDataset()
+ {
+ long file_id = HDF5Constants.H5I_INVALID_HID;
+ long dataspace_id = HDF5Constants.H5I_INVALID_HID;
+ long group_id = HDF5Constants.H5I_INVALID_HID;
+ long dataset_id = HDF5Constants.H5I_INVALID_HID;
+ long attribute_id = HDF5Constants.H5I_INVALID_HID;
+ long[] dims = {DIM0};
byte[][] dset_data = new byte[DIM0][HDF5Constants.H5R_REF_BUF_SIZE];
// Create a new file using default properties.
try {
file_id = H5.H5Fcreate(FILENAME, HDF5Constants.H5F_ACC_TRUNC, HDF5Constants.H5P_DEFAULT,
- HDF5Constants.H5P_DEFAULT);
+ HDF5Constants.H5P_DEFAULT);
}
catch (Exception e) {
e.printStackTrace();
@@ -87,7 +83,8 @@ public class H5Ex_T_ObjectReferenceAttribute {
dataspace_id = H5.H5Screate(HDF5Constants.H5S_SCALAR);
if ((file_id >= 0) && (dataspace_id >= 0)) {
dataset_id = H5.H5Dcreate(file_id, DATASETNAME2, HDF5Constants.H5T_STD_I32LE, dataspace_id,
- HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT);
+ HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT,
+ HDF5Constants.H5P_DEFAULT);
if (dataset_id >= 0)
H5.H5Dclose(dataset_id);
dataset_id = HDF5Constants.H5I_INVALID_HID;
@@ -102,8 +99,8 @@ public class H5Ex_T_ObjectReferenceAttribute {
// Create a group in the file.
try {
if (file_id >= 0)
- group_id = H5.H5Gcreate(file_id, GROUPNAME, HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT,
- HDF5Constants.H5P_DEFAULT);
+ group_id = H5.H5Gcreate(file_id, GROUPNAME, HDF5Constants.H5P_DEFAULT,
+ HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT);
if (group_id >= 0)
H5.H5Gclose(group_id);
group_id = HDF5Constants.H5I_INVALID_HID;
@@ -120,7 +117,7 @@ public class H5Ex_T_ObjectReferenceAttribute {
catch (Throwable err) {
err.printStackTrace();
}
-
+
try {
dset_data[1] = H5.H5Rcreate_object(file_id, DATASETNAME2, HDF5Constants.H5P_DEFAULT);
}
@@ -135,7 +132,8 @@ public class H5Ex_T_ObjectReferenceAttribute {
dataspace_id = H5.H5Screate(HDF5Constants.H5S_SCALAR);
if (dataspace_id >= 0) {
dataset_id = H5.H5Dcreate(file_id, DATASETNAME, HDF5Constants.H5T_STD_I32LE, dataspace_id,
- HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT);
+ HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT,
+ HDF5Constants.H5P_DEFAULT);
H5.H5Sclose(dataspace_id);
dataspace_id = HDF5Constants.H5I_INVALID_HID;
}
@@ -143,7 +141,7 @@ public class H5Ex_T_ObjectReferenceAttribute {
catch (Exception e) {
e.printStackTrace();
}
-
+
// Create dataspace. Setting maximum size to NULL sets the maximum
// size to be the current size.
try {
@@ -152,17 +150,18 @@ public class H5Ex_T_ObjectReferenceAttribute {
catch (Exception e) {
e.printStackTrace();
}
-
+
// Create the attribute and write the array data to it.
try {
if ((dataset_id >= 0) && (dataspace_id >= 0))
- attribute_id = H5.H5Acreate(dataset_id, ATTRIBUTENAME, HDF5Constants.H5T_STD_REF, dataspace_id,
- HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT);
+ attribute_id =
+ H5.H5Acreate(dataset_id, ATTRIBUTENAME, HDF5Constants.H5T_STD_REF, dataspace_id,
+ HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT);
}
catch (Exception e) {
e.printStackTrace();
}
-
+
// Write the dataset.
try {
if (attribute_id >= 0)
@@ -176,8 +175,16 @@ public class H5Ex_T_ObjectReferenceAttribute {
ex.printStackTrace();
}
finally {
- try {H5.H5Rdestroy(dset_data[1]);} catch (Exception ex) {}
- try {H5.H5Rdestroy(dset_data[0]);} catch (Exception ex) {}
+ try {
+ H5.H5Rdestroy(dset_data[1]);
+ }
+ catch (Exception ex) {
+ }
+ try {
+ H5.H5Rdestroy(dset_data[0]);
+ }
+ catch (Exception ex) {
+ }
}
// End access to the dataset and release resources used by it.
@@ -215,14 +222,15 @@ public class H5Ex_T_ObjectReferenceAttribute {
}
}
- private static void ReadDataset() {
- long file_id = HDF5Constants.H5I_INVALID_HID;
- long dataspace_id = HDF5Constants.H5I_INVALID_HID;
- long dataset_id = HDF5Constants.H5I_INVALID_HID;
- long attribute_id = HDF5Constants.H5I_INVALID_HID;
- int object_type = -1;
- long object_id = HDF5Constants.H5I_INVALID_HID;
- long[] dims = { DIM0 };
+ private static void ReadDataset()
+ {
+ long file_id = HDF5Constants.H5I_INVALID_HID;
+ long dataspace_id = HDF5Constants.H5I_INVALID_HID;
+ long dataset_id = HDF5Constants.H5I_INVALID_HID;
+ long attribute_id = HDF5Constants.H5I_INVALID_HID;
+ int object_type = -1;
+ long object_id = HDF5Constants.H5I_INVALID_HID;
+ long[] dims = {DIM0};
byte[][] dset_data = new byte[DIM0][HDF5Constants.H5R_REF_BUF_SIZE];
// Open an existing file.
@@ -232,28 +240,30 @@ public class H5Ex_T_ObjectReferenceAttribute {
// Open an existing dataset.
try {
dataset_id = H5.H5Dopen(file_id, DATASETNAME, HDF5Constants.H5P_DEFAULT);
-
+
try {
- attribute_id = H5.H5Aopen_by_name(dataset_id, ".", ATTRIBUTENAME, HDF5Constants.H5P_DEFAULT,
- HDF5Constants.H5P_DEFAULT);
-
+ attribute_id = H5.H5Aopen_by_name(dataset_id, ".", ATTRIBUTENAME,
+ HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT);
+
// Get dataspace and allocate memory for read buffer.
try {
dataspace_id = H5.H5Aget_space(attribute_id);
H5.H5Sget_simple_extent_dims(dataspace_id, dims, null);
-
+
// Read data.
H5.H5Aread(attribute_id, HDF5Constants.H5T_STD_REF, dset_data);
-
+
// Output the data to the screen.
for (int indx = 0; indx < dims[0]; indx++) {
System.out.println(ATTRIBUTENAME + "[" + indx + "]:");
System.out.print(" ->");
// Open the referenced object, get its name and type.
try {
- object_id = H5.H5Ropen_object(dset_data[indx], HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT);
+ object_id = H5.H5Ropen_object(dset_data[indx], HDF5Constants.H5P_DEFAULT,
+ HDF5Constants.H5P_DEFAULT);
try {
- object_type = H5.H5Rget_obj_type3(dset_data[indx], HDF5Constants.H5R_OBJECT);
+ object_type =
+ H5.H5Rget_obj_type3(dset_data[indx], HDF5Constants.H5R_OBJECT);
String obj_name = null;
if (object_type >= 0) {
// Get the name.
@@ -281,14 +291,22 @@ public class H5Ex_T_ObjectReferenceAttribute {
e.printStackTrace();
}
finally {
- try {H5.H5Oclose(object_id);} catch (Exception e) {}
+ try {
+ H5.H5Oclose(object_id);
+ }
+ catch (Exception e) {
+ }
}
}
catch (Exception e5) {
e5.printStackTrace();
}
finally {
- try {H5.H5Rdestroy(dset_data[indx]);} catch (Exception e5) {}
+ try {
+ H5.H5Rdestroy(dset_data[indx]);
+ }
+ catch (Exception e5) {
+ }
}
} // end for
}
@@ -296,32 +314,49 @@ public class H5Ex_T_ObjectReferenceAttribute {
e4.printStackTrace();
}
finally {
- try {H5.H5Sclose(dataspace_id);} catch (Exception e3) {}
+ try {
+ H5.H5Sclose(dataspace_id);
+ }
+ catch (Exception e3) {
+ }
}
}
catch (Exception e3) {
e3.printStackTrace();
}
finally {
- try {H5.H5Aclose(attribute_id);} catch (Exception e4) {}
+ try {
+ H5.H5Aclose(attribute_id);
+ }
+ catch (Exception e4) {
+ }
}
}
catch (Exception e2) {
e2.printStackTrace();
}
finally {
- try {H5.H5Dclose(dataset_id);} catch (Exception e2) {}
+ try {
+ H5.H5Dclose(dataset_id);
+ }
+ catch (Exception e2) {
+ }
}
}
catch (Exception e1) {
e1.printStackTrace();
}
finally {
- try {H5.H5Fclose(file_id);} catch (Exception e1) {}
+ try {
+ H5.H5Fclose(file_id);
+ }
+ catch (Exception e1) {
+ }
}
}
- public static void main(String[] args) {
+ public static void main(String[] args)
+ {
H5Ex_T_ObjectReferenceAttribute.CreateDataset();
// Now we begin the read section of this example. Here we assume
// the dataset and array have the same name and rank, but can have
@@ -329,5 +364,4 @@ public class H5Ex_T_ObjectReferenceAttribute {
// data using malloc().
H5Ex_T_ObjectReferenceAttribute.ReadDataset();
}
-
}
diff --git a/java/examples/datatypes/H5Ex_T_Opaque.java b/java/examples/datatypes/H5Ex_T_Opaque.java
index e851fb2..419a5c8 100644
--- a/java/examples/datatypes/H5Ex_T_Opaque.java
+++ b/java/examples/datatypes/H5Ex_T_Opaque.java
@@ -24,32 +24,33 @@ import hdf.hdf5lib.H5;
import hdf.hdf5lib.HDF5Constants;
public class H5Ex_T_Opaque {
- private static String FILENAME = "H5Ex_T_Opaque.h5";
+ private static String FILENAME = "H5Ex_T_Opaque.h5";
private static String DATASETNAME = "DS1";
- private static final int DIM0 = 4;
- private static final int LEN = 7;
- private static final int RANK = 1;
+ private static final int DIM0 = 4;
+ private static final int LEN = 7;
+ private static final int RANK = 1;
- private static void CreateDataset() {
- long file_id = HDF5Constants.H5I_INVALID_HID;
+ private static void CreateDataset()
+ {
+ long file_id = HDF5Constants.H5I_INVALID_HID;
long dataspace_id = HDF5Constants.H5I_INVALID_HID;
- long datatype_id = HDF5Constants.H5I_INVALID_HID;
- long dataset_id = HDF5Constants.H5I_INVALID_HID;
- long[] dims = { DIM0 };
- byte[] dset_data = new byte[DIM0 * LEN];
- byte[] str_data = { 'O', 'P', 'A', 'Q', 'U', 'E' };
+ long datatype_id = HDF5Constants.H5I_INVALID_HID;
+ long dataset_id = HDF5Constants.H5I_INVALID_HID;
+ long[] dims = {DIM0};
+ byte[] dset_data = new byte[DIM0 * LEN];
+ byte[] str_data = {'O', 'P', 'A', 'Q', 'U', 'E'};
// Initialize data.
for (int indx = 0; indx < DIM0; indx++) {
for (int jndx = 0; jndx < LEN - 1; jndx++)
dset_data[jndx + indx * LEN] = str_data[jndx];
- dset_data[LEN - 1 + indx * LEN] = (byte) (indx + '0');
+ dset_data[LEN - 1 + indx * LEN] = (byte)(indx + '0');
}
// Create a new file using default properties.
try {
file_id = H5.H5Fcreate(FILENAME, HDF5Constants.H5F_ACC_TRUNC, HDF5Constants.H5P_DEFAULT,
- HDF5Constants.H5P_DEFAULT);
+ HDF5Constants.H5P_DEFAULT);
}
catch (Exception e) {
e.printStackTrace();
@@ -82,8 +83,9 @@ public class H5Ex_T_Opaque {
// automatically converts between different integer types.
try {
if ((file_id >= 0) && (datatype_id >= 0) && (dataspace_id >= 0))
- dataset_id = H5.H5Dcreate(file_id, DATASETNAME, datatype_id, dataspace_id, HDF5Constants.H5P_DEFAULT,
- HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT);
+ dataset_id =
+ H5.H5Dcreate(file_id, DATASETNAME, datatype_id, dataspace_id, HDF5Constants.H5P_DEFAULT,
+ HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT);
}
catch (Exception e) {
e.printStackTrace();
@@ -93,7 +95,7 @@ public class H5Ex_T_Opaque {
try {
if ((dataset_id >= 0) && (datatype_id >= 0))
H5.H5Dwrite(dataset_id, datatype_id, HDF5Constants.H5S_ALL, HDF5Constants.H5S_ALL,
- HDF5Constants.H5P_DEFAULT, dset_data);
+ HDF5Constants.H5P_DEFAULT, dset_data);
}
catch (Exception e) {
e.printStackTrace();
@@ -133,16 +135,16 @@ public class H5Ex_T_Opaque {
catch (Exception e) {
e.printStackTrace();
}
-
}
- private static void ReadDataset() {
- long file_id = HDF5Constants.H5I_INVALID_HID;
- long datatype_id = HDF5Constants.H5I_INVALID_HID;
+ private static void ReadDataset()
+ {
+ long file_id = HDF5Constants.H5I_INVALID_HID;
+ long datatype_id = HDF5Constants.H5I_INVALID_HID;
long dataspace_id = HDF5Constants.H5I_INVALID_HID;
- long dataset_id = HDF5Constants.H5I_INVALID_HID;
- long type_len = HDF5Constants.H5I_INVALID_HID;
- long[] dims = { DIM0 };
+ long dataset_id = HDF5Constants.H5I_INVALID_HID;
+ long type_len = HDF5Constants.H5I_INVALID_HID;
+ long[] dims = {DIM0};
byte[] dset_data;
String tag_name = null;
@@ -194,13 +196,13 @@ public class H5Ex_T_Opaque {
}
// Allocate buffer.
- dset_data = new byte[(int) (dims[0] * type_len)];
+ dset_data = new byte[(int)(dims[0] * type_len)];
// Read data.
try {
if ((dataset_id >= 0) && (datatype_id >= 0))
H5.H5Dread(dataset_id, datatype_id, HDF5Constants.H5S_ALL, HDF5Constants.H5S_ALL,
- HDF5Constants.H5P_DEFAULT, dset_data);
+ HDF5Constants.H5P_DEFAULT, dset_data);
}
catch (Exception e) {
e.printStackTrace();
@@ -211,7 +213,7 @@ public class H5Ex_T_Opaque {
for (int indx = 0; indx < dims[0]; indx++) {
System.out.print(DATASETNAME + "[" + indx + "]: ");
for (int jndx = 0; jndx < type_len; jndx++) {
- char temp = (char) dset_data[jndx + indx * (int)type_len];
+ char temp = (char)dset_data[jndx + indx * (int)type_len];
System.out.print(temp);
}
System.out.println("");
@@ -252,10 +254,10 @@ public class H5Ex_T_Opaque {
catch (Exception e) {
e.printStackTrace();
}
-
}
- public static void main(String[] args) {
+ public static void main(String[] args)
+ {
H5Ex_T_Opaque.CreateDataset();
// Now we begin the read section of this example. Here we assume
// the dataset and array have the same name and rank, but can have
@@ -263,5 +265,4 @@ public class H5Ex_T_Opaque {
// data using malloc().
H5Ex_T_Opaque.ReadDataset();
}
-
}
diff --git a/java/examples/datatypes/H5Ex_T_OpaqueAttribute.java b/java/examples/datatypes/H5Ex_T_OpaqueAttribute.java
index 3e16ab4..b8a15a6 100644
--- a/java/examples/datatypes/H5Ex_T_OpaqueAttribute.java
+++ b/java/examples/datatypes/H5Ex_T_OpaqueAttribute.java
@@ -24,34 +24,35 @@ import hdf.hdf5lib.H5;
import hdf.hdf5lib.HDF5Constants;
public class H5Ex_T_OpaqueAttribute {
- private static String FILENAME = "H5Ex_T_OpaqueAttribute.h5";
- private static String DATASETNAME = "DS1";
+ private static String FILENAME = "H5Ex_T_OpaqueAttribute.h5";
+ private static String DATASETNAME = "DS1";
private static String ATTRIBUTENAME = "A1";
- private static final int DIM0 = 4;
- private static final int LEN = 7;
- private static final int RANK = 1;
+ private static final int DIM0 = 4;
+ private static final int LEN = 7;
+ private static final int RANK = 1;
- private static void CreateDataset() {
- long file_id = HDF5Constants.H5I_INVALID_HID;
+ private static void CreateDataset()
+ {
+ long file_id = HDF5Constants.H5I_INVALID_HID;
long dataspace_id = HDF5Constants.H5I_INVALID_HID;
- long datatype_id = HDF5Constants.H5I_INVALID_HID;
- long dataset_id = HDF5Constants.H5I_INVALID_HID;
+ long datatype_id = HDF5Constants.H5I_INVALID_HID;
+ long dataset_id = HDF5Constants.H5I_INVALID_HID;
long attribute_id = HDF5Constants.H5I_INVALID_HID;
- long[] dims = { DIM0 };
- byte[] dset_data = new byte[DIM0 * LEN];
- byte[] str_data = { 'O', 'P', 'A', 'Q', 'U', 'E' };
+ long[] dims = {DIM0};
+ byte[] dset_data = new byte[DIM0 * LEN];
+ byte[] str_data = {'O', 'P', 'A', 'Q', 'U', 'E'};
// Initialize data.
for (int indx = 0; indx < DIM0; indx++) {
for (int jndx = 0; jndx < LEN - 1; jndx++)
dset_data[jndx + indx * LEN] = str_data[jndx];
- dset_data[LEN - 1 + indx * LEN] = (byte) (indx + '0');
+ dset_data[LEN - 1 + indx * LEN] = (byte)(indx + '0');
}
// Create a new file using default properties.
try {
file_id = H5.H5Fcreate(FILENAME, HDF5Constants.H5F_ACC_TRUNC, HDF5Constants.H5P_DEFAULT,
- HDF5Constants.H5P_DEFAULT);
+ HDF5Constants.H5P_DEFAULT);
}
catch (Exception e) {
e.printStackTrace();
@@ -62,7 +63,8 @@ public class H5Ex_T_OpaqueAttribute {
dataspace_id = H5.H5Screate(HDF5Constants.H5S_SCALAR);
if (dataspace_id >= 0) {
dataset_id = H5.H5Dcreate(file_id, DATASETNAME, HDF5Constants.H5T_STD_I32LE, dataspace_id,
- HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT);
+ HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT,
+ HDF5Constants.H5P_DEFAULT);
H5.H5Sclose(dataspace_id);
dataspace_id = HDF5Constants.H5I_INVALID_HID;
}
@@ -96,7 +98,7 @@ public class H5Ex_T_OpaqueAttribute {
try {
if ((dataset_id >= 0) && (datatype_id >= 0) && (dataspace_id >= 0))
attribute_id = H5.H5Acreate(dataset_id, ATTRIBUTENAME, datatype_id, dataspace_id,
- HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT);
+ HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT);
}
catch (Exception e) {
e.printStackTrace();
@@ -153,17 +155,17 @@ public class H5Ex_T_OpaqueAttribute {
catch (Exception e) {
e.printStackTrace();
}
-
}
- private static void ReadDataset() {
- long file_id = HDF5Constants.H5I_INVALID_HID;
- long datatype_id = HDF5Constants.H5I_INVALID_HID;
+ private static void ReadDataset()
+ {
+ long file_id = HDF5Constants.H5I_INVALID_HID;
+ long datatype_id = HDF5Constants.H5I_INVALID_HID;
long dataspace_id = HDF5Constants.H5I_INVALID_HID;
- long dataset_id = HDF5Constants.H5I_INVALID_HID;
+ long dataset_id = HDF5Constants.H5I_INVALID_HID;
long attribute_id = HDF5Constants.H5I_INVALID_HID;
- long type_len = -1;
- long[] dims = { DIM0 };
+ long type_len = -1;
+ long[] dims = {DIM0};
byte[] dset_data;
String tag_name = null;
@@ -187,7 +189,7 @@ public class H5Ex_T_OpaqueAttribute {
try {
if (dataset_id >= 0)
attribute_id = H5.H5Aopen_by_name(dataset_id, ".", ATTRIBUTENAME, HDF5Constants.H5P_DEFAULT,
- HDF5Constants.H5P_DEFAULT);
+ HDF5Constants.H5P_DEFAULT);
}
catch (Exception e) {
e.printStackTrace();
@@ -224,7 +226,7 @@ public class H5Ex_T_OpaqueAttribute {
}
// Allocate buffer.
- dset_data = new byte[(int) (dims[0] * type_len)];
+ dset_data = new byte[(int)(dims[0] * type_len)];
// Read data.
try {
@@ -240,7 +242,7 @@ public class H5Ex_T_OpaqueAttribute {
for (int indx = 0; indx < dims[0]; indx++) {
System.out.print(ATTRIBUTENAME + "[" + indx + "]: ");
for (int jndx = 0; jndx < type_len; jndx++) {
- char temp = (char) dset_data[jndx + indx * (int)type_len];
+ char temp = (char)dset_data[jndx + indx * (int)type_len];
System.out.print(temp);
}
System.out.println("");
@@ -289,10 +291,10 @@ public class H5Ex_T_OpaqueAttribute {
catch (Exception e) {
e.printStackTrace();
}
-
}
- public static void main(String[] args) {
+ public static void main(String[] args)
+ {
H5Ex_T_OpaqueAttribute.CreateDataset();
// Now we begin the read section of this example. Here we assume
// the dataset and array have the same name and rank, but can have
@@ -300,5 +302,4 @@ public class H5Ex_T_OpaqueAttribute {
// data using malloc().
H5Ex_T_OpaqueAttribute.ReadDataset();
}
-
}
diff --git a/java/examples/datatypes/H5Ex_T_String.java b/java/examples/datatypes/H5Ex_T_String.java
index e497bd8..a69a70b 100644
--- a/java/examples/datatypes/H5Ex_T_String.java
+++ b/java/examples/datatypes/H5Ex_T_String.java
@@ -24,27 +24,28 @@ import hdf.hdf5lib.H5;
import hdf.hdf5lib.HDF5Constants;
public class H5Ex_T_String {
- private static String FILENAME = "H5Ex_T_String.h5";
+ private static String FILENAME = "H5Ex_T_String.h5";
private static String DATASETNAME = "DS1";
- private static final int DIM0 = 4;
- private static final int SDIM = 8;
- private static final int RANK = 1;
-
- private static void CreateDataset() {
- long file_id = HDF5Constants.H5I_INVALID_HID;
- long memtype_id = HDF5Constants.H5I_INVALID_HID;
- long filetype_id = HDF5Constants.H5I_INVALID_HID;
- long dataspace_id = HDF5Constants.H5I_INVALID_HID;
- long dataset_id = HDF5Constants.H5I_INVALID_HID;
- long[] dims = { DIM0 };
- byte[][] dset_data = new byte[DIM0][SDIM];
- StringBuffer[] str_data = { new StringBuffer("Parting"), new StringBuffer("is such"),
- new StringBuffer("sweet"), new StringBuffer("sorrow.") };
+ private static final int DIM0 = 4;
+ private static final int SDIM = 8;
+ private static final int RANK = 1;
+
+ private static void CreateDataset()
+ {
+ long file_id = HDF5Constants.H5I_INVALID_HID;
+ long memtype_id = HDF5Constants.H5I_INVALID_HID;
+ long filetype_id = HDF5Constants.H5I_INVALID_HID;
+ long dataspace_id = HDF5Constants.H5I_INVALID_HID;
+ long dataset_id = HDF5Constants.H5I_INVALID_HID;
+ long[] dims = {DIM0};
+ byte[][] dset_data = new byte[DIM0][SDIM];
+ StringBuffer[] str_data = {new StringBuffer("Parting"), new StringBuffer("is such"),
+ new StringBuffer("sweet"), new StringBuffer("sorrow.")};
// Create a new file using default properties.
try {
file_id = H5.H5Fcreate(FILENAME, HDF5Constants.H5F_ACC_TRUNC, HDF5Constants.H5P_DEFAULT,
- HDF5Constants.H5P_DEFAULT);
+ HDF5Constants.H5P_DEFAULT);
}
catch (Exception e) {
e.printStackTrace();
@@ -82,8 +83,9 @@ public class H5Ex_T_String {
// Create the dataset and write the string data to it.
try {
if ((file_id >= 0) && (filetype_id >= 0) && (dataspace_id >= 0))
- dataset_id = H5.H5Dcreate(file_id, DATASETNAME, filetype_id, dataspace_id, HDF5Constants.H5P_DEFAULT,
- HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT);
+ dataset_id =
+ H5.H5Dcreate(file_id, DATASETNAME, filetype_id, dataspace_id, HDF5Constants.H5P_DEFAULT,
+ HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT);
}
catch (Exception e) {
e.printStackTrace();
@@ -94,14 +96,14 @@ public class H5Ex_T_String {
for (int indx = 0; indx < DIM0; indx++) {
for (int jndx = 0; jndx < SDIM; jndx++) {
if (jndx < str_data[indx].length())
- dset_data[indx][jndx] = (byte) str_data[indx].charAt(jndx);
+ dset_data[indx][jndx] = (byte)str_data[indx].charAt(jndx);
else
dset_data[indx][jndx] = 0;
}
}
if ((dataset_id >= 0) && (memtype_id >= 0))
H5.H5Dwrite(dataset_id, memtype_id, HDF5Constants.H5S_ALL, HDF5Constants.H5S_ALL,
- HDF5Constants.H5P_DEFAULT, dset_data);
+ HDF5Constants.H5P_DEFAULT, dset_data);
}
catch (Exception e) {
e.printStackTrace();
@@ -151,17 +153,17 @@ public class H5Ex_T_String {
catch (Exception e) {
e.printStackTrace();
}
-
}
- private static void ReadDataset() {
- long file_id = HDF5Constants.H5I_INVALID_HID;
- long filetype_id = HDF5Constants.H5I_INVALID_HID;
- long memtype_id = HDF5Constants.H5I_INVALID_HID;
+ private static void ReadDataset()
+ {
+ long file_id = HDF5Constants.H5I_INVALID_HID;
+ long filetype_id = HDF5Constants.H5I_INVALID_HID;
+ long memtype_id = HDF5Constants.H5I_INVALID_HID;
long dataspace_id = HDF5Constants.H5I_INVALID_HID;
- long dataset_id = HDF5Constants.H5I_INVALID_HID;
- long sdim = 0;
- long[] dims = { DIM0 };
+ long dataset_id = HDF5Constants.H5I_INVALID_HID;
+ long sdim = 0;
+ long[] dims = {DIM0};
byte[][] dset_data;
StringBuffer[] str_data;
@@ -213,8 +215,8 @@ public class H5Ex_T_String {
}
// Allocate space for data.
- dset_data = new byte[(int) dims[0]][(int)sdim];
- str_data = new StringBuffer[(int) dims[0]];
+ dset_data = new byte[(int)dims[0]][(int)sdim];
+ str_data = new StringBuffer[(int)dims[0]];
// Create the memory datatype.
try {
@@ -230,9 +232,9 @@ public class H5Ex_T_String {
try {
if ((dataset_id >= 0) && (memtype_id >= 0))
H5.H5Dread(dataset_id, memtype_id, HDF5Constants.H5S_ALL, HDF5Constants.H5S_ALL,
- HDF5Constants.H5P_DEFAULT, dset_data);
+ HDF5Constants.H5P_DEFAULT, dset_data);
byte[] tempbuf = new byte[(int)sdim];
- for (int indx = 0; indx < (int) dims[0]; indx++) {
+ for (int indx = 0; indx < (int)dims[0]; indx++) {
for (int jndx = 0; jndx < sdim; jndx++) {
tempbuf[jndx] = dset_data[indx][jndx];
}
@@ -293,10 +295,10 @@ public class H5Ex_T_String {
catch (Exception e) {
e.printStackTrace();
}
-
}
- public static void main(String[] args) {
+ public static void main(String[] args)
+ {
H5Ex_T_String.CreateDataset();
// Now we begin the read section of this example. Here we assume
// the dataset and array have the same name and rank, but can have
@@ -304,5 +306,4 @@ public class H5Ex_T_String {
// data using malloc().
H5Ex_T_String.ReadDataset();
}
-
}
diff --git a/java/examples/datatypes/H5Ex_T_StringAttribute.java b/java/examples/datatypes/H5Ex_T_StringAttribute.java
index 700f6a9..46c1038 100644
--- a/java/examples/datatypes/H5Ex_T_StringAttribute.java
+++ b/java/examples/datatypes/H5Ex_T_StringAttribute.java
@@ -24,29 +24,30 @@ import hdf.hdf5lib.H5;
import hdf.hdf5lib.HDF5Constants;
public class H5Ex_T_StringAttribute {
- private static String FILENAME = "H5Ex_T_StringAttribute.h5";
- private static String DATASETNAME = "DS1";
+ private static String FILENAME = "H5Ex_T_StringAttribute.h5";
+ private static String DATASETNAME = "DS1";
private static String ATTRIBUTENAME = "A1";
- private static final int DIM0 = 4;
- private static final int SDIM = 8;
- private static final int RANK = 1;
-
- private static void CreateDataset() {
- long file_id = HDF5Constants.H5I_INVALID_HID;
- long memtype_id = HDF5Constants.H5I_INVALID_HID;
- long filetype_id = HDF5Constants.H5I_INVALID_HID;
- long dataspace_id = HDF5Constants.H5I_INVALID_HID;
- long dataset_id = HDF5Constants.H5I_INVALID_HID;
- long attribute_id = HDF5Constants.H5I_INVALID_HID;
- long[] dims = { DIM0 };
- byte[][] dset_data = new byte[DIM0][SDIM];
- StringBuffer[] str_data = { new StringBuffer("Parting"), new StringBuffer("is such"),
- new StringBuffer("sweet"), new StringBuffer("sorrow.") };
+ private static final int DIM0 = 4;
+ private static final int SDIM = 8;
+ private static final int RANK = 1;
+
+ private static void CreateDataset()
+ {
+ long file_id = HDF5Constants.H5I_INVALID_HID;
+ long memtype_id = HDF5Constants.H5I_INVALID_HID;
+ long filetype_id = HDF5Constants.H5I_INVALID_HID;
+ long dataspace_id = HDF5Constants.H5I_INVALID_HID;
+ long dataset_id = HDF5Constants.H5I_INVALID_HID;
+ long attribute_id = HDF5Constants.H5I_INVALID_HID;
+ long[] dims = {DIM0};
+ byte[][] dset_data = new byte[DIM0][SDIM];
+ StringBuffer[] str_data = {new StringBuffer("Parting"), new StringBuffer("is such"),
+ new StringBuffer("sweet"), new StringBuffer("sorrow.")};
// Create a new file using default properties.
try {
file_id = H5.H5Fcreate(FILENAME, HDF5Constants.H5F_ACC_TRUNC, HDF5Constants.H5P_DEFAULT,
- HDF5Constants.H5P_DEFAULT);
+ HDF5Constants.H5P_DEFAULT);
}
catch (Exception e) {
e.printStackTrace();
@@ -77,7 +78,8 @@ public class H5Ex_T_StringAttribute {
dataspace_id = H5.H5Screate(HDF5Constants.H5S_SCALAR);
if (dataspace_id >= 0) {
dataset_id = H5.H5Dcreate(file_id, DATASETNAME, HDF5Constants.H5T_STD_I32LE, dataspace_id,
- HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT);
+ HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT,
+ HDF5Constants.H5P_DEFAULT);
H5.H5Sclose(dataspace_id);
dataspace_id = HDF5Constants.H5I_INVALID_HID;
}
@@ -99,7 +101,7 @@ public class H5Ex_T_StringAttribute {
try {
if ((dataset_id >= 0) && (dataspace_id >= 0) && (filetype_id >= 0))
attribute_id = H5.H5Acreate(dataset_id, ATTRIBUTENAME, filetype_id, dataspace_id,
- HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT);
+ HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT);
}
catch (Exception e) {
e.printStackTrace();
@@ -110,7 +112,7 @@ public class H5Ex_T_StringAttribute {
for (int indx = 0; indx < DIM0; indx++) {
for (int jndx = 0; jndx < SDIM; jndx++) {
if (jndx < str_data[indx].length())
- dset_data[indx][jndx] = (byte) str_data[indx].charAt(jndx);
+ dset_data[indx][jndx] = (byte)str_data[indx].charAt(jndx);
else
dset_data[indx][jndx] = 0;
}
@@ -174,18 +176,18 @@ public class H5Ex_T_StringAttribute {
catch (Exception e) {
e.printStackTrace();
}
-
}
- private static void ReadDataset() {
- long file_id = HDF5Constants.H5I_INVALID_HID;
- long filetype_id = HDF5Constants.H5I_INVALID_HID;
- long memtype_id = HDF5Constants.H5I_INVALID_HID;
+ private static void ReadDataset()
+ {
+ long file_id = HDF5Constants.H5I_INVALID_HID;
+ long filetype_id = HDF5Constants.H5I_INVALID_HID;
+ long memtype_id = HDF5Constants.H5I_INVALID_HID;
long dataspace_id = HDF5Constants.H5I_INVALID_HID;
- long dataset_id = HDF5Constants.H5I_INVALID_HID;
+ long dataset_id = HDF5Constants.H5I_INVALID_HID;
long attribute_id = HDF5Constants.H5I_INVALID_HID;
- long sdim = 0;
- long[] dims = { DIM0 };
+ long sdim = 0;
+ long[] dims = {DIM0};
byte[][] dset_data;
StringBuffer[] str_data;
@@ -209,7 +211,7 @@ public class H5Ex_T_StringAttribute {
try {
if (dataset_id >= 0)
attribute_id = H5.H5Aopen_by_name(dataset_id, ".", ATTRIBUTENAME, HDF5Constants.H5P_DEFAULT,
- HDF5Constants.H5P_DEFAULT);
+ HDF5Constants.H5P_DEFAULT);
}
catch (Exception e) {
e.printStackTrace();
@@ -246,8 +248,8 @@ public class H5Ex_T_StringAttribute {
}
// Allocate space for data.
- dset_data = new byte[(int) dims[0]][(int)sdim];
- str_data = new StringBuffer[(int) dims[0]];
+ dset_data = new byte[(int)dims[0]][(int)sdim];
+ str_data = new StringBuffer[(int)dims[0]];
// Create the memory datatype.
try {
@@ -264,7 +266,7 @@ public class H5Ex_T_StringAttribute {
if ((attribute_id >= 0) && (memtype_id >= 0))
H5.H5Aread(attribute_id, memtype_id, dset_data);
byte[] tempbuf = new byte[(int)sdim];
- for (int indx = 0; indx < (int) dims[0]; indx++) {
+ for (int indx = 0; indx < (int)dims[0]; indx++) {
for (int jndx = 0; jndx < sdim; jndx++) {
tempbuf[jndx] = dset_data[indx][jndx];
}
@@ -333,10 +335,10 @@ public class H5Ex_T_StringAttribute {
catch (Exception e) {
e.printStackTrace();
}
-
}
- public static void main(String[] args) {
+ public static void main(String[] args)
+ {
H5Ex_T_StringAttribute.CreateDataset();
// Now we begin the read section of this example. Here we assume
// the dataset and array have the same name and rank, but can have
@@ -344,5 +346,4 @@ public class H5Ex_T_StringAttribute {
// data using malloc().
H5Ex_T_StringAttribute.ReadDataset();
}
-
}
diff --git a/java/examples/datatypes/H5Ex_T_VLString.java b/java/examples/datatypes/H5Ex_T_VLString.java
index 8a29e60..1d4c2c4 100644
--- a/java/examples/datatypes/H5Ex_T_VLString.java
+++ b/java/examples/datatypes/H5Ex_T_VLString.java
@@ -19,24 +19,24 @@ package examples.datatypes;
import hdf.hdf5lib.H5;
import hdf.hdf5lib.HDF5Constants;
-public class H5Ex_T_VLString
-{
- private static String FILENAME = "H5Ex_T_VLString.h5";
+public class H5Ex_T_VLString {
+ private static String FILENAME = "H5Ex_T_VLString.h5";
private static String DATASETNAME = "DS1";
- private static void createDataset() {
- long file_id = HDF5Constants.H5I_INVALID_HID;
- long type_id = HDF5Constants.H5I_INVALID_HID;
+ private static void createDataset()
+ {
+ long file_id = HDF5Constants.H5I_INVALID_HID;
+ long type_id = HDF5Constants.H5I_INVALID_HID;
long dataspace_id = HDF5Constants.H5I_INVALID_HID;
- long dataset_id = HDF5Constants.H5I_INVALID_HID;
- int rank = 1;
- String[] str_data = { "Parting", "is such", "sweet", "sorrow." };
- long[] dims = { str_data.length };
+ long dataset_id = HDF5Constants.H5I_INVALID_HID;
+ int rank = 1;
+ String[] str_data = {"Parting", "is such", "sweet", "sorrow."};
+ long[] dims = {str_data.length};
// Create a new file using default properties.
try {
file_id = H5.H5Fcreate(FILENAME, HDF5Constants.H5F_ACC_TRUNC, HDF5Constants.H5P_DEFAULT,
- HDF5Constants.H5P_DEFAULT);
+ HDF5Constants.H5P_DEFAULT);
}
catch (Exception e) {
e.printStackTrace();
@@ -62,8 +62,9 @@ public class H5Ex_T_VLString
// Create the dataset and write the string data to it.
try {
if ((file_id >= 0) && (type_id >= 0) && (dataspace_id >= 0)) {
- dataset_id = H5.H5Dcreate(file_id, DATASETNAME, type_id, dataspace_id, HDF5Constants.H5P_DEFAULT,
- HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT);
+ dataset_id =
+ H5.H5Dcreate(file_id, DATASETNAME, type_id, dataspace_id, HDF5Constants.H5P_DEFAULT,
+ HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT);
}
}
catch (Exception e) {
@@ -74,7 +75,7 @@ public class H5Ex_T_VLString
try {
if (dataset_id >= 0)
H5.H5Dwrite_VLStrings(dataset_id, type_id, HDF5Constants.H5S_ALL, HDF5Constants.H5S_ALL,
- HDF5Constants.H5P_DEFAULT, str_data);
+ HDF5Constants.H5P_DEFAULT, str_data);
}
catch (Exception e) {
e.printStackTrace();
@@ -91,11 +92,12 @@ public class H5Ex_T_VLString
}
}
- private static void readDataset() {
- long file_id = HDF5Constants.H5I_INVALID_HID;
- long type_id = HDF5Constants.H5I_INVALID_HID;
- long dataset_id = HDF5Constants.H5I_INVALID_HID;
- String[] str_data = { "", "", "", "" };
+ private static void readDataset()
+ {
+ long file_id = HDF5Constants.H5I_INVALID_HID;
+ long type_id = HDF5Constants.H5I_INVALID_HID;
+ long dataset_id = HDF5Constants.H5I_INVALID_HID;
+ String[] str_data = {"", "", "", ""};
try {
file_id = H5.H5Fopen(FILENAME, HDF5Constants.H5F_ACC_RDONLY, HDF5Constants.H5P_DEFAULT);
@@ -106,9 +108,9 @@ public class H5Ex_T_VLString
try {
dataset_id = H5.H5Dopen(file_id, DATASETNAME, HDF5Constants.H5P_DEFAULT);
- type_id = H5.H5Dget_type(dataset_id);
- H5.H5Dread_VLStrings(dataset_id, type_id, HDF5Constants.H5S_ALL, HDF5Constants.H5S_ALL, HDF5Constants.H5P_DEFAULT,
- str_data);
+ type_id = H5.H5Dget_type(dataset_id);
+ H5.H5Dread_VLStrings(dataset_id, type_id, HDF5Constants.H5S_ALL, HDF5Constants.H5S_ALL,
+ HDF5Constants.H5P_DEFAULT, str_data);
}
catch (Exception e) {
e.printStackTrace();
@@ -127,9 +129,9 @@ public class H5Ex_T_VLString
}
}
- public static void main(String[] args) {
+ public static void main(String[] args)
+ {
H5Ex_T_VLString.createDataset();
H5Ex_T_VLString.readDataset();
}
-
}
diff --git a/java/examples/groups/CMakeLists.txt b/java/examples/groups/CMakeLists.txt
index 6483f91..9f20abf 100644
--- a/java/examples/groups/CMakeLists.txt
+++ b/java/examples/groups/CMakeLists.txt
@@ -36,6 +36,13 @@ foreach (example ${HDF_JAVA_EXAMPLES})
# install_jar (${example} ${HJAVA_INSTALL_DATA_DIR}/examples examples)
get_target_property (${example}_CLASSPATH ${example} CLASSDIR)
add_dependencies (${example} ${HDF5_JAVA_HDF5_LIB_TARGET})
+
+ #-----------------------------------------------------------------------------
+ # Add Target to clang-format
+ #-----------------------------------------------------------------------------
+ if (HDF5_ENABLE_FORMATTERS)
+ clang_format (HDF5_JAVA_${example}_SRC_FORMAT ${example}.java)
+ endif ()
endforeach ()
set (CMAKE_JAVA_INCLUDE_PATH "${HDF5_JAVA_JARS};${HDF5_JAVA_LOGGING_JAR};${HDF5_JAVA_LOGGING_NOP_JAR}")
diff --git a/java/examples/groups/H5Ex_G_Compact.java b/java/examples/groups/H5Ex_G_Compact.java
index 313c9c7..2c6535a 100644
--- a/java/examples/groups/H5Ex_G_Compact.java
+++ b/java/examples/groups/H5Ex_G_Compact.java
@@ -16,14 +16,14 @@
package examples.groups;
-import hdf.hdf5lib.H5;
-import hdf.hdf5lib.HDF5Constants;
-import hdf.hdf5lib.structs.H5G_info_t;
-
import java.util.EnumSet;
import java.util.HashMap;
import java.util.Map;
+import hdf.hdf5lib.H5;
+import hdf.hdf5lib.HDF5Constants;
+import hdf.hdf5lib.structs.H5G_info_t;
+
public class H5Ex_G_Compact {
private static final String FILE1 = "H5Ex_G_Compact1.h5";
@@ -38,44 +38,42 @@ public class H5Ex_G_Compact {
private static final Map<Integer, H5G_storage> lookup = new HashMap<Integer, H5G_storage>();
- static {
+ static
+ {
for (H5G_storage s : EnumSet.allOf(H5G_storage.class))
lookup.put(s.getCode(), s);
}
private int code;
- H5G_storage(int layout_type) {
- this.code = layout_type;
- }
+ H5G_storage(int layout_type) { this.code = layout_type; }
- public int getCode() {
- return this.code;
- }
+ public int getCode() { return this.code; }
- public static H5G_storage get(int code) {
- return lookup.get(code);
- }
+ public static H5G_storage get(int code) { return lookup.get(code); }
}
- public static void CreateGroup() {
- long file_id = HDF5Constants.H5I_INVALID_HID;
+ public static void CreateGroup()
+ {
+ long file_id = HDF5Constants.H5I_INVALID_HID;
long group_id = HDF5Constants.H5I_INVALID_HID;
- long fapl_id = HDF5Constants.H5I_INVALID_HID;
+ long fapl_id = HDF5Constants.H5I_INVALID_HID;
H5G_info_t ginfo;
long size;
// Create file 1. This file will use original format groups.
try {
- file_id = H5.H5Fcreate (FILE1, HDF5Constants.H5F_ACC_TRUNC, HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT);
+ file_id = H5.H5Fcreate(FILE1, HDF5Constants.H5F_ACC_TRUNC, HDF5Constants.H5P_DEFAULT,
+ HDF5Constants.H5P_DEFAULT);
}
catch (Exception e) {
e.printStackTrace();
}
// Create a group in the file1.
try {
- if(file_id >= 0)
- group_id = H5.H5Gcreate(file_id, GROUP, HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT);
+ if (file_id >= 0)
+ group_id = H5.H5Gcreate(file_id, GROUP, HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT,
+ HDF5Constants.H5P_DEFAULT);
}
catch (Exception e) {
e.printStackTrace();
@@ -83,7 +81,7 @@ public class H5Ex_G_Compact {
// Obtain the group info and print the group storage type.
try {
- if(group_id >= 0) {
+ if (group_id >= 0) {
ginfo = H5.H5Gget_info(group_id);
System.out.print("Group storage type for " + FILE1 + " is: ");
switch (H5G_storage.get(ginfo.storage_type)) {
@@ -160,7 +158,8 @@ public class H5Ex_G_Compact {
try {
fapl_id = H5.H5Pcreate(HDF5Constants.H5P_FILE_ACCESS);
if (fapl_id >= 0)
- H5.H5Pset_libver_bounds(fapl_id, HDF5Constants.H5F_LIBVER_LATEST, HDF5Constants.H5F_LIBVER_LATEST);
+ H5.H5Pset_libver_bounds(fapl_id, HDF5Constants.H5F_LIBVER_LATEST,
+ HDF5Constants.H5F_LIBVER_LATEST);
}
catch (Exception e) {
e.printStackTrace();
@@ -175,8 +174,9 @@ public class H5Ex_G_Compact {
}
// Create group in file2.
try {
- if(file_id >= 0)
- group_id = H5.H5Gcreate(file_id, GROUP, HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT);
+ if (file_id >= 0)
+ group_id = H5.H5Gcreate(file_id, GROUP, HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT,
+ HDF5Constants.H5P_DEFAULT);
}
catch (Exception e) {
e.printStackTrace();
@@ -257,7 +257,5 @@ public class H5Ex_G_Compact {
}
}
- public static void main(String[] args) {
- H5Ex_G_Compact.CreateGroup();
- }
+ public static void main(String[] args) { H5Ex_G_Compact.CreateGroup(); }
}
diff --git a/java/examples/groups/H5Ex_G_Corder.java b/java/examples/groups/H5Ex_G_Corder.java
index 4fa5aa2..86a790d 100644
--- a/java/examples/groups/H5Ex_G_Corder.java
+++ b/java/examples/groups/H5Ex_G_Corder.java
@@ -23,11 +23,12 @@ import hdf.hdf5lib.structs.H5G_info_t;
public class H5Ex_G_Corder {
private static String FILE = "H5Ex_G_Corder.h5";
- private static void CreateGroup() throws Exception {
- long file_id = HDF5Constants.H5I_INVALID_HID;
- long group_id = HDF5Constants.H5I_INVALID_HID;
+ private static void CreateGroup() throws Exception
+ {
+ long file_id = HDF5Constants.H5I_INVALID_HID;
+ long group_id = HDF5Constants.H5I_INVALID_HID;
long subgroup_id = HDF5Constants.H5I_INVALID_HID;
- long gcpl_id = HDF5Constants.H5I_INVALID_HID;
+ long gcpl_id = HDF5Constants.H5I_INVALID_HID;
int status;
H5G_info_t ginfo;
int i;
@@ -36,35 +37,35 @@ public class H5Ex_G_Corder {
try {
// Create a new file using default properties.
file_id = H5.H5Fcreate(FILE, HDF5Constants.H5F_ACC_TRUNC, HDF5Constants.H5P_DEFAULT,
- HDF5Constants.H5P_DEFAULT);
+ HDF5Constants.H5P_DEFAULT);
// Create group creation property list and enable link creation order tracking.
gcpl_id = H5.H5Pcreate(HDF5Constants.H5P_GROUP_CREATE);
- status = H5.H5Pset_link_creation_order(gcpl_id, HDF5Constants.H5P_CRT_ORDER_TRACKED
- + HDF5Constants.H5P_CRT_ORDER_INDEXED);
+ status = H5.H5Pset_link_creation_order(gcpl_id, HDF5Constants.H5P_CRT_ORDER_TRACKED +
+ HDF5Constants.H5P_CRT_ORDER_INDEXED);
// Create primary group using the property list.
if (status >= 0)
group_id = H5.H5Gcreate(file_id, "index_group", HDF5Constants.H5P_DEFAULT, gcpl_id,
- HDF5Constants.H5P_DEFAULT);
+ HDF5Constants.H5P_DEFAULT);
try {
/*
- * Create subgroups in the primary group. These will be tracked by creation order. Note that these
- * groups do not have to have the creation order tracking property set.
+ * Create subgroups in the primary group. These will be tracked by creation order. Note that
+ * these groups do not have to have the creation order tracking property set.
*/
- subgroup_id = H5.H5Gcreate(group_id, "H", HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT,
- HDF5Constants.H5P_DEFAULT);
- status = H5.H5Gclose(subgroup_id);
- subgroup_id = H5.H5Gcreate(group_id, "D", HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT,
- HDF5Constants.H5P_DEFAULT);
- status = H5.H5Gclose(subgroup_id);
- subgroup_id = H5.H5Gcreate(group_id, "F", HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT,
- HDF5Constants.H5P_DEFAULT);
- status = H5.H5Gclose(subgroup_id);
- subgroup_id = H5.H5Gcreate(group_id, "5", HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT,
- HDF5Constants.H5P_DEFAULT);
- status = H5.H5Gclose(subgroup_id);
+ subgroup_id = H5.H5Gcreate(group_id, "H", HDF5Constants.H5P_DEFAULT,
+ HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT);
+ status = H5.H5Gclose(subgroup_id);
+ subgroup_id = H5.H5Gcreate(group_id, "D", HDF5Constants.H5P_DEFAULT,
+ HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT);
+ status = H5.H5Gclose(subgroup_id);
+ subgroup_id = H5.H5Gcreate(group_id, "F", HDF5Constants.H5P_DEFAULT,
+ HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT);
+ status = H5.H5Gclose(subgroup_id);
+ subgroup_id = H5.H5Gcreate(group_id, "5", HDF5Constants.H5P_DEFAULT,
+ HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT);
+ status = H5.H5Gclose(subgroup_id);
// Get group info.
ginfo = H5.H5Gget_info(group_id);
@@ -73,8 +74,8 @@ public class H5Ex_G_Corder {
System.out.println("Traversing group using alphabetical indices:");
for (i = 0; i < ginfo.nlinks; i++) {
// Retrieve the name of the ith link in a group
- name = H5.H5Lget_name_by_idx(group_id, ".", HDF5Constants.H5_INDEX_NAME, HDF5Constants.H5_ITER_INC,
- i, HDF5Constants.H5P_DEFAULT);
+ name = H5.H5Lget_name_by_idx(group_id, ".", HDF5Constants.H5_INDEX_NAME,
+ HDF5Constants.H5_ITER_INC, i, HDF5Constants.H5P_DEFAULT);
System.out.println("Index " + i + ": " + name);
}
@@ -83,10 +84,9 @@ public class H5Ex_G_Corder {
for (i = 0; i < ginfo.nlinks; i++) {
// Retrieve the name of the ith link in a group
name = H5.H5Lget_name_by_idx(group_id, ".", HDF5Constants.H5_INDEX_CRT_ORDER,
- HDF5Constants.H5_ITER_INC, i, HDF5Constants.H5P_DEFAULT);
+ HDF5Constants.H5_ITER_INC, i, HDF5Constants.H5P_DEFAULT);
System.out.println("Index " + i + ": " + name);
}
-
}
catch (Exception e) {
e.printStackTrace();
@@ -106,7 +106,8 @@ public class H5Ex_G_Corder {
}
}
- public static void main(String[] args) {
+ public static void main(String[] args)
+ {
try {
H5Ex_G_Corder.CreateGroup();
}
@@ -114,5 +115,4 @@ public class H5Ex_G_Corder {
ex.printStackTrace();
}
}
-
}
diff --git a/java/examples/groups/H5Ex_G_Create.java b/java/examples/groups/H5Ex_G_Create.java
index 9304538..51804b9 100644
--- a/java/examples/groups/H5Ex_G_Create.java
+++ b/java/examples/groups/H5Ex_G_Create.java
@@ -20,17 +20,18 @@ import hdf.hdf5lib.H5;
import hdf.hdf5lib.HDF5Constants;
public class H5Ex_G_Create {
- private static String FILENAME = "H5Ex_G_Create.h5";
+ private static String FILENAME = "H5Ex_G_Create.h5";
private static String GROUPNAME = "G1";
- private static void CreateGroup() {
- long file_id = HDF5Constants.H5I_INVALID_HID;
+ private static void CreateGroup()
+ {
+ long file_id = HDF5Constants.H5I_INVALID_HID;
long group_id = HDF5Constants.H5I_INVALID_HID;
// Create a new file using default properties.
try {
file_id = H5.H5Fcreate(FILENAME, HDF5Constants.H5F_ACC_TRUNC, HDF5Constants.H5P_DEFAULT,
- HDF5Constants.H5P_DEFAULT);
+ HDF5Constants.H5P_DEFAULT);
}
catch (Exception e) {
e.printStackTrace();
@@ -39,8 +40,8 @@ public class H5Ex_G_Create {
// Create a group in the file.
try {
if (file_id >= 0)
- group_id = H5.H5Gcreate(file_id, "/" + GROUPNAME, HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT,
- HDF5Constants.H5P_DEFAULT);
+ group_id = H5.H5Gcreate(file_id, "/" + GROUPNAME, HDF5Constants.H5P_DEFAULT,
+ HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT);
}
catch (Exception e) {
e.printStackTrace();
@@ -81,11 +82,7 @@ public class H5Ex_G_Create {
catch (Exception e) {
e.printStackTrace();
}
-
- }
-
- public static void main(String[] args) {
- H5Ex_G_Create.CreateGroup();
}
+ public static void main(String[] args) { H5Ex_G_Create.CreateGroup(); }
}
diff --git a/java/examples/groups/H5Ex_G_Intermediate.java b/java/examples/groups/H5Ex_G_Intermediate.java
index ad0290c..e638fd0 100644
--- a/java/examples/groups/H5Ex_G_Intermediate.java
+++ b/java/examples/groups/H5Ex_G_Intermediate.java
@@ -16,47 +16,49 @@
************************************************************/
package examples.groups;
+import java.util.ArrayList;
+
import hdf.hdf5lib.H5;
import hdf.hdf5lib.HDF5Constants;
-import hdf.hdf5lib.callbacks.H5O_iterate_t;
import hdf.hdf5lib.callbacks.H5O_iterate_opdata_t;
+import hdf.hdf5lib.callbacks.H5O_iterate_t;
import hdf.hdf5lib.structs.H5O_info_t;
-import java.util.ArrayList;
-
public class H5Ex_G_Intermediate {
private static String FILE = "H5Ex_G_Intermediate.h5";
- private void CreateGroup() throws Exception {
+ private void CreateGroup() throws Exception
+ {
- long file_id = HDF5Constants.H5I_INVALID_HID;
+ long file_id = HDF5Constants.H5I_INVALID_HID;
long group_id = HDF5Constants.H5I_INVALID_HID;
- long gcpl_id = HDF5Constants.H5I_INVALID_HID;
+ long gcpl_id = HDF5Constants.H5I_INVALID_HID;
try {
// Create a new file_id using the default properties.
file_id = H5.H5Fcreate(FILE, HDF5Constants.H5F_ACC_TRUNC, HDF5Constants.H5P_DEFAULT,
- HDF5Constants.H5P_DEFAULT);
+ HDF5Constants.H5P_DEFAULT);
// Create group_id creation property list and set it to allow creation of intermediate group_ids.
gcpl_id = H5.H5Pcreate(HDF5Constants.H5P_LINK_CREATE);
H5.H5Pset_create_intermediate_group(gcpl_id, true);
/*
- * Create the group_id /G1/G2/G3. Note that /G1 and /G1/G2 do not exist yet. This call would cause an error
- * if we did not use the previously created property list.
+ * Create the group_id /G1/G2/G3. Note that /G1 and /G1/G2 do not exist yet. This call would cause
+ * an error if we did not use the previously created property list.
*/
- group_id = H5
- .H5Gcreate(file_id, "/G1/G2/G3", gcpl_id, HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT);
+ group_id = H5.H5Gcreate(file_id, "/G1/G2/G3", gcpl_id, HDF5Constants.H5P_DEFAULT,
+ HDF5Constants.H5P_DEFAULT);
// Print all the objects in the file_ids to show that intermediate group_ids have been created.
System.out.println("Objects in the file_id:");
// H5O_iterate_opdata_t iter_data = null;
H5O_iterate_opdata_t iter_data = new H5O_iter_data();
- H5O_iterate_t iter_cb = new H5O_iter_callback();
+ H5O_iterate_t iter_cb = new H5O_iter_callback();
- H5.H5Ovisit(file_id, HDF5Constants.H5_INDEX_NAME, HDF5Constants.H5_ITER_NATIVE, iter_cb, iter_data);
+ H5.H5Ovisit(file_id, HDF5Constants.H5_INDEX_NAME, HDF5Constants.H5_ITER_NATIVE, iter_cb,
+ iter_data);
}
catch (Exception e) {
e.printStackTrace();
@@ -72,7 +74,8 @@ public class H5Ex_G_Intermediate {
}
}
- public static void main(String[] args) {
+ public static void main(String[] args)
+ {
try {
(new H5Ex_G_Intermediate()).CreateGroup();
}
@@ -83,9 +86,10 @@ public class H5Ex_G_Intermediate {
private class idata {
public String link_name = null;
- public int link_type = -1;
+ public int link_type = -1;
- idata(String name, int type) {
+ idata(String name, int type)
+ {
this.link_name = name;
this.link_type = type;
}
@@ -96,9 +100,10 @@ public class H5Ex_G_Intermediate {
}
private class H5O_iter_callback implements H5O_iterate_t {
- public int callback(long group, String name, H5O_info_t info, H5O_iterate_opdata_t op_data) {
+ public int callback(long group, String name, H5O_info_t info, H5O_iterate_opdata_t op_data)
+ {
idata id = new idata(name, info.type);
- ((H5O_iter_data) op_data).iterdata.add(id);
+ ((H5O_iter_data)op_data).iterdata.add(id);
System.out.print("/"); /* Print root group in object path */
@@ -118,5 +123,4 @@ public class H5Ex_G_Intermediate {
return 0;
}
}
-
}
diff --git a/java/examples/groups/H5Ex_G_Iterate.java b/java/examples/groups/H5Ex_G_Iterate.java
index 6caecac..24cbb0a 100644
--- a/java/examples/groups/H5Ex_G_Iterate.java
+++ b/java/examples/groups/H5Ex_G_Iterate.java
@@ -16,47 +16,43 @@
************************************************************/
package examples.groups;
-import hdf.hdf5lib.H5;
-import hdf.hdf5lib.HDF5Constants;
-import hdf.hdf5lib.structs.H5O_token_t;
-
import java.util.EnumSet;
import java.util.HashMap;
import java.util.Map;
+import hdf.hdf5lib.H5;
+import hdf.hdf5lib.HDF5Constants;
+import hdf.hdf5lib.structs.H5O_token_t;
+
public class H5Ex_G_Iterate {
- private static String FILENAME = "groups/h5ex_g_iterate.h5";
+ private static String FILENAME = "groups/h5ex_g_iterate.h5";
private static String DATASETNAME = "/";
enum H5O_type {
- H5O_TYPE_UNKNOWN(-1), // Unknown object type
- H5O_TYPE_GROUP(0), // Object is a group
- H5O_TYPE_DATASET(1), // Object is a dataset
+ H5O_TYPE_UNKNOWN(-1), // Unknown object type
+ H5O_TYPE_GROUP(0), // Object is a group
+ H5O_TYPE_DATASET(1), // Object is a dataset
H5O_TYPE_NAMED_DATATYPE(2), // Object is a named data type
- H5O_TYPE_NTYPES(3); // Number of different object types
+ H5O_TYPE_NTYPES(3); // Number of different object types
private static final Map<Integer, H5O_type> lookup = new HashMap<Integer, H5O_type>();
- static {
+ static
+ {
for (H5O_type s : EnumSet.allOf(H5O_type.class))
lookup.put(s.getCode(), s);
}
private int code;
- H5O_type(int layout_type) {
- this.code = layout_type;
- }
+ H5O_type(int layout_type) { this.code = layout_type; }
- public int getCode() {
- return this.code;
- }
+ public int getCode() { return this.code; }
- public static H5O_type get(int code) {
- return lookup.get(code);
- }
+ public static H5O_type get(int code) { return lookup.get(code); }
}
- private static void do_iterate() {
+ private static void do_iterate()
+ {
long file_id = HDF5Constants.H5I_INVALID_HID;
// Open a file using default properties.
@@ -71,12 +67,13 @@ public class H5Ex_G_Iterate {
System.out.println("Objects in root group:");
try {
if (file_id >= 0) {
- int count = (int) H5.H5Gn_members(file_id, DATASETNAME);
- String[] oname = new String[count];
- int[] otype = new int[count];
- int[] ltype = new int[count];
+ int count = (int)H5.H5Gn_members(file_id, DATASETNAME);
+ String[] oname = new String[count];
+ int[] otype = new int[count];
+ int[] ltype = new int[count];
H5O_token_t[] otokens = new H5O_token_t[count];
- H5.H5Gget_obj_info_all(file_id, DATASETNAME, oname, otype, ltype, otokens, HDF5Constants.H5_INDEX_NAME);
+ H5.H5Gget_obj_info_all(file_id, DATASETNAME, oname, otype, ltype, otokens,
+ HDF5Constants.H5_INDEX_NAME);
// Get type of the object and display its name and type.
for (int indx = 0; indx < otype.length; indx++) {
@@ -110,8 +107,5 @@ public class H5Ex_G_Iterate {
}
}
- public static void main(String[] args) {
- H5Ex_G_Iterate.do_iterate();
- }
-
+ public static void main(String[] args) { H5Ex_G_Iterate.do_iterate(); }
}
diff --git a/java/examples/groups/H5Ex_G_Phase.java b/java/examples/groups/H5Ex_G_Phase.java
index 67a2f53..7a3fba3 100644
--- a/java/examples/groups/H5Ex_G_Phase.java
+++ b/java/examples/groups/H5Ex_G_Phase.java
@@ -16,19 +16,19 @@
************************************************************/
package examples.groups;
-import hdf.hdf5lib.H5;
-import hdf.hdf5lib.HDF5Constants;
-import hdf.hdf5lib.structs.H5G_info_t;
-
import java.util.EnumSet;
import java.util.HashMap;
import java.util.Map;
+import hdf.hdf5lib.H5;
+import hdf.hdf5lib.HDF5Constants;
+import hdf.hdf5lib.structs.H5G_info_t;
+
public class H5Ex_G_Phase {
- private static String FILE = "H5Ex_G_Phase.h5";
- private static int MAX_GROUPS = 7;
+ private static String FILE = "H5Ex_G_Phase.h5";
+ private static int MAX_GROUPS = 7;
private static int MAX_COMPACT = 5;
- private static int MIN_DENSE = 3;
+ private static int MIN_DENSE = 3;
enum H5G_storage {
H5G_STORAGE_TYPE_UNKNOWN(-1),
@@ -38,42 +38,39 @@ public class H5Ex_G_Phase {
private static final Map<Integer, H5G_storage> lookup = new HashMap<Integer, H5G_storage>();
- static {
+ static
+ {
for (H5G_storage s : EnumSet.allOf(H5G_storage.class))
lookup.put(s.getCode(), s);
}
private int code;
- H5G_storage(int layout_type) {
- this.code = layout_type;
- }
+ H5G_storage(int layout_type) { this.code = layout_type; }
- public int getCode() {
- return this.code;
- }
+ public int getCode() { return this.code; }
- public static H5G_storage get(int code) {
- return lookup.get(code);
- }
+ public static H5G_storage get(int code) { return lookup.get(code); }
}
- private static void CreateGroup() {
- long file_id = HDF5Constants.H5I_INVALID_HID;
- long group_id = HDF5Constants.H5I_INVALID_HID;
+ private static void CreateGroup()
+ {
+ long file_id = HDF5Constants.H5I_INVALID_HID;
+ long group_id = HDF5Constants.H5I_INVALID_HID;
long subgroup_id = HDF5Constants.H5I_INVALID_HID;
- long fapl_id = HDF5Constants.H5I_INVALID_HID;
- long gcpl_id = HDF5Constants.H5I_INVALID_HID;
+ long fapl_id = HDF5Constants.H5I_INVALID_HID;
+ long gcpl_id = HDF5Constants.H5I_INVALID_HID;
H5G_info_t ginfo;
String name = "G0"; // Name of subgroup_id
int i;
- // Set file access property list to allow the latest file format.This will allow the library to create new
- // format groups.
+ // Set file access property list to allow the latest file format.This will allow the library to create
+ // new format groups.
try {
fapl_id = H5.H5Pcreate(HDF5Constants.H5P_FILE_ACCESS);
if (fapl_id >= 0)
- H5.H5Pset_libver_bounds(fapl_id, HDF5Constants.H5F_LIBVER_LATEST, HDF5Constants.H5F_LIBVER_LATEST);
+ H5.H5Pset_libver_bounds(fapl_id, HDF5Constants.H5F_LIBVER_LATEST,
+ HDF5Constants.H5F_LIBVER_LATEST);
}
catch (Exception e) {
e.printStackTrace();
@@ -101,21 +98,23 @@ public class H5Ex_G_Phase {
// Create primary group.
try {
if ((file_id >= 0) && (gcpl_id >= 0))
- group_id = H5.H5Gcreate(file_id, name, HDF5Constants.H5P_DEFAULT, gcpl_id, HDF5Constants.H5P_DEFAULT);
+ group_id = H5.H5Gcreate(file_id, name, HDF5Constants.H5P_DEFAULT, gcpl_id,
+ HDF5Constants.H5P_DEFAULT);
}
catch (Exception e) {
e.printStackTrace();
}
- // Add subgroups to "group" one at a time, print the storage type for "group" after each subgroup is created.
+ // Add subgroups to "group" one at a time, print the storage type for "group" after each subgroup is
+ // created.
for (i = 1; i <= MAX_GROUPS; i++) {
// Define the subgroup name and create the subgroup.
- char append = (char) (((char) i) + '0');
- name = name + append; /* G1, G2, G3 etc. */
+ char append = (char)(((char)i) + '0');
+ name = name + append; /* G1, G2, G3 etc. */
try {
if (group_id >= 0) {
- subgroup_id = H5.H5Gcreate(group_id, name, HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT,
- HDF5Constants.H5P_DEFAULT);
+ subgroup_id = H5.H5Gcreate(group_id, name, HDF5Constants.H5P_DEFAULT,
+ HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT);
H5.H5Gclose(subgroup_id);
}
}
@@ -127,7 +126,8 @@ public class H5Ex_G_Phase {
try {
if (group_id >= 0) {
ginfo = H5.H5Gget_info(group_id);
- System.out.print(ginfo.nlinks + " Group" + (ginfo.nlinks == 1 ? " " : "s") + ": Storage type is ");
+ System.out.print(ginfo.nlinks + " Group" + (ginfo.nlinks == 1 ? " " : "s") +
+ ": Storage type is ");
switch (H5G_storage.get(ginfo.storage_type)) {
case H5G_STORAGE_TYPE_COMPACT:
System.out.println("H5G_STORAGE_TYPE_COMPACT"); // New compact format
@@ -169,7 +169,8 @@ public class H5Ex_G_Phase {
try {
if (group_id >= 0) {
ginfo = H5.H5Gget_info(group_id);
- System.out.print(ginfo.nlinks + " Group" + (ginfo.nlinks == 1 ? " " : "s") + ": Storage type is ");
+ System.out.print(ginfo.nlinks + " Group" + (ginfo.nlinks == 1 ? " " : "s") +
+ ": Storage type is ");
switch (H5G_storage.get(ginfo.storage_type)) {
case H5G_STORAGE_TYPE_COMPACT:
System.out.println("H5G_STORAGE_TYPE_COMPACT"); // New compact format
@@ -228,11 +229,7 @@ public class H5Ex_G_Phase {
catch (Exception e) {
e.printStackTrace();
}
-
- }
-
- public static void main(String[] args) {
- H5Ex_G_Phase.CreateGroup();
}
+ public static void main(String[] args) { H5Ex_G_Phase.CreateGroup(); }
}
diff --git a/java/examples/groups/H5Ex_G_Traverse.java b/java/examples/groups/H5Ex_G_Traverse.java
index c5b6373..cd60f6b 100644
--- a/java/examples/groups/H5Ex_G_Traverse.java
+++ b/java/examples/groups/H5Ex_G_Traverse.java
@@ -24,10 +24,11 @@ package examples.groups;
import hdf.hdf5lib.H5;
import hdf.hdf5lib.HDF5Constants;
-import hdf.hdf5lib.callbacks.H5L_iterate_t;
import hdf.hdf5lib.callbacks.H5L_iterate_opdata_t;
+import hdf.hdf5lib.callbacks.H5L_iterate_t;
import hdf.hdf5lib.structs.H5L_info_t;
import hdf.hdf5lib.structs.H5O_info_t;
+
import examples.groups.H5Ex_G_Iterate.H5O_type;
class opdata implements H5L_iterate_opdata_t {
@@ -38,10 +39,11 @@ class opdata implements H5L_iterate_opdata_t {
public class H5Ex_G_Traverse {
- private static String FILE = "h5ex_g_traverse.h5";
+ private static String FILE = "h5ex_g_traverse.h5";
public static H5L_iterate_t iter_cb = new H5L_iter_callbackT();
- private static void OpenGroup() {
+ private static void OpenGroup()
+ {
long file_id = HDF5Constants.H5I_INVALID_HID;
H5O_info_t infobuf;
opdata od = new opdata();
@@ -50,9 +52,9 @@ public class H5Ex_G_Traverse {
try {
file_id = H5.H5Fopen(FILE, HDF5Constants.H5F_ACC_RDONLY, HDF5Constants.H5P_DEFAULT);
if (file_id >= 0) {
- infobuf = H5.H5Oget_info(file_id);
- od.recurs = 0;
- od.prev = null;
+ infobuf = H5.H5Oget_info(file_id);
+ od.recurs = 0;
+ od.prev = null;
od.obj_token = infobuf.token;
}
}
@@ -64,7 +66,8 @@ public class H5Ex_G_Traverse {
try {
System.out.println("/ {");
// H5L_iterate_t iter_cb = new H5L_iter_callbackT();
- H5.H5Literate(file_id, HDF5Constants.H5_INDEX_NAME, HDF5Constants.H5_ITER_NATIVE, 0L, iter_cb, od);
+ H5.H5Literate(file_id, HDF5Constants.H5_INDEX_NAME, HDF5Constants.H5_ITER_NATIVE, 0L, iter_cb,
+ od);
System.out.println("}");
}
catch (Exception e) {
@@ -81,18 +84,17 @@ public class H5Ex_G_Traverse {
}
}
- public static void main(String[] args) {
- H5Ex_G_Traverse.OpenGroup();
- }
+ public static void main(String[] args) { H5Ex_G_Traverse.OpenGroup(); }
}
class H5L_iter_callbackT implements H5L_iterate_t {
- public int callback(long group, String name, H5L_info_t info, H5L_iterate_opdata_t op_data) {
+ public int callback(long group, String name, H5L_info_t info, H5L_iterate_opdata_t op_data)
+ {
H5O_info_t infobuf;
int return_val = 0;
- opdata od = (opdata) op_data; // Type conversion
- int spaces = 2 * (od.recurs + 1); // Number of white spaces to prepend to output.
+ opdata od = (opdata)op_data; // Type conversion
+ int spaces = 2 * (od.recurs + 1); // Number of white spaces to prepend to output.
// Get type of the object and display its name and type.
// The name of the object is passed to this function by the Library.
@@ -123,13 +125,14 @@ class H5L_iter_callbackT implements H5L_iterate_t {
// recursive iteration on the discovered
// group. The new opdata is given a pointer to the
// current one.
- opdata nextod = new opdata();
- nextod.recurs = od.recurs + 1;
- nextod.prev = od;
- nextod.obj_token = infobuf.token;
+ opdata nextod = new opdata();
+ nextod.recurs = od.recurs + 1;
+ nextod.prev = od;
+ nextod.obj_token = infobuf.token;
H5L_iterate_t iter_cb2 = new H5L_iter_callbackT();
- return_val = H5.H5Literate_by_name(group, name, HDF5Constants.H5_INDEX_NAME,
- HDF5Constants.H5_ITER_NATIVE, 0L, iter_cb2, nextod, HDF5Constants.H5P_DEFAULT);
+ return_val = H5.H5Literate_by_name(group, name, HDF5Constants.H5_INDEX_NAME,
+ HDF5Constants.H5_ITER_NATIVE, 0L, iter_cb2, nextod,
+ HDF5Constants.H5P_DEFAULT);
}
for (int i = 0; i < spaces; i++)
System.out.print(" ");
@@ -152,7 +155,8 @@ class H5L_iter_callbackT implements H5L_iterate_t {
return return_val;
}
- public boolean group_check(opdata od, H5O_token_t target_token) {
+ public boolean group_check(opdata od, H5O_token_t target_token)
+ {
if (od.obj_token.equals(target_token))
return true; // Object tokens match
else if (od.recurs == 0)
@@ -160,5 +164,4 @@ class H5L_iter_callbackT implements H5L_iterate_t {
else
return group_check(od.prev, target_token); // Recursively examine the next node
}
-
}
diff --git a/java/examples/groups/H5Ex_G_Visit.java b/java/examples/groups/H5Ex_G_Visit.java
index 790be80..d14ded6 100644
--- a/java/examples/groups/H5Ex_G_Visit.java
+++ b/java/examples/groups/H5Ex_G_Visit.java
@@ -20,22 +20,23 @@
************************************************************/
package examples.groups;
+import java.util.ArrayList;
+
import hdf.hdf5lib.H5;
import hdf.hdf5lib.HDF5Constants;
-import hdf.hdf5lib.callbacks.H5L_iterate_t;
import hdf.hdf5lib.callbacks.H5L_iterate_opdata_t;
-import hdf.hdf5lib.callbacks.H5O_iterate_t;
+import hdf.hdf5lib.callbacks.H5L_iterate_t;
import hdf.hdf5lib.callbacks.H5O_iterate_opdata_t;
+import hdf.hdf5lib.callbacks.H5O_iterate_t;
import hdf.hdf5lib.structs.H5L_info_t;
import hdf.hdf5lib.structs.H5O_info_t;
-import java.util.ArrayList;
-
public class H5Ex_G_Visit {
private static String FILE = "groups/h5ex_g_visit.h5";
- public static void main(String[] args) {
+ public static void main(String[] args)
+ {
try {
(new H5Ex_G_Visit()).VisitGroup();
}
@@ -44,7 +45,8 @@ public class H5Ex_G_Visit {
}
}
- private void VisitGroup() throws Exception {
+ private void VisitGroup() throws Exception
+ {
long file_id = HDF5Constants.H5I_INVALID_HID;
@@ -55,15 +57,16 @@ public class H5Ex_G_Visit {
// Begin iteration using H5Ovisit
System.out.println("Objects in the file:");
H5O_iterate_opdata_t iter_data = new H5O_iter_data();
- H5O_iterate_t iter_cb = new H5O_iter_callback();
- H5.H5Ovisit(file_id, HDF5Constants.H5_INDEX_NAME, HDF5Constants.H5_ITER_NATIVE, iter_cb, iter_data);
+ H5O_iterate_t iter_cb = new H5O_iter_callback();
+ H5.H5Ovisit(file_id, HDF5Constants.H5_INDEX_NAME, HDF5Constants.H5_ITER_NATIVE, iter_cb,
+ iter_data);
System.out.println();
// Repeat the same process using H5Lvisit
H5L_iterate_opdata_t iter_data2 = new H5L_iter_data();
- H5L_iterate_t iter_cb2 = new H5L_iter_callback();
+ H5L_iterate_t iter_cb2 = new H5L_iter_callback();
System.out.println("Links in the file:");
- H5.H5Lvisit(file_id, HDF5Constants.H5_INDEX_NAME, HDF5Constants.H5_ITER_NATIVE, iter_cb2, iter_data2);
-
+ H5.H5Lvisit(file_id, HDF5Constants.H5_INDEX_NAME, HDF5Constants.H5_ITER_NATIVE, iter_cb2,
+ iter_data2);
}
catch (Exception e) {
e.printStackTrace();
@@ -76,15 +79,16 @@ public class H5Ex_G_Visit {
}
/************************************************************
- * Operator function for H5Lvisit. This function simply retrieves the info for the object the current link points
- * to, and calls the operator function for H5Ovisit.
+ * Operator function for H5Lvisit. This function simply retrieves the info for the object the current link
+ *points to, and calls the operator function for H5Ovisit.
************************************************************/
private class idata {
public String link_name = null;
- public int link_type = -1;
+ public int link_type = -1;
- idata(String name, int type) {
+ idata(String name, int type)
+ {
this.link_name = name;
this.link_type = type;
}
@@ -95,20 +99,21 @@ public class H5Ex_G_Visit {
}
private class H5L_iter_callback implements H5L_iterate_t {
- public int callback(long group, String name, H5L_info_t info, H5L_iterate_opdata_t op_data) {
+ public int callback(long group, String name, H5L_info_t info, H5L_iterate_opdata_t op_data)
+ {
idata id = new idata(name, info.type);
- ((H5L_iter_data) op_data).iterdata.add(id);
+ ((H5L_iter_data)op_data).iterdata.add(id);
H5O_info_t infobuf;
int ret = 0;
try {
- // Get type of the object and display its name and type. The name of the object is passed to this
- // function by the Library.
- infobuf = H5.H5Oget_info_by_name(group, name, HDF5Constants.H5P_DEFAULT);
+ // Get type of the object and display its name and type. The name of the object is passed to
+ // this function by the Library.
+ infobuf = H5.H5Oget_info_by_name(group, name, HDF5Constants.H5P_DEFAULT);
H5O_iterate_t iter_cbO = new H5O_iter_callback();
H5O_iterate_opdata_t iter_dataO = new H5O_iter_data();
- ret = iter_cbO.callback(group, name, infobuf, iter_dataO);
+ ret = iter_cbO.callback(group, name, infobuf, iter_dataO);
}
catch (Exception e) {
e.printStackTrace();
@@ -123,9 +128,10 @@ public class H5Ex_G_Visit {
}
private class H5O_iter_callback implements H5O_iterate_t {
- public int callback(long group, String name, H5O_info_t info, H5O_iterate_opdata_t op_data) {
+ public int callback(long group, String name, H5O_info_t info, H5O_iterate_opdata_t op_data)
+ {
idata id = new idata(name, info.type);
- ((H5O_iter_data) op_data).iterdata.add(id);
+ ((H5O_iter_data)op_data).iterdata.add(id);
System.out.print("/"); /* Print root group in object path */
@@ -145,5 +151,4 @@ public class H5Ex_G_Visit {
return 0;
}
}
-
}
diff --git a/java/examples/intro/CMakeLists.txt b/java/examples/intro/CMakeLists.txt
index 867740b..a5e2a67 100644
--- a/java/examples/intro/CMakeLists.txt
+++ b/java/examples/intro/CMakeLists.txt
@@ -36,6 +36,13 @@ foreach (example ${HDF_JAVA_EXAMPLES})
# install_jar (${example} ${HJAVA_INSTALL_DATA_DIR}/examples examples)
get_target_property (${example}_CLASSPATH ${example} CLASSDIR)
add_dependencies (${example} ${HDF5_JAVA_HDF5_LIB_TARGET})
+
+ #-----------------------------------------------------------------------------
+ # Add Target to clang-format
+ #-----------------------------------------------------------------------------
+ if (HDF5_ENABLE_FORMATTERS)
+ clang_format (HDF5_JAVA_${example}_SRC_FORMAT ${example}.java)
+ endif ()
endforeach ()
set (CMAKE_JAVA_INCLUDE_PATH "${HDF5_JAVA_JARS};${HDF5_JAVA_LOGGING_JAR};${HDF5_JAVA_LOGGING_NOP_JAR}")
diff --git a/java/examples/intro/H5_CreateAttribute.java b/java/examples/intro/H5_CreateAttribute.java
index 949a770..22ecdae 100644
--- a/java/examples/intro/H5_CreateAttribute.java
+++ b/java/examples/intro/H5_CreateAttribute.java
@@ -20,25 +20,26 @@ import hdf.hdf5lib.H5;
import hdf.hdf5lib.HDF5Constants;
public class H5_CreateAttribute {
- private static String FILENAME = "H5_CreateAttribute.h5";
- private static String DATASETNAME = "dset";
- private static final int DIM_X = 4;
- private static final int DIM_Y = 6;
+ private static String FILENAME = "H5_CreateAttribute.h5";
+ private static String DATASETNAME = "dset";
+ private static final int DIM_X = 4;
+ private static final int DIM_Y = 6;
private static String DATASETATTRIBUTE = "Units";
- private static void CreateDatasetAttribute() {
- long file_id = HDF5Constants.H5I_INVALID_HID;
+ private static void CreateDatasetAttribute()
+ {
+ long file_id = HDF5Constants.H5I_INVALID_HID;
long dataspace_id = HDF5Constants.H5I_INVALID_HID;
- long dataset_id = HDF5Constants.H5I_INVALID_HID;
+ long dataset_id = HDF5Constants.H5I_INVALID_HID;
long attribute_id = HDF5Constants.H5I_INVALID_HID;
- long[] dims1 = { DIM_X, DIM_Y };
- long[] dims = { 2 };
- int[] attr_data = { 100, 200 };
+ long[] dims1 = {DIM_X, DIM_Y};
+ long[] dims = {2};
+ int[] attr_data = {100, 200};
// Create a new file using default properties.
try {
file_id = H5.H5Fcreate(FILENAME, HDF5Constants.H5F_ACC_TRUNC, HDF5Constants.H5P_DEFAULT,
- HDF5Constants.H5P_DEFAULT);
+ HDF5Constants.H5P_DEFAULT);
}
catch (Exception e) {
e.printStackTrace();
@@ -55,8 +56,9 @@ public class H5_CreateAttribute {
// Create the dataset.
try {
if ((file_id >= 0) && (dataspace_id >= 0))
- dataset_id = H5.H5Dcreate(file_id, "/" + DATASETNAME, HDF5Constants.H5T_STD_I32BE, dataspace_id,
- HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT);
+ dataset_id = H5.H5Dcreate(file_id, "/" + DATASETNAME, HDF5Constants.H5T_STD_I32BE,
+ dataspace_id, HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT,
+ HDF5Constants.H5P_DEFAULT);
}
catch (Exception e) {
e.printStackTrace();
@@ -82,8 +84,9 @@ public class H5_CreateAttribute {
// Create a dataset attribute.
try {
if ((dataset_id >= 0) && (dataspace_id >= 0))
- attribute_id = H5.H5Acreate(dataset_id, DATASETATTRIBUTE, HDF5Constants.H5T_STD_I32BE, dataspace_id,
- HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT);
+ attribute_id =
+ H5.H5Acreate(dataset_id, DATASETATTRIBUTE, HDF5Constants.H5T_STD_I32BE, dataspace_id,
+ HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT);
}
catch (Exception e) {
e.printStackTrace();
@@ -135,8 +138,5 @@ public class H5_CreateAttribute {
}
}
- public static void main(String[] args) {
- H5_CreateAttribute.CreateDatasetAttribute();
- }
-
+ public static void main(String[] args) { H5_CreateAttribute.CreateDatasetAttribute(); }
}
diff --git a/java/examples/intro/H5_CreateDataset.java b/java/examples/intro/H5_CreateDataset.java
index f938be2..20683e8 100644
--- a/java/examples/intro/H5_CreateDataset.java
+++ b/java/examples/intro/H5_CreateDataset.java
@@ -20,21 +20,22 @@ import hdf.hdf5lib.H5;
import hdf.hdf5lib.HDF5Constants;
public class H5_CreateDataset {
- private static String FILENAME = "H5_CreateDataset.h5";
+ private static String FILENAME = "H5_CreateDataset.h5";
private static String DATASETNAME = "dset";
- private static final int DIM_X = 4;
- private static final int DIM_Y = 6;
+ private static final int DIM_X = 4;
+ private static final int DIM_Y = 6;
- private static void CreateDataset() {
- long file_id = HDF5Constants.H5I_INVALID_HID;
+ private static void CreateDataset()
+ {
+ long file_id = HDF5Constants.H5I_INVALID_HID;
long dataspace_id = HDF5Constants.H5I_INVALID_HID;
- long dataset_id = HDF5Constants.H5I_INVALID_HID;
- long[] dims = { DIM_X, DIM_Y };
+ long dataset_id = HDF5Constants.H5I_INVALID_HID;
+ long[] dims = {DIM_X, DIM_Y};
// Create a new file using default properties.
try {
file_id = H5.H5Fcreate(FILENAME, HDF5Constants.H5F_ACC_TRUNC, HDF5Constants.H5P_DEFAULT,
- HDF5Constants.H5P_DEFAULT);
+ HDF5Constants.H5P_DEFAULT);
}
catch (Exception e) {
e.printStackTrace();
@@ -51,8 +52,9 @@ public class H5_CreateDataset {
// Create the dataset.
try {
if ((file_id >= 0) && (dataspace_id >= 0))
- dataset_id = H5.H5Dcreate(file_id, "/" + DATASETNAME, HDF5Constants.H5T_STD_I32BE, dataspace_id,
- HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT);
+ dataset_id = H5.H5Dcreate(file_id, "/" + DATASETNAME, HDF5Constants.H5T_STD_I32BE,
+ dataspace_id, HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT,
+ HDF5Constants.H5P_DEFAULT);
}
catch (Exception e) {
e.printStackTrace();
@@ -84,11 +86,7 @@ public class H5_CreateDataset {
catch (Exception e) {
e.printStackTrace();
}
-
- }
-
- public static void main(String[] args) {
- H5_CreateDataset.CreateDataset();
}
+ public static void main(String[] args) { H5_CreateDataset.CreateDataset(); }
}
diff --git a/java/examples/intro/H5_CreateFile.java b/java/examples/intro/H5_CreateFile.java
index d48ba6c..4323581 100644
--- a/java/examples/intro/H5_CreateFile.java
+++ b/java/examples/intro/H5_CreateFile.java
@@ -22,13 +22,14 @@ import hdf.hdf5lib.HDF5Constants;
public class H5_CreateFile {
static final String FILENAME = "H5_CreateFile.h5";
- private static void CreateFile() {
+ private static void CreateFile()
+ {
long file_id = HDF5Constants.H5I_INVALID_HID;
// Create a new file using default properties.
try {
file_id = H5.H5Fcreate(FILENAME, HDF5Constants.H5F_ACC_TRUNC, HDF5Constants.H5P_DEFAULT,
- HDF5Constants.H5P_DEFAULT);
+ HDF5Constants.H5P_DEFAULT);
}
catch (Exception e) {
e.printStackTrace();
@@ -42,11 +43,7 @@ public class H5_CreateFile {
catch (Exception e) {
e.printStackTrace();
}
-
- }
-
- public static void main(String[] args) {
- H5_CreateFile.CreateFile();
}
+ public static void main(String[] args) { H5_CreateFile.CreateFile(); }
}
diff --git a/java/examples/intro/H5_CreateGroup.java b/java/examples/intro/H5_CreateGroup.java
index c0bb954..a276cbd 100644
--- a/java/examples/intro/H5_CreateGroup.java
+++ b/java/examples/intro/H5_CreateGroup.java
@@ -20,17 +20,18 @@ import hdf.hdf5lib.H5;
import hdf.hdf5lib.HDF5Constants;
public class H5_CreateGroup {
- private static String FILENAME = "H5_CreateGroup.h5";
+ private static String FILENAME = "H5_CreateGroup.h5";
private static String GROUPNAME = "MyGroup";
- private static void CreateGroup() {
- long file_id = HDF5Constants.H5I_INVALID_HID;
+ private static void CreateGroup()
+ {
+ long file_id = HDF5Constants.H5I_INVALID_HID;
long group_id = HDF5Constants.H5I_INVALID_HID;
// Create a new file using default properties.
try {
file_id = H5.H5Fcreate(FILENAME, HDF5Constants.H5F_ACC_TRUNC, HDF5Constants.H5P_DEFAULT,
- HDF5Constants.H5P_DEFAULT);
+ HDF5Constants.H5P_DEFAULT);
}
catch (Exception e) {
e.printStackTrace();
@@ -39,8 +40,8 @@ public class H5_CreateGroup {
// Create a group in the file.
try {
if (file_id >= 0)
- group_id = H5.H5Gcreate(file_id, "/" + GROUPNAME, HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT,
- HDF5Constants.H5P_DEFAULT);
+ group_id = H5.H5Gcreate(file_id, "/" + GROUPNAME, HDF5Constants.H5P_DEFAULT,
+ HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT);
}
catch (Exception e) {
e.printStackTrace();
@@ -63,11 +64,7 @@ public class H5_CreateGroup {
catch (Exception e) {
e.printStackTrace();
}
-
- }
-
- public static void main(String[] args) {
- H5_CreateGroup.CreateGroup();
}
+ public static void main(String[] args) { H5_CreateGroup.CreateGroup(); }
}
diff --git a/java/examples/intro/H5_CreateGroupAbsoluteRelative.java b/java/examples/intro/H5_CreateGroupAbsoluteRelative.java
index f2c6168..2fb9c3c 100644
--- a/java/examples/intro/H5_CreateGroupAbsoluteRelative.java
+++ b/java/examples/intro/H5_CreateGroupAbsoluteRelative.java
@@ -20,13 +20,14 @@ import hdf.hdf5lib.H5;
import hdf.hdf5lib.HDF5Constants;
public class H5_CreateGroupAbsoluteRelative {
- private static String FILENAME = "H5_CreateGroupAbsoluteRelative.h5";
- private static String GROUPNAME = "MyGroup";
+ private static String FILENAME = "H5_CreateGroupAbsoluteRelative.h5";
+ private static String GROUPNAME = "MyGroup";
private static String GROUPNAME_A = "GroupA";
private static String GROUPNAME_B = "GroupB";
- private static void CreateGroupAbsoluteAndRelative() {
- long file_id = HDF5Constants.H5I_INVALID_HID;
+ private static void CreateGroupAbsoluteAndRelative()
+ {
+ long file_id = HDF5Constants.H5I_INVALID_HID;
long group1_id = HDF5Constants.H5I_INVALID_HID;
long group2_id = HDF5Constants.H5I_INVALID_HID;
long group3_id = HDF5Constants.H5I_INVALID_HID;
@@ -34,7 +35,7 @@ public class H5_CreateGroupAbsoluteRelative {
// Create a new file using default properties.
try {
file_id = H5.H5Fcreate(FILENAME, HDF5Constants.H5F_ACC_TRUNC, HDF5Constants.H5P_DEFAULT,
- HDF5Constants.H5P_DEFAULT);
+ HDF5Constants.H5P_DEFAULT);
}
catch (Exception e) {
e.printStackTrace();
@@ -44,7 +45,7 @@ public class H5_CreateGroupAbsoluteRelative {
try {
if (file_id >= 0)
group1_id = H5.H5Gcreate(file_id, "/" + GROUPNAME, HDF5Constants.H5P_DEFAULT,
- HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT);
+ HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT);
}
catch (Exception e) {
e.printStackTrace();
@@ -53,8 +54,9 @@ public class H5_CreateGroupAbsoluteRelative {
// Create group "Group_A" in group "MyGroup" using absolute name.
try {
if (file_id >= 0)
- group2_id = H5.H5Gcreate(file_id, "/" + GROUPNAME + "/" + GROUPNAME_A, HDF5Constants.H5P_DEFAULT,
- HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT);
+ group2_id =
+ H5.H5Gcreate(file_id, "/" + GROUPNAME + "/" + GROUPNAME_A, HDF5Constants.H5P_DEFAULT,
+ HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT);
}
catch (Exception e) {
e.printStackTrace();
@@ -63,8 +65,8 @@ public class H5_CreateGroupAbsoluteRelative {
// Create group "Group_B" in group "MyGroup" using relative name.
try {
if (group1_id >= 0)
- group3_id = H5.H5Gcreate(group1_id, GROUPNAME_B, HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT,
- HDF5Constants.H5P_DEFAULT);
+ group3_id = H5.H5Gcreate(group1_id, GROUPNAME_B, HDF5Constants.H5P_DEFAULT,
+ HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT);
}
catch (Exception e) {
e.printStackTrace();
@@ -105,11 +107,10 @@ public class H5_CreateGroupAbsoluteRelative {
catch (Exception e) {
e.printStackTrace();
}
-
}
- public static void main(String[] args) {
+ public static void main(String[] args)
+ {
H5_CreateGroupAbsoluteRelative.CreateGroupAbsoluteAndRelative();
}
-
}
diff --git a/java/examples/intro/H5_CreateGroupDataset.java b/java/examples/intro/H5_CreateGroupDataset.java
index f1d1cba..1d7cf99 100644
--- a/java/examples/intro/H5_CreateGroupDataset.java
+++ b/java/examples/intro/H5_CreateGroupDataset.java
@@ -20,27 +20,28 @@ import hdf.hdf5lib.H5;
import hdf.hdf5lib.HDF5Constants;
public class H5_CreateGroupDataset {
- private static String FILENAME = "H5_CreateGroupDataset.h5";
- private static String GROUPNAME = "MyGroup";
- private static String GROUPNAME_A = "GroupA";
+ private static String FILENAME = "H5_CreateGroupDataset.h5";
+ private static String GROUPNAME = "MyGroup";
+ private static String GROUPNAME_A = "GroupA";
private static String DATASETNAME1 = "dset1";
private static String DATASETNAME2 = "dset2";
- private static final int DIM1_X = 3;
- private static final int DIM1_Y = 3;
- private static final int DIM2_X = 2;
- private static final int DIM2_Y = 10;
-
- private static void h5_crtgrpd() {
- long file_id = HDF5Constants.H5I_INVALID_HID;
- long dataspace_id = HDF5Constants.H5I_INVALID_HID;
- long dataset_id = HDF5Constants.H5I_INVALID_HID;
- long group_id = HDF5Constants.H5I_INVALID_HID;
- long group1_id = HDF5Constants.H5I_INVALID_HID;
- long group2_id = HDF5Constants.H5I_INVALID_HID;
+ private static final int DIM1_X = 3;
+ private static final int DIM1_Y = 3;
+ private static final int DIM2_X = 2;
+ private static final int DIM2_Y = 10;
+
+ private static void h5_crtgrpd()
+ {
+ long file_id = HDF5Constants.H5I_INVALID_HID;
+ long dataspace_id = HDF5Constants.H5I_INVALID_HID;
+ long dataset_id = HDF5Constants.H5I_INVALID_HID;
+ long group_id = HDF5Constants.H5I_INVALID_HID;
+ long group1_id = HDF5Constants.H5I_INVALID_HID;
+ long group2_id = HDF5Constants.H5I_INVALID_HID;
int[][] dset1_data = new int[DIM1_X][DIM1_Y];
int[][] dset2_data = new int[DIM2_X][DIM2_Y];
- long[] dims1 = { DIM1_X, DIM1_Y };
- long[] dims2 = { DIM2_X, DIM2_Y };
+ long[] dims1 = {DIM1_X, DIM1_Y};
+ long[] dims2 = {DIM2_X, DIM2_Y};
// Initialize the first dataset.
for (int indx = 0; indx < DIM1_X; indx++)
@@ -55,15 +56,16 @@ public class H5_CreateGroupDataset {
// Create a file.
try {
file_id = H5.H5Fcreate(FILENAME, HDF5Constants.H5F_ACC_TRUNC, HDF5Constants.H5P_DEFAULT,
- HDF5Constants.H5P_DEFAULT);
+ HDF5Constants.H5P_DEFAULT);
// Create a group named "/MyGroup" in the file.
if (file_id >= 0) {
group1_id = H5.H5Gcreate(file_id, "/" + GROUPNAME, HDF5Constants.H5P_DEFAULT,
- HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT);
+ HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT);
// Create group "Group_A" in group "MyGroup" using absolute name.
if (group1_id >= 0) {
- group2_id = H5.H5Gcreate(file_id, "/" + GROUPNAME + "/" + GROUPNAME_A, HDF5Constants.H5P_DEFAULT,
- HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT);
+ group2_id =
+ H5.H5Gcreate(file_id, "/" + GROUPNAME + "/" + GROUPNAME_A, HDF5Constants.H5P_DEFAULT,
+ HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT);
if (group2_id >= 0)
H5.H5Gclose(group2_id);
}
@@ -86,8 +88,9 @@ public class H5_CreateGroupDataset {
// Create the dataset in group "MyGroup".
try {
if ((file_id >= 0) && (dataspace_id >= 0))
- dataset_id = H5.H5Dcreate(file_id, "/" + GROUPNAME + "/" + DATASETNAME1, HDF5Constants.H5T_STD_I32BE,
- dataspace_id, HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT);
+ dataset_id = H5.H5Dcreate(
+ file_id, "/" + GROUPNAME + "/" + DATASETNAME1, HDF5Constants.H5T_STD_I32BE, dataspace_id,
+ HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT);
}
catch (Exception e) {
e.printStackTrace();
@@ -96,8 +99,8 @@ public class H5_CreateGroupDataset {
// Write the first dataset.
try {
if (dataset_id >= 0)
- H5.H5Dwrite(dataset_id, HDF5Constants.H5T_NATIVE_INT, HDF5Constants.H5S_ALL, HDF5Constants.H5S_ALL,
- HDF5Constants.H5P_DEFAULT, dset1_data);
+ H5.H5Dwrite(dataset_id, HDF5Constants.H5T_NATIVE_INT, HDF5Constants.H5S_ALL,
+ HDF5Constants.H5S_ALL, HDF5Constants.H5P_DEFAULT, dset1_data);
}
catch (Exception e) {
e.printStackTrace();
@@ -126,7 +129,8 @@ public class H5_CreateGroupDataset {
// Open an existing group of the specified file.
try {
if (file_id >= 0)
- group_id = H5.H5Gopen(file_id, "/" + GROUPNAME + "/" + GROUPNAME_A, HDF5Constants.H5P_DEFAULT);
+ group_id =
+ H5.H5Gopen(file_id, "/" + GROUPNAME + "/" + GROUPNAME_A, HDF5Constants.H5P_DEFAULT);
}
catch (Exception e) {
e.printStackTrace();
@@ -144,7 +148,8 @@ public class H5_CreateGroupDataset {
try {
if ((group_id >= 0) && (dataspace_id >= 0))
dataset_id = H5.H5Dcreate(group_id, DATASETNAME2, HDF5Constants.H5T_STD_I32BE, dataspace_id,
- HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT);
+ HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT,
+ HDF5Constants.H5P_DEFAULT);
}
catch (Exception e) {
e.printStackTrace();
@@ -153,8 +158,8 @@ public class H5_CreateGroupDataset {
// Write the second dataset.
try {
if (dataset_id >= 0)
- H5.H5Dwrite(dataset_id, HDF5Constants.H5T_NATIVE_INT, HDF5Constants.H5S_ALL, HDF5Constants.H5S_ALL,
- HDF5Constants.H5P_DEFAULT, dset2_data);
+ H5.H5Dwrite(dataset_id, HDF5Constants.H5T_NATIVE_INT, HDF5Constants.H5S_ALL,
+ HDF5Constants.H5S_ALL, HDF5Constants.H5P_DEFAULT, dset2_data);
}
catch (Exception e) {
e.printStackTrace();
@@ -197,8 +202,5 @@ public class H5_CreateGroupDataset {
}
}
- public static void main(String[] args) {
- H5_CreateGroupDataset.h5_crtgrpd();
- }
-
+ public static void main(String[] args) { H5_CreateGroupDataset.h5_crtgrpd(); }
}
diff --git a/java/examples/intro/H5_ReadWrite.java b/java/examples/intro/H5_ReadWrite.java
index 67e1ac5..379fe20 100644
--- a/java/examples/intro/H5_ReadWrite.java
+++ b/java/examples/intro/H5_ReadWrite.java
@@ -20,16 +20,17 @@ import hdf.hdf5lib.H5;
import hdf.hdf5lib.HDF5Constants;
public class H5_ReadWrite {
- private static String FILENAME = "H5_ReadWrite.h5";
+ private static String FILENAME = "H5_ReadWrite.h5";
private static String DATASETNAME = "dset";
- private static final int DIM_X = 4;
- private static final int DIM_Y = 6;
+ private static final int DIM_X = 4;
+ private static final int DIM_Y = 6;
- private static void ReadWriteDataset() {
- long file_id = HDF5Constants.H5I_INVALID_HID;
+ private static void ReadWriteDataset()
+ {
+ long file_id = HDF5Constants.H5I_INVALID_HID;
long dataspace_id = HDF5Constants.H5I_INVALID_HID;
- long dataset_id = HDF5Constants.H5I_INVALID_HID;
- long[] dims = { DIM_X, DIM_Y };
+ long dataset_id = HDF5Constants.H5I_INVALID_HID;
+ long[] dims = {DIM_X, DIM_Y};
int[][] dset_data = new int[DIM_X][DIM_Y];
// Initialize the dataset.
@@ -40,7 +41,7 @@ public class H5_ReadWrite {
// Create a new file using default properties.
try {
file_id = H5.H5Fcreate(FILENAME, HDF5Constants.H5F_ACC_TRUNC, HDF5Constants.H5P_DEFAULT,
- HDF5Constants.H5P_DEFAULT);
+ HDF5Constants.H5P_DEFAULT);
}
catch (Exception e) {
e.printStackTrace();
@@ -57,8 +58,9 @@ public class H5_ReadWrite {
// Create the dataset.
try {
if ((file_id >= 0) && (dataspace_id >= 0))
- dataset_id = H5.H5Dcreate(file_id, "/" + DATASETNAME, HDF5Constants.H5T_STD_I32BE, dataspace_id,
- HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT);
+ dataset_id = H5.H5Dcreate(file_id, "/" + DATASETNAME, HDF5Constants.H5T_STD_I32BE,
+ dataspace_id, HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT,
+ HDF5Constants.H5P_DEFAULT);
}
catch (Exception e) {
e.printStackTrace();
@@ -67,8 +69,8 @@ public class H5_ReadWrite {
// Write the dataset.
try {
if (dataset_id >= 0)
- H5.H5Dwrite(dataset_id, HDF5Constants.H5T_NATIVE_INT, HDF5Constants.H5S_ALL, HDF5Constants.H5S_ALL,
- HDF5Constants.H5P_DEFAULT, dset_data);
+ H5.H5Dwrite(dataset_id, HDF5Constants.H5T_NATIVE_INT, HDF5Constants.H5S_ALL,
+ HDF5Constants.H5S_ALL, HDF5Constants.H5P_DEFAULT, dset_data);
}
catch (Exception e) {
e.printStackTrace();
@@ -76,8 +78,8 @@ public class H5_ReadWrite {
try {
if (dataset_id >= 0)
- H5.H5Dread(dataset_id, HDF5Constants.H5T_NATIVE_INT, HDF5Constants.H5S_ALL, HDF5Constants.H5S_ALL,
- HDF5Constants.H5P_DEFAULT, dset_data);
+ H5.H5Dread(dataset_id, HDF5Constants.H5T_NATIVE_INT, HDF5Constants.H5S_ALL,
+ HDF5Constants.H5S_ALL, HDF5Constants.H5P_DEFAULT, dset_data);
}
catch (Exception e) {
e.printStackTrace();
@@ -102,8 +104,5 @@ public class H5_ReadWrite {
}
}
- public static void main(String[] args) {
- H5_ReadWrite.ReadWriteDataset();
- }
-
+ public static void main(String[] args) { H5_ReadWrite.ReadWriteDataset(); }
}