summaryrefslogtreecommitdiffstats
path: root/java/examples/datatypes
diff options
context:
space:
mode:
Diffstat (limited to 'java/examples/datatypes')
-rw-r--r--java/examples/datatypes/CMakeLists.txt7
-rw-r--r--java/examples/datatypes/H5Ex_T_Array.java63
-rw-r--r--java/examples/datatypes/H5Ex_T_ArrayAttribute.java67
-rw-r--r--java/examples/datatypes/H5Ex_T_Bit.java51
-rw-r--r--java/examples/datatypes/H5Ex_T_BitAttribute.java52
-rw-r--r--java/examples/datatypes/H5Ex_T_Commit.java100
-rw-r--r--java/examples/datatypes/H5Ex_T_Compound.java152
-rw-r--r--java/examples/datatypes/H5Ex_T_CompoundAttribute.java154
-rw-r--r--java/examples/datatypes/H5Ex_T_Float.java51
-rw-r--r--java/examples/datatypes/H5Ex_T_FloatAttribute.java54
-rw-r--r--java/examples/datatypes/H5Ex_T_Integer.java47
-rw-r--r--java/examples/datatypes/H5Ex_T_IntegerAttribute.java48
-rw-r--r--java/examples/datatypes/H5Ex_T_ObjectReference.java144
-rw-r--r--java/examples/datatypes/H5Ex_T_ObjectReferenceAttribute.java160
-rw-r--r--java/examples/datatypes/H5Ex_T_Opaque.java59
-rw-r--r--java/examples/datatypes/H5Ex_T_OpaqueAttribute.java59
-rw-r--r--java/examples/datatypes/H5Ex_T_String.java71
-rw-r--r--java/examples/datatypes/H5Ex_T_StringAttribute.java73
-rw-r--r--java/examples/datatypes/H5Ex_T_VLString.java50
19 files changed, 779 insertions, 683 deletions
diff --git a/java/examples/datatypes/CMakeLists.txt b/java/examples/datatypes/CMakeLists.txt
index 6997b2e..4512221 100644
--- a/java/examples/datatypes/CMakeLists.txt
+++ b/java/examples/datatypes/CMakeLists.txt
@@ -47,6 +47,13 @@ foreach (example ${HDF_JAVA_EXAMPLES})
# install_jar (${example} ${HJAVA_INSTALL_DATA_DIR}/examples examples)
get_target_property (${example}_CLASSPATH ${example} CLASSDIR)
add_dependencies (${example} ${HDF5_JAVA_HDF5_LIB_TARGET})
+
+ #-----------------------------------------------------------------------------
+ # Add Target to clang-format
+ #-----------------------------------------------------------------------------
+ if (HDF5_ENABLE_FORMATTERS)
+ clang_format (HDF5_JAVA_${example}_SRC_FORMAT ${example}.java)
+ endif ()
endforeach ()
set (CMAKE_JAVA_INCLUDE_PATH "${HDF5_JAVA_JARS};${HDF5_JAVA_LOGGING_JAR};${HDF5_JAVA_LOGGING_NOP_JAR}")
diff --git a/java/examples/datatypes/H5Ex_T_Array.java b/java/examples/datatypes/H5Ex_T_Array.java
index 3939b38..4e9de2a 100644
--- a/java/examples/datatypes/H5Ex_T_Array.java
+++ b/java/examples/datatypes/H5Ex_T_Array.java
@@ -24,22 +24,23 @@ import hdf.hdf5lib.H5;
import hdf.hdf5lib.HDF5Constants;
public class H5Ex_T_Array {
- private static String FILENAME = "H5Ex_T_Array.h5";
+ private static String FILENAME = "H5Ex_T_Array.h5";
private static String DATASETNAME = "DS1";
- private static final int DIM0 = 4;
- private static final int ADIM0 = 3;
- private static final int ADIM1 = 5;
- private static final int RANK = 1;
- private static final int NDIMS = 2;
-
- private static void CreateDataset() {
- long file_id = HDF5Constants.H5I_INVALID_HID;
- long filetype_id = HDF5Constants.H5I_INVALID_HID;
- long memtype_id = HDF5Constants.H5I_INVALID_HID;
- long dataspace_id = HDF5Constants.H5I_INVALID_HID;
- long dataset_id = HDF5Constants.H5I_INVALID_HID;
- long[] dims = { DIM0 };
- long[] adims = { ADIM0, ADIM1 };
+ private static final int DIM0 = 4;
+ private static final int ADIM0 = 3;
+ private static final int ADIM1 = 5;
+ private static final int RANK = 1;
+ private static final int NDIMS = 2;
+
+ private static void CreateDataset()
+ {
+ long file_id = HDF5Constants.H5I_INVALID_HID;
+ long filetype_id = HDF5Constants.H5I_INVALID_HID;
+ long memtype_id = HDF5Constants.H5I_INVALID_HID;
+ long dataspace_id = HDF5Constants.H5I_INVALID_HID;
+ long dataset_id = HDF5Constants.H5I_INVALID_HID;
+ long[] dims = {DIM0};
+ long[] adims = {ADIM0, ADIM1};
int[][][] dset_data = new int[DIM0][ADIM0][ADIM1];
// Initialize data. indx is the element in the dataspace, jndx and kndx the
@@ -52,7 +53,7 @@ public class H5Ex_T_Array {
// Create a new file using default properties.
try {
file_id = H5.H5Fcreate(FILENAME, HDF5Constants.H5F_ACC_TRUNC, HDF5Constants.H5P_DEFAULT,
- HDF5Constants.H5P_DEFAULT);
+ HDF5Constants.H5P_DEFAULT);
}
catch (Exception e) {
e.printStackTrace();
@@ -86,8 +87,9 @@ public class H5Ex_T_Array {
// Create the dataset.
try {
if ((file_id >= 0) && (dataspace_id >= 0) && (filetype_id >= 0))
- dataset_id = H5.H5Dcreate(file_id, DATASETNAME, filetype_id, dataspace_id, HDF5Constants.H5P_DEFAULT,
- HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT);
+ dataset_id =
+ H5.H5Dcreate(file_id, DATASETNAME, filetype_id, dataspace_id, HDF5Constants.H5P_DEFAULT,
+ HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT);
}
catch (Exception e) {
e.printStackTrace();
@@ -97,7 +99,7 @@ public class H5Ex_T_Array {
try {
if ((dataset_id >= 0) && (memtype_id >= 0))
H5.H5Dwrite(dataset_id, memtype_id, HDF5Constants.H5S_ALL, HDF5Constants.H5S_ALL,
- HDF5Constants.H5P_DEFAULT, dset_data);
+ HDF5Constants.H5P_DEFAULT, dset_data);
}
catch (Exception e) {
e.printStackTrace();
@@ -147,16 +149,16 @@ public class H5Ex_T_Array {
catch (Exception e) {
e.printStackTrace();
}
-
}
- private static void ReadDataset() {
- long file_id = HDF5Constants.H5I_INVALID_HID;
+ private static void ReadDataset()
+ {
+ long file_id = HDF5Constants.H5I_INVALID_HID;
long filetype_id = HDF5Constants.H5I_INVALID_HID;
- long memtype_id = HDF5Constants.H5I_INVALID_HID;
- long dataset_id = HDF5Constants.H5I_INVALID_HID;
- long[] dims = { DIM0 };
- long[] adims = { ADIM0, ADIM1 };
+ long memtype_id = HDF5Constants.H5I_INVALID_HID;
+ long dataset_id = HDF5Constants.H5I_INVALID_HID;
+ long[] dims = {DIM0};
+ long[] adims = {ADIM0, ADIM1};
int[][][] dset_data;
// Open an existing file.
@@ -196,7 +198,7 @@ public class H5Ex_T_Array {
// Allocate array of pointers to two-dimensional arrays (the
// elements of the dataset.
- dset_data = new int[(int) dims[0]][(int) (adims[0])][(int) (adims[1])];
+ dset_data = new int[(int)dims[0]][(int)(adims[0])][(int)(adims[1])];
// Create array datatypes for memory.
try {
@@ -210,7 +212,7 @@ public class H5Ex_T_Array {
try {
if ((dataset_id >= 0) && (memtype_id >= 0))
H5.H5Dread(dataset_id, memtype_id, HDF5Constants.H5S_ALL, HDF5Constants.H5S_ALL,
- HDF5Constants.H5P_DEFAULT, dset_data);
+ HDF5Constants.H5P_DEFAULT, dset_data);
}
catch (Exception e) {
e.printStackTrace();
@@ -264,10 +266,10 @@ public class H5Ex_T_Array {
catch (Exception e) {
e.printStackTrace();
}
-
}
- public static void main(String[] args) {
+ public static void main(String[] args)
+ {
H5Ex_T_Array.CreateDataset();
// Now we begin the read section of this example. Here we assume
// the dataset and array have the same name and rank, but can have
@@ -275,5 +277,4 @@ public class H5Ex_T_Array {
// data using malloc().
H5Ex_T_Array.ReadDataset();
}
-
}
diff --git a/java/examples/datatypes/H5Ex_T_ArrayAttribute.java b/java/examples/datatypes/H5Ex_T_ArrayAttribute.java
index c4c4bc4..45b44c1 100644
--- a/java/examples/datatypes/H5Ex_T_ArrayAttribute.java
+++ b/java/examples/datatypes/H5Ex_T_ArrayAttribute.java
@@ -24,24 +24,25 @@ import hdf.hdf5lib.H5;
import hdf.hdf5lib.HDF5Constants;
public class H5Ex_T_ArrayAttribute {
- private static String FILENAME = "H5Ex_T_ArrayAttribute.h5";
- private static String DATASETNAME = "DS1";
+ private static String FILENAME = "H5Ex_T_ArrayAttribute.h5";
+ private static String DATASETNAME = "DS1";
private static String ATTRIBUTENAME = "A1";
- private static final int DIM0 = 4;
- private static final int ADIM0 = 3;
- private static final int ADIM1 = 5;
- private static final int RANK = 1;
- private static final int NDIMS = 2;
-
- private static void CreateDataset() {
- long file_id = HDF5Constants.H5I_INVALID_HID;
- long filetype_id = HDF5Constants.H5I_INVALID_HID;
- long memtype_id = HDF5Constants.H5I_INVALID_HID;
- long dataspace_id = HDF5Constants.H5I_INVALID_HID;
- long dataset_id = HDF5Constants.H5I_INVALID_HID;
- long attribute_id = HDF5Constants.H5I_INVALID_HID;
- long[] dims = { DIM0 };
- long[] adims = { ADIM0, ADIM1 };
+ private static final int DIM0 = 4;
+ private static final int ADIM0 = 3;
+ private static final int ADIM1 = 5;
+ private static final int RANK = 1;
+ private static final int NDIMS = 2;
+
+ private static void CreateDataset()
+ {
+ long file_id = HDF5Constants.H5I_INVALID_HID;
+ long filetype_id = HDF5Constants.H5I_INVALID_HID;
+ long memtype_id = HDF5Constants.H5I_INVALID_HID;
+ long dataspace_id = HDF5Constants.H5I_INVALID_HID;
+ long dataset_id = HDF5Constants.H5I_INVALID_HID;
+ long attribute_id = HDF5Constants.H5I_INVALID_HID;
+ long[] dims = {DIM0};
+ long[] adims = {ADIM0, ADIM1};
int[][][] dset_data = new int[DIM0][ADIM0][ADIM1];
// Initialize data. indx is the element in the dataspace, jndx and kndx the
@@ -54,7 +55,7 @@ public class H5Ex_T_ArrayAttribute {
// Create a new file using default properties.
try {
file_id = H5.H5Fcreate(FILENAME, HDF5Constants.H5F_ACC_TRUNC, HDF5Constants.H5P_DEFAULT,
- HDF5Constants.H5P_DEFAULT);
+ HDF5Constants.H5P_DEFAULT);
}
catch (Exception e) {
e.printStackTrace();
@@ -81,7 +82,8 @@ public class H5Ex_T_ArrayAttribute {
dataspace_id = H5.H5Screate(HDF5Constants.H5S_SCALAR);
if (dataspace_id >= 0) {
dataset_id = H5.H5Dcreate(file_id, DATASETNAME, HDF5Constants.H5T_STD_I32LE, dataspace_id,
- HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT);
+ HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT,
+ HDF5Constants.H5P_DEFAULT);
H5.H5Sclose(dataspace_id);
dataspace_id = HDF5Constants.H5I_INVALID_HID;
}
@@ -103,7 +105,7 @@ public class H5Ex_T_ArrayAttribute {
try {
if ((dataset_id >= 0) && (dataspace_id >= 0) && (filetype_id >= 0))
attribute_id = H5.H5Acreate(dataset_id, ATTRIBUTENAME, filetype_id, dataspace_id,
- HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT);
+ HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT);
}
catch (Exception e) {
e.printStackTrace();
@@ -170,17 +172,17 @@ public class H5Ex_T_ArrayAttribute {
catch (Exception e) {
e.printStackTrace();
}
-
}
- private static void ReadDataset() {
- long file_id = HDF5Constants.H5I_INVALID_HID;
- long filetype_id = HDF5Constants.H5I_INVALID_HID;
- long memtype_id = HDF5Constants.H5I_INVALID_HID;
- long dataset_id = HDF5Constants.H5I_INVALID_HID;
+ private static void ReadDataset()
+ {
+ long file_id = HDF5Constants.H5I_INVALID_HID;
+ long filetype_id = HDF5Constants.H5I_INVALID_HID;
+ long memtype_id = HDF5Constants.H5I_INVALID_HID;
+ long dataset_id = HDF5Constants.H5I_INVALID_HID;
long attribute_id = HDF5Constants.H5I_INVALID_HID;
- long[] dims = { DIM0 };
- long[] adims = { ADIM0, ADIM1 };
+ long[] dims = {DIM0};
+ long[] adims = {ADIM0, ADIM1};
int[][][] dset_data;
// Open an existing file.
@@ -203,7 +205,7 @@ public class H5Ex_T_ArrayAttribute {
try {
if (dataset_id >= 0)
attribute_id = H5.H5Aopen_by_name(dataset_id, ".", ATTRIBUTENAME, HDF5Constants.H5P_DEFAULT,
- HDF5Constants.H5P_DEFAULT);
+ HDF5Constants.H5P_DEFAULT);
}
catch (Exception e) {
e.printStackTrace();
@@ -229,7 +231,7 @@ public class H5Ex_T_ArrayAttribute {
// Allocate array of pointers to two-dimensional arrays (the
// elements of the dataset.
- dset_data = new int[(int) dims[0]][(int) (adims[0])][(int) (adims[1])];
+ dset_data = new int[(int)dims[0]][(int)(adims[0])][(int)(adims[1])];
// Create array datatypes for memory.
try {
@@ -304,10 +306,10 @@ public class H5Ex_T_ArrayAttribute {
catch (Exception e) {
e.printStackTrace();
}
-
}
- public static void main(String[] args) {
+ public static void main(String[] args)
+ {
H5Ex_T_ArrayAttribute.CreateDataset();
// Now we begin the read section of this example. Here we assume
// the dataset and array have the same name and rank, but can have
@@ -315,5 +317,4 @@ public class H5Ex_T_ArrayAttribute {
// data using malloc().
H5Ex_T_ArrayAttribute.ReadDataset();
}
-
}
diff --git a/java/examples/datatypes/H5Ex_T_Bit.java b/java/examples/datatypes/H5Ex_T_Bit.java
index 45d4e8a..6d1a253 100644
--- a/java/examples/datatypes/H5Ex_T_Bit.java
+++ b/java/examples/datatypes/H5Ex_T_Bit.java
@@ -24,17 +24,18 @@ import hdf.hdf5lib.H5;
import hdf.hdf5lib.HDF5Constants;
public class H5Ex_T_Bit {
- private static String FILENAME = "H5Ex_T_Bit.h5";
+ private static String FILENAME = "H5Ex_T_Bit.h5";
private static String DATASETNAME = "DS1";
- private static final int DIM0 = 4;
- private static final int DIM1 = 7;
- private static final int RANK = 2;
+ private static final int DIM0 = 4;
+ private static final int DIM1 = 7;
+ private static final int RANK = 2;
- private static void CreateDataset() {
- long file_id = HDF5Constants.H5I_INVALID_HID;
+ private static void CreateDataset()
+ {
+ long file_id = HDF5Constants.H5I_INVALID_HID;
long dataspace_id = HDF5Constants.H5I_INVALID_HID;
- long dataset_id = HDF5Constants.H5I_INVALID_HID;
- long[] dims = { DIM0, DIM1 };
+ long dataset_id = HDF5Constants.H5I_INVALID_HID;
+ long[] dims = {DIM0, DIM1};
int[][] dset_data = new int[DIM0][DIM1];
// Initialize data.
@@ -42,15 +43,15 @@ public class H5Ex_T_Bit {
for (int jndx = 0; jndx < DIM1; jndx++) {
dset_data[indx][jndx] = 0;
dset_data[indx][jndx] |= (indx * jndx - jndx) & 0x03; /* Field "A" */
- dset_data[indx][jndx] |= (indx & 0x03) << 2; /* Field "B" */
- dset_data[indx][jndx] |= (jndx & 0x03) << 4; /* Field "C" */
+ dset_data[indx][jndx] |= (indx & 0x03) << 2; /* Field "B" */
+ dset_data[indx][jndx] |= (jndx & 0x03) << 4; /* Field "C" */
dset_data[indx][jndx] |= ((indx + jndx) & 0x03) << 6; /* Field "D" */
}
// Create a new file using default properties.
try {
file_id = H5.H5Fcreate(FILENAME, HDF5Constants.H5F_ACC_TRUNC, HDF5Constants.H5P_DEFAULT,
- HDF5Constants.H5P_DEFAULT);
+ HDF5Constants.H5P_DEFAULT);
}
catch (Exception e) {
e.printStackTrace();
@@ -69,7 +70,8 @@ public class H5Ex_T_Bit {
try {
if ((file_id >= 0) && (dataspace_id >= 0))
dataset_id = H5.H5Dcreate(file_id, DATASETNAME, HDF5Constants.H5T_STD_B8BE, dataspace_id,
- HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT);
+ HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT,
+ HDF5Constants.H5P_DEFAULT);
}
catch (Exception e) {
e.printStackTrace();
@@ -78,8 +80,8 @@ public class H5Ex_T_Bit {
// Write the bitfield data to the dataset.
try {
if (dataset_id >= 0)
- H5.H5Dwrite(dataset_id, HDF5Constants.H5T_NATIVE_B8, HDF5Constants.H5S_ALL, HDF5Constants.H5S_ALL,
- HDF5Constants.H5P_DEFAULT, dset_data);
+ H5.H5Dwrite(dataset_id, HDF5Constants.H5T_NATIVE_B8, HDF5Constants.H5S_ALL,
+ HDF5Constants.H5S_ALL, HDF5Constants.H5P_DEFAULT, dset_data);
}
catch (Exception e) {
e.printStackTrace();
@@ -111,14 +113,14 @@ public class H5Ex_T_Bit {
catch (Exception e) {
e.printStackTrace();
}
-
}
- private static void ReadDataset() {
- long file_id = HDF5Constants.H5I_INVALID_HID;
+ private static void ReadDataset()
+ {
+ long file_id = HDF5Constants.H5I_INVALID_HID;
long dataspace_id = HDF5Constants.H5I_INVALID_HID;
- long dataset_id = HDF5Constants.H5I_INVALID_HID;
- long[] dims = { DIM0, DIM1 };
+ long dataset_id = HDF5Constants.H5I_INVALID_HID;
+ long[] dims = {DIM0, DIM1};
int[][] dset_data;
// Open an existing file.
@@ -157,13 +159,13 @@ public class H5Ex_T_Bit {
// Allocate array of pointers to two-dimensional arrays (the
// elements of the dataset.
- dset_data = new int[(int) dims[0]][(int) (dims[1])];
+ dset_data = new int[(int)dims[0]][(int)(dims[1])];
// Read data.
try {
if (dataset_id >= 0)
- H5.H5Dread(dataset_id, HDF5Constants.H5T_NATIVE_B8, HDF5Constants.H5S_ALL, HDF5Constants.H5S_ALL,
- HDF5Constants.H5P_DEFAULT, dset_data);
+ H5.H5Dread(dataset_id, HDF5Constants.H5T_NATIVE_B8, HDF5Constants.H5S_ALL,
+ HDF5Constants.H5S_ALL, HDF5Constants.H5P_DEFAULT, dset_data);
}
catch (Exception e) {
e.printStackTrace();
@@ -209,10 +211,10 @@ public class H5Ex_T_Bit {
catch (Exception e) {
e.printStackTrace();
}
-
}
- public static void main(String[] args) {
+ public static void main(String[] args)
+ {
H5Ex_T_Bit.CreateDataset();
// Now we begin the read section of this example. Here we assume
// the dataset and array have the same name and rank, but can have
@@ -220,5 +222,4 @@ public class H5Ex_T_Bit {
// data using malloc().
H5Ex_T_Bit.ReadDataset();
}
-
}
diff --git a/java/examples/datatypes/H5Ex_T_BitAttribute.java b/java/examples/datatypes/H5Ex_T_BitAttribute.java
index 9b33ca5..3ad643a 100644
--- a/java/examples/datatypes/H5Ex_T_BitAttribute.java
+++ b/java/examples/datatypes/H5Ex_T_BitAttribute.java
@@ -24,19 +24,20 @@ import hdf.hdf5lib.H5;
import hdf.hdf5lib.HDF5Constants;
public class H5Ex_T_BitAttribute {
- private static String FILENAME = "H5Ex_T_BitAttribute.h5";
- private static String DATASETNAME = "DS1";
+ private static String FILENAME = "H5Ex_T_BitAttribute.h5";
+ private static String DATASETNAME = "DS1";
private static String ATTRIBUTENAME = "A1";
- private static final int DIM0 = 4;
- private static final int DIM1 = 7;
- private static final int RANK = 2;
+ private static final int DIM0 = 4;
+ private static final int DIM1 = 7;
+ private static final int RANK = 2;
- private static void CreateDataset() {
- long file_id = HDF5Constants.H5I_INVALID_HID;
+ private static void CreateDataset()
+ {
+ long file_id = HDF5Constants.H5I_INVALID_HID;
long dataspace_id = HDF5Constants.H5I_INVALID_HID;
- long dataset_id = HDF5Constants.H5I_INVALID_HID;
+ long dataset_id = HDF5Constants.H5I_INVALID_HID;
long attribute_id = HDF5Constants.H5I_INVALID_HID;
- long[] dims = { DIM0, DIM1 };
+ long[] dims = {DIM0, DIM1};
int[][] dset_data = new int[DIM0][DIM1];
// Initialize data.
@@ -44,15 +45,15 @@ public class H5Ex_T_BitAttribute {
for (int jndx = 0; jndx < DIM1; jndx++) {
dset_data[indx][jndx] = 0;
dset_data[indx][jndx] |= (indx * jndx - jndx) & 0x03; /* Field "A" */
- dset_data[indx][jndx] |= (indx & 0x03) << 2; /* Field "B" */
- dset_data[indx][jndx] |= (jndx & 0x03) << 4; /* Field "C" */
+ dset_data[indx][jndx] |= (indx & 0x03) << 2; /* Field "B" */
+ dset_data[indx][jndx] |= (jndx & 0x03) << 4; /* Field "C" */
dset_data[indx][jndx] |= ((indx + jndx) & 0x03) << 6; /* Field "D" */
}
// Create a new file using default properties.
try {
file_id = H5.H5Fcreate(FILENAME, HDF5Constants.H5F_ACC_TRUNC, HDF5Constants.H5P_DEFAULT,
- HDF5Constants.H5P_DEFAULT);
+ HDF5Constants.H5P_DEFAULT);
}
catch (Exception e) {
e.printStackTrace();
@@ -63,7 +64,8 @@ public class H5Ex_T_BitAttribute {
dataspace_id = H5.H5Screate(HDF5Constants.H5S_SCALAR);
if (dataspace_id >= 0) {
dataset_id = H5.H5Dcreate(file_id, DATASETNAME, HDF5Constants.H5T_STD_I32LE, dataspace_id,
- HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT);
+ HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT,
+ HDF5Constants.H5P_DEFAULT);
H5.H5Sclose(dataspace_id);
dataspace_id = HDF5Constants.H5I_INVALID_HID;
}
@@ -84,8 +86,9 @@ public class H5Ex_T_BitAttribute {
// Create the attribute and write the array data to it.
try {
if ((dataset_id >= 0) && (dataspace_id >= 0))
- attribute_id = H5.H5Acreate(dataset_id, ATTRIBUTENAME, HDF5Constants.H5T_STD_B8BE, dataspace_id,
- HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT);
+ attribute_id =
+ H5.H5Acreate(dataset_id, ATTRIBUTENAME, HDF5Constants.H5T_STD_B8BE, dataspace_id,
+ HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT);
}
catch (Exception e) {
e.printStackTrace();
@@ -134,15 +137,15 @@ public class H5Ex_T_BitAttribute {
catch (Exception e) {
e.printStackTrace();
}
-
}
- private static void ReadDataset() {
- long file_id = HDF5Constants.H5I_INVALID_HID;
+ private static void ReadDataset()
+ {
+ long file_id = HDF5Constants.H5I_INVALID_HID;
long dataspace_id = HDF5Constants.H5I_INVALID_HID;
- long dataset_id = HDF5Constants.H5I_INVALID_HID;
+ long dataset_id = HDF5Constants.H5I_INVALID_HID;
long attribute_id = HDF5Constants.H5I_INVALID_HID;
- long[] dims = { DIM0, DIM1 };
+ long[] dims = {DIM0, DIM1};
int[][] dset_data;
// Open an existing file.
@@ -165,7 +168,7 @@ public class H5Ex_T_BitAttribute {
try {
if (dataset_id >= 0)
attribute_id = H5.H5Aopen_by_name(dataset_id, ".", ATTRIBUTENAME, HDF5Constants.H5P_DEFAULT,
- HDF5Constants.H5P_DEFAULT);
+ HDF5Constants.H5P_DEFAULT);
}
catch (Exception e) {
e.printStackTrace();
@@ -190,7 +193,7 @@ public class H5Ex_T_BitAttribute {
// Allocate array of pointers to two-dimensional arrays (the
// elements of the dataset.
- dset_data = new int[(int) dims[0]][(int) (dims[1])];
+ dset_data = new int[(int)dims[0]][(int)(dims[1])];
// Read data.
try {
@@ -249,10 +252,10 @@ public class H5Ex_T_BitAttribute {
catch (Exception e) {
e.printStackTrace();
}
-
}
- public static void main(String[] args) {
+ public static void main(String[] args)
+ {
H5Ex_T_BitAttribute.CreateDataset();
// Now we begin the read section of this example. Here we assume
// the dataset and array have the same name and rank, but can have
@@ -260,5 +263,4 @@ public class H5Ex_T_BitAttribute {
// data using malloc().
H5Ex_T_BitAttribute.ReadDataset();
}
-
}
diff --git a/java/examples/datatypes/H5Ex_T_Commit.java b/java/examples/datatypes/H5Ex_T_Commit.java
index 62db5ea..44586ef 100644
--- a/java/examples/datatypes/H5Ex_T_Commit.java
+++ b/java/examples/datatypes/H5Ex_T_Commit.java
@@ -21,77 +21,74 @@
package examples.datatypes;
-import hdf.hdf5lib.H5;
-import hdf.hdf5lib.HDF5Constants;
-
import java.util.EnumSet;
import java.util.HashMap;
import java.util.Map;
+import hdf.hdf5lib.H5;
+import hdf.hdf5lib.HDF5Constants;
+
public class H5Ex_T_Commit {
- private static String FILENAME = "H5Ex_T_Commit.h5";
- private static String DATATYPENAME = "Sensor_Type";
- protected static final int INTEGERSIZE = 4;
- protected static final int DOUBLESIZE = 8;
+ private static String FILENAME = "H5Ex_T_Commit.h5";
+ private static String DATATYPENAME = "Sensor_Type";
+ protected static final int INTEGERSIZE = 4;
+ protected static final int DOUBLESIZE = 8;
protected final static int MAXSTRINGSIZE = 80;
// Values for the various classes of datatypes
enum H5T_class {
- H5T_NO_CLASS(HDF5Constants.H5T_NO_CLASS), // error
- H5T_INTEGER(HDF5Constants.H5T_INTEGER), // integer types
- H5T_FLOAT(HDF5Constants.H5T_FLOAT), // floating-point types
- H5T_TIME(HDF5Constants.H5T_TIME), // date and time types
- H5T_STRING(HDF5Constants.H5T_STRING), // character string types
- H5T_BITFIELD(HDF5Constants.H5T_BITFIELD), // bit field types
- H5T_OPAQUE(HDF5Constants.H5T_OPAQUE), // opaque types
- H5T_COMPOUND(HDF5Constants.H5T_COMPOUND), // compound types
+ H5T_NO_CLASS(HDF5Constants.H5T_NO_CLASS), // error
+ H5T_INTEGER(HDF5Constants.H5T_INTEGER), // integer types
+ H5T_FLOAT(HDF5Constants.H5T_FLOAT), // floating-point types
+ H5T_TIME(HDF5Constants.H5T_TIME), // date and time types
+ H5T_STRING(HDF5Constants.H5T_STRING), // character string types
+ H5T_BITFIELD(HDF5Constants.H5T_BITFIELD), // bit field types
+ H5T_OPAQUE(HDF5Constants.H5T_OPAQUE), // opaque types
+ H5T_COMPOUND(HDF5Constants.H5T_COMPOUND), // compound types
H5T_REFERENCE(HDF5Constants.H5T_REFERENCE), // reference types
- H5T_ENUM(HDF5Constants.H5T_ENUM), // enumeration types
- H5T_VLEN(HDF5Constants.H5T_VLEN), // Variable-Length types
- H5T_ARRAY(HDF5Constants.H5T_ARRAY), // Array types
- H5T_NCLASSES(11); // this must be last
+ H5T_ENUM(HDF5Constants.H5T_ENUM), // enumeration types
+ H5T_VLEN(HDF5Constants.H5T_VLEN), // Variable-Length types
+ H5T_ARRAY(HDF5Constants.H5T_ARRAY), // Array types
+ H5T_NCLASSES(11); // this must be last
private static final Map<Long, H5T_class> lookup = new HashMap<Long, H5T_class>();
- static {
+ static
+ {
for (H5T_class s : EnumSet.allOf(H5T_class.class))
lookup.put(s.getCode(), s);
}
private long code;
- H5T_class(long layout_type) {
- this.code = layout_type;
- }
+ H5T_class(long layout_type) { this.code = layout_type; }
- public long getCode() {
- return this.code;
- }
+ public long getCode() { return this.code; }
- public static H5T_class get(long typeclass_id) {
- return lookup.get(typeclass_id);
- }
+ public static H5T_class get(long typeclass_id) { return lookup.get(typeclass_id); }
}
// The supporting Sensor_Datatype class.
private static class Sensor_Datatype {
static int numberMembers = 4;
- static int[] memberDims = { 1, 1, 1, 1 };
+ static int[] memberDims = {1, 1, 1, 1};
- String[] memberNames = { "Serial number", "Location", "Temperature (F)", "Pressure (inHg)" };
- long[] memberFileTypes = { HDF5Constants.H5T_STD_I32BE, HDF5Constants.H5T_C_S1, HDF5Constants.H5T_IEEE_F64BE,
- HDF5Constants.H5T_IEEE_F64BE };
- static int[] memberStorage = { INTEGERSIZE, MAXSTRINGSIZE, DOUBLESIZE, DOUBLESIZE };
+ String[] memberNames = {"Serial number", "Location", "Temperature (F)", "Pressure (inHg)"};
+ long[] memberFileTypes = {HDF5Constants.H5T_STD_I32BE, HDF5Constants.H5T_C_S1,
+ HDF5Constants.H5T_IEEE_F64BE, HDF5Constants.H5T_IEEE_F64BE};
+ static int[] memberStorage = {INTEGERSIZE, MAXSTRINGSIZE, DOUBLESIZE, DOUBLESIZE};
// Data size is the storage size for the members not the object.
- static long getDataSize() {
+ static long getDataSize()
+ {
long data_size = 0;
for (int indx = 0; indx < numberMembers; indx++)
data_size += memberStorage[indx] * memberDims[indx];
return data_size;
}
- static int getOffset(int memberItem) {
+ static int getOffset(int memberItem)
+ {
int data_offset = 0;
for (int indx = 0; indx < memberItem; indx++)
data_offset += memberStorage[indx];
@@ -99,15 +96,16 @@ public class H5Ex_T_Commit {
}
}
- private static void CreateDataType() {
- long file_id = HDF5Constants.H5I_INVALID_HID;
- long strtype_id = HDF5Constants.H5I_INVALID_HID;
- long filetype_id = HDF5Constants.H5I_INVALID_HID;
+ private static void CreateDataType()
+ {
+ long file_id = HDF5Constants.H5I_INVALID_HID;
+ long strtype_id = HDF5Constants.H5I_INVALID_HID;
+ long filetype_id = HDF5Constants.H5I_INVALID_HID;
Sensor_Datatype datatypes = new Sensor_Datatype();
// Create a new file using default properties.
try {
file_id = H5.H5Fcreate(FILENAME, HDF5Constants.H5F_ACC_TRUNC, HDF5Constants.H5P_DEFAULT,
- HDF5Constants.H5P_DEFAULT);
+ HDF5Constants.H5P_DEFAULT);
}
catch (Exception e) {
e.printStackTrace();
@@ -134,7 +132,8 @@ public class H5Ex_T_Commit {
long type_id = datatypes.memberFileTypes[indx];
if (type_id == HDF5Constants.H5T_C_S1)
type_id = strtype_id;
- H5.H5Tinsert(filetype_id, datatypes.memberNames[indx], Sensor_Datatype.getOffset(indx), type_id);
+ H5.H5Tinsert(filetype_id, datatypes.memberNames[indx], Sensor_Datatype.getOffset(indx),
+ type_id);
}
}
}
@@ -145,8 +144,8 @@ public class H5Ex_T_Commit {
// Commit the compound datatype to the file, creating a named datatype.
try {
if ((file_id >= 0) && (filetype_id >= 0))
- H5.H5Tcommit(file_id, DATATYPENAME, filetype_id, HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT,
- HDF5Constants.H5P_DEFAULT);
+ H5.H5Tcommit(file_id, DATATYPENAME, filetype_id, HDF5Constants.H5P_DEFAULT,
+ HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT);
}
catch (Exception e) {
e.printStackTrace();
@@ -178,13 +177,13 @@ public class H5Ex_T_Commit {
catch (Exception e) {
e.printStackTrace();
}
-
}
- private static void ReadDataType() {
- long file_id = HDF5Constants.H5I_INVALID_HID;
+ private static void ReadDataType()
+ {
+ long file_id = HDF5Constants.H5I_INVALID_HID;
long typeclass_id = HDF5Constants.H5I_INVALID_HID;
- long filetype_id = HDF5Constants.H5I_INVALID_HID;
+ long filetype_id = HDF5Constants.H5I_INVALID_HID;
// Open an existing file.
try {
@@ -247,10 +246,10 @@ public class H5Ex_T_Commit {
catch (Exception e) {
e.printStackTrace();
}
-
}
- public static void main(String[] args) {
+ public static void main(String[] args)
+ {
H5Ex_T_Commit.CreateDataType();
// Now we begin the read section of this example. Here we assume
// the dataset and array have the same name and rank, but can have
@@ -258,5 +257,4 @@ public class H5Ex_T_Commit {
// data using malloc().
H5Ex_T_Commit.ReadDataType();
}
-
}
diff --git a/java/examples/datatypes/H5Ex_T_Compound.java b/java/examples/datatypes/H5Ex_T_Compound.java
index 8c83ebb..a78d0e3 100644
--- a/java/examples/datatypes/H5Ex_T_Compound.java
+++ b/java/examples/datatypes/H5Ex_T_Compound.java
@@ -20,49 +20,52 @@
package examples.datatypes;
-import hdf.hdf5lib.H5;
-import hdf.hdf5lib.HDF5Constants;
-
import java.nio.ByteBuffer;
import java.nio.ByteOrder;
import java.nio.charset.Charset;
+import hdf.hdf5lib.H5;
+import hdf.hdf5lib.HDF5Constants;
+
public class H5Ex_T_Compound {
- private static String FILENAME = "H5Ex_T_Compound.h5";
- private static String DATASETNAME = "DS1";
- private static final int DIM0 = 4;
- private static final int RANK = 1;
- protected static final int INTEGERSIZE = 4;
- protected static final int DOUBLESIZE = 8;
+ private static String FILENAME = "H5Ex_T_Compound.h5";
+ private static String DATASETNAME = "DS1";
+ private static final int DIM0 = 4;
+ private static final int RANK = 1;
+ protected static final int INTEGERSIZE = 4;
+ protected static final int DOUBLESIZE = 8;
protected final static int MAXSTRINGSIZE = 80;
static class Sensor_Datatype {
static int numberMembers = 4;
- static int[] memberDims = { 1, 1, 1, 1 };
+ static int[] memberDims = {1, 1, 1, 1};
- static String[] memberNames = { "Serial number", "Location", "Temperature (F)", "Pressure (inHg)" };
- static long[] memberMemTypes = { HDF5Constants.H5T_NATIVE_INT, HDF5Constants.H5T_C_S1,
- HDF5Constants.H5T_NATIVE_DOUBLE, HDF5Constants.H5T_NATIVE_DOUBLE };
- static long[] memberFileTypes = { HDF5Constants.H5T_STD_I32BE, HDF5Constants.H5T_C_S1,
- HDF5Constants.H5T_IEEE_F64BE, HDF5Constants.H5T_IEEE_F64BE };
- static int[] memberStorage = { INTEGERSIZE, MAXSTRINGSIZE, DOUBLESIZE, DOUBLESIZE };
+ static String[] memberNames = {"Serial number", "Location", "Temperature (F)", "Pressure (inHg)"};
+ static long[] memberMemTypes = {HDF5Constants.H5T_NATIVE_INT, HDF5Constants.H5T_C_S1,
+ HDF5Constants.H5T_NATIVE_DOUBLE, HDF5Constants.H5T_NATIVE_DOUBLE};
+ static long[] memberFileTypes = {HDF5Constants.H5T_STD_I32BE, HDF5Constants.H5T_C_S1,
+ HDF5Constants.H5T_IEEE_F64BE, HDF5Constants.H5T_IEEE_F64BE};
+ static int[] memberStorage = {INTEGERSIZE, MAXSTRINGSIZE, DOUBLESIZE, DOUBLESIZE};
// Data size is the storage size for the members.
- static long getTotalDataSize() {
+ static long getTotalDataSize()
+ {
long data_size = 0;
for (int indx = 0; indx < numberMembers; indx++)
data_size += memberStorage[indx] * memberDims[indx];
return DIM0 * data_size;
}
- static long getDataSize() {
+ static long getDataSize()
+ {
long data_size = 0;
for (int indx = 0; indx < numberMembers; indx++)
data_size += memberStorage[indx] * memberDims[indx];
return data_size;
}
- static int getOffset(int memberItem) {
+ static int getOffset(int memberItem)
+ {
int data_offset = 0;
for (int indx = 0; indx < memberItem; indx++)
data_offset += memberStorage[indx];
@@ -76,60 +79,63 @@ public class H5Ex_T_Compound {
public double temperature;
public double pressure;
- Sensor(int serial_no, String location, double temperature, double pressure) {
- this.serial_no = serial_no;
- this.location = location;
+ Sensor(int serial_no, String location, double temperature, double pressure)
+ {
+ this.serial_no = serial_no;
+ this.location = location;
this.temperature = temperature;
- this.pressure = pressure;
+ this.pressure = pressure;
}
- Sensor(ByteBuffer databuf, int dbposition) {
- readBuffer(databuf, dbposition);
- }
+ Sensor(ByteBuffer databuf, int dbposition) { readBuffer(databuf, dbposition); }
- void writeBuffer(ByteBuffer databuf, int dbposition) {
+ void writeBuffer(ByteBuffer databuf, int dbposition)
+ {
databuf.putInt(dbposition + Sensor_Datatype.getOffset(0), serial_no);
byte[] temp_str = location.getBytes(Charset.forName("UTF-8"));
- int arraylen = (temp_str.length > MAXSTRINGSIZE) ? MAXSTRINGSIZE : temp_str.length;
+ int arraylen = (temp_str.length > MAXSTRINGSIZE) ? MAXSTRINGSIZE : temp_str.length;
for (int ndx = 0; ndx < arraylen; ndx++)
databuf.put(dbposition + Sensor_Datatype.getOffset(1) + ndx, temp_str[ndx]);
for (int ndx = arraylen; ndx < MAXSTRINGSIZE; ndx++)
- databuf.put(dbposition + Sensor_Datatype.getOffset(1) + arraylen, (byte) 0);
+ databuf.put(dbposition + Sensor_Datatype.getOffset(1) + arraylen, (byte)0);
databuf.putDouble(dbposition + Sensor_Datatype.getOffset(2), temperature);
databuf.putDouble(dbposition + Sensor_Datatype.getOffset(3), pressure);
}
- void readBuffer(ByteBuffer databuf, int dbposition) {
- this.serial_no = databuf.getInt(dbposition + Sensor_Datatype.getOffset(0));
+ void readBuffer(ByteBuffer databuf, int dbposition)
+ {
+ this.serial_no = databuf.getInt(dbposition + Sensor_Datatype.getOffset(0));
ByteBuffer stringbuf = databuf.duplicate();
stringbuf.position(dbposition + Sensor_Datatype.getOffset(1));
stringbuf.limit(dbposition + Sensor_Datatype.getOffset(1) + MAXSTRINGSIZE);
byte[] bytearr = new byte[stringbuf.remaining()];
stringbuf.get(bytearr);
- this.location = new String(bytearr, Charset.forName("UTF-8")).trim();
+ this.location = new String(bytearr, Charset.forName("UTF-8")).trim();
this.temperature = databuf.getDouble(dbposition + Sensor_Datatype.getOffset(2));
- this.pressure = databuf.getDouble(dbposition + Sensor_Datatype.getOffset(3));
+ this.pressure = databuf.getDouble(dbposition + Sensor_Datatype.getOffset(3));
}
@Override
- public String toString() {
- return String.format("Serial number : " + serial_no + "%n" +
- "Location : " + location + "%n" +
- "Temperature (F) : " + temperature + "%n" +
- "Pressure (inHg) : " + pressure + "%n");
+ public String toString()
+ {
+ return String.format("Serial number : " + serial_no + "%n"
+ + "Location : " + location + "%n"
+ + "Temperature (F) : " + temperature + "%n"
+ + "Pressure (inHg) : " + pressure + "%n");
}
}
- private static void CreateDataset() {
- long file_id = HDF5Constants.H5I_INVALID_HID;
- long strtype_id = HDF5Constants.H5I_INVALID_HID;
- long memtype_id = HDF5Constants.H5I_INVALID_HID;
- long filetype_id = HDF5Constants.H5I_INVALID_HID;
- long dataspace_id = HDF5Constants.H5I_INVALID_HID;
- long dataset_id = HDF5Constants.H5I_INVALID_HID;
- long[] dims = { DIM0 };
+ private static void CreateDataset()
+ {
+ long file_id = HDF5Constants.H5I_INVALID_HID;
+ long strtype_id = HDF5Constants.H5I_INVALID_HID;
+ long memtype_id = HDF5Constants.H5I_INVALID_HID;
+ long filetype_id = HDF5Constants.H5I_INVALID_HID;
+ long dataspace_id = HDF5Constants.H5I_INVALID_HID;
+ long dataset_id = HDF5Constants.H5I_INVALID_HID;
+ long[] dims = {DIM0};
Sensor[] object_data = new Sensor[DIM0];
- byte[] dset_data = null;
+ byte[] dset_data = null;
// Initialize data.
object_data[0] = new Sensor(1153, new String("Exterior (static)"), 53.23, 24.57);
@@ -140,7 +146,7 @@ public class H5Ex_T_Compound {
// Create a new file using default properties.
try {
file_id = H5.H5Fcreate(FILENAME, HDF5Constants.H5F_ACC_TRUNC, HDF5Constants.H5P_DEFAULT,
- HDF5Constants.H5P_DEFAULT);
+ HDF5Constants.H5P_DEFAULT);
}
catch (Exception e) {
e.printStackTrace();
@@ -164,8 +170,8 @@ public class H5Ex_T_Compound {
long type_id = Sensor_Datatype.memberMemTypes[indx];
if (type_id == HDF5Constants.H5T_C_S1)
type_id = strtype_id;
- H5.H5Tinsert(memtype_id, Sensor_Datatype.memberNames[indx], Sensor_Datatype.getOffset(indx),
- type_id);
+ H5.H5Tinsert(memtype_id, Sensor_Datatype.memberNames[indx],
+ Sensor_Datatype.getOffset(indx), type_id);
}
}
}
@@ -184,8 +190,8 @@ public class H5Ex_T_Compound {
long type_id = Sensor_Datatype.memberFileTypes[indx];
if (type_id == HDF5Constants.H5T_C_S1)
type_id = strtype_id;
- H5.H5Tinsert(filetype_id, Sensor_Datatype.memberNames[indx], Sensor_Datatype.getOffset(indx),
- type_id);
+ H5.H5Tinsert(filetype_id, Sensor_Datatype.memberNames[indx],
+ Sensor_Datatype.getOffset(indx), type_id);
}
}
}
@@ -205,8 +211,9 @@ public class H5Ex_T_Compound {
// Create the dataset.
try {
if ((file_id >= 0) && (dataspace_id >= 0) && (filetype_id >= 0))
- dataset_id = H5.H5Dcreate(file_id, DATASETNAME, filetype_id, dataspace_id, HDF5Constants.H5P_DEFAULT,
- HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT);
+ dataset_id =
+ H5.H5Dcreate(file_id, DATASETNAME, filetype_id, dataspace_id, HDF5Constants.H5P_DEFAULT,
+ HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT);
}
catch (Exception e) {
e.printStackTrace();
@@ -214,16 +221,16 @@ public class H5Ex_T_Compound {
// Write the compound data to the dataset.
// allocate memory for read buffer.
- dset_data = new byte[(int)dims[0] * (int)Sensor_Datatype.getDataSize()];
+ dset_data = new byte[(int)dims[0] * (int)Sensor_Datatype.getDataSize()];
ByteBuffer outBuf = ByteBuffer.wrap(dset_data);
outBuf.order(ByteOrder.nativeOrder());
- for (int indx = 0; indx < (int) dims[0]; indx++) {
+ for (int indx = 0; indx < (int)dims[0]; indx++) {
object_data[indx].writeBuffer(outBuf, indx * (int)Sensor_Datatype.getDataSize());
}
try {
if ((dataset_id >= 0) && (memtype_id >= 0))
H5.H5Dwrite(dataset_id, memtype_id, HDF5Constants.H5S_ALL, HDF5Constants.H5S_ALL,
- HDF5Constants.H5P_DEFAULT, dset_data);
+ HDF5Constants.H5P_DEFAULT, dset_data);
}
catch (Exception e) {
e.printStackTrace();
@@ -281,16 +288,16 @@ public class H5Ex_T_Compound {
catch (Exception e) {
e.printStackTrace();
}
-
}
- private static void ReadDataset() {
- long file_id = HDF5Constants.H5I_INVALID_HID;
- long strtype_id = HDF5Constants.H5I_INVALID_HID;
- long memtype_id = HDF5Constants.H5I_INVALID_HID;
+ private static void ReadDataset()
+ {
+ long file_id = HDF5Constants.H5I_INVALID_HID;
+ long strtype_id = HDF5Constants.H5I_INVALID_HID;
+ long memtype_id = HDF5Constants.H5I_INVALID_HID;
long dataspace_id = HDF5Constants.H5I_INVALID_HID;
- long dataset_id = HDF5Constants.H5I_INVALID_HID;
- long[] dims = { DIM0 };
+ long dataset_id = HDF5Constants.H5I_INVALID_HID;
+ long[] dims = {DIM0};
Sensor[] object_data2;
byte[] dset_data;
@@ -346,8 +353,8 @@ public class H5Ex_T_Compound {
long type_id = Sensor_Datatype.memberMemTypes[indx];
if (type_id == HDF5Constants.H5T_C_S1)
type_id = strtype_id;
- H5.H5Tinsert(memtype_id, Sensor_Datatype.memberNames[indx], Sensor_Datatype.getOffset(indx),
- type_id);
+ H5.H5Tinsert(memtype_id, Sensor_Datatype.memberNames[indx],
+ Sensor_Datatype.getOffset(indx), type_id);
}
}
}
@@ -356,19 +363,19 @@ public class H5Ex_T_Compound {
}
// allocate memory for read buffer.
- dset_data = new byte[(int) dims[0] * (int)Sensor_Datatype.getDataSize()];
+ dset_data = new byte[(int)dims[0] * (int)Sensor_Datatype.getDataSize()];
- object_data2 = new Sensor[(int) dims[0]];
+ object_data2 = new Sensor[(int)dims[0]];
// Read data.
try {
if ((dataset_id >= 0) && (memtype_id >= 0))
H5.H5Dread(dataset_id, memtype_id, HDF5Constants.H5S_ALL, HDF5Constants.H5S_ALL,
- HDF5Constants.H5P_DEFAULT, dset_data);
+ HDF5Constants.H5P_DEFAULT, dset_data);
ByteBuffer inBuf = ByteBuffer.wrap(dset_data);
inBuf.order(ByteOrder.nativeOrder());
- for (int indx = 0; indx < (int) dims[0]; indx++) {
+ for (int indx = 0; indx < (int)dims[0]; indx++) {
object_data2[indx] = new Sensor(inBuf, indx * (int)Sensor_Datatype.getDataSize());
}
}
@@ -425,10 +432,10 @@ public class H5Ex_T_Compound {
catch (Exception e) {
e.printStackTrace();
}
-
}
- public static void main(String[] args) {
+ public static void main(String[] args)
+ {
H5Ex_T_Compound.CreateDataset();
// Now we begin the read section of this example. Here we assume
// the dataset and array have the same name and rank, but can have
@@ -436,5 +443,4 @@ public class H5Ex_T_Compound {
// data using malloc().
H5Ex_T_Compound.ReadDataset();
}
-
}
diff --git a/java/examples/datatypes/H5Ex_T_CompoundAttribute.java b/java/examples/datatypes/H5Ex_T_CompoundAttribute.java
index 58d2fb7..f331a1e 100644
--- a/java/examples/datatypes/H5Ex_T_CompoundAttribute.java
+++ b/java/examples/datatypes/H5Ex_T_CompoundAttribute.java
@@ -20,52 +20,55 @@
package examples.datatypes;
-import hdf.hdf5lib.H5;
-import hdf.hdf5lib.HDF5Constants;
-
import java.nio.ByteBuffer;
import java.nio.ByteOrder;
import java.nio.charset.Charset;
+import hdf.hdf5lib.H5;
+import hdf.hdf5lib.HDF5Constants;
+
public class H5Ex_T_CompoundAttribute {
- private static String FILENAME = "H5Ex_T_CompoundAttribute.h5";
- private static String DATASETNAME = "DS1";
- private static String ATTRIBUTENAME = "A1";
- private static final int DIM0 = 4;
- private static final int RANK = 1;
- protected static final int INTEGERSIZE = 4;
- protected static final int DOUBLESIZE = 8;
+ private static String FILENAME = "H5Ex_T_CompoundAttribute.h5";
+ private static String DATASETNAME = "DS1";
+ private static String ATTRIBUTENAME = "A1";
+ private static final int DIM0 = 4;
+ private static final int RANK = 1;
+ protected static final int INTEGERSIZE = 4;
+ protected static final int DOUBLESIZE = 8;
protected final static int MAXSTRINGSIZE = 80;
// Using Java Externalization will add a two-byte object header in
// the stream, which needs to be called out in the datatypes.
static class Sensor_Datatype {
static int numberMembers = 4;
- static int[] memberDims = { 1, 1, 1, 1 };
+ static int[] memberDims = {1, 1, 1, 1};
- static String[] memberNames = { "Serial number", "Location", "Temperature (F)", "Pressure (inHg)" };
- static long[] memberMemTypes = { HDF5Constants.H5T_NATIVE_INT, HDF5Constants.H5T_C_S1,
- HDF5Constants.H5T_NATIVE_DOUBLE, HDF5Constants.H5T_NATIVE_DOUBLE };
- static long[] memberFileTypes = { HDF5Constants.H5T_STD_I32BE, HDF5Constants.H5T_C_S1,
- HDF5Constants.H5T_IEEE_F64BE, HDF5Constants.H5T_IEEE_F64BE };
- static int[] memberStorage = { INTEGERSIZE, MAXSTRINGSIZE, DOUBLESIZE, DOUBLESIZE };
+ static String[] memberNames = {"Serial number", "Location", "Temperature (F)", "Pressure (inHg)"};
+ static long[] memberMemTypes = {HDF5Constants.H5T_NATIVE_INT, HDF5Constants.H5T_C_S1,
+ HDF5Constants.H5T_NATIVE_DOUBLE, HDF5Constants.H5T_NATIVE_DOUBLE};
+ static long[] memberFileTypes = {HDF5Constants.H5T_STD_I32BE, HDF5Constants.H5T_C_S1,
+ HDF5Constants.H5T_IEEE_F64BE, HDF5Constants.H5T_IEEE_F64BE};
+ static int[] memberStorage = {INTEGERSIZE, MAXSTRINGSIZE, DOUBLESIZE, DOUBLESIZE};
// Data size is the storage size for the members not the object.
- static long getTotalDataSize() {
+ static long getTotalDataSize()
+ {
long data_size = 0;
for (int indx = 0; indx < numberMembers; indx++)
data_size += memberStorage[indx] * memberDims[indx];
return DIM0 * data_size;
}
- static long getDataSize() {
+ static long getDataSize()
+ {
long data_size = 0;
for (int indx = 0; indx < numberMembers; indx++)
data_size += memberStorage[indx] * memberDims[indx];
return data_size;
}
- static int getOffset(int memberItem) {
+ static int getOffset(int memberItem)
+ {
int data_offset = 0;
for (int indx = 0; indx < memberItem; indx++)
data_offset += memberStorage[indx];
@@ -79,61 +82,64 @@ public class H5Ex_T_CompoundAttribute {
public double temperature;
public double pressure;
- Sensor(int serial_no, String location, double temperature, double pressure) {
- this.serial_no = serial_no;
- this.location = location;
+ Sensor(int serial_no, String location, double temperature, double pressure)
+ {
+ this.serial_no = serial_no;
+ this.location = location;
this.temperature = temperature;
- this.pressure = pressure;
+ this.pressure = pressure;
}
- Sensor(ByteBuffer databuf, int dbposition) {
- readBuffer(databuf, dbposition);
- }
+ Sensor(ByteBuffer databuf, int dbposition) { readBuffer(databuf, dbposition); }
- void writeBuffer(ByteBuffer databuf, int dbposition) {
+ void writeBuffer(ByteBuffer databuf, int dbposition)
+ {
databuf.putInt(dbposition + Sensor_Datatype.getOffset(0), serial_no);
byte[] temp_str = location.getBytes(Charset.forName("UTF-8"));
- int arraylen = (temp_str.length > MAXSTRINGSIZE) ? MAXSTRINGSIZE : temp_str.length;
+ int arraylen = (temp_str.length > MAXSTRINGSIZE) ? MAXSTRINGSIZE : temp_str.length;
for (int ndx = 0; ndx < arraylen; ndx++)
databuf.put(dbposition + Sensor_Datatype.getOffset(1) + ndx, temp_str[ndx]);
for (int ndx = arraylen; ndx < MAXSTRINGSIZE; ndx++)
- databuf.put(dbposition + Sensor_Datatype.getOffset(1) + arraylen, (byte) 0);
+ databuf.put(dbposition + Sensor_Datatype.getOffset(1) + arraylen, (byte)0);
databuf.putDouble(dbposition + Sensor_Datatype.getOffset(2), temperature);
databuf.putDouble(dbposition + Sensor_Datatype.getOffset(3), pressure);
}
- void readBuffer(ByteBuffer databuf, int dbposition) {
- this.serial_no = databuf.getInt(dbposition + Sensor_Datatype.getOffset(0));
+ void readBuffer(ByteBuffer databuf, int dbposition)
+ {
+ this.serial_no = databuf.getInt(dbposition + Sensor_Datatype.getOffset(0));
ByteBuffer stringbuf = databuf.duplicate();
stringbuf.position(dbposition + Sensor_Datatype.getOffset(1));
stringbuf.limit(dbposition + Sensor_Datatype.getOffset(1) + MAXSTRINGSIZE);
byte[] bytearr = new byte[stringbuf.remaining()];
stringbuf.get(bytearr);
- this.location = new String(bytearr, Charset.forName("UTF-8")).trim();
+ this.location = new String(bytearr, Charset.forName("UTF-8")).trim();
this.temperature = databuf.getDouble(dbposition + Sensor_Datatype.getOffset(2));
- this.pressure = databuf.getDouble(dbposition + Sensor_Datatype.getOffset(3));
+ this.pressure = databuf.getDouble(dbposition + Sensor_Datatype.getOffset(3));
}
@Override
- public String toString() {
- return String.format("Serial number : " + serial_no + "%n" +
- "Location : " + location + "%n" +
- "Temperature (F) : " + temperature + "%n" +
- "Pressure (inHg) : " + pressure + "%n");
+ public String toString()
+ {
+ return String.format("Serial number : " + serial_no + "%n"
+ + "Location : " + location + "%n"
+ + "Temperature (F) : " + temperature + "%n"
+ + "Pressure (inHg) : " + pressure + "%n");
}
}
- private static void CreateDataset() {
- long file_id = HDF5Constants.H5I_INVALID_HID;
- long strtype_id = HDF5Constants.H5I_INVALID_HID;
- long memtype_id = HDF5Constants.H5I_INVALID_HID;
- long filetype_id = HDF5Constants.H5I_INVALID_HID;
- long dataspace_id = HDF5Constants.H5I_INVALID_HID;
- long dataset_id = HDF5Constants.H5I_INVALID_HID;
- long attribute_id = HDF5Constants.H5I_INVALID_HID;
- long[] dims = { DIM0 };
+ private static void CreateDataset()
+ {
+ long file_id = HDF5Constants.H5I_INVALID_HID;
+ long strtype_id = HDF5Constants.H5I_INVALID_HID;
+ long memtype_id = HDF5Constants.H5I_INVALID_HID;
+ long filetype_id = HDF5Constants.H5I_INVALID_HID;
+ long dataspace_id = HDF5Constants.H5I_INVALID_HID;
+ long dataset_id = HDF5Constants.H5I_INVALID_HID;
+ long attribute_id = HDF5Constants.H5I_INVALID_HID;
+ long[] dims = {DIM0};
Sensor[] object_data = new Sensor[DIM0];
- byte[] dset_data = null;
+ byte[] dset_data = null;
// Initialize data.
object_data[0] = new Sensor(1153, new String("Exterior (static)"), 53.23, 24.57);
@@ -144,7 +150,7 @@ public class H5Ex_T_CompoundAttribute {
// Create a new file using default properties.
try {
file_id = H5.H5Fcreate(FILENAME, HDF5Constants.H5F_ACC_TRUNC, HDF5Constants.H5P_DEFAULT,
- HDF5Constants.H5P_DEFAULT);
+ HDF5Constants.H5P_DEFAULT);
}
catch (Exception e) {
e.printStackTrace();
@@ -168,8 +174,8 @@ public class H5Ex_T_CompoundAttribute {
long type_id = Sensor_Datatype.memberMemTypes[indx];
if (type_id == HDF5Constants.H5T_C_S1)
type_id = strtype_id;
- H5.H5Tinsert(memtype_id, Sensor_Datatype.memberNames[indx], Sensor_Datatype.getOffset(indx),
- type_id);
+ H5.H5Tinsert(memtype_id, Sensor_Datatype.memberNames[indx],
+ Sensor_Datatype.getOffset(indx), type_id);
}
}
}
@@ -188,8 +194,8 @@ public class H5Ex_T_CompoundAttribute {
long type_id = Sensor_Datatype.memberFileTypes[indx];
if (type_id == HDF5Constants.H5T_C_S1)
type_id = strtype_id;
- H5.H5Tinsert(filetype_id, Sensor_Datatype.memberNames[indx], Sensor_Datatype.getOffset(indx),
- type_id);
+ H5.H5Tinsert(filetype_id, Sensor_Datatype.memberNames[indx],
+ Sensor_Datatype.getOffset(indx), type_id);
}
}
}
@@ -202,7 +208,8 @@ public class H5Ex_T_CompoundAttribute {
dataspace_id = H5.H5Screate(HDF5Constants.H5S_SCALAR);
if (dataspace_id >= 0) {
dataset_id = H5.H5Dcreate(file_id, DATASETNAME, HDF5Constants.H5T_STD_I32LE, dataspace_id,
- HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT);
+ HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT,
+ HDF5Constants.H5P_DEFAULT);
H5.H5Sclose(dataspace_id);
dataspace_id = HDF5Constants.H5I_INVALID_HID;
}
@@ -224,17 +231,17 @@ public class H5Ex_T_CompoundAttribute {
try {
if ((dataset_id >= 0) && (dataspace_id >= 0) && (filetype_id >= 0))
attribute_id = H5.H5Acreate(dataset_id, ATTRIBUTENAME, filetype_id, dataspace_id,
- HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT);
+ HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT);
}
catch (Exception e) {
e.printStackTrace();
}
// Write the compound data.
- dset_data = new byte[(int) dims[0] * (int)Sensor_Datatype.getDataSize()];
+ dset_data = new byte[(int)dims[0] * (int)Sensor_Datatype.getDataSize()];
ByteBuffer outBuf = ByteBuffer.wrap(dset_data);
outBuf.order(ByteOrder.nativeOrder());
- for (int indx = 0; indx < (int) dims[0]; indx++) {
+ for (int indx = 0; indx < (int)dims[0]; indx++) {
object_data[indx].writeBuffer(outBuf, indx * (int)Sensor_Datatype.getDataSize());
}
try {
@@ -305,17 +312,17 @@ public class H5Ex_T_CompoundAttribute {
catch (Exception e) {
e.printStackTrace();
}
-
}
- private static void ReadDataset() {
- long file_id = HDF5Constants.H5I_INVALID_HID;
- long strtype_id = HDF5Constants.H5I_INVALID_HID;
- long memtype_id = HDF5Constants.H5I_INVALID_HID;
+ private static void ReadDataset()
+ {
+ long file_id = HDF5Constants.H5I_INVALID_HID;
+ long strtype_id = HDF5Constants.H5I_INVALID_HID;
+ long memtype_id = HDF5Constants.H5I_INVALID_HID;
long dataspace_id = HDF5Constants.H5I_INVALID_HID;
- long dataset_id = HDF5Constants.H5I_INVALID_HID;
+ long dataset_id = HDF5Constants.H5I_INVALID_HID;
long attribute_id = HDF5Constants.H5I_INVALID_HID;
- long[] dims = { DIM0 };
+ long[] dims = {DIM0};
Sensor[] object_data2;
byte[] dset_data;
@@ -339,7 +346,7 @@ public class H5Ex_T_CompoundAttribute {
try {
if (dataset_id >= 0)
attribute_id = H5.H5Aopen_by_name(dataset_id, ".", ATTRIBUTENAME, HDF5Constants.H5P_DEFAULT,
- HDF5Constants.H5P_DEFAULT);
+ HDF5Constants.H5P_DEFAULT);
}
catch (Exception e) {
e.printStackTrace();
@@ -382,8 +389,8 @@ public class H5Ex_T_CompoundAttribute {
long type_id = Sensor_Datatype.memberMemTypes[indx];
if (type_id == HDF5Constants.H5T_C_S1)
type_id = strtype_id;
- H5.H5Tinsert(memtype_id, Sensor_Datatype.memberNames[indx], Sensor_Datatype.getOffset(indx),
- type_id);
+ H5.H5Tinsert(memtype_id, Sensor_Datatype.memberNames[indx],
+ Sensor_Datatype.getOffset(indx), type_id);
}
}
}
@@ -392,9 +399,9 @@ public class H5Ex_T_CompoundAttribute {
}
// allocate memory for read buffer.
- dset_data = new byte[(int) dims[0] * (int)Sensor_Datatype.getDataSize()];
+ dset_data = new byte[(int)dims[0] * (int)Sensor_Datatype.getDataSize()];
- object_data2 = new Sensor[(int) dims[0]];
+ object_data2 = new Sensor[(int)dims[0]];
// Read data.
try {
@@ -403,7 +410,7 @@ public class H5Ex_T_CompoundAttribute {
ByteBuffer inBuf = ByteBuffer.wrap(dset_data);
inBuf.order(ByteOrder.nativeOrder());
- for (int indx = 0; indx < (int) dims[0]; indx++) {
+ for (int indx = 0; indx < (int)dims[0]; indx++) {
object_data2[indx] = new Sensor(inBuf, indx * (int)Sensor_Datatype.getDataSize());
}
}
@@ -468,10 +475,10 @@ public class H5Ex_T_CompoundAttribute {
catch (Exception e) {
e.printStackTrace();
}
-
}
- public static void main(String[] args) {
+ public static void main(String[] args)
+ {
H5Ex_T_CompoundAttribute.CreateDataset();
// Now we begin the read section of this example. Here we assume
// the dataset and array have the same name and rank, but can have
@@ -479,5 +486,4 @@ public class H5Ex_T_CompoundAttribute {
// data using malloc().
H5Ex_T_CompoundAttribute.ReadDataset();
}
-
}
diff --git a/java/examples/datatypes/H5Ex_T_Float.java b/java/examples/datatypes/H5Ex_T_Float.java
index e8da7f6..9ca099e 100644
--- a/java/examples/datatypes/H5Ex_T_Float.java
+++ b/java/examples/datatypes/H5Ex_T_Float.java
@@ -28,17 +28,18 @@ import hdf.hdf5lib.H5;
import hdf.hdf5lib.HDF5Constants;
public class H5Ex_T_Float {
- private static String FILENAME = "H5Ex_T_Float.h5";
+ private static String FILENAME = "H5Ex_T_Float.h5";
private static String DATASETNAME = "DS1";
- private static final int DIM0 = 4;
- private static final int DIM1 = 7;
- private static final int RANK = 2;
-
- private static void CreateDataset() {
- long file_id = HDF5Constants.H5I_INVALID_HID;
- long dataspace_id = HDF5Constants.H5I_INVALID_HID;
- long dataset_id = HDF5Constants.H5I_INVALID_HID;
- long[] dims = { DIM0, DIM1 };
+ private static final int DIM0 = 4;
+ private static final int DIM1 = 7;
+ private static final int RANK = 2;
+
+ private static void CreateDataset()
+ {
+ long file_id = HDF5Constants.H5I_INVALID_HID;
+ long dataspace_id = HDF5Constants.H5I_INVALID_HID;
+ long dataset_id = HDF5Constants.H5I_INVALID_HID;
+ long[] dims = {DIM0, DIM1};
double[][] dset_data = new double[DIM0][DIM1];
// Initialize data.
@@ -50,7 +51,7 @@ public class H5Ex_T_Float {
// Create a new file using default properties.
try {
file_id = H5.H5Fcreate(FILENAME, HDF5Constants.H5F_ACC_TRUNC, HDF5Constants.H5P_DEFAULT,
- HDF5Constants.H5P_DEFAULT);
+ HDF5Constants.H5P_DEFAULT);
}
catch (Exception e) {
e.printStackTrace();
@@ -73,7 +74,8 @@ public class H5Ex_T_Float {
try {
if ((file_id >= 0) && (dataspace_id >= 0))
dataset_id = H5.H5Dcreate(file_id, DATASETNAME, HDF5Constants.H5T_IEEE_F64LE, dataspace_id,
- HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT);
+ HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT,
+ HDF5Constants.H5P_DEFAULT);
}
catch (Exception e) {
e.printStackTrace();
@@ -82,8 +84,8 @@ public class H5Ex_T_Float {
// Write the data to the dataset.
try {
if (dataset_id >= 0)
- H5.H5Dwrite(dataset_id, HDF5Constants.H5T_NATIVE_DOUBLE, HDF5Constants.H5S_ALL, HDF5Constants.H5S_ALL,
- HDF5Constants.H5P_DEFAULT, dset_data);
+ H5.H5Dwrite(dataset_id, HDF5Constants.H5T_NATIVE_DOUBLE, HDF5Constants.H5S_ALL,
+ HDF5Constants.H5S_ALL, HDF5Constants.H5P_DEFAULT, dset_data);
}
catch (Exception e) {
e.printStackTrace();
@@ -115,14 +117,14 @@ public class H5Ex_T_Float {
catch (Exception e) {
e.printStackTrace();
}
-
}
- private static void ReadDataset() {
- long file_id = HDF5Constants.H5I_INVALID_HID;
+ private static void ReadDataset()
+ {
+ long file_id = HDF5Constants.H5I_INVALID_HID;
long dataspace_id = HDF5Constants.H5I_INVALID_HID;
- long dataset_id = HDF5Constants.H5I_INVALID_HID;
- long[] dims = { DIM0, DIM1 };
+ long dataset_id = HDF5Constants.H5I_INVALID_HID;
+ long[] dims = {DIM0, DIM1};
double[][] dset_data;
// Open an existing file.
@@ -161,13 +163,13 @@ public class H5Ex_T_Float {
// Allocate array of pointers to two-dimensional arrays (the
// elements of the dataset.
- dset_data = new double[(int) dims[0]][(int) (dims[1])];
+ dset_data = new double[(int)dims[0]][(int)(dims[1])];
// Read data.
try {
if (dataset_id >= 0)
- H5.H5Dread(dataset_id, HDF5Constants.H5T_NATIVE_DOUBLE, HDF5Constants.H5S_ALL, HDF5Constants.H5S_ALL,
- HDF5Constants.H5P_DEFAULT, dset_data);
+ H5.H5Dread(dataset_id, HDF5Constants.H5T_NATIVE_DOUBLE, HDF5Constants.H5S_ALL,
+ HDF5Constants.H5S_ALL, HDF5Constants.H5P_DEFAULT, dset_data);
}
catch (Exception e) {
e.printStackTrace();
@@ -211,10 +213,10 @@ public class H5Ex_T_Float {
catch (Exception e) {
e.printStackTrace();
}
-
}
- public static void main(String[] args) {
+ public static void main(String[] args)
+ {
H5Ex_T_Float.CreateDataset();
// Now we begin the read section of this example. Here we assume
// the dataset and array have the same name and rank, but can have
@@ -222,5 +224,4 @@ public class H5Ex_T_Float {
// data using malloc().
H5Ex_T_Float.ReadDataset();
}
-
}
diff --git a/java/examples/datatypes/H5Ex_T_FloatAttribute.java b/java/examples/datatypes/H5Ex_T_FloatAttribute.java
index eb8e1f8..426c4dd 100644
--- a/java/examples/datatypes/H5Ex_T_FloatAttribute.java
+++ b/java/examples/datatypes/H5Ex_T_FloatAttribute.java
@@ -28,19 +28,20 @@ import hdf.hdf5lib.H5;
import hdf.hdf5lib.HDF5Constants;
public class H5Ex_T_FloatAttribute {
- private static String FILENAME = "H5Ex_T_FloatAttribute.h5";
- private static String DATASETNAME = "DS1";
+ private static String FILENAME = "H5Ex_T_FloatAttribute.h5";
+ private static String DATASETNAME = "DS1";
private static String ATTRIBUTENAME = "A1";
- private static final int DIM0 = 4;
- private static final int DIM1 = 7;
- private static final int RANK = 2;
-
- private static void CreateDataset() {
- long file_id = HDF5Constants.H5I_INVALID_HID;
- long dataspace_id = HDF5Constants.H5I_INVALID_HID;
- long dataset_id = HDF5Constants.H5I_INVALID_HID;
- long attribute_id = HDF5Constants.H5I_INVALID_HID;
- long[] dims = { DIM0, DIM1 };
+ private static final int DIM0 = 4;
+ private static final int DIM1 = 7;
+ private static final int RANK = 2;
+
+ private static void CreateDataset()
+ {
+ long file_id = HDF5Constants.H5I_INVALID_HID;
+ long dataspace_id = HDF5Constants.H5I_INVALID_HID;
+ long dataset_id = HDF5Constants.H5I_INVALID_HID;
+ long attribute_id = HDF5Constants.H5I_INVALID_HID;
+ long[] dims = {DIM0, DIM1};
double[][] dset_data = new double[DIM0][DIM1];
// Initialize data.
@@ -52,7 +53,7 @@ public class H5Ex_T_FloatAttribute {
// Create a new file using default properties.
try {
file_id = H5.H5Fcreate(FILENAME, HDF5Constants.H5F_ACC_TRUNC, HDF5Constants.H5P_DEFAULT,
- HDF5Constants.H5P_DEFAULT);
+ HDF5Constants.H5P_DEFAULT);
}
catch (Exception e) {
e.printStackTrace();
@@ -63,7 +64,8 @@ public class H5Ex_T_FloatAttribute {
dataspace_id = H5.H5Screate(HDF5Constants.H5S_SCALAR);
if (dataspace_id >= 0) {
dataset_id = H5.H5Dcreate(file_id, DATASETNAME, HDF5Constants.H5T_STD_I32LE, dataspace_id,
- HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT);
+ HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT,
+ HDF5Constants.H5P_DEFAULT);
H5.H5Sclose(dataspace_id);
dataspace_id = HDF5Constants.H5I_INVALID_HID;
}
@@ -84,8 +86,9 @@ public class H5Ex_T_FloatAttribute {
// Create the attribute and write the array data to it.
try {
if ((dataset_id >= 0) && (dataspace_id >= 0))
- attribute_id = H5.H5Acreate(dataset_id, ATTRIBUTENAME, HDF5Constants.H5T_IEEE_F64LE, dataspace_id,
- HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT);
+ attribute_id =
+ H5.H5Acreate(dataset_id, ATTRIBUTENAME, HDF5Constants.H5T_IEEE_F64LE, dataspace_id,
+ HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT);
}
catch (Exception e) {
e.printStackTrace();
@@ -134,15 +137,15 @@ public class H5Ex_T_FloatAttribute {
catch (Exception e) {
e.printStackTrace();
}
-
}
- private static void ReadDataset() {
- long file_id = HDF5Constants.H5I_INVALID_HID;
+ private static void ReadDataset()
+ {
+ long file_id = HDF5Constants.H5I_INVALID_HID;
long dataspace_id = HDF5Constants.H5I_INVALID_HID;
- long dataset_id = HDF5Constants.H5I_INVALID_HID;
+ long dataset_id = HDF5Constants.H5I_INVALID_HID;
long attribute_id = HDF5Constants.H5I_INVALID_HID;
- long[] dims = { DIM0, DIM1 };
+ long[] dims = {DIM0, DIM1};
double[][] dset_data;
// Open an existing file.
@@ -165,7 +168,7 @@ public class H5Ex_T_FloatAttribute {
try {
if (dataset_id >= 0)
attribute_id = H5.H5Aopen_by_name(dataset_id, ".", ATTRIBUTENAME, HDF5Constants.H5P_DEFAULT,
- HDF5Constants.H5P_DEFAULT);
+ HDF5Constants.H5P_DEFAULT);
}
catch (Exception e) {
e.printStackTrace();
@@ -190,7 +193,7 @@ public class H5Ex_T_FloatAttribute {
// Allocate array of pointers to two-dimensional arrays (the
// elements of the dataset.
- dset_data = new double[(int) dims[0]][(int) (dims[1])];
+ dset_data = new double[(int)dims[0]][(int)(dims[1])];
// Read data.
try {
@@ -247,10 +250,10 @@ public class H5Ex_T_FloatAttribute {
catch (Exception e) {
e.printStackTrace();
}
-
}
- public static void main(String[] args) {
+ public static void main(String[] args)
+ {
H5Ex_T_FloatAttribute.CreateDataset();
// Now we begin the read section of this example. Here we assume
// the dataset and array have the same name and rank, but can have
@@ -258,5 +261,4 @@ public class H5Ex_T_FloatAttribute {
// data using malloc().
H5Ex_T_FloatAttribute.ReadDataset();
}
-
}
diff --git a/java/examples/datatypes/H5Ex_T_Integer.java b/java/examples/datatypes/H5Ex_T_Integer.java
index bb8e0cb..919ea7e 100644
--- a/java/examples/datatypes/H5Ex_T_Integer.java
+++ b/java/examples/datatypes/H5Ex_T_Integer.java
@@ -26,17 +26,18 @@ import hdf.hdf5lib.H5;
import hdf.hdf5lib.HDF5Constants;
public class H5Ex_T_Integer {
- private static String FILENAME = "H5Ex_T_Integer.h5";
+ private static String FILENAME = "H5Ex_T_Integer.h5";
private static String DATASETNAME = "DS1";
- private static final int DIM0 = 4;
- private static final int DIM1 = 7;
- private static final int RANK = 2;
+ private static final int DIM0 = 4;
+ private static final int DIM1 = 7;
+ private static final int RANK = 2;
- private static void CreateDataset() {
- long file_id = HDF5Constants.H5I_INVALID_HID;
+ private static void CreateDataset()
+ {
+ long file_id = HDF5Constants.H5I_INVALID_HID;
long dataspace_id = HDF5Constants.H5I_INVALID_HID;
- long dataset_id = HDF5Constants.H5I_INVALID_HID;
- long[] dims = { DIM0, DIM1 };
+ long dataset_id = HDF5Constants.H5I_INVALID_HID;
+ long[] dims = {DIM0, DIM1};
int[][] dset_data = new int[DIM0][DIM1];
// Initialize data.
@@ -48,7 +49,7 @@ public class H5Ex_T_Integer {
// Create a new file using default properties.
try {
file_id = H5.H5Fcreate(FILENAME, HDF5Constants.H5F_ACC_TRUNC, HDF5Constants.H5P_DEFAULT,
- HDF5Constants.H5P_DEFAULT);
+ HDF5Constants.H5P_DEFAULT);
}
catch (Exception e) {
e.printStackTrace();
@@ -70,7 +71,8 @@ public class H5Ex_T_Integer {
try {
if ((file_id >= 0) && (dataspace_id >= 0))
dataset_id = H5.H5Dcreate(file_id, DATASETNAME, HDF5Constants.H5T_STD_I64BE, dataspace_id,
- HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT);
+ HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT,
+ HDF5Constants.H5P_DEFAULT);
}
catch (Exception e) {
e.printStackTrace();
@@ -79,8 +81,8 @@ public class H5Ex_T_Integer {
// Write the data to the dataset.
try {
if (dataset_id >= 0)
- H5.H5Dwrite(dataset_id, HDF5Constants.H5T_NATIVE_INT, HDF5Constants.H5S_ALL, HDF5Constants.H5S_ALL,
- HDF5Constants.H5P_DEFAULT, dset_data);
+ H5.H5Dwrite(dataset_id, HDF5Constants.H5T_NATIVE_INT, HDF5Constants.H5S_ALL,
+ HDF5Constants.H5S_ALL, HDF5Constants.H5P_DEFAULT, dset_data);
}
catch (Exception e) {
e.printStackTrace();
@@ -112,14 +114,14 @@ public class H5Ex_T_Integer {
catch (Exception e) {
e.printStackTrace();
}
-
}
- private static void ReadDataset() {
- long file_id = HDF5Constants.H5I_INVALID_HID;
+ private static void ReadDataset()
+ {
+ long file_id = HDF5Constants.H5I_INVALID_HID;
long dataspace_id = HDF5Constants.H5I_INVALID_HID;
- long dataset_id = HDF5Constants.H5I_INVALID_HID;
- long[] dims = { DIM0, DIM1 };
+ long dataset_id = HDF5Constants.H5I_INVALID_HID;
+ long[] dims = {DIM0, DIM1};
int[][] dset_data;
// Open an existing file.
@@ -158,13 +160,13 @@ public class H5Ex_T_Integer {
// Allocate array of pointers to two-dimensional arrays (the
// elements of the dataset.
- dset_data = new int[(int) dims[0]][(int) (dims[1])];
+ dset_data = new int[(int)dims[0]][(int)(dims[1])];
// Read data.
try {
if (dataset_id >= 0)
- H5.H5Dread(dataset_id, HDF5Constants.H5T_NATIVE_INT, HDF5Constants.H5S_ALL, HDF5Constants.H5S_ALL,
- HDF5Constants.H5P_DEFAULT, dset_data);
+ H5.H5Dread(dataset_id, HDF5Constants.H5T_NATIVE_INT, HDF5Constants.H5S_ALL,
+ HDF5Constants.H5S_ALL, HDF5Constants.H5P_DEFAULT, dset_data);
}
catch (Exception e) {
e.printStackTrace();
@@ -208,10 +210,10 @@ public class H5Ex_T_Integer {
catch (Exception e) {
e.printStackTrace();
}
-
}
- public static void main(String[] args) {
+ public static void main(String[] args)
+ {
H5Ex_T_Integer.CreateDataset();
// Now we begin the read section of this example. Here we assume
// the dataset and array have the same name and rank, but can have
@@ -219,5 +221,4 @@ public class H5Ex_T_Integer {
// data using malloc().
H5Ex_T_Integer.ReadDataset();
}
-
}
diff --git a/java/examples/datatypes/H5Ex_T_IntegerAttribute.java b/java/examples/datatypes/H5Ex_T_IntegerAttribute.java
index b0df5e4..4ec98c4 100644
--- a/java/examples/datatypes/H5Ex_T_IntegerAttribute.java
+++ b/java/examples/datatypes/H5Ex_T_IntegerAttribute.java
@@ -26,19 +26,20 @@ import hdf.hdf5lib.H5;
import hdf.hdf5lib.HDF5Constants;
public class H5Ex_T_IntegerAttribute {
- private static String FILENAME = "H5Ex_T_IntegerAttribute.h5";
- private static String DATASETNAME = "DS1";
+ private static String FILENAME = "H5Ex_T_IntegerAttribute.h5";
+ private static String DATASETNAME = "DS1";
private static String ATTRIBUTENAME = "A1";
- private static final int DIM0 = 4;
- private static final int DIM1 = 7;
- private static final int RANK = 2;
+ private static final int DIM0 = 4;
+ private static final int DIM1 = 7;
+ private static final int RANK = 2;
- private static void CreateDataset() {
- long file_id = HDF5Constants.H5I_INVALID_HID;
+ private static void CreateDataset()
+ {
+ long file_id = HDF5Constants.H5I_INVALID_HID;
long dataspace_id = HDF5Constants.H5I_INVALID_HID;
- long dataset_id = HDF5Constants.H5I_INVALID_HID;
+ long dataset_id = HDF5Constants.H5I_INVALID_HID;
long attribute_id = HDF5Constants.H5I_INVALID_HID;
- long[] dims = { DIM0, DIM1 };
+ long[] dims = {DIM0, DIM1};
int[][] dset_data = new int[DIM0][DIM1];
// Initialize data.
@@ -50,7 +51,7 @@ public class H5Ex_T_IntegerAttribute {
// Create a new file using default properties.
try {
file_id = H5.H5Fcreate(FILENAME, HDF5Constants.H5F_ACC_TRUNC, HDF5Constants.H5P_DEFAULT,
- HDF5Constants.H5P_DEFAULT);
+ HDF5Constants.H5P_DEFAULT);
}
catch (Exception e) {
e.printStackTrace();
@@ -61,7 +62,8 @@ public class H5Ex_T_IntegerAttribute {
dataspace_id = H5.H5Screate(HDF5Constants.H5S_SCALAR);
if (dataspace_id >= 0) {
dataset_id = H5.H5Dcreate(file_id, DATASETNAME, HDF5Constants.H5T_STD_I32LE, dataspace_id,
- HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT);
+ HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT,
+ HDF5Constants.H5P_DEFAULT);
H5.H5Sclose(dataspace_id);
dataspace_id = HDF5Constants.H5I_INVALID_HID;
}
@@ -82,8 +84,9 @@ public class H5Ex_T_IntegerAttribute {
// Create the attribute and write the array data to it.
try {
if ((dataset_id >= 0) && (dataspace_id >= 0))
- attribute_id = H5.H5Acreate(dataset_id, ATTRIBUTENAME, HDF5Constants.H5T_STD_I64BE, dataspace_id,
- HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT);
+ attribute_id =
+ H5.H5Acreate(dataset_id, ATTRIBUTENAME, HDF5Constants.H5T_STD_I64BE, dataspace_id,
+ HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT);
}
catch (Exception e) {
e.printStackTrace();
@@ -132,15 +135,15 @@ public class H5Ex_T_IntegerAttribute {
catch (Exception e) {
e.printStackTrace();
}
-
}
- private static void ReadDataset() {
- long file_id = HDF5Constants.H5I_INVALID_HID;
+ private static void ReadDataset()
+ {
+ long file_id = HDF5Constants.H5I_INVALID_HID;
long dataspace_id = HDF5Constants.H5I_INVALID_HID;
- long dataset_id = HDF5Constants.H5I_INVALID_HID;
+ long dataset_id = HDF5Constants.H5I_INVALID_HID;
long attribute_id = HDF5Constants.H5I_INVALID_HID;
- long[] dims = { DIM0, DIM1 };
+ long[] dims = {DIM0, DIM1};
int[][] dset_data;
// Open an existing file.
@@ -163,7 +166,7 @@ public class H5Ex_T_IntegerAttribute {
try {
if (dataset_id >= 0)
attribute_id = H5.H5Aopen_by_name(dataset_id, ".", ATTRIBUTENAME, HDF5Constants.H5P_DEFAULT,
- HDF5Constants.H5P_DEFAULT);
+ HDF5Constants.H5P_DEFAULT);
}
catch (Exception e) {
e.printStackTrace();
@@ -188,7 +191,7 @@ public class H5Ex_T_IntegerAttribute {
// Allocate array of pointers to two-dimensional arrays (the
// elements of the dataset.
- dset_data = new int[(int) dims[0]][(int) (dims[1])];
+ dset_data = new int[(int)dims[0]][(int)(dims[1])];
// Read data.
try {
@@ -245,10 +248,10 @@ public class H5Ex_T_IntegerAttribute {
catch (Exception e) {
e.printStackTrace();
}
-
}
- public static void main(String[] args) {
+ public static void main(String[] args)
+ {
H5Ex_T_IntegerAttribute.CreateDataset();
// Now we begin the read section of this example. Here we assume
// the dataset and array have the same name and rank, but can have
@@ -256,5 +259,4 @@ public class H5Ex_T_IntegerAttribute {
// data using malloc().
H5Ex_T_IntegerAttribute.ReadDataset();
}
-
}
diff --git a/java/examples/datatypes/H5Ex_T_ObjectReference.java b/java/examples/datatypes/H5Ex_T_ObjectReference.java
index b0f98de..9220d8f 100644
--- a/java/examples/datatypes/H5Ex_T_ObjectReference.java
+++ b/java/examples/datatypes/H5Ex_T_ObjectReference.java
@@ -28,54 +28,50 @@ import hdf.hdf5lib.H5;
import hdf.hdf5lib.HDF5Constants;
public class H5Ex_T_ObjectReference {
- private static String FILENAME = "H5Ex_T_ObjectReference.h5";
- private static String DATASETNAME = "DS1";
+ private static String FILENAME = "H5Ex_T_ObjectReference.h5";
+ private static String DATASETNAME = "DS1";
private static String DATASETNAME2 = "DS2";
- private static String GROUPNAME = "G1";
- private static final int DIM0 = 2;
- private static final int RANK = 1;
+ private static String GROUPNAME = "G1";
+ private static final int DIM0 = 2;
+ private static final int RANK = 1;
// Values for the status of space allocation
enum H5G_obj {
- H5G_UNKNOWN(HDF5Constants.H5O_TYPE_UNKNOWN), /* Unknown object type */
- H5G_GROUP(HDF5Constants.H5O_TYPE_GROUP), /* Object is a group */
- H5G_DATASET(HDF5Constants.H5O_TYPE_DATASET), /* Object is a dataset */
+ H5G_UNKNOWN(HDF5Constants.H5O_TYPE_UNKNOWN), /* Unknown object type */
+ H5G_GROUP(HDF5Constants.H5O_TYPE_GROUP), /* Object is a group */
+ H5G_DATASET(HDF5Constants.H5O_TYPE_DATASET), /* Object is a dataset */
H5G_TYPE(HDF5Constants.H5O_TYPE_NAMED_DATATYPE); /* Object is a named data type */
private static final Map<Integer, H5G_obj> lookup = new HashMap<Integer, H5G_obj>();
- static {
+ static
+ {
for (H5G_obj s : EnumSet.allOf(H5G_obj.class))
lookup.put(s.getCode(), s);
}
private int code;
- H5G_obj(int layout_type) {
- this.code = layout_type;
- }
+ H5G_obj(int layout_type) { this.code = layout_type; }
- public int getCode() {
- return this.code;
- }
+ public int getCode() { return this.code; }
- public static H5G_obj get(int code) {
- return lookup.get(code);
- }
+ public static H5G_obj get(int code) { return lookup.get(code); }
}
- private static void writeObjRef() {
- long file_id = HDF5Constants.H5I_INVALID_HID;
- long dataspace_id = HDF5Constants.H5I_INVALID_HID;
- long filespace_id = HDF5Constants.H5I_INVALID_HID;
- long group_id = HDF5Constants.H5I_INVALID_HID;
- long dataset_id = HDF5Constants.H5I_INVALID_HID;
- long[] dims = { DIM0 };
+ private static void writeObjRef()
+ {
+ long file_id = HDF5Constants.H5I_INVALID_HID;
+ long dataspace_id = HDF5Constants.H5I_INVALID_HID;
+ long filespace_id = HDF5Constants.H5I_INVALID_HID;
+ long group_id = HDF5Constants.H5I_INVALID_HID;
+ long dataset_id = HDF5Constants.H5I_INVALID_HID;
+ long[] dims = {DIM0};
byte[][] dset_data = new byte[DIM0][HDF5Constants.H5R_REF_BUF_SIZE];
// Create a new file using default properties.
try {
file_id = H5.H5Fcreate(FILENAME, HDF5Constants.H5F_ACC_TRUNC, HDF5Constants.H5P_DEFAULT,
- HDF5Constants.H5P_DEFAULT);
+ HDF5Constants.H5P_DEFAULT);
}
catch (Exception e) {
e.printStackTrace();
@@ -86,7 +82,8 @@ public class H5Ex_T_ObjectReference {
dataspace_id = H5.H5Screate(HDF5Constants.H5S_SCALAR);
if ((file_id >= 0) && (dataspace_id >= 0)) {
dataset_id = H5.H5Dcreate(file_id, DATASETNAME2, HDF5Constants.H5T_STD_I32LE, dataspace_id,
- HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT);
+ HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT,
+ HDF5Constants.H5P_DEFAULT);
if (dataset_id >= 0)
H5.H5Dclose(dataset_id);
dataset_id = HDF5Constants.H5I_INVALID_HID;
@@ -101,8 +98,8 @@ public class H5Ex_T_ObjectReference {
// Create a group in the file.
try {
if (file_id >= 0)
- group_id = H5.H5Gcreate(file_id, GROUPNAME, HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT,
- HDF5Constants.H5P_DEFAULT);
+ group_id = H5.H5Gcreate(file_id, GROUPNAME, HDF5Constants.H5P_DEFAULT,
+ HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT);
if (group_id >= 0)
H5.H5Gclose(group_id);
group_id = HDF5Constants.H5I_INVALID_HID;
@@ -119,7 +116,7 @@ public class H5Ex_T_ObjectReference {
catch (Throwable err) {
err.printStackTrace();
}
-
+
try {
dset_data[1] = H5.H5Rcreate_object(file_id, DATASETNAME2, HDF5Constants.H5P_DEFAULT);
}
@@ -127,7 +124,7 @@ public class H5Ex_T_ObjectReference {
err.printStackTrace();
}
}
-
+
// Create dataspace. Setting maximum size to NULL sets the maximum
// size to be the current size.
try {
@@ -136,22 +133,23 @@ public class H5Ex_T_ObjectReference {
catch (Exception e) {
e.printStackTrace();
}
-
+
// Create the dataset.
try {
if ((file_id >= 0) && (filespace_id >= 0))
dataset_id = H5.H5Dcreate(file_id, DATASETNAME, HDF5Constants.H5T_STD_REF, filespace_id,
- HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT);
+ HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT,
+ HDF5Constants.H5P_DEFAULT);
}
catch (Exception e) {
e.printStackTrace();
}
-
+
// Write the object references to it.
try {
if (dataset_id >= 0)
- H5.H5Dwrite(dataset_id, HDF5Constants.H5T_STD_REF, HDF5Constants.H5S_ALL, HDF5Constants.H5S_ALL,
- HDF5Constants.H5P_DEFAULT, dset_data);
+ H5.H5Dwrite(dataset_id, HDF5Constants.H5T_STD_REF, HDF5Constants.H5S_ALL,
+ HDF5Constants.H5S_ALL, HDF5Constants.H5P_DEFAULT, dset_data);
}
catch (Exception e) {
e.printStackTrace();
@@ -161,8 +159,16 @@ public class H5Ex_T_ObjectReference {
ex.printStackTrace();
}
finally {
- try {H5.H5Rdestroy(dset_data[1]);} catch (Exception ex) {}
- try {H5.H5Rdestroy(dset_data[0]);} catch (Exception ex) {}
+ try {
+ H5.H5Rdestroy(dset_data[1]);
+ }
+ catch (Exception ex) {
+ }
+ try {
+ H5.H5Rdestroy(dset_data[0]);
+ }
+ catch (Exception ex) {
+ }
}
// End access to the dataset and release resources used by it.
@@ -193,13 +199,14 @@ public class H5Ex_T_ObjectReference {
}
}
- private static void readObjRef() {
- long file_id = HDF5Constants.H5I_INVALID_HID;
- long dataset_id = HDF5Constants.H5I_INVALID_HID;
- long dataspace_id = HDF5Constants.H5I_INVALID_HID;
- int object_type = -1;
- long object_id = HDF5Constants.H5I_INVALID_HID;
- long[] dims = { DIM0 };
+ private static void readObjRef()
+ {
+ long file_id = HDF5Constants.H5I_INVALID_HID;
+ long dataset_id = HDF5Constants.H5I_INVALID_HID;
+ long dataspace_id = HDF5Constants.H5I_INVALID_HID;
+ int object_type = -1;
+ long object_id = HDF5Constants.H5I_INVALID_HID;
+ long[] dims = {DIM0};
byte[][] dset_data = new byte[DIM0][HDF5Constants.H5R_REF_BUF_SIZE];
// Open an existing file.
@@ -209,23 +216,24 @@ public class H5Ex_T_ObjectReference {
// Open an existing dataset.
try {
dataset_id = H5.H5Dopen(file_id, DATASETNAME, HDF5Constants.H5P_DEFAULT);
-
+
try {
// Get dataspace and allocate memory for read buffer.
dataspace_id = H5.H5Dget_space(dataset_id);
H5.H5Sget_simple_extent_dims(dataspace_id, dims, null);
-
+
// Read data.
- H5.H5Dread(dataset_id, HDF5Constants.H5T_STD_REF, HDF5Constants.H5S_ALL, HDF5Constants.H5S_ALL,
- HDF5Constants.H5P_DEFAULT, dset_data);
-
+ H5.H5Dread(dataset_id, HDF5Constants.H5T_STD_REF, HDF5Constants.H5S_ALL,
+ HDF5Constants.H5S_ALL, HDF5Constants.H5P_DEFAULT, dset_data);
+
// Output the data to the screen.
for (int indx = 0; indx < dims[0]; indx++) {
System.out.println(DATASETNAME + "[" + indx + "]:");
System.out.print(" ->");
// Open the referenced object, get its name and type.
try {
- object_id = H5.H5Ropen_object(dset_data[indx], HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT);
+ object_id = H5.H5Ropen_object(dset_data[indx], HDF5Constants.H5P_DEFAULT,
+ HDF5Constants.H5P_DEFAULT);
try {
object_type = H5.H5Rget_obj_type3(dset_data[indx], HDF5Constants.H5R_OBJECT);
String obj_name = null;
@@ -255,14 +263,22 @@ public class H5Ex_T_ObjectReference {
e.printStackTrace();
}
finally {
- try {H5.H5Oclose(object_id);} catch (Exception e) {}
+ try {
+ H5.H5Oclose(object_id);
+ }
+ catch (Exception e) {
+ }
}
}
catch (Exception e4) {
e4.printStackTrace();
}
finally {
- try {H5.H5Rdestroy(dset_data[indx]);} catch (Exception e4) {}
+ try {
+ H5.H5Rdestroy(dset_data[indx]);
+ }
+ catch (Exception e4) {
+ }
}
} // end for
}
@@ -270,25 +286,38 @@ public class H5Ex_T_ObjectReference {
e3.printStackTrace();
}
finally {
- try {H5.H5Sclose(dataspace_id);} catch (Exception e3) {}
+ try {
+ H5.H5Sclose(dataspace_id);
+ }
+ catch (Exception e3) {
+ }
}
}
catch (Exception e2) {
e2.printStackTrace();
}
finally {
- try {H5.H5Dclose(dataset_id);} catch (Exception e2) {}
+ try {
+ H5.H5Dclose(dataset_id);
+ }
+ catch (Exception e2) {
+ }
}
}
catch (Exception e1) {
e1.printStackTrace();
}
finally {
- try {H5.H5Fclose(file_id);} catch (Exception e1) {}
+ try {
+ H5.H5Fclose(file_id);
+ }
+ catch (Exception e1) {
+ }
}
}
- public static void main(String[] args) {
+ public static void main(String[] args)
+ {
// Check if gzip compression is available and can be used for both
// compression and decompression. Normally we do not perform error
// checking in these examples for the sake of clarity, but in this
@@ -297,5 +326,4 @@ public class H5Ex_T_ObjectReference {
H5Ex_T_ObjectReference.writeObjRef();
H5Ex_T_ObjectReference.readObjRef();
}
-
}
diff --git a/java/examples/datatypes/H5Ex_T_ObjectReferenceAttribute.java b/java/examples/datatypes/H5Ex_T_ObjectReferenceAttribute.java
index f61ae0d..be84e51 100644
--- a/java/examples/datatypes/H5Ex_T_ObjectReferenceAttribute.java
+++ b/java/examples/datatypes/H5Ex_T_ObjectReferenceAttribute.java
@@ -28,55 +28,51 @@ import hdf.hdf5lib.H5;
import hdf.hdf5lib.HDF5Constants;
public class H5Ex_T_ObjectReferenceAttribute {
- private static String FILENAME = "H5Ex_T_ObjectReferenceAttribute.h5";
- private static String DATASETNAME = "DS1";
+ private static String FILENAME = "H5Ex_T_ObjectReferenceAttribute.h5";
+ private static String DATASETNAME = "DS1";
private static String ATTRIBUTENAME = "A1";
- private static String DATASETNAME2 = "DS2";
- private static String GROUPNAME = "G1";
- private static final int DIM0 = 2;
- private static final int RANK = 1;
+ private static String DATASETNAME2 = "DS2";
+ private static String GROUPNAME = "G1";
+ private static final int DIM0 = 2;
+ private static final int RANK = 1;
// Values for the status of space allocation
enum H5G_obj {
- H5G_UNKNOWN(HDF5Constants.H5O_TYPE_UNKNOWN), /* Unknown object type */
- H5G_GROUP(HDF5Constants.H5O_TYPE_GROUP), /* Object is a group */
- H5G_DATASET(HDF5Constants.H5O_TYPE_DATASET), /* Object is a dataset */
+ H5G_UNKNOWN(HDF5Constants.H5O_TYPE_UNKNOWN), /* Unknown object type */
+ H5G_GROUP(HDF5Constants.H5O_TYPE_GROUP), /* Object is a group */
+ H5G_DATASET(HDF5Constants.H5O_TYPE_DATASET), /* Object is a dataset */
H5G_TYPE(HDF5Constants.H5O_TYPE_NAMED_DATATYPE); /* Object is a named data type */
private static final Map<Integer, H5G_obj> lookup = new HashMap<Integer, H5G_obj>();
- static {
+ static
+ {
for (H5G_obj s : EnumSet.allOf(H5G_obj.class))
lookup.put(s.getCode(), s);
}
private int code;
- H5G_obj(int layout_type) {
- this.code = layout_type;
- }
+ H5G_obj(int layout_type) { this.code = layout_type; }
- public int getCode() {
- return this.code;
- }
+ public int getCode() { return this.code; }
- public static H5G_obj get(int code) {
- return lookup.get(code);
- }
+ public static H5G_obj get(int code) { return lookup.get(code); }
}
- private static void CreateDataset() {
- long file_id = HDF5Constants.H5I_INVALID_HID;
- long dataspace_id = HDF5Constants.H5I_INVALID_HID;
- long group_id = HDF5Constants.H5I_INVALID_HID;
- long dataset_id = HDF5Constants.H5I_INVALID_HID;
- long attribute_id = HDF5Constants.H5I_INVALID_HID;
- long[] dims = { DIM0 };
+ private static void CreateDataset()
+ {
+ long file_id = HDF5Constants.H5I_INVALID_HID;
+ long dataspace_id = HDF5Constants.H5I_INVALID_HID;
+ long group_id = HDF5Constants.H5I_INVALID_HID;
+ long dataset_id = HDF5Constants.H5I_INVALID_HID;
+ long attribute_id = HDF5Constants.H5I_INVALID_HID;
+ long[] dims = {DIM0};
byte[][] dset_data = new byte[DIM0][HDF5Constants.H5R_REF_BUF_SIZE];
// Create a new file using default properties.
try {
file_id = H5.H5Fcreate(FILENAME, HDF5Constants.H5F_ACC_TRUNC, HDF5Constants.H5P_DEFAULT,
- HDF5Constants.H5P_DEFAULT);
+ HDF5Constants.H5P_DEFAULT);
}
catch (Exception e) {
e.printStackTrace();
@@ -87,7 +83,8 @@ public class H5Ex_T_ObjectReferenceAttribute {
dataspace_id = H5.H5Screate(HDF5Constants.H5S_SCALAR);
if ((file_id >= 0) && (dataspace_id >= 0)) {
dataset_id = H5.H5Dcreate(file_id, DATASETNAME2, HDF5Constants.H5T_STD_I32LE, dataspace_id,
- HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT);
+ HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT,
+ HDF5Constants.H5P_DEFAULT);
if (dataset_id >= 0)
H5.H5Dclose(dataset_id);
dataset_id = HDF5Constants.H5I_INVALID_HID;
@@ -102,8 +99,8 @@ public class H5Ex_T_ObjectReferenceAttribute {
// Create a group in the file.
try {
if (file_id >= 0)
- group_id = H5.H5Gcreate(file_id, GROUPNAME, HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT,
- HDF5Constants.H5P_DEFAULT);
+ group_id = H5.H5Gcreate(file_id, GROUPNAME, HDF5Constants.H5P_DEFAULT,
+ HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT);
if (group_id >= 0)
H5.H5Gclose(group_id);
group_id = HDF5Constants.H5I_INVALID_HID;
@@ -120,7 +117,7 @@ public class H5Ex_T_ObjectReferenceAttribute {
catch (Throwable err) {
err.printStackTrace();
}
-
+
try {
dset_data[1] = H5.H5Rcreate_object(file_id, DATASETNAME2, HDF5Constants.H5P_DEFAULT);
}
@@ -135,7 +132,8 @@ public class H5Ex_T_ObjectReferenceAttribute {
dataspace_id = H5.H5Screate(HDF5Constants.H5S_SCALAR);
if (dataspace_id >= 0) {
dataset_id = H5.H5Dcreate(file_id, DATASETNAME, HDF5Constants.H5T_STD_I32LE, dataspace_id,
- HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT);
+ HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT,
+ HDF5Constants.H5P_DEFAULT);
H5.H5Sclose(dataspace_id);
dataspace_id = HDF5Constants.H5I_INVALID_HID;
}
@@ -143,7 +141,7 @@ public class H5Ex_T_ObjectReferenceAttribute {
catch (Exception e) {
e.printStackTrace();
}
-
+
// Create dataspace. Setting maximum size to NULL sets the maximum
// size to be the current size.
try {
@@ -152,17 +150,18 @@ public class H5Ex_T_ObjectReferenceAttribute {
catch (Exception e) {
e.printStackTrace();
}
-
+
// Create the attribute and write the array data to it.
try {
if ((dataset_id >= 0) && (dataspace_id >= 0))
- attribute_id = H5.H5Acreate(dataset_id, ATTRIBUTENAME, HDF5Constants.H5T_STD_REF, dataspace_id,
- HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT);
+ attribute_id =
+ H5.H5Acreate(dataset_id, ATTRIBUTENAME, HDF5Constants.H5T_STD_REF, dataspace_id,
+ HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT);
}
catch (Exception e) {
e.printStackTrace();
}
-
+
// Write the dataset.
try {
if (attribute_id >= 0)
@@ -176,8 +175,16 @@ public class H5Ex_T_ObjectReferenceAttribute {
ex.printStackTrace();
}
finally {
- try {H5.H5Rdestroy(dset_data[1]);} catch (Exception ex) {}
- try {H5.H5Rdestroy(dset_data[0]);} catch (Exception ex) {}
+ try {
+ H5.H5Rdestroy(dset_data[1]);
+ }
+ catch (Exception ex) {
+ }
+ try {
+ H5.H5Rdestroy(dset_data[0]);
+ }
+ catch (Exception ex) {
+ }
}
// End access to the dataset and release resources used by it.
@@ -215,14 +222,15 @@ public class H5Ex_T_ObjectReferenceAttribute {
}
}
- private static void ReadDataset() {
- long file_id = HDF5Constants.H5I_INVALID_HID;
- long dataspace_id = HDF5Constants.H5I_INVALID_HID;
- long dataset_id = HDF5Constants.H5I_INVALID_HID;
- long attribute_id = HDF5Constants.H5I_INVALID_HID;
- int object_type = -1;
- long object_id = HDF5Constants.H5I_INVALID_HID;
- long[] dims = { DIM0 };
+ private static void ReadDataset()
+ {
+ long file_id = HDF5Constants.H5I_INVALID_HID;
+ long dataspace_id = HDF5Constants.H5I_INVALID_HID;
+ long dataset_id = HDF5Constants.H5I_INVALID_HID;
+ long attribute_id = HDF5Constants.H5I_INVALID_HID;
+ int object_type = -1;
+ long object_id = HDF5Constants.H5I_INVALID_HID;
+ long[] dims = {DIM0};
byte[][] dset_data = new byte[DIM0][HDF5Constants.H5R_REF_BUF_SIZE];
// Open an existing file.
@@ -232,28 +240,30 @@ public class H5Ex_T_ObjectReferenceAttribute {
// Open an existing dataset.
try {
dataset_id = H5.H5Dopen(file_id, DATASETNAME, HDF5Constants.H5P_DEFAULT);
-
+
try {
- attribute_id = H5.H5Aopen_by_name(dataset_id, ".", ATTRIBUTENAME, HDF5Constants.H5P_DEFAULT,
- HDF5Constants.H5P_DEFAULT);
-
+ attribute_id = H5.H5Aopen_by_name(dataset_id, ".", ATTRIBUTENAME,
+ HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT);
+
// Get dataspace and allocate memory for read buffer.
try {
dataspace_id = H5.H5Aget_space(attribute_id);
H5.H5Sget_simple_extent_dims(dataspace_id, dims, null);
-
+
// Read data.
H5.H5Aread(attribute_id, HDF5Constants.H5T_STD_REF, dset_data);
-
+
// Output the data to the screen.
for (int indx = 0; indx < dims[0]; indx++) {
System.out.println(ATTRIBUTENAME + "[" + indx + "]:");
System.out.print(" ->");
// Open the referenced object, get its name and type.
try {
- object_id = H5.H5Ropen_object(dset_data[indx], HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT);
+ object_id = H5.H5Ropen_object(dset_data[indx], HDF5Constants.H5P_DEFAULT,
+ HDF5Constants.H5P_DEFAULT);
try {
- object_type = H5.H5Rget_obj_type3(dset_data[indx], HDF5Constants.H5R_OBJECT);
+ object_type =
+ H5.H5Rget_obj_type3(dset_data[indx], HDF5Constants.H5R_OBJECT);
String obj_name = null;
if (object_type >= 0) {
// Get the name.
@@ -281,14 +291,22 @@ public class H5Ex_T_ObjectReferenceAttribute {
e.printStackTrace();
}
finally {
- try {H5.H5Oclose(object_id);} catch (Exception e) {}
+ try {
+ H5.H5Oclose(object_id);
+ }
+ catch (Exception e) {
+ }
}
}
catch (Exception e5) {
e5.printStackTrace();
}
finally {
- try {H5.H5Rdestroy(dset_data[indx]);} catch (Exception e5) {}
+ try {
+ H5.H5Rdestroy(dset_data[indx]);
+ }
+ catch (Exception e5) {
+ }
}
} // end for
}
@@ -296,32 +314,49 @@ public class H5Ex_T_ObjectReferenceAttribute {
e4.printStackTrace();
}
finally {
- try {H5.H5Sclose(dataspace_id);} catch (Exception e3) {}
+ try {
+ H5.H5Sclose(dataspace_id);
+ }
+ catch (Exception e3) {
+ }
}
}
catch (Exception e3) {
e3.printStackTrace();
}
finally {
- try {H5.H5Aclose(attribute_id);} catch (Exception e4) {}
+ try {
+ H5.H5Aclose(attribute_id);
+ }
+ catch (Exception e4) {
+ }
}
}
catch (Exception e2) {
e2.printStackTrace();
}
finally {
- try {H5.H5Dclose(dataset_id);} catch (Exception e2) {}
+ try {
+ H5.H5Dclose(dataset_id);
+ }
+ catch (Exception e2) {
+ }
}
}
catch (Exception e1) {
e1.printStackTrace();
}
finally {
- try {H5.H5Fclose(file_id);} catch (Exception e1) {}
+ try {
+ H5.H5Fclose(file_id);
+ }
+ catch (Exception e1) {
+ }
}
}
- public static void main(String[] args) {
+ public static void main(String[] args)
+ {
H5Ex_T_ObjectReferenceAttribute.CreateDataset();
// Now we begin the read section of this example. Here we assume
// the dataset and array have the same name and rank, but can have
@@ -329,5 +364,4 @@ public class H5Ex_T_ObjectReferenceAttribute {
// data using malloc().
H5Ex_T_ObjectReferenceAttribute.ReadDataset();
}
-
}
diff --git a/java/examples/datatypes/H5Ex_T_Opaque.java b/java/examples/datatypes/H5Ex_T_Opaque.java
index e851fb2..419a5c8 100644
--- a/java/examples/datatypes/H5Ex_T_Opaque.java
+++ b/java/examples/datatypes/H5Ex_T_Opaque.java
@@ -24,32 +24,33 @@ import hdf.hdf5lib.H5;
import hdf.hdf5lib.HDF5Constants;
public class H5Ex_T_Opaque {
- private static String FILENAME = "H5Ex_T_Opaque.h5";
+ private static String FILENAME = "H5Ex_T_Opaque.h5";
private static String DATASETNAME = "DS1";
- private static final int DIM0 = 4;
- private static final int LEN = 7;
- private static final int RANK = 1;
+ private static final int DIM0 = 4;
+ private static final int LEN = 7;
+ private static final int RANK = 1;
- private static void CreateDataset() {
- long file_id = HDF5Constants.H5I_INVALID_HID;
+ private static void CreateDataset()
+ {
+ long file_id = HDF5Constants.H5I_INVALID_HID;
long dataspace_id = HDF5Constants.H5I_INVALID_HID;
- long datatype_id = HDF5Constants.H5I_INVALID_HID;
- long dataset_id = HDF5Constants.H5I_INVALID_HID;
- long[] dims = { DIM0 };
- byte[] dset_data = new byte[DIM0 * LEN];
- byte[] str_data = { 'O', 'P', 'A', 'Q', 'U', 'E' };
+ long datatype_id = HDF5Constants.H5I_INVALID_HID;
+ long dataset_id = HDF5Constants.H5I_INVALID_HID;
+ long[] dims = {DIM0};
+ byte[] dset_data = new byte[DIM0 * LEN];
+ byte[] str_data = {'O', 'P', 'A', 'Q', 'U', 'E'};
// Initialize data.
for (int indx = 0; indx < DIM0; indx++) {
for (int jndx = 0; jndx < LEN - 1; jndx++)
dset_data[jndx + indx * LEN] = str_data[jndx];
- dset_data[LEN - 1 + indx * LEN] = (byte) (indx + '0');
+ dset_data[LEN - 1 + indx * LEN] = (byte)(indx + '0');
}
// Create a new file using default properties.
try {
file_id = H5.H5Fcreate(FILENAME, HDF5Constants.H5F_ACC_TRUNC, HDF5Constants.H5P_DEFAULT,
- HDF5Constants.H5P_DEFAULT);
+ HDF5Constants.H5P_DEFAULT);
}
catch (Exception e) {
e.printStackTrace();
@@ -82,8 +83,9 @@ public class H5Ex_T_Opaque {
// automatically converts between different integer types.
try {
if ((file_id >= 0) && (datatype_id >= 0) && (dataspace_id >= 0))
- dataset_id = H5.H5Dcreate(file_id, DATASETNAME, datatype_id, dataspace_id, HDF5Constants.H5P_DEFAULT,
- HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT);
+ dataset_id =
+ H5.H5Dcreate(file_id, DATASETNAME, datatype_id, dataspace_id, HDF5Constants.H5P_DEFAULT,
+ HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT);
}
catch (Exception e) {
e.printStackTrace();
@@ -93,7 +95,7 @@ public class H5Ex_T_Opaque {
try {
if ((dataset_id >= 0) && (datatype_id >= 0))
H5.H5Dwrite(dataset_id, datatype_id, HDF5Constants.H5S_ALL, HDF5Constants.H5S_ALL,
- HDF5Constants.H5P_DEFAULT, dset_data);
+ HDF5Constants.H5P_DEFAULT, dset_data);
}
catch (Exception e) {
e.printStackTrace();
@@ -133,16 +135,16 @@ public class H5Ex_T_Opaque {
catch (Exception e) {
e.printStackTrace();
}
-
}
- private static void ReadDataset() {
- long file_id = HDF5Constants.H5I_INVALID_HID;
- long datatype_id = HDF5Constants.H5I_INVALID_HID;
+ private static void ReadDataset()
+ {
+ long file_id = HDF5Constants.H5I_INVALID_HID;
+ long datatype_id = HDF5Constants.H5I_INVALID_HID;
long dataspace_id = HDF5Constants.H5I_INVALID_HID;
- long dataset_id = HDF5Constants.H5I_INVALID_HID;
- long type_len = HDF5Constants.H5I_INVALID_HID;
- long[] dims = { DIM0 };
+ long dataset_id = HDF5Constants.H5I_INVALID_HID;
+ long type_len = HDF5Constants.H5I_INVALID_HID;
+ long[] dims = {DIM0};
byte[] dset_data;
String tag_name = null;
@@ -194,13 +196,13 @@ public class H5Ex_T_Opaque {
}
// Allocate buffer.
- dset_data = new byte[(int) (dims[0] * type_len)];
+ dset_data = new byte[(int)(dims[0] * type_len)];
// Read data.
try {
if ((dataset_id >= 0) && (datatype_id >= 0))
H5.H5Dread(dataset_id, datatype_id, HDF5Constants.H5S_ALL, HDF5Constants.H5S_ALL,
- HDF5Constants.H5P_DEFAULT, dset_data);
+ HDF5Constants.H5P_DEFAULT, dset_data);
}
catch (Exception e) {
e.printStackTrace();
@@ -211,7 +213,7 @@ public class H5Ex_T_Opaque {
for (int indx = 0; indx < dims[0]; indx++) {
System.out.print(DATASETNAME + "[" + indx + "]: ");
for (int jndx = 0; jndx < type_len; jndx++) {
- char temp = (char) dset_data[jndx + indx * (int)type_len];
+ char temp = (char)dset_data[jndx + indx * (int)type_len];
System.out.print(temp);
}
System.out.println("");
@@ -252,10 +254,10 @@ public class H5Ex_T_Opaque {
catch (Exception e) {
e.printStackTrace();
}
-
}
- public static void main(String[] args) {
+ public static void main(String[] args)
+ {
H5Ex_T_Opaque.CreateDataset();
// Now we begin the read section of this example. Here we assume
// the dataset and array have the same name and rank, but can have
@@ -263,5 +265,4 @@ public class H5Ex_T_Opaque {
// data using malloc().
H5Ex_T_Opaque.ReadDataset();
}
-
}
diff --git a/java/examples/datatypes/H5Ex_T_OpaqueAttribute.java b/java/examples/datatypes/H5Ex_T_OpaqueAttribute.java
index 3e16ab4..b8a15a6 100644
--- a/java/examples/datatypes/H5Ex_T_OpaqueAttribute.java
+++ b/java/examples/datatypes/H5Ex_T_OpaqueAttribute.java
@@ -24,34 +24,35 @@ import hdf.hdf5lib.H5;
import hdf.hdf5lib.HDF5Constants;
public class H5Ex_T_OpaqueAttribute {
- private static String FILENAME = "H5Ex_T_OpaqueAttribute.h5";
- private static String DATASETNAME = "DS1";
+ private static String FILENAME = "H5Ex_T_OpaqueAttribute.h5";
+ private static String DATASETNAME = "DS1";
private static String ATTRIBUTENAME = "A1";
- private static final int DIM0 = 4;
- private static final int LEN = 7;
- private static final int RANK = 1;
+ private static final int DIM0 = 4;
+ private static final int LEN = 7;
+ private static final int RANK = 1;
- private static void CreateDataset() {
- long file_id = HDF5Constants.H5I_INVALID_HID;
+ private static void CreateDataset()
+ {
+ long file_id = HDF5Constants.H5I_INVALID_HID;
long dataspace_id = HDF5Constants.H5I_INVALID_HID;
- long datatype_id = HDF5Constants.H5I_INVALID_HID;
- long dataset_id = HDF5Constants.H5I_INVALID_HID;
+ long datatype_id = HDF5Constants.H5I_INVALID_HID;
+ long dataset_id = HDF5Constants.H5I_INVALID_HID;
long attribute_id = HDF5Constants.H5I_INVALID_HID;
- long[] dims = { DIM0 };
- byte[] dset_data = new byte[DIM0 * LEN];
- byte[] str_data = { 'O', 'P', 'A', 'Q', 'U', 'E' };
+ long[] dims = {DIM0};
+ byte[] dset_data = new byte[DIM0 * LEN];
+ byte[] str_data = {'O', 'P', 'A', 'Q', 'U', 'E'};
// Initialize data.
for (int indx = 0; indx < DIM0; indx++) {
for (int jndx = 0; jndx < LEN - 1; jndx++)
dset_data[jndx + indx * LEN] = str_data[jndx];
- dset_data[LEN - 1 + indx * LEN] = (byte) (indx + '0');
+ dset_data[LEN - 1 + indx * LEN] = (byte)(indx + '0');
}
// Create a new file using default properties.
try {
file_id = H5.H5Fcreate(FILENAME, HDF5Constants.H5F_ACC_TRUNC, HDF5Constants.H5P_DEFAULT,
- HDF5Constants.H5P_DEFAULT);
+ HDF5Constants.H5P_DEFAULT);
}
catch (Exception e) {
e.printStackTrace();
@@ -62,7 +63,8 @@ public class H5Ex_T_OpaqueAttribute {
dataspace_id = H5.H5Screate(HDF5Constants.H5S_SCALAR);
if (dataspace_id >= 0) {
dataset_id = H5.H5Dcreate(file_id, DATASETNAME, HDF5Constants.H5T_STD_I32LE, dataspace_id,
- HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT);
+ HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT,
+ HDF5Constants.H5P_DEFAULT);
H5.H5Sclose(dataspace_id);
dataspace_id = HDF5Constants.H5I_INVALID_HID;
}
@@ -96,7 +98,7 @@ public class H5Ex_T_OpaqueAttribute {
try {
if ((dataset_id >= 0) && (datatype_id >= 0) && (dataspace_id >= 0))
attribute_id = H5.H5Acreate(dataset_id, ATTRIBUTENAME, datatype_id, dataspace_id,
- HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT);
+ HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT);
}
catch (Exception e) {
e.printStackTrace();
@@ -153,17 +155,17 @@ public class H5Ex_T_OpaqueAttribute {
catch (Exception e) {
e.printStackTrace();
}
-
}
- private static void ReadDataset() {
- long file_id = HDF5Constants.H5I_INVALID_HID;
- long datatype_id = HDF5Constants.H5I_INVALID_HID;
+ private static void ReadDataset()
+ {
+ long file_id = HDF5Constants.H5I_INVALID_HID;
+ long datatype_id = HDF5Constants.H5I_INVALID_HID;
long dataspace_id = HDF5Constants.H5I_INVALID_HID;
- long dataset_id = HDF5Constants.H5I_INVALID_HID;
+ long dataset_id = HDF5Constants.H5I_INVALID_HID;
long attribute_id = HDF5Constants.H5I_INVALID_HID;
- long type_len = -1;
- long[] dims = { DIM0 };
+ long type_len = -1;
+ long[] dims = {DIM0};
byte[] dset_data;
String tag_name = null;
@@ -187,7 +189,7 @@ public class H5Ex_T_OpaqueAttribute {
try {
if (dataset_id >= 0)
attribute_id = H5.H5Aopen_by_name(dataset_id, ".", ATTRIBUTENAME, HDF5Constants.H5P_DEFAULT,
- HDF5Constants.H5P_DEFAULT);
+ HDF5Constants.H5P_DEFAULT);
}
catch (Exception e) {
e.printStackTrace();
@@ -224,7 +226,7 @@ public class H5Ex_T_OpaqueAttribute {
}
// Allocate buffer.
- dset_data = new byte[(int) (dims[0] * type_len)];
+ dset_data = new byte[(int)(dims[0] * type_len)];
// Read data.
try {
@@ -240,7 +242,7 @@ public class H5Ex_T_OpaqueAttribute {
for (int indx = 0; indx < dims[0]; indx++) {
System.out.print(ATTRIBUTENAME + "[" + indx + "]: ");
for (int jndx = 0; jndx < type_len; jndx++) {
- char temp = (char) dset_data[jndx + indx * (int)type_len];
+ char temp = (char)dset_data[jndx + indx * (int)type_len];
System.out.print(temp);
}
System.out.println("");
@@ -289,10 +291,10 @@ public class H5Ex_T_OpaqueAttribute {
catch (Exception e) {
e.printStackTrace();
}
-
}
- public static void main(String[] args) {
+ public static void main(String[] args)
+ {
H5Ex_T_OpaqueAttribute.CreateDataset();
// Now we begin the read section of this example. Here we assume
// the dataset and array have the same name and rank, but can have
@@ -300,5 +302,4 @@ public class H5Ex_T_OpaqueAttribute {
// data using malloc().
H5Ex_T_OpaqueAttribute.ReadDataset();
}
-
}
diff --git a/java/examples/datatypes/H5Ex_T_String.java b/java/examples/datatypes/H5Ex_T_String.java
index e497bd8..a69a70b 100644
--- a/java/examples/datatypes/H5Ex_T_String.java
+++ b/java/examples/datatypes/H5Ex_T_String.java
@@ -24,27 +24,28 @@ import hdf.hdf5lib.H5;
import hdf.hdf5lib.HDF5Constants;
public class H5Ex_T_String {
- private static String FILENAME = "H5Ex_T_String.h5";
+ private static String FILENAME = "H5Ex_T_String.h5";
private static String DATASETNAME = "DS1";
- private static final int DIM0 = 4;
- private static final int SDIM = 8;
- private static final int RANK = 1;
-
- private static void CreateDataset() {
- long file_id = HDF5Constants.H5I_INVALID_HID;
- long memtype_id = HDF5Constants.H5I_INVALID_HID;
- long filetype_id = HDF5Constants.H5I_INVALID_HID;
- long dataspace_id = HDF5Constants.H5I_INVALID_HID;
- long dataset_id = HDF5Constants.H5I_INVALID_HID;
- long[] dims = { DIM0 };
- byte[][] dset_data = new byte[DIM0][SDIM];
- StringBuffer[] str_data = { new StringBuffer("Parting"), new StringBuffer("is such"),
- new StringBuffer("sweet"), new StringBuffer("sorrow.") };
+ private static final int DIM0 = 4;
+ private static final int SDIM = 8;
+ private static final int RANK = 1;
+
+ private static void CreateDataset()
+ {
+ long file_id = HDF5Constants.H5I_INVALID_HID;
+ long memtype_id = HDF5Constants.H5I_INVALID_HID;
+ long filetype_id = HDF5Constants.H5I_INVALID_HID;
+ long dataspace_id = HDF5Constants.H5I_INVALID_HID;
+ long dataset_id = HDF5Constants.H5I_INVALID_HID;
+ long[] dims = {DIM0};
+ byte[][] dset_data = new byte[DIM0][SDIM];
+ StringBuffer[] str_data = {new StringBuffer("Parting"), new StringBuffer("is such"),
+ new StringBuffer("sweet"), new StringBuffer("sorrow.")};
// Create a new file using default properties.
try {
file_id = H5.H5Fcreate(FILENAME, HDF5Constants.H5F_ACC_TRUNC, HDF5Constants.H5P_DEFAULT,
- HDF5Constants.H5P_DEFAULT);
+ HDF5Constants.H5P_DEFAULT);
}
catch (Exception e) {
e.printStackTrace();
@@ -82,8 +83,9 @@ public class H5Ex_T_String {
// Create the dataset and write the string data to it.
try {
if ((file_id >= 0) && (filetype_id >= 0) && (dataspace_id >= 0))
- dataset_id = H5.H5Dcreate(file_id, DATASETNAME, filetype_id, dataspace_id, HDF5Constants.H5P_DEFAULT,
- HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT);
+ dataset_id =
+ H5.H5Dcreate(file_id, DATASETNAME, filetype_id, dataspace_id, HDF5Constants.H5P_DEFAULT,
+ HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT);
}
catch (Exception e) {
e.printStackTrace();
@@ -94,14 +96,14 @@ public class H5Ex_T_String {
for (int indx = 0; indx < DIM0; indx++) {
for (int jndx = 0; jndx < SDIM; jndx++) {
if (jndx < str_data[indx].length())
- dset_data[indx][jndx] = (byte) str_data[indx].charAt(jndx);
+ dset_data[indx][jndx] = (byte)str_data[indx].charAt(jndx);
else
dset_data[indx][jndx] = 0;
}
}
if ((dataset_id >= 0) && (memtype_id >= 0))
H5.H5Dwrite(dataset_id, memtype_id, HDF5Constants.H5S_ALL, HDF5Constants.H5S_ALL,
- HDF5Constants.H5P_DEFAULT, dset_data);
+ HDF5Constants.H5P_DEFAULT, dset_data);
}
catch (Exception e) {
e.printStackTrace();
@@ -151,17 +153,17 @@ public class H5Ex_T_String {
catch (Exception e) {
e.printStackTrace();
}
-
}
- private static void ReadDataset() {
- long file_id = HDF5Constants.H5I_INVALID_HID;
- long filetype_id = HDF5Constants.H5I_INVALID_HID;
- long memtype_id = HDF5Constants.H5I_INVALID_HID;
+ private static void ReadDataset()
+ {
+ long file_id = HDF5Constants.H5I_INVALID_HID;
+ long filetype_id = HDF5Constants.H5I_INVALID_HID;
+ long memtype_id = HDF5Constants.H5I_INVALID_HID;
long dataspace_id = HDF5Constants.H5I_INVALID_HID;
- long dataset_id = HDF5Constants.H5I_INVALID_HID;
- long sdim = 0;
- long[] dims = { DIM0 };
+ long dataset_id = HDF5Constants.H5I_INVALID_HID;
+ long sdim = 0;
+ long[] dims = {DIM0};
byte[][] dset_data;
StringBuffer[] str_data;
@@ -213,8 +215,8 @@ public class H5Ex_T_String {
}
// Allocate space for data.
- dset_data = new byte[(int) dims[0]][(int)sdim];
- str_data = new StringBuffer[(int) dims[0]];
+ dset_data = new byte[(int)dims[0]][(int)sdim];
+ str_data = new StringBuffer[(int)dims[0]];
// Create the memory datatype.
try {
@@ -230,9 +232,9 @@ public class H5Ex_T_String {
try {
if ((dataset_id >= 0) && (memtype_id >= 0))
H5.H5Dread(dataset_id, memtype_id, HDF5Constants.H5S_ALL, HDF5Constants.H5S_ALL,
- HDF5Constants.H5P_DEFAULT, dset_data);
+ HDF5Constants.H5P_DEFAULT, dset_data);
byte[] tempbuf = new byte[(int)sdim];
- for (int indx = 0; indx < (int) dims[0]; indx++) {
+ for (int indx = 0; indx < (int)dims[0]; indx++) {
for (int jndx = 0; jndx < sdim; jndx++) {
tempbuf[jndx] = dset_data[indx][jndx];
}
@@ -293,10 +295,10 @@ public class H5Ex_T_String {
catch (Exception e) {
e.printStackTrace();
}
-
}
- public static void main(String[] args) {
+ public static void main(String[] args)
+ {
H5Ex_T_String.CreateDataset();
// Now we begin the read section of this example. Here we assume
// the dataset and array have the same name and rank, but can have
@@ -304,5 +306,4 @@ public class H5Ex_T_String {
// data using malloc().
H5Ex_T_String.ReadDataset();
}
-
}
diff --git a/java/examples/datatypes/H5Ex_T_StringAttribute.java b/java/examples/datatypes/H5Ex_T_StringAttribute.java
index 700f6a9..46c1038 100644
--- a/java/examples/datatypes/H5Ex_T_StringAttribute.java
+++ b/java/examples/datatypes/H5Ex_T_StringAttribute.java
@@ -24,29 +24,30 @@ import hdf.hdf5lib.H5;
import hdf.hdf5lib.HDF5Constants;
public class H5Ex_T_StringAttribute {
- private static String FILENAME = "H5Ex_T_StringAttribute.h5";
- private static String DATASETNAME = "DS1";
+ private static String FILENAME = "H5Ex_T_StringAttribute.h5";
+ private static String DATASETNAME = "DS1";
private static String ATTRIBUTENAME = "A1";
- private static final int DIM0 = 4;
- private static final int SDIM = 8;
- private static final int RANK = 1;
-
- private static void CreateDataset() {
- long file_id = HDF5Constants.H5I_INVALID_HID;
- long memtype_id = HDF5Constants.H5I_INVALID_HID;
- long filetype_id = HDF5Constants.H5I_INVALID_HID;
- long dataspace_id = HDF5Constants.H5I_INVALID_HID;
- long dataset_id = HDF5Constants.H5I_INVALID_HID;
- long attribute_id = HDF5Constants.H5I_INVALID_HID;
- long[] dims = { DIM0 };
- byte[][] dset_data = new byte[DIM0][SDIM];
- StringBuffer[] str_data = { new StringBuffer("Parting"), new StringBuffer("is such"),
- new StringBuffer("sweet"), new StringBuffer("sorrow.") };
+ private static final int DIM0 = 4;
+ private static final int SDIM = 8;
+ private static final int RANK = 1;
+
+ private static void CreateDataset()
+ {
+ long file_id = HDF5Constants.H5I_INVALID_HID;
+ long memtype_id = HDF5Constants.H5I_INVALID_HID;
+ long filetype_id = HDF5Constants.H5I_INVALID_HID;
+ long dataspace_id = HDF5Constants.H5I_INVALID_HID;
+ long dataset_id = HDF5Constants.H5I_INVALID_HID;
+ long attribute_id = HDF5Constants.H5I_INVALID_HID;
+ long[] dims = {DIM0};
+ byte[][] dset_data = new byte[DIM0][SDIM];
+ StringBuffer[] str_data = {new StringBuffer("Parting"), new StringBuffer("is such"),
+ new StringBuffer("sweet"), new StringBuffer("sorrow.")};
// Create a new file using default properties.
try {
file_id = H5.H5Fcreate(FILENAME, HDF5Constants.H5F_ACC_TRUNC, HDF5Constants.H5P_DEFAULT,
- HDF5Constants.H5P_DEFAULT);
+ HDF5Constants.H5P_DEFAULT);
}
catch (Exception e) {
e.printStackTrace();
@@ -77,7 +78,8 @@ public class H5Ex_T_StringAttribute {
dataspace_id = H5.H5Screate(HDF5Constants.H5S_SCALAR);
if (dataspace_id >= 0) {
dataset_id = H5.H5Dcreate(file_id, DATASETNAME, HDF5Constants.H5T_STD_I32LE, dataspace_id,
- HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT);
+ HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT,
+ HDF5Constants.H5P_DEFAULT);
H5.H5Sclose(dataspace_id);
dataspace_id = HDF5Constants.H5I_INVALID_HID;
}
@@ -99,7 +101,7 @@ public class H5Ex_T_StringAttribute {
try {
if ((dataset_id >= 0) && (dataspace_id >= 0) && (filetype_id >= 0))
attribute_id = H5.H5Acreate(dataset_id, ATTRIBUTENAME, filetype_id, dataspace_id,
- HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT);
+ HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT);
}
catch (Exception e) {
e.printStackTrace();
@@ -110,7 +112,7 @@ public class H5Ex_T_StringAttribute {
for (int indx = 0; indx < DIM0; indx++) {
for (int jndx = 0; jndx < SDIM; jndx++) {
if (jndx < str_data[indx].length())
- dset_data[indx][jndx] = (byte) str_data[indx].charAt(jndx);
+ dset_data[indx][jndx] = (byte)str_data[indx].charAt(jndx);
else
dset_data[indx][jndx] = 0;
}
@@ -174,18 +176,18 @@ public class H5Ex_T_StringAttribute {
catch (Exception e) {
e.printStackTrace();
}
-
}
- private static void ReadDataset() {
- long file_id = HDF5Constants.H5I_INVALID_HID;
- long filetype_id = HDF5Constants.H5I_INVALID_HID;
- long memtype_id = HDF5Constants.H5I_INVALID_HID;
+ private static void ReadDataset()
+ {
+ long file_id = HDF5Constants.H5I_INVALID_HID;
+ long filetype_id = HDF5Constants.H5I_INVALID_HID;
+ long memtype_id = HDF5Constants.H5I_INVALID_HID;
long dataspace_id = HDF5Constants.H5I_INVALID_HID;
- long dataset_id = HDF5Constants.H5I_INVALID_HID;
+ long dataset_id = HDF5Constants.H5I_INVALID_HID;
long attribute_id = HDF5Constants.H5I_INVALID_HID;
- long sdim = 0;
- long[] dims = { DIM0 };
+ long sdim = 0;
+ long[] dims = {DIM0};
byte[][] dset_data;
StringBuffer[] str_data;
@@ -209,7 +211,7 @@ public class H5Ex_T_StringAttribute {
try {
if (dataset_id >= 0)
attribute_id = H5.H5Aopen_by_name(dataset_id, ".", ATTRIBUTENAME, HDF5Constants.H5P_DEFAULT,
- HDF5Constants.H5P_DEFAULT);
+ HDF5Constants.H5P_DEFAULT);
}
catch (Exception e) {
e.printStackTrace();
@@ -246,8 +248,8 @@ public class H5Ex_T_StringAttribute {
}
// Allocate space for data.
- dset_data = new byte[(int) dims[0]][(int)sdim];
- str_data = new StringBuffer[(int) dims[0]];
+ dset_data = new byte[(int)dims[0]][(int)sdim];
+ str_data = new StringBuffer[(int)dims[0]];
// Create the memory datatype.
try {
@@ -264,7 +266,7 @@ public class H5Ex_T_StringAttribute {
if ((attribute_id >= 0) && (memtype_id >= 0))
H5.H5Aread(attribute_id, memtype_id, dset_data);
byte[] tempbuf = new byte[(int)sdim];
- for (int indx = 0; indx < (int) dims[0]; indx++) {
+ for (int indx = 0; indx < (int)dims[0]; indx++) {
for (int jndx = 0; jndx < sdim; jndx++) {
tempbuf[jndx] = dset_data[indx][jndx];
}
@@ -333,10 +335,10 @@ public class H5Ex_T_StringAttribute {
catch (Exception e) {
e.printStackTrace();
}
-
}
- public static void main(String[] args) {
+ public static void main(String[] args)
+ {
H5Ex_T_StringAttribute.CreateDataset();
// Now we begin the read section of this example. Here we assume
// the dataset and array have the same name and rank, but can have
@@ -344,5 +346,4 @@ public class H5Ex_T_StringAttribute {
// data using malloc().
H5Ex_T_StringAttribute.ReadDataset();
}
-
}
diff --git a/java/examples/datatypes/H5Ex_T_VLString.java b/java/examples/datatypes/H5Ex_T_VLString.java
index 8a29e60..1d4c2c4 100644
--- a/java/examples/datatypes/H5Ex_T_VLString.java
+++ b/java/examples/datatypes/H5Ex_T_VLString.java
@@ -19,24 +19,24 @@ package examples.datatypes;
import hdf.hdf5lib.H5;
import hdf.hdf5lib.HDF5Constants;
-public class H5Ex_T_VLString
-{
- private static String FILENAME = "H5Ex_T_VLString.h5";
+public class H5Ex_T_VLString {
+ private static String FILENAME = "H5Ex_T_VLString.h5";
private static String DATASETNAME = "DS1";
- private static void createDataset() {
- long file_id = HDF5Constants.H5I_INVALID_HID;
- long type_id = HDF5Constants.H5I_INVALID_HID;
+ private static void createDataset()
+ {
+ long file_id = HDF5Constants.H5I_INVALID_HID;
+ long type_id = HDF5Constants.H5I_INVALID_HID;
long dataspace_id = HDF5Constants.H5I_INVALID_HID;
- long dataset_id = HDF5Constants.H5I_INVALID_HID;
- int rank = 1;
- String[] str_data = { "Parting", "is such", "sweet", "sorrow." };
- long[] dims = { str_data.length };
+ long dataset_id = HDF5Constants.H5I_INVALID_HID;
+ int rank = 1;
+ String[] str_data = {"Parting", "is such", "sweet", "sorrow."};
+ long[] dims = {str_data.length};
// Create a new file using default properties.
try {
file_id = H5.H5Fcreate(FILENAME, HDF5Constants.H5F_ACC_TRUNC, HDF5Constants.H5P_DEFAULT,
- HDF5Constants.H5P_DEFAULT);
+ HDF5Constants.H5P_DEFAULT);
}
catch (Exception e) {
e.printStackTrace();
@@ -62,8 +62,9 @@ public class H5Ex_T_VLString
// Create the dataset and write the string data to it.
try {
if ((file_id >= 0) && (type_id >= 0) && (dataspace_id >= 0)) {
- dataset_id = H5.H5Dcreate(file_id, DATASETNAME, type_id, dataspace_id, HDF5Constants.H5P_DEFAULT,
- HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT);
+ dataset_id =
+ H5.H5Dcreate(file_id, DATASETNAME, type_id, dataspace_id, HDF5Constants.H5P_DEFAULT,
+ HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT);
}
}
catch (Exception e) {
@@ -74,7 +75,7 @@ public class H5Ex_T_VLString
try {
if (dataset_id >= 0)
H5.H5Dwrite_VLStrings(dataset_id, type_id, HDF5Constants.H5S_ALL, HDF5Constants.H5S_ALL,
- HDF5Constants.H5P_DEFAULT, str_data);
+ HDF5Constants.H5P_DEFAULT, str_data);
}
catch (Exception e) {
e.printStackTrace();
@@ -91,11 +92,12 @@ public class H5Ex_T_VLString
}
}
- private static void readDataset() {
- long file_id = HDF5Constants.H5I_INVALID_HID;
- long type_id = HDF5Constants.H5I_INVALID_HID;
- long dataset_id = HDF5Constants.H5I_INVALID_HID;
- String[] str_data = { "", "", "", "" };
+ private static void readDataset()
+ {
+ long file_id = HDF5Constants.H5I_INVALID_HID;
+ long type_id = HDF5Constants.H5I_INVALID_HID;
+ long dataset_id = HDF5Constants.H5I_INVALID_HID;
+ String[] str_data = {"", "", "", ""};
try {
file_id = H5.H5Fopen(FILENAME, HDF5Constants.H5F_ACC_RDONLY, HDF5Constants.H5P_DEFAULT);
@@ -106,9 +108,9 @@ public class H5Ex_T_VLString
try {
dataset_id = H5.H5Dopen(file_id, DATASETNAME, HDF5Constants.H5P_DEFAULT);
- type_id = H5.H5Dget_type(dataset_id);
- H5.H5Dread_VLStrings(dataset_id, type_id, HDF5Constants.H5S_ALL, HDF5Constants.H5S_ALL, HDF5Constants.H5P_DEFAULT,
- str_data);
+ type_id = H5.H5Dget_type(dataset_id);
+ H5.H5Dread_VLStrings(dataset_id, type_id, HDF5Constants.H5S_ALL, HDF5Constants.H5S_ALL,
+ HDF5Constants.H5P_DEFAULT, str_data);
}
catch (Exception e) {
e.printStackTrace();
@@ -127,9 +129,9 @@ public class H5Ex_T_VLString
}
}
- public static void main(String[] args) {
+ public static void main(String[] args)
+ {
H5Ex_T_VLString.createDataset();
H5Ex_T_VLString.readDataset();
}
-
}