summaryrefslogtreecommitdiffstats
path: root/java
diff options
context:
space:
mode:
Diffstat (limited to 'java')
-rw-r--r--java/CMakeLists.txt77
-rw-r--r--java/COPYING16
-rw-r--r--java/Makefile.am38
-rw-r--r--java/examples/CMakeLists.txt7
-rw-r--r--java/examples/Makefile.am31
-rw-r--r--java/examples/datasets/CMakeLists.txt138
-rw-r--r--java/examples/datasets/H5Ex_D_Alloc.java301
-rw-r--r--java/examples/datasets/H5Ex_D_Checksum.java347
-rw-r--r--java/examples/datasets/H5Ex_D_Chunk.java366
-rw-r--r--java/examples/datasets/H5Ex_D_Compact.java289
-rw-r--r--java/examples/datasets/H5Ex_D_External.java238
-rw-r--r--java/examples/datasets/H5Ex_D_FillValue.java246
-rw-r--r--java/examples/datasets/H5Ex_D_Gzip.java336
-rw-r--r--java/examples/datasets/H5Ex_D_Hyperslab.java269
-rw-r--r--java/examples/datasets/H5Ex_D_Nbit.java305
-rw-r--r--java/examples/datasets/H5Ex_D_ReadWrite.java179
-rw-r--r--java/examples/datasets/H5Ex_D_Shuffle.java373
-rw-r--r--java/examples/datasets/H5Ex_D_Sofloat.java356
-rw-r--r--java/examples/datasets/H5Ex_D_Soint.java335
-rw-r--r--java/examples/datasets/H5Ex_D_Szip.java337
-rw-r--r--java/examples/datasets/H5Ex_D_Transform.java250
-rw-r--r--java/examples/datasets/H5Ex_D_UnlimitedAdd.java393
-rw-r--r--java/examples/datasets/H5Ex_D_UnlimitedGzip.java504
-rw-r--r--java/examples/datasets/H5Ex_D_UnlimitedMod.java379
-rw-r--r--java/examples/datasets/Makefile.am78
-rw-r--r--java/examples/datasets/runExample.sh.in405
-rw-r--r--java/examples/datatypes/CMakeLists.txt109
-rw-r--r--java/examples/datatypes/H5Ex_T_Array.java282
-rw-r--r--java/examples/datatypes/H5Ex_T_ArrayAttribute.java322
-rw-r--r--java/examples/datatypes/H5Ex_T_Bit.java227
-rw-r--r--java/examples/datatypes/H5Ex_T_BitAttribute.java267
-rw-r--r--java/examples/datatypes/H5Ex_T_Commit.java265
-rw-r--r--java/examples/datatypes/H5Ex_T_Compound.java443
-rw-r--r--java/examples/datatypes/H5Ex_T_CompoundAttribute.java486
-rw-r--r--java/examples/datatypes/H5Ex_T_Float.java227
-rw-r--r--java/examples/datatypes/H5Ex_T_FloatAttribute.java263
-rw-r--r--java/examples/datatypes/H5Ex_T_Integer.java226
-rw-r--r--java/examples/datatypes/H5Ex_T_IntegerAttribute.java263
-rw-r--r--java/examples/datatypes/H5Ex_T_ObjectReference.java347
-rw-r--r--java/examples/datatypes/H5Ex_T_ObjectReferenceAttribute.java389
-rw-r--r--java/examples/datatypes/H5Ex_T_Opaque.java270
-rw-r--r--java/examples/datatypes/H5Ex_T_OpaqueAttribute.java307
-rw-r--r--java/examples/datatypes/H5Ex_T_String.java311
-rw-r--r--java/examples/datatypes/H5Ex_T_StringAttribute.java351
-rw-r--r--java/examples/datatypes/H5Ex_T_VLString.java138
-rw-r--r--java/examples/datatypes/Makefile.am78
-rw-r--r--java/examples/datatypes/runExample.sh.in400
-rw-r--r--java/examples/groups/CMakeLists.txt138
-rw-r--r--java/examples/groups/H5Ex_G_Compact.java266
-rw-r--r--java/examples/groups/H5Ex_G_Corder.java121
-rw-r--r--java/examples/groups/H5Ex_G_Create.java94
-rw-r--r--java/examples/groups/H5Ex_G_Intermediate.java125
-rw-r--r--java/examples/groups/H5Ex_G_Iterate.java119
-rw-r--r--java/examples/groups/H5Ex_G_Phase.java241
-rw-r--r--java/examples/groups/H5Ex_G_Traverse.java167
-rw-r--r--java/examples/groups/H5Ex_G_Visit.java152
-rw-r--r--java/examples/groups/Makefile.am68
-rw-r--r--java/examples/groups/h5ex_g_iterate.h5bin0 -> 2928 bytes
-rw-r--r--java/examples/groups/h5ex_g_visit.h5bin0 -> 6312 bytes
-rw-r--r--java/examples/groups/runExample.sh.in341
-rw-r--r--java/examples/intro/CMakeLists.txt112
-rw-r--r--java/examples/intro/H5_CreateAttribute.java145
-rw-r--r--java/examples/intro/H5_CreateDataset.java97
-rw-r--r--java/examples/intro/H5_CreateFile.java55
-rw-r--r--java/examples/intro/H5_CreateGroup.java76
-rw-r--r--java/examples/intro/H5_CreateGroupAbsoluteRelative.java118
-rw-r--r--java/examples/intro/H5_CreateGroupDataset.java207
-rw-r--r--java/examples/intro/H5_ReadWrite.java112
-rw-r--r--java/examples/intro/Makefile.am67
-rw-r--r--java/examples/intro/runExample.sh.in290
-rw-r--r--java/examples/testfiles/examples.datasets.H5Ex_D_Alloc.txt16
-rw-r--r--java/examples/testfiles/examples.datasets.H5Ex_D_Checksum.txt3
-rw-r--r--java/examples/testfiles/examples.datasets.H5Ex_D_Chunk.txt26
-rw-r--r--java/examples/testfiles/examples.datasets.H5Ex_D_Compact.txt8
-rw-r--r--java/examples/testfiles/examples.datasets.H5Ex_D_External.txt7
-rw-r--r--java/examples/testfiles/examples.datasets.H5Ex_D_FillValue.txt20
-rw-r--r--java/examples/testfiles/examples.datasets.H5Ex_D_Gzip.txt3
-rw-r--r--java/examples/testfiles/examples.datasets.H5Ex_D_Hyperslab.txt24
-rw-r--r--java/examples/testfiles/examples.datasets.H5Ex_D_Nbit.txt3
-rw-r--r--java/examples/testfiles/examples.datasets.H5Ex_D_ReadWrite.txt6
-rw-r--r--java/examples/testfiles/examples.datasets.H5Ex_D_Shuffle.txt5
-rw-r--r--java/examples/testfiles/examples.datasets.H5Ex_D_Sofloat.txt6
-rw-r--r--java/examples/testfiles/examples.datasets.H5Ex_D_Soint.txt3
-rw-r--r--java/examples/testfiles/examples.datasets.H5Ex_D_Szip.txt3
-rw-r--r--java/examples/testfiles/examples.datasets.H5Ex_D_Transform.txt15
-rw-r--r--java/examples/testfiles/examples.datasets.H5Ex_D_UnlimitedAdd.txt14
-rw-r--r--java/examples/testfiles/examples.datasets.H5Ex_D_UnlimitedGzip.txt16
-rw-r--r--java/examples/testfiles/examples.datasets.H5Ex_D_UnlimitedMod.txt14
-rw-r--r--java/examples/testfiles/examples.datatypes.H5Ex_T_Array.txt21
-rw-r--r--java/examples/testfiles/examples.datatypes.H5Ex_T_ArrayAttribute.txt21
-rw-r--r--java/examples/testfiles/examples.datatypes.H5Ex_T_Bit.txt6
-rw-r--r--java/examples/testfiles/examples.datatypes.H5Ex_T_BitAttribute.txt6
-rw-r--r--java/examples/testfiles/examples.datatypes.H5Ex_T_Commit.txt6
-rw-r--r--java/examples/testfiles/examples.datatypes.H5Ex_T_Compound.txt25
-rw-r--r--java/examples/testfiles/examples.datatypes.H5Ex_T_CompoundAttribute.txt25
-rw-r--r--java/examples/testfiles/examples.datatypes.H5Ex_T_Float.txt6
-rw-r--r--java/examples/testfiles/examples.datatypes.H5Ex_T_FloatAttribute.txt6
-rw-r--r--java/examples/testfiles/examples.datatypes.H5Ex_T_Integer.txt6
-rw-r--r--java/examples/testfiles/examples.datatypes.H5Ex_T_IntegerAttribute.txt6
-rw-r--r--java/examples/testfiles/examples.datatypes.H5Ex_T_ObjectReference.txt4
-rw-r--r--java/examples/testfiles/examples.datatypes.H5Ex_T_ObjectReferenceAttribute.txt4
-rw-r--r--java/examples/testfiles/examples.datatypes.H5Ex_T_Opaque.txt6
-rw-r--r--java/examples/testfiles/examples.datatypes.H5Ex_T_OpaqueAttribute.txt6
-rw-r--r--java/examples/testfiles/examples.datatypes.H5Ex_T_String.txt5
-rw-r--r--java/examples/testfiles/examples.datatypes.H5Ex_T_StringAttribute.txt5
-rw-r--r--java/examples/testfiles/examples.datatypes.H5Ex_T_VLString.txt4
-rw-r--r--java/examples/testfiles/examples.groups.H5Ex_G_Compact.txt5
-rw-r--r--java/examples/testfiles/examples.groups.H5Ex_G_Corder.txt10
-rw-r--r--java/examples/testfiles/examples.groups.H5Ex_G_Create.txt0
-rw-r--r--java/examples/testfiles/examples.groups.H5Ex_G_Intermediate.txt5
-rw-r--r--java/examples/testfiles/examples.groups.H5Ex_G_Iterate.txt5
-rw-r--r--java/examples/testfiles/examples.groups.H5Ex_G_Phase.txt15
-rw-r--r--java/examples/testfiles/examples.groups.H5Ex_G_Visit.txt19
-rw-r--r--java/examples/testfiles/examples.intro.H5_CreateAttribute.txt0
-rw-r--r--java/examples/testfiles/examples.intro.H5_CreateDataset.txt0
-rw-r--r--java/examples/testfiles/examples.intro.H5_CreateFile.txt0
-rw-r--r--java/examples/testfiles/examples.intro.H5_CreateGroup.txt0
-rw-r--r--java/examples/testfiles/examples.intro.H5_CreateGroupAbsoluteRelative.txt0
-rw-r--r--java/examples/testfiles/examples.intro.H5_CreateGroupDataset.txt0
-rw-r--r--java/examples/testfiles/examples.intro.H5_ReadWrite.txt0
-rw-r--r--java/lib/ext/slf4j-nop-1.7.5.jarbin0 -> 4091 bytes
-rw-r--r--java/lib/ext/slf4j-simple-1.7.5.jarbin0 -> 10680 bytes
-rw-r--r--java/lib/hamcrest-core.jarbin0 -> 45024 bytes
-rw-r--r--java/lib/junit.jarbin0 -> 245039 bytes
-rw-r--r--java/lib/simplelogger.properties36
-rw-r--r--java/lib/slf4j-api-1.7.5.jarbin0 -> 26084 bytes
-rw-r--r--java/src/CMakeLists.txt8
-rw-r--r--java/src/Makefile.am137
-rw-r--r--java/src/hdf/CMakeLists.txt4
-rw-r--r--java/src/hdf/hdf5lib/CMakeLists.txt125
-rw-r--r--java/src/hdf/hdf5lib/H5.java9184
-rw-r--r--java/src/hdf/hdf5lib/HDF5Constants.java1877
-rw-r--r--java/src/hdf/hdf5lib/HDF5GroupInfo.java171
-rw-r--r--java/src/hdf/hdf5lib/HDFArray.java1096
-rw-r--r--java/src/hdf/hdf5lib/HDFNativeData.java481
-rw-r--r--java/src/hdf/hdf5lib/callbacks/Callbacks.java33
-rw-r--r--java/src/hdf/hdf5lib/callbacks/H5A_iterate_cb.java23
-rw-r--r--java/src/hdf/hdf5lib/callbacks/H5A_iterate_t.java22
-rw-r--r--java/src/hdf/hdf5lib/callbacks/H5D_iterate_cb.java21
-rw-r--r--java/src/hdf/hdf5lib/callbacks/H5D_iterate_t.java22
-rw-r--r--java/src/hdf/hdf5lib/callbacks/H5E_walk_cb.java23
-rw-r--r--java/src/hdf/hdf5lib/callbacks/H5E_walk_t.java22
-rw-r--r--java/src/hdf/hdf5lib/callbacks/H5L_iterate_cb.java23
-rw-r--r--java/src/hdf/hdf5lib/callbacks/H5L_iterate_t.java22
-rw-r--r--java/src/hdf/hdf5lib/callbacks/H5O_iterate_cb.java23
-rw-r--r--java/src/hdf/hdf5lib/callbacks/H5O_iterate_t.java22
-rw-r--r--java/src/hdf/hdf5lib/callbacks/H5P_cls_close_func_cb.java21
-rw-r--r--java/src/hdf/hdf5lib/callbacks/H5P_cls_close_func_t.java22
-rw-r--r--java/src/hdf/hdf5lib/callbacks/H5P_cls_copy_func_cb.java21
-rw-r--r--java/src/hdf/hdf5lib/callbacks/H5P_cls_copy_func_t.java22
-rw-r--r--java/src/hdf/hdf5lib/callbacks/H5P_cls_create_func_cb.java21
-rw-r--r--java/src/hdf/hdf5lib/callbacks/H5P_cls_create_func_t.java22
-rw-r--r--java/src/hdf/hdf5lib/callbacks/H5P_iterate_cb.java21
-rw-r--r--java/src/hdf/hdf5lib/callbacks/H5P_iterate_t.java22
-rw-r--r--java/src/hdf/hdf5lib/callbacks/H5P_prp_close_func_cb.java21
-rw-r--r--java/src/hdf/hdf5lib/callbacks/H5P_prp_compare_func_cb.java21
-rw-r--r--java/src/hdf/hdf5lib/callbacks/H5P_prp_copy_func_cb.java21
-rw-r--r--java/src/hdf/hdf5lib/callbacks/H5P_prp_create_func_cb.java21
-rw-r--r--java/src/hdf/hdf5lib/callbacks/H5P_prp_delete_func_cb.java21
-rw-r--r--java/src/hdf/hdf5lib/callbacks/H5P_prp_get_func_cb.java21
-rw-r--r--java/src/hdf/hdf5lib/callbacks/H5P_prp_set_func_cb.java21
-rw-r--r--java/src/hdf/hdf5lib/exceptions/HDF5AtomException.java44
-rw-r--r--java/src/hdf/hdf5lib/exceptions/HDF5AttributeException.java42
-rw-r--r--java/src/hdf/hdf5lib/exceptions/HDF5BtreeException.java42
-rw-r--r--java/src/hdf/hdf5lib/exceptions/HDF5DataFiltersException.java42
-rw-r--r--java/src/hdf/hdf5lib/exceptions/HDF5DataStorageException.java43
-rw-r--r--java/src/hdf/hdf5lib/exceptions/HDF5DatasetInterfaceException.java42
-rw-r--r--java/src/hdf/hdf5lib/exceptions/HDF5DataspaceInterfaceException.java43
-rw-r--r--java/src/hdf/hdf5lib/exceptions/HDF5DatatypeInterfaceException.java43
-rw-r--r--java/src/hdf/hdf5lib/exceptions/HDF5Exception.java68
-rw-r--r--java/src/hdf/hdf5lib/exceptions/HDF5ExternalFileListException.java43
-rw-r--r--java/src/hdf/hdf5lib/exceptions/HDF5FileInterfaceException.java43
-rw-r--r--java/src/hdf/hdf5lib/exceptions/HDF5FunctionArgumentException.java43
-rw-r--r--java/src/hdf/hdf5lib/exceptions/HDF5FunctionEntryExitException.java43
-rw-r--r--java/src/hdf/hdf5lib/exceptions/HDF5HeapException.java43
-rw-r--r--java/src/hdf/hdf5lib/exceptions/HDF5InternalErrorException.java43
-rw-r--r--java/src/hdf/hdf5lib/exceptions/HDF5JavaException.java45
-rw-r--r--java/src/hdf/hdf5lib/exceptions/HDF5LibraryException.java383
-rw-r--r--java/src/hdf/hdf5lib/exceptions/HDF5LowLevelIOException.java43
-rw-r--r--java/src/hdf/hdf5lib/exceptions/HDF5MetaDataCacheException.java43
-rw-r--r--java/src/hdf/hdf5lib/exceptions/HDF5ObjectHeaderException.java43
-rw-r--r--java/src/hdf/hdf5lib/exceptions/HDF5PropertyListInterfaceException.java43
-rw-r--r--java/src/hdf/hdf5lib/exceptions/HDF5ReferenceException.java37
-rw-r--r--java/src/hdf/hdf5lib/exceptions/HDF5ResourceUnavailableException.java43
-rw-r--r--java/src/hdf/hdf5lib/exceptions/HDF5SymbolTableException.java43
-rw-r--r--java/src/hdf/hdf5lib/structs/H5AC_cache_config_t.java99
-rw-r--r--java/src/hdf/hdf5lib/structs/H5A_info_t.java34
-rw-r--r--java/src/hdf/hdf5lib/structs/H5E_error2_t.java41
-rw-r--r--java/src/hdf/hdf5lib/structs/H5F_info2_t.java47
-rw-r--r--java/src/hdf/hdf5lib/structs/H5G_info_t.java27
-rw-r--r--java/src/hdf/hdf5lib/structs/H5L_info_t.java38
-rw-r--r--java/src/hdf/hdf5lib/structs/H5O_hdr_info_t.java49
-rw-r--r--java/src/hdf/hdf5lib/structs/H5O_info_t.java54
-rw-r--r--java/src/hdf/hdf5lib/structs/H5_ih_info_t.java31
-rw-r--r--java/src/hdf/overview.html96
-rw-r--r--java/src/jni/CMakeLists.txt85
-rw-r--r--java/src/jni/Makefile.am44
-rw-r--r--java/src/jni/exceptionImp.c414
-rw-r--r--java/src/jni/exceptionImp.h67
-rw-r--r--java/src/jni/h5Constants.c692
-rw-r--r--java/src/jni/h5Imp.c181
-rw-r--r--java/src/jni/h5Imp.h95
-rw-r--r--java/src/jni/h5aImp.c879
-rw-r--r--java/src/jni/h5aImp.h273
-rw-r--r--java/src/jni/h5dImp.c1758
-rw-r--r--java/src/jni/h5dImp.h319
-rw-r--r--java/src/jni/h5eImp.c524
-rw-r--r--java/src/jni/h5eImp.h167
-rw-r--r--java/src/jni/h5fImp.c516
-rw-r--r--java/src/jni/h5fImp.h198
-rw-r--r--java/src/jni/h5gImp.c257
-rw-r--r--java/src/jni/h5gImp.h96
-rw-r--r--java/src/jni/h5iImp.c289
-rw-r--r--java/src/jni/h5iImp.h143
-rw-r--r--java/src/jni/h5jni.h249
-rw-r--r--java/src/jni/h5lImp.c736
-rw-r--r--java/src/jni/h5lImp.h183
-rw-r--r--java/src/jni/h5oImp.c766
-rw-r--r--java/src/jni/h5oImp.h175
-rw-r--r--java/src/jni/h5pImp.c5340
-rw-r--r--java/src/jni/h5pImp.h1313
-rw-r--r--java/src/jni/h5plImp.c64
-rw-r--r--java/src/jni/h5plImp.h46
-rw-r--r--java/src/jni/h5rImp.c324
-rw-r--r--java/src/jni/h5rImp.h78
-rw-r--r--java/src/jni/h5sImp.c1408
-rw-r--r--java/src/jni/h5sImp.h288
-rw-r--r--java/src/jni/h5tImp.c1588
-rw-r--r--java/src/jni/h5tImp.h550
-rw-r--r--java/src/jni/h5util.c2592
-rw-r--r--java/src/jni/h5util.h108
-rw-r--r--java/src/jni/h5zImp.c84
-rw-r--r--java/src/jni/h5zImp.h55
-rw-r--r--java/src/jni/nativeData.c1195
-rw-r--r--java/src/jni/nativeData.h115
-rw-r--r--java/test/CMakeLists.txt131
-rw-r--r--java/test/JUnit-interface.ert2
-rw-r--r--java/test/JUnit-interface.txt650
-rw-r--r--java/test/Makefile.am103
-rw-r--r--java/test/TestAll.java40
-rw-r--r--java/test/TestH5.java257
-rw-r--r--java/test/TestH5A.java1119
-rw-r--r--java/test/TestH5D.java944
-rw-r--r--java/test/TestH5Dparams.java134
-rw-r--r--java/test/TestH5Dplist.java216
-rw-r--r--java/test/TestH5E.java553
-rw-r--r--java/test/TestH5Edefault.java564
-rw-r--r--java/test/TestH5Eregister.java78
-rw-r--r--java/test/TestH5F.java332
-rw-r--r--java/test/TestH5Fbasic.java320
-rw-r--r--java/test/TestH5Fparams.java215
-rw-r--r--java/test/TestH5G.java505
-rw-r--r--java/test/TestH5Gbasic.java371
-rw-r--r--java/test/TestH5Giterate.java136
-rw-r--r--java/test/TestH5Lbasic.java371
-rw-r--r--java/test/TestH5Lcreate.java819
-rw-r--r--java/test/TestH5Lparams.java231
-rw-r--r--java/test/TestH5Obasic.java488
-rw-r--r--java/test/TestH5Ocopy.java365
-rw-r--r--java/test/TestH5Ocreate.java562
-rw-r--r--java/test/TestH5Oparams.java154
-rw-r--r--java/test/TestH5P.java1222
-rw-r--r--java/test/TestH5PData.java170
-rw-r--r--java/test/TestH5PL.java61
-rw-r--r--java/test/TestH5Pfapl.java1325
-rw-r--r--java/test/TestH5Plist.java1013
-rw-r--r--java/test/TestH5Pvirtual.java433
-rw-r--r--java/test/TestH5R.java335
-rw-r--r--java/test/TestH5S.java590
-rw-r--r--java/test/TestH5Sbasic.java247
-rw-r--r--java/test/TestH5T.java459
-rw-r--r--java/test/TestH5Tbasic.java161
-rw-r--r--java/test/TestH5Tparams.java389
-rw-r--r--java/test/TestH5Z.java100
-rw-r--r--java/test/h5ex_g_iterate.origbin0 -> 2928 bytes
-rw-r--r--java/test/junit.sh.in255
276 files changed, 72368 insertions, 0 deletions
diff --git a/java/CMakeLists.txt b/java/CMakeLists.txt
new file mode 100644
index 0000000..0440d41
--- /dev/null
+++ b/java/CMakeLists.txt
@@ -0,0 +1,77 @@
+cmake_minimum_required(VERSION 3.1.0)
+PROJECT ( HDF5_JAVA C Java )
+
+set (CMAKE_MODULE_PATH "${HDF_RESOURCES_DIR};${HDF_RESOURCES_EXT_DIR}")
+find_package (Java)
+#-----------------------------------------------------------------------------
+# Include some macros for reusable code
+#-----------------------------------------------------------------------------
+include (${HDF_RESOURCES_DIR}/UseJava.cmake)
+
+message (STATUS "JAVA: JAVA_HOME=$ENV{JAVA_HOME} JAVA_ROOT=$ENV{JAVA_ROOT}")
+find_package (JNI)
+
+INCLUDE_DIRECTORIES ( ${JNI_INCLUDE_DIRS} )
+
+#-----------------------------------------------------------------------------
+# Include the main src and config directories
+#-----------------------------------------------------------------------------
+set (HDF5_JAVA_INCLUDE_DIRECTORIES
+ ${HDF5_JAVA_JNI_SRC_DIR}
+ ${JAVA_INCLUDE_PATH}
+ ${JAVA_INCLUDE_PATH2}
+)
+INCLUDE_DIRECTORIES (${HDF5_JAVA_INCLUDE_DIRECTORIES})
+set (CMAKE_JAVA_INCLUDE_PATH "")
+
+
+#-----------------------------------------------------------------------------
+# Traverse source subdirectory
+#-----------------------------------------------------------------------------
+add_subdirectory (${HDF5_JAVA_SOURCE_DIR}/src ${HDF5_JAVA_BINARY_DIR}/src)
+
+#-----------------------------------------------------------------------------
+# Build the Java Examples
+#-----------------------------------------------------------------------------
+if (HDF5_BUILD_EXAMPLES)
+ add_subdirectory (${HDF5_JAVA_SOURCE_DIR}/examples ${HDF5_JAVA_BINARY_DIR}/examples)
+endif (HDF5_BUILD_EXAMPLES)
+
+#-----------------------------------------------------------------------------
+# Testing
+#-----------------------------------------------------------------------------
+if (BUILD_TESTING)
+ add_subdirectory (${HDF5_JAVA_SOURCE_DIR}/test ${HDF5_JAVA_BINARY_DIR}/test)
+endif (BUILD_TESTING)
+
+#-----------------------------------------------------------------------------
+# Add Required Jar(s)
+#-----------------------------------------------------------------------------
+install (
+ FILES
+ ${HDF5_JAVA_LOGGING_JAR}
+ ${HDF5_JAVA_LOGGING_NOP_JAR}
+ ${HDF5_JAVA_LOGGING_SIMPLE_JAR}
+ DESTINATION ${HDF5_INSTALL_JAR_DIR}
+ COMPONENT libraries
+)
+
+#-----------------------------------------------------------------------------
+# Option to include jre
+#-----------------------------------------------------------------------------
+option (HDF5_JAVA_PACK_JRE "Package a JRE installer directory" OFF)
+if (HDF5_JAVA_PACK_JRE)
+ if (WIN32)
+ install (
+ DIRECTORY "C:/Program Files/Java/jre8"
+ DESTINATION ${HDF5_INSTALL_BIN_DIR}
+ USE_SOURCE_PERMISSIONS
+ )
+ else (WIN32)
+ install (
+ DIRECTORY "/usr/lib/jvm/jre"
+ DESTINATION ${HDF5_INSTALL_BIN_DIR}
+ USE_SOURCE_PERMISSIONS
+ )
+ endif (WIN32)
+endif (HDF5_JAVA_PACK_JRE)
diff --git a/java/COPYING b/java/COPYING
new file mode 100644
index 0000000..6903daf
--- /dev/null
+++ b/java/COPYING
@@ -0,0 +1,16 @@
+
+ Copyright by The HDF Group and
+ The Board of Trustees of the University of Illinois.
+ All rights reserved.
+
+ The files and subdirectories in this directory are part of HDF5.
+ The full HDF5 copyright notice, including terms governing use,
+ modification, and redistribution, is contained in the files COPYING
+ and Copyright.html. COPYING can be found at the root of the source
+ code distribution tree; Copyright.html can be found at the root
+ level of an installed copy of the electronic HDF5 document set and
+ is linked from the top-level documents page. It can also be found
+ at http://www.hdfgroup.org/HDF5/doc/Copyright.html. If you do not
+ have access to either file, you may request a copy from
+ help@hdfgroup.org.
+
diff --git a/java/Makefile.am b/java/Makefile.am
new file mode 100644
index 0000000..10b1e91
--- /dev/null
+++ b/java/Makefile.am
@@ -0,0 +1,38 @@
+#
+# Copyright by The HDF Group.
+# Copyright by the Board of Trustees of the University of Illinois.
+# All rights reserved.
+#
+# This file is part of HDF5. The full HDF5 copyright notice, including
+# terms governing use, modification, and redistribution, is contained in
+# the files COPYING and Copyright.html. COPYING can be found at the root
+# of the source code distribution tree; Copyright.html can be found at the
+# root level of an installed copy of the electronic HDF5 document set and
+# is linked from the top-level documents page. It can also be found at
+# http://hdfgroup.org/HDF5/doc/Copyright.html. If you do not have
+# access to either file, you may request a copy from help@hdfgroup.org.
+#
+#
+# This makefile mostly just reinvokes make in the various subdirectories
+# but does so in the correct order. You can alternatively invoke make from
+# each subdirectory manually.
+##
+## Makefile.am
+## Run automake to generate a Makefile.in from this file.
+##
+#
+# HDF5 Java native interface (JNI) Library Makefile(.in)
+
+include $(top_srcdir)/config/commence.am
+
+## Only recurse into subdirectories if the Java (JNI) interface is enabled.
+if BUILD_JAVA_CONDITIONAL
+
+# Mark this directory as part of the JNI API
+JAVA_API=yes
+
+SUBDIRS=src test examples
+
+endif
+
+include $(top_srcdir)/config/conclude.am
diff --git a/java/examples/CMakeLists.txt b/java/examples/CMakeLists.txt
new file mode 100644
index 0000000..0430bdb
--- /dev/null
+++ b/java/examples/CMakeLists.txt
@@ -0,0 +1,7 @@
+cmake_minimum_required (VERSION 3.1.0)
+PROJECT (HDFJAVA_EXAMPLES)
+
+add_subdirectory (${HDFJAVA_EXAMPLES_SOURCE_DIR}/datasets datasets)
+add_subdirectory (${HDFJAVA_EXAMPLES_SOURCE_DIR}/datatypes datatypes)
+add_subdirectory (${HDFJAVA_EXAMPLES_SOURCE_DIR}/groups groups)
+add_subdirectory (${HDFJAVA_EXAMPLES_SOURCE_DIR}/intro intro)
diff --git a/java/examples/Makefile.am b/java/examples/Makefile.am
new file mode 100644
index 0000000..e685e01
--- /dev/null
+++ b/java/examples/Makefile.am
@@ -0,0 +1,31 @@
+#
+# Copyright by The HDF Group.
+# Copyright by the Board of Trustees of the University of Illinois.
+# All rights reserved.
+#
+# This file is part of HDF5. The full HDF5 copyright notice, including
+# terms governing use, modification, and redistribution, is contained in
+# the files COPYING and Copyright.html. COPYING can be found at the root
+# of the source code distribution tree; Copyright.html can be found at the
+# root level of an installed copy of the electronic HDF5 document set and
+# is linked from the top-level documents page. It can also be found at
+# http://hdfgroup.org/HDF5/doc/Copyright.html. If you do not have
+# access to either file, you may request a copy from help@hdfgroup.org.
+#
+#
+# This makefile mostly just reinvokes make in the various subdirectories
+# but does so in the correct order. You can alternatively invoke make from
+# each subdirectory manually.
+##
+## Makefile.am
+## Run automake to generate a Makefile.in from this file.
+##
+#
+# HDF5 Java Library Examples Makefile(.in)
+
+include $(top_srcdir)/config/commence.am
+
+## Only recurse into subdirectories if the Java (JNI) interface is enabled.
+ SUBDIRS=intro groups datasets datatypes
+
+include $(top_srcdir)/config/conclude.am
diff --git a/java/examples/datasets/CMakeLists.txt b/java/examples/datasets/CMakeLists.txt
new file mode 100644
index 0000000..077c6bb
--- /dev/null
+++ b/java/examples/datasets/CMakeLists.txt
@@ -0,0 +1,138 @@
+cmake_minimum_required (VERSION 3.1.0)
+PROJECT (HDFJAVA_EXAMPLES_DATASETS Java)
+
+set (CMAKE_VERBOSE_MAKEFILE 1)
+
+INCLUDE_DIRECTORIES (
+ ${HDF5_JAVA_JNI_BINARY_DIR}
+ ${HDF5_JAVA_HDF5_LIB_DIR}
+)
+
+set (HDF_JAVA_EXAMPLES
+ H5Ex_D_Alloc
+ H5Ex_D_Checksum
+ H5Ex_D_Chunk
+ H5Ex_D_Compact
+ H5Ex_D_External
+ H5Ex_D_FillValue
+ H5Ex_D_Gzip
+ H5Ex_D_Hyperslab
+ H5Ex_D_ReadWrite
+ H5Ex_D_Shuffle
+ H5Ex_D_Szip
+ H5Ex_D_UnlimitedAdd
+ H5Ex_D_UnlimitedGzip
+ H5Ex_D_UnlimitedMod
+ H5Ex_D_Nbit
+ H5Ex_D_Transform
+ H5Ex_D_Sofloat
+ H5Ex_D_Soint
+)
+
+if (WIN32)
+ set (CMAKE_JAVA_INCLUDE_FLAG_SEP ";")
+else (WIN32)
+ set (CMAKE_JAVA_INCLUDE_FLAG_SEP ":")
+endif (WIN32)
+
+set (CMAKE_JAVA_INCLUDE_PATH "${HDF5_JAVA_JARS}")
+
+set (CMAKE_JAVA_CLASSPATH ".")
+foreach (CMAKE_INCLUDE_PATH ${CMAKE_JAVA_INCLUDE_PATH})
+ set (CMAKE_JAVA_CLASSPATH "${CMAKE_JAVA_CLASSPATH}${CMAKE_JAVA_INCLUDE_FLAG_SEP}${CMAKE_INCLUDE_PATH}")
+endforeach (CMAKE_INCLUDE_PATH)
+
+foreach (example ${HDF_JAVA_EXAMPLES})
+ file (WRITE ${PROJECT_BINARY_DIR}/${example}_Manifest.txt
+ "Main-Class: examples.datasets.${example}
+"
+ )
+ add_jar (${example} MANIFEST ${PROJECT_BINARY_DIR}/${example}_Manifest.txt ${example}.java)
+ get_target_property (${example}_JAR_FILE ${example} JAR_FILE)
+# install_jar (${example} ${HJAVA_INSTALL_DATA_DIR}/examples examples)
+ get_target_property (${example}_CLASSPATH ${example} CLASSDIR)
+ add_dependencies (${example} ${HDF5_JAVA_HDF5_LIB_TARGET})
+endforeach (example ${HDF_JAVA_EXAMPLES})
+
+set (CMAKE_JAVA_INCLUDE_PATH "${HDF5_JAVA_JARS};${HDF5_JAVA_LOGGING_JAR};${HDF5_JAVA_LOGGING_NOP_JAR}")
+
+set (CMAKE_JAVA_CLASSPATH ".")
+foreach (HDFJAVA_JAR ${CMAKE_JAVA_INCLUDE_PATH})
+ set (CMAKE_JAVA_CLASSPATH "${CMAKE_JAVA_CLASSPATH}${CMAKE_JAVA_INCLUDE_FLAG_SEP}${HDFJAVA_JAR}")
+endforeach (HDFJAVA_JAR)
+
+MACRO (ADD_H5_TEST resultfile resultcode)
+ add_test (
+ NAME JAVA_datasets-${resultfile}
+ COMMAND "${CMAKE_COMMAND}"
+ -D "TEST_TESTER=${CMAKE_Java_RUNTIME};${CMAKE_Java_RUNTIME_FLAGS}"
+ -D "TEST_CLASSPATH:STRING=${CMAKE_JAVA_CLASSPATH}${CMAKE_JAVA_INCLUDE_FLAG_SEP}${${resultfile}_JAR_FILE}"
+ -D "TEST_ARGS:STRING=${ARGN}"
+ -D "TEST_PROGRAM=examples.datasets.${resultfile}"
+ -D "TEST_LIBRARY_DIRECTORY=${CMAKE_TEST_OUTPUT_DIRECTORY}"
+ -D "TEST_FOLDER=${HDFJAVA_EXAMPLES_BINARY_DIR}"
+ -D "TEST_OUTPUT=datasets/${resultfile}.out"
+ -D "TEST_EXPECT=${resultcode}"
+ -D "TEST_REFERENCE=datasets/${resultfile}.txt"
+ -P "${HDF_RESOURCES_DIR}/jrunTest.cmake"
+ )
+ if (NOT "${last_test}" STREQUAL "")
+ set_tests_properties (JAVA_datasets-${resultfile} PROPERTIES DEPENDS ${last_test})
+ endif (NOT "${last_test}" STREQUAL "")
+ set (last_test "JAVA_datasets-${resultfile}")
+ENDMACRO (ADD_H5_TEST file)
+
+if (BUILD_TESTING)
+
+# detect whether the encoder is present.
+ if (H5_HAVE_FILTER_DEFLATE)
+ set (USE_FILTER_DEFLATE "true")
+ endif (H5_HAVE_FILTER_DEFLATE)
+
+ if (H5_HAVE_FILTER_SZIP)
+ set (USE_FILTER_SZIP "true")
+ endif (H5_HAVE_FILTER_SZIP)
+
+ foreach (example ${HDF_JAVA_EXAMPLES})
+ if (${example} STREQUAL "H5Ex_D_External")
+ add_test (
+ NAME JAVA_datasets-${example}-clearall-objects
+ COMMAND ${CMAKE_COMMAND}
+ -E remove
+ ${HDFJAVA_EXAMPLES_BINARY_DIR}/${example}.h5
+ ${HDFJAVA_EXAMPLES_BINARY_DIR}/${example}.data
+ ${example}.out
+ ${example}.out.err
+ )
+ else (${example} STREQUAL "H5Ex_D_External")
+ add_test (
+ NAME JAVA_datasets-${example}-clearall-objects
+ COMMAND ${CMAKE_COMMAND}
+ -E remove
+ ${HDFJAVA_EXAMPLES_BINARY_DIR}/${example}.h5
+ ${example}.out
+ ${example}.out.err
+ )
+ endif (${example} STREQUAL "H5Ex_D_External")
+ if (NOT "${last_test}" STREQUAL "")
+ set_tests_properties (JAVA_datasets-${example}-clearall-objects PROPERTIES DEPENDS ${last_test})
+ endif (NOT "${last_test}" STREQUAL "")
+ add_test (
+ NAME JAVA_datasets-${example}-copy-objects
+ COMMAND ${CMAKE_COMMAND}
+ -E copy_if_different
+ ${HDFJAVA_EXAMPLES_SOURCE_DIR}/testfiles/examples.datasets.${example}.txt
+ ${HDFJAVA_EXAMPLES_DATASETS_BINARY_DIR}/${example}.txt
+ )
+ set_tests_properties (JAVA_datasets-${example}-copy-objects PROPERTIES DEPENDS JAVA_datasets-${example}-clearall-objects)
+ set (last_test "JAVA_datasets-${example}-copy-objects")
+ if (${example} STREQUAL "H5Ex_D_Szip")
+ if (USE_FILTER_SZIP)
+ ADD_H5_TEST (${example} 0)
+ endif (USE_FILTER_SZIP)
+ else (${example} STREQUAL "H5Ex_D_Szip")
+ ADD_H5_TEST (${example} 0)
+ endif (${example} STREQUAL "H5Ex_D_Szip")
+
+ endforeach (example ${HDF_JAVA_EXAMPLES})
+endif (BUILD_TESTING)
diff --git a/java/examples/datasets/H5Ex_D_Alloc.java b/java/examples/datasets/H5Ex_D_Alloc.java
new file mode 100644
index 0000000..69fee38
--- /dev/null
+++ b/java/examples/datasets/H5Ex_D_Alloc.java
@@ -0,0 +1,301 @@
+/* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
+ * Copyright by The HDF Group. *
+ * Copyright by the Board of Trustees of the University of Illinois. *
+ * All rights reserved. *
+ * *
+ * This file is part of HDF5. The full HDF5 copyright notice, including *
+ * terms governing use, modification, and redistribution, is contained in *
+ * the files COPYING and Copyright.html. COPYING can be found at the root *
+ * of the source code distribution tree; Copyright.html can be found at the *
+ * root level of an installed copy of the electronic HDF5 document set and *
+ * is linked from the top-level documents page. It can also be found at *
+ * http://hdfgroup.org/HDF5/doc/Copyright.html. If you do not have *
+ * access to either file, you may request a copy from help@hdfgroup.org. *
+ * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */
+
+/************************************************************
+ This example shows how to set the space allocation time
+ for a dataset. The program first creates two datasets,
+ one with the default allocation time (late) and one with
+ early allocation time, and displays whether each has been
+ allocated and their allocation size. Next, it writes data
+ to the datasets, and again displays whether each has been
+ allocated and their allocation size.
+ ************************************************************/
+package examples.datasets;
+
+import java.util.EnumSet;
+import java.util.HashMap;
+import java.util.Map;
+
+import hdf.hdf5lib.H5;
+import hdf.hdf5lib.HDF5Constants;
+
+public class H5Ex_D_Alloc {
+ private static String FILENAME = "H5Ex_D_Alloc.h5";
+ private static String DATASETNAME1 = "DS1";
+ private static String DATASETNAME2 = "DS2";
+ private static final int DIM_X = 4;
+ private static final int DIM_Y = 7;
+ private static final int FILLVAL = 99;
+ private static final int RANK = 2;
+
+ // Values for the status of space allocation
+ enum H5D_space_status {
+ H5D_SPACE_STATUS_ERROR(-1), H5D_SPACE_STATUS_NOT_ALLOCATED(0), H5D_SPACE_STATUS_PART_ALLOCATED(1), H5D_SPACE_STATUS_ALLOCATED(
+ 2);
+ private static final Map<Integer, H5D_space_status> lookup = new HashMap<Integer, H5D_space_status>();
+
+ static {
+ for (H5D_space_status s : EnumSet.allOf(H5D_space_status.class))
+ lookup.put(s.getCode(), s);
+ }
+
+ private int code;
+
+ H5D_space_status(int space_status) {
+ this.code = space_status;
+ }
+
+ public int getCode() {
+ return this.code;
+ }
+
+ public static H5D_space_status get(int code) {
+ return lookup.get(code);
+ }
+ }
+
+ private static void allocation() {
+ long file_id = -1;
+ long filespace_id = -1;
+ long dataset_id1 = -1;
+ long dataset_id2 = -1;
+ long dcpl_id = -1;
+ long[] dims = { DIM_X, DIM_Y };
+ int[][] dset_data = new int[DIM_X][DIM_Y];
+ int space_status = 0;
+ long storage_size = 0;
+
+ // Initialize the dataset.
+ for (int indx = 0; indx < DIM_X; indx++)
+ for (int jndx = 0; jndx < DIM_Y; jndx++)
+ dset_data[indx][jndx] = FILLVAL;
+
+ // Create a file using default properties.
+ try {
+ file_id = H5.H5Fcreate(FILENAME, HDF5Constants.H5F_ACC_TRUNC, HDF5Constants.H5P_DEFAULT,
+ HDF5Constants.H5P_DEFAULT);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Create dataspace. Setting maximum size to NULL sets the maximum
+ // size to be the current size.
+ try {
+ filespace_id = H5.H5Screate_simple(RANK, dims, null);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Create the dataset creation property list, and set the chunk size.
+ try {
+ dcpl_id = H5.H5Pcreate(HDF5Constants.H5P_DATASET_CREATE);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Set the allocation time to "early". This way we can be sure
+ // that reading from the dataset immediately after creation will
+ // return the fill value.
+ try {
+ if (dcpl_id >= 0)
+ H5.H5Pset_alloc_time(dcpl_id, HDF5Constants.H5D_ALLOC_TIME_EARLY);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ System.out.println("Creating datasets...");
+ System.out.println(DATASETNAME1 + " has allocation time H5D_ALLOC_TIME_LATE");
+ System.out.println(DATASETNAME2 + " has allocation time H5D_ALLOC_TIME_EARLY");
+ System.out.println();
+
+ // Create the dataset using the dataset default creation property list.
+ try {
+ if ((file_id >= 0) && (filespace_id >= 0))
+ dataset_id1 = H5.H5Dcreate(file_id, DATASETNAME1, HDF5Constants.H5T_NATIVE_INT, filespace_id,
+ HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Create the dataset using the dataset creation property list.
+ try {
+ if ((file_id >= 0) && (filespace_id >= 0) && (dcpl_id >= 0))
+ dataset_id2 = H5.H5Dcreate(file_id, DATASETNAME2, HDF5Constants.H5T_NATIVE_INT, filespace_id,
+ HDF5Constants.H5P_DEFAULT, dcpl_id, HDF5Constants.H5P_DEFAULT);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Retrieve and print space status and storage size for dset1.
+ try {
+ if (dataset_id1 >= 0)
+ space_status = H5.H5Dget_space_status(dataset_id1);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+ try {
+ if (dataset_id1 >= 0)
+ storage_size = H5.H5Dget_storage_size(dataset_id1);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+ String the_space = " ";
+ if (H5D_space_status.get(space_status) != H5D_space_status.H5D_SPACE_STATUS_ALLOCATED)
+ the_space += "not ";
+ System.out.println("Space for " + DATASETNAME1 + " has" + the_space + "been allocated.");
+ System.out.println("Storage size for " + DATASETNAME1 + " is: " + storage_size + " bytes.");
+
+ // Retrieve and print space status and storage size for dset2.
+ try {
+ if (dataset_id2 >= 0)
+ space_status = H5.H5Dget_space_status(dataset_id2);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+ try {
+ if (dataset_id2 >= 0)
+ storage_size = H5.H5Dget_storage_size(dataset_id2);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+ the_space = " ";
+ if (H5D_space_status.get(space_status) != H5D_space_status.H5D_SPACE_STATUS_ALLOCATED)
+ the_space += "not ";
+ System.out.println("Space for " + DATASETNAME2 + " has" + the_space + "been allocated.");
+ System.out.println("Storage size for " + DATASETNAME2 + " is: " + storage_size + " bytes.");
+ System.out.println();
+
+ System.out.println("Writing data...");
+ System.out.println();
+
+ // Write the data to the datasets.
+ try {
+ if (dataset_id1 >= 0)
+ H5.H5Dwrite(dataset_id1, HDF5Constants.H5T_NATIVE_INT, HDF5Constants.H5S_ALL, HDF5Constants.H5S_ALL,
+ HDF5Constants.H5P_DEFAULT, dset_data[0]);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+ try {
+ if (dataset_id2 >= 0)
+ H5.H5Dwrite(dataset_id2, HDF5Constants.H5T_NATIVE_INT, HDF5Constants.H5S_ALL, HDF5Constants.H5S_ALL,
+ HDF5Constants.H5P_DEFAULT, dset_data[0]);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Retrieve and print space status and storage size for dset1.
+ try {
+ if (dataset_id1 >= 0)
+ space_status = H5.H5Dget_space_status(dataset_id1);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+ try {
+ if (dataset_id1 >= 0)
+ storage_size = H5.H5Dget_storage_size(dataset_id1);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+ the_space = " ";
+ if (H5D_space_status.get(space_status) != H5D_space_status.H5D_SPACE_STATUS_ALLOCATED)
+ the_space += "not ";
+ System.out.println("Space for " + DATASETNAME1 + " has" + the_space + "been allocated.");
+ System.out.println("Storage size for " + DATASETNAME1 + " is: " + storage_size + " bytes.");
+
+ // Retrieve and print space status and storage size for dset2.
+ try {
+ if (dataset_id2 >= 0)
+ space_status = H5.H5Dget_space_status(dataset_id2);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+ try {
+ if (dataset_id2 >= 0)
+ storage_size = H5.H5Dget_storage_size(dataset_id2);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+ the_space = " ";
+ if (H5D_space_status.get(space_status) != H5D_space_status.H5D_SPACE_STATUS_ALLOCATED)
+ the_space += "not ";
+ System.out.println("Space for " + DATASETNAME2 + " has" + the_space + "been allocated.");
+ System.out.println("Storage size for " + DATASETNAME2 + " is: " + storage_size + " bytes.");
+ System.out.println();
+
+ // End access to the dataset and release resources used by it.
+ try {
+ if (dcpl_id >= 0)
+ H5.H5Pclose(dcpl_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ try {
+ if (dataset_id1 >= 0)
+ H5.H5Dclose(dataset_id1);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ try {
+ if (dataset_id2 >= 0)
+ H5.H5Dclose(dataset_id2);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ try {
+ if (filespace_id >= 0)
+ H5.H5Sclose(filespace_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Close the file.
+ try {
+ if (file_id >= 0)
+ H5.H5Fclose(file_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+ }
+
+ public static void main(String[] args) {
+ H5Ex_D_Alloc.allocation();
+ }
+
+}
diff --git a/java/examples/datasets/H5Ex_D_Checksum.java b/java/examples/datasets/H5Ex_D_Checksum.java
new file mode 100644
index 0000000..3a2f98f
--- /dev/null
+++ b/java/examples/datasets/H5Ex_D_Checksum.java
@@ -0,0 +1,347 @@
+/* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
+ * Copyright by The HDF Group. *
+ * Copyright by the Board of Trustees of the University of Illinois. *
+ * All rights reserved. *
+ * *
+ * This file is part of HDF5. The full HDF5 copyright notice, including *
+ * terms governing use, modification, and redistribution, is contained in *
+ * the files COPYING and Copyright.html. COPYING can be found at the root *
+ * of the source code distribution tree; Copyright.html can be found at the *
+ * root level of an installed copy of the electronic HDF5 document set and *
+ * is linked from the top-level documents page. It can also be found at *
+ * http://hdfgroup.org/HDF5/doc/Copyright.html. If you do not have *
+ * access to either file, you may request a copy from help@hdfgroup.org. *
+ * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */
+
+/************************************************************
+ This example shows how to read and write data to a dataset
+ using the Fletcher32 checksum filter. The program first
+ checks if the Fletcher32 filter is available, then if it
+ is it writes integers to a dataset using Fletcher32, then
+ closes the file. Next, it reopens the file, reads back
+ the data, checks if the filter detected an error and
+ outputs the type of filter and the maximum value in the
+ dataset to the screen.
+ ************************************************************/
+package examples.datasets;
+
+import java.util.EnumSet;
+import java.util.HashMap;
+import java.util.Map;
+
+import hdf.hdf5lib.H5;
+import hdf.hdf5lib.HDF5Constants;
+
+public class H5Ex_D_Checksum {
+ private static String FILENAME = "H5Ex_D_Checksum.h5";
+ private static String DATASETNAME = "DS1";
+ private static final int DIM_X = 32;
+ private static final int DIM_Y = 64;
+ private static final int CHUNK_X = 4;
+ private static final int CHUNK_Y = 8;
+ private static final int RANK = 2;
+ private static final int NDIMS = 2;
+
+ // Values for the status of space allocation
+ enum H5Z_filter {
+ H5Z_FILTER_ERROR(-1), H5Z_FILTER_NONE(0), H5Z_FILTER_DEFLATE(1), H5Z_FILTER_SHUFFLE(2), H5Z_FILTER_FLETCHER32(3), H5Z_FILTER_SZIP(
+ 4), H5Z_FILTER_NBIT(5), H5Z_FILTER_SCALEOFFSET(6), H5Z_FILTER_RESERVED(256), H5Z_FILTER_MAX(65535);
+ private static final Map<Integer, H5Z_filter> lookup = new HashMap<Integer, H5Z_filter>();
+
+ static {
+ for (H5Z_filter s : EnumSet.allOf(H5Z_filter.class))
+ lookup.put(s.getCode(), s);
+ }
+
+ private int code;
+
+ H5Z_filter(int layout_type) {
+ this.code = layout_type;
+ }
+
+ public int getCode() {
+ return this.code;
+ }
+
+ public static H5Z_filter get(int code) {
+ return lookup.get(code);
+ }
+ }
+
+ private static boolean checkFletcher32Filter() {
+ try {
+ int available = H5.H5Zfilter_avail(H5Z_filter.H5Z_FILTER_FLETCHER32.getCode());
+ if (available == 0) {
+ System.out.println("N-Bit filter not available.");
+ return false;
+ }
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ try {
+ int filter_info = H5.H5Zget_filter_info(HDF5Constants.H5Z_FILTER_FLETCHER32);
+ if (((filter_info & HDF5Constants.H5Z_FILTER_CONFIG_ENCODE_ENABLED) == 0)
+ || ((filter_info & HDF5Constants.H5Z_FILTER_CONFIG_DECODE_ENABLED) == 0)) {
+ System.out.println("N-Bit filter not available for encoding and decoding.");
+ return false;
+ }
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+ return true;
+ }
+
+ private static void writeChecksum() {
+ long file_id = -1;
+ long filespace_id = -1;
+ long dataset_id = -1;
+ long dcpl_id = -1;
+ long[] dims = { DIM_X, DIM_Y };
+ long[] chunk_dims = { CHUNK_X, CHUNK_Y };
+ int[][] dset_data = new int[DIM_X][DIM_Y];
+
+ // Initialize data.
+ for (int indx = 0; indx < DIM_X; indx++)
+ for (int jndx = 0; jndx < DIM_Y; jndx++)
+ dset_data[indx][jndx] = indx * jndx - jndx;
+
+ // Create a new file using default properties.
+ try {
+ file_id = H5.H5Fcreate(FILENAME, HDF5Constants.H5F_ACC_TRUNC, HDF5Constants.H5P_DEFAULT,
+ HDF5Constants.H5P_DEFAULT);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Create dataspace. Setting maximum size to NULL sets the maximum
+ // size to be the current size.
+ try {
+ filespace_id = H5.H5Screate_simple(RANK, dims, null);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Create the dataset creation property list, add the N-Bit filter.
+ try {
+ dcpl_id = H5.H5Pcreate(HDF5Constants.H5P_DATASET_CREATE);
+ if (dcpl_id >= 0) {
+ H5.H5Pset_fletcher32(dcpl_id);
+ // Set the chunk size.
+ H5.H5Pset_chunk(dcpl_id, NDIMS, chunk_dims);
+ }
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Create the dataset.
+ try {
+ if ((file_id >= 0) && (filespace_id >= 0) && (dcpl_id >= 0))
+ dataset_id = H5.H5Dcreate(file_id, DATASETNAME, HDF5Constants.H5T_STD_I32LE, filespace_id,
+ HDF5Constants.H5P_DEFAULT, dcpl_id, HDF5Constants.H5P_DEFAULT);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Write the data to the dataset.
+ try {
+ if (dataset_id >= 0)
+ H5.H5Dwrite(dataset_id, HDF5Constants.H5T_NATIVE_INT, HDF5Constants.H5S_ALL, HDF5Constants.H5S_ALL,
+ HDF5Constants.H5P_DEFAULT, dset_data);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // End access to the dataset and release resources used by it.
+ try {
+ if (dcpl_id >= 0)
+ H5.H5Pclose(dcpl_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ try {
+ if (dataset_id >= 0)
+ H5.H5Dclose(dataset_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ try {
+ if (filespace_id >= 0)
+ H5.H5Sclose(filespace_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Close the file.
+ try {
+ if (file_id >= 0)
+ H5.H5Fclose(file_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+ }
+
+ private static void readChecksum() {
+ long file_id = -1;
+ long dataset_id = -1;
+ long dcpl_id = -1;
+ int[][] dset_data = new int[DIM_X][DIM_Y];
+
+ // Open an existing file.
+ try {
+ file_id = H5.H5Fopen(FILENAME, HDF5Constants.H5F_ACC_RDONLY, HDF5Constants.H5P_DEFAULT);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Open an existing dataset.
+ try {
+ if (file_id >= 0)
+ dataset_id = H5.H5Dopen(file_id, DATASETNAME, HDF5Constants.H5P_DEFAULT);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Retrieve the dataset creation property list.
+ try {
+ if (dataset_id >= 0)
+ dcpl_id = H5.H5Dget_create_plist(dataset_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Retrieve and print the filter type. Here we only retrieve the
+ // first filter because we know that we only added one filter.
+ try {
+ if (dcpl_id >= 0) {
+ // Java lib requires a valid filter_name object and cd_values
+ int[] flags = { 0 };
+ long[] cd_nelmts = { 1 };
+ int[] cd_values = { 0 };
+ String[] filter_name = { "" };
+ int[] filter_config = { 0 };
+ int filter_type = -1;
+ filter_type = H5
+ .H5Pget_filter(dcpl_id, 0, flags, cd_nelmts, cd_values, 120, filter_name, filter_config);
+ System.out.print("Filter type is: ");
+ switch (H5Z_filter.get(filter_type)) {
+ case H5Z_FILTER_DEFLATE:
+ System.out.println("H5Z_FILTER_DEFLATE");
+ break;
+ case H5Z_FILTER_SHUFFLE:
+ System.out.println("H5Z_FILTER_SHUFFLE");
+ break;
+ case H5Z_FILTER_FLETCHER32:
+ System.out.println("H5Z_FILTER_FLETCHER32");
+ break;
+ case H5Z_FILTER_SZIP:
+ System.out.println("H5Z_FILTER_SZIP");
+ break;
+ default:
+ System.out.println("H5Z_FILTER_ERROR");
+ }
+ System.out.println();
+ }
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Read the data using the default properties.
+ try {
+ if (dataset_id >= 0) {
+ int status = H5.H5Dread(dataset_id, HDF5Constants.H5T_NATIVE_INT, HDF5Constants.H5S_ALL,
+ HDF5Constants.H5S_ALL, HDF5Constants.H5P_DEFAULT, dset_data);
+ // Check if the read was successful. Normally we do not perform
+ // error checking in these examples for the sake of clarity, but in
+ // this case we will make an exception because this is how the
+ // fletcher32 checksum filter reports data errors.
+ if (status < 0) {
+ System.out.print("Dataset read failed!");
+ try {
+ if (dcpl_id >= 0)
+ H5.H5Pclose(dcpl_id);
+ if (dataset_id >= 0)
+ H5.H5Dclose(dataset_id);
+ if (file_id >= 0)
+ H5.H5Fclose(file_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+ return;
+ }
+ }
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Find the maximum value in the dataset, to verify that it was read
+ // correctly.
+ int max = dset_data[0][0];
+ for (int indx = 0; indx < DIM_X; indx++) {
+ for (int jndx = 0; jndx < DIM_Y; jndx++)
+ if (max < dset_data[indx][jndx])
+ max = dset_data[indx][jndx];
+ }
+ // Print the maximum value.
+ System.out.println("Maximum value in " + DATASETNAME + " is: " + max);
+
+ // End access to the dataset and release resources used by it.
+ try {
+ if (dcpl_id >= 0)
+ H5.H5Pclose(dcpl_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ try {
+ if (dataset_id >= 0)
+ H5.H5Dclose(dataset_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Close the file.
+ try {
+ if (file_id >= 0)
+ H5.H5Fclose(file_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+ }
+
+ public static void main(String[] args) {
+ // Check if the Fletcher32 filter is available and can be used for
+ // both encoding and decoding. Normally we do not perform error
+ // checking in these examples for the sake of clarity, but in this
+ // case we will make an exception because this filter is an
+ // optional part of the hdf5 library.
+ // size to be the current size.
+ if (H5Ex_D_Checksum.checkFletcher32Filter()) {
+ H5Ex_D_Checksum.writeChecksum();
+ H5Ex_D_Checksum.readChecksum();
+ }
+ }
+
+}
diff --git a/java/examples/datasets/H5Ex_D_Chunk.java b/java/examples/datasets/H5Ex_D_Chunk.java
new file mode 100644
index 0000000..7f02e5a
--- /dev/null
+++ b/java/examples/datasets/H5Ex_D_Chunk.java
@@ -0,0 +1,366 @@
+/* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
+ * Copyright by The HDF Group. *
+ * Copyright by the Board of Trustees of the University of Illinois. *
+ * All rights reserved. *
+ * *
+ * This file is part of HDF5. The full HDF5 copyright notice, including *
+ * terms governing use, modification, and redistribution, is contained in *
+ * the files COPYING and Copyright.html. COPYING can be found at the root *
+ * of the source code distribution tree; Copyright.html can be found at the *
+ * root level of an installed copy of the electronic HDF5 document set and *
+ * is linked from the top-level documents page. It can also be found at *
+ * http://hdfgroup.org/HDF5/doc/Copyright.html. If you do not have *
+ * access to either file, you may request a copy from help@hdfgroup.org. *
+ * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */
+
+/************************************************************
+ This example shows how to create a chunked dataset. The
+ program first writes integers in a hyperslab selection to
+ a chunked dataset with dataspace dimensions of DIM_XxDIM_Y
+ and chunk size of CHUNK_XxCHUNK_Y, then closes the file.
+ Next, it reopens the file, reads back the data, and
+ outputs it to the screen. Finally it reads the data again
+ using a different hyperslab selection, and outputs
+ the result to the screen.
+ ************************************************************/
+package examples.datasets;
+
+import java.util.EnumSet;
+import java.util.HashMap;
+import java.util.Map;
+
+import hdf.hdf5lib.H5;
+import hdf.hdf5lib.HDF5Constants;
+
+public class H5Ex_D_Chunk {
+ private static String FILENAME = "H5Ex_D_Chunk.h5";
+ private static String DATASETNAME = "DS1";
+ private static final int DIM_X = 6;
+ private static final int DIM_Y = 8;
+ private static final int CHUNK_X = 4;
+ private static final int CHUNK_Y = 4;
+ private static final int RANK = 2;
+ private static final int NDIMS = 2;
+
+ // Values for the status of space allocation
+ enum H5D_layout {
+ H5D_LAYOUT_ERROR(-1), H5D_COMPACT(0), H5D_CONTIGUOUS(1), H5D_CHUNKED(2), H5D_NLAYOUTS(3);
+ private static final Map<Integer, H5D_layout> lookup = new HashMap<Integer, H5D_layout>();
+
+ static {
+ for (H5D_layout s : EnumSet.allOf(H5D_layout.class))
+ lookup.put(s.getCode(), s);
+ }
+
+ private int code;
+
+ H5D_layout(int layout_type) {
+ this.code = layout_type;
+ }
+
+ public int getCode() {
+ return this.code;
+ }
+
+ public static H5D_layout get(int code) {
+ return lookup.get(code);
+ }
+ }
+
+ private static void writeChunk() {
+ long file_id = -1;
+ long filespace_id = -1;
+ long dataset_id = -1;
+ long dcpl_id = -1;
+ long[] dims = { DIM_X, DIM_Y };
+ long[] chunk_dims = { CHUNK_X, CHUNK_Y };
+ int[][] dset_data = new int[DIM_X][DIM_Y];
+
+ // Initialize data to "1", to make it easier to see the selections.
+ for (int indx = 0; indx < DIM_X; indx++)
+ for (int jndx = 0; jndx < DIM_Y; jndx++)
+ dset_data[indx][jndx] = 1;
+
+ // Print the data to the screen.
+ System.out.println("Original Data:");
+ for (int indx = 0; indx < DIM_X; indx++) {
+ System.out.print(" [ ");
+ for (int jndx = 0; jndx < DIM_Y; jndx++)
+ System.out.print(dset_data[indx][jndx] + " ");
+ System.out.println("]");
+ }
+ System.out.println();
+
+ // Create a new file using default properties.
+ try {
+ file_id = H5.H5Fcreate(FILENAME, HDF5Constants.H5F_ACC_TRUNC, HDF5Constants.H5P_DEFAULT,
+ HDF5Constants.H5P_DEFAULT);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Create dataspace. Setting maximum size to NULL sets the maximum
+ // size to be the current size.
+ try {
+ filespace_id = H5.H5Screate_simple(RANK, dims, null);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Create the dataset creation property list.
+ try {
+ dcpl_id = H5.H5Pcreate(HDF5Constants.H5P_DATASET_CREATE);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Set the chunk size.
+ try {
+ if (dcpl_id >= 0)
+ H5.H5Pset_chunk(dcpl_id, NDIMS, chunk_dims);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Create the chunked dataset.
+ try {
+ if ((file_id >= 0) && (filespace_id >= 0) && (dcpl_id >= 0))
+ dataset_id = H5.H5Dcreate(file_id, DATASETNAME, HDF5Constants.H5T_STD_I32LE, filespace_id,
+ HDF5Constants.H5P_DEFAULT, dcpl_id, HDF5Constants.H5P_DEFAULT);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Define and select the first part of the hyperslab selection.
+ long[] start = { 0, 0 };
+ long[] stride = { 3, 3 };
+ long[] count = { 2, 3 };
+ long[] block = { 2, 2 };
+ try {
+ if ((filespace_id >= 0))
+ H5.H5Sselect_hyperslab(filespace_id, HDF5Constants.H5S_SELECT_SET, start, stride, count, block);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+ // Define and select the second part of the hyperslab selection,
+ // which is subtracted from the first selection by the use of
+ // H5S_SELECT_NOTB
+ block[0] = 1;
+ block[1] = 1;
+ try {
+ if ((filespace_id >= 0)) {
+ H5.H5Sselect_hyperslab(filespace_id, HDF5Constants.H5S_SELECT_NOTB, start, stride, count, block);
+
+ // Write the data to the dataset.
+ if (dataset_id >= 0)
+ H5.H5Dwrite(dataset_id, HDF5Constants.H5T_NATIVE_INT, HDF5Constants.H5S_ALL, filespace_id,
+ HDF5Constants.H5P_DEFAULT, dset_data);
+ }
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // End access to the dataset and release resources used by it.
+ try {
+ if (dcpl_id >= 0)
+ H5.H5Pclose(dcpl_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ try {
+ if (dataset_id >= 0)
+ H5.H5Dclose(dataset_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ try {
+ if (filespace_id >= 0)
+ H5.H5Sclose(filespace_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Close the file.
+ try {
+ if (file_id >= 0)
+ H5.H5Fclose(file_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+ }
+
+ private static void readChunk() {
+ long file_id = -1;
+ long filespace_id = -1;
+ long dataset_id = -1;
+ long dcpl_id = -1;
+ int[][] dset_data = new int[DIM_X][DIM_Y];
+
+ // Open an existing file.
+ try {
+ file_id = H5.H5Fopen(FILENAME, HDF5Constants.H5F_ACC_RDONLY, HDF5Constants.H5P_DEFAULT);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Open an existing dataset.
+ try {
+ if (file_id >= 0)
+ dataset_id = H5.H5Dopen(file_id, DATASETNAME, HDF5Constants.H5P_DEFAULT);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Retrieve the dataset creation property list.
+ try {
+ if (dataset_id >= 0)
+ dcpl_id = H5.H5Dget_create_plist(dataset_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Print the storage layout.
+ try {
+ if (dcpl_id >= 0) {
+ int layout_type = H5.H5Pget_layout(dcpl_id);
+ System.out.print("Storage layout for " + DATASETNAME + " is: ");
+ switch (H5D_layout.get(layout_type)) {
+ case H5D_COMPACT:
+ System.out.println("H5D_COMPACT");
+ break;
+ case H5D_CONTIGUOUS:
+ System.out.println("H5D_CONTIGUOUS");
+ break;
+ case H5D_CHUNKED:
+ System.out.println("H5D_CHUNKED");
+ break;
+ case H5D_LAYOUT_ERROR:
+ break;
+ case H5D_NLAYOUTS:
+ break;
+ default:
+ break;
+ }
+ System.out.println();
+ }
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Read the data using the default properties.
+ try {
+ if (dataset_id >= 0)
+ H5.H5Dread(dataset_id, HDF5Constants.H5T_NATIVE_INT, HDF5Constants.H5S_ALL, HDF5Constants.H5S_ALL,
+ HDF5Constants.H5P_DEFAULT, dset_data);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Output the data to the screen.
+ System.out.println("Data as written to disk by hyberslabs:");
+ for (int indx = 0; indx < DIM_X; indx++) {
+ System.out.print(" [ ");
+ for (int jndx = 0; jndx < DIM_Y; jndx++)
+ System.out.print(dset_data[indx][jndx] + " ");
+ System.out.println("]");
+ }
+ System.out.println();
+
+ // Initialize the read array.
+ for (int indx = 0; indx < DIM_X; indx++)
+ for (int jndx = 0; jndx < DIM_Y; jndx++)
+ dset_data[indx][jndx] = 0;
+
+ // Define and select the hyperslab to use for reading.
+ try {
+ if (dataset_id >= 0) {
+ filespace_id = H5.H5Dget_space(dataset_id);
+
+ long[] start = { 0, 1 };
+ long[] stride = { 4, 4 };
+ long[] count = { 2, 2 };
+ long[] block = { 2, 3 };
+
+ if (filespace_id >= 0) {
+ H5.H5Sselect_hyperslab(filespace_id, HDF5Constants.H5S_SELECT_SET, start, stride, count, block);
+
+ // Read the data using the previously defined hyperslab.
+ if ((dataset_id >= 0) && (filespace_id >= 0))
+ H5.H5Dread(dataset_id, HDF5Constants.H5T_NATIVE_INT, HDF5Constants.H5S_ALL, filespace_id,
+ HDF5Constants.H5P_DEFAULT, dset_data);
+ }
+ }
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Output the data to the screen.
+ System.out.println("Data as read from disk by hyberslab:");
+ for (int indx = 0; indx < DIM_X; indx++) {
+ System.out.print(" [ ");
+ for (int jndx = 0; jndx < DIM_Y; jndx++)
+ System.out.print(dset_data[indx][jndx] + " ");
+ System.out.println("]");
+ }
+ System.out.println();
+
+ // End access to the dataset and release resources used by it.
+ try {
+ if (dcpl_id >= 0)
+ H5.H5Pclose(dcpl_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ try {
+ if (dataset_id >= 0)
+ H5.H5Dclose(dataset_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ try {
+ if (filespace_id >= 0)
+ H5.H5Sclose(filespace_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Close the file.
+ try {
+ if (file_id >= 0)
+ H5.H5Fclose(file_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+ }
+
+ public static void main(String[] args) {
+ H5Ex_D_Chunk.writeChunk();
+ H5Ex_D_Chunk.readChunk();
+ }
+
+}
diff --git a/java/examples/datasets/H5Ex_D_Compact.java b/java/examples/datasets/H5Ex_D_Compact.java
new file mode 100644
index 0000000..4f1e2f0
--- /dev/null
+++ b/java/examples/datasets/H5Ex_D_Compact.java
@@ -0,0 +1,289 @@
+/* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
+ * Copyright by The HDF Group. *
+ * Copyright by the Board of Trustees of the University of Illinois. *
+ * All rights reserved. *
+ * *
+ * This file is part of HDF5. The full HDF5 copyright notice, including *
+ * terms governing use, modification, and redistribution, is contained in *
+ * the files COPYING and Copyright.html. COPYING can be found at the root *
+ * of the source code distribution tree; Copyright.html can be found at the *
+ * root level of an installed copy of the electronic HDF5 document set and *
+ * is linked from the top-level documents page. It can also be found at *
+ * http://hdfgroup.org/HDF5/doc/Copyright.html. If you do not have *
+ * access to either file, you may request a copy from help@hdfgroup.org. *
+ * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */
+
+/************************************************************
+ This example shows how to read and write data to a compact
+ dataset. The program first writes integers to a compact
+ dataset with dataspace dimensions of DIM_XxDIM_Y, then
+ closes the file. Next, it reopens the file, reads back
+ the data, and outputs it to the screen.
+ ************************************************************/
+package examples.datasets;
+
+import java.util.EnumSet;
+import java.util.HashMap;
+import java.util.Map;
+
+import hdf.hdf5lib.H5;
+import hdf.hdf5lib.HDF5Constants;
+
+public class H5Ex_D_Compact {
+ private static String FILENAME = "H5Ex_D_Compact.h5";
+ private static String DATASETNAME = "DS1";
+ private static final int DIM_X = 4;
+ private static final int DIM_Y = 7;
+ private static final int RANK = 2;
+
+ // Values for the status of space allocation
+ enum H5D_layout {
+ H5D_LAYOUT_ERROR(-1), H5D_COMPACT(0), H5D_CONTIGUOUS(1), H5D_CHUNKED(2), H5D_NLAYOUTS(3);
+ private static final Map<Integer, H5D_layout> lookup = new HashMap<Integer, H5D_layout>();
+
+ static {
+ for (H5D_layout s : EnumSet.allOf(H5D_layout.class))
+ lookup.put(s.getCode(), s);
+ }
+
+ private int code;
+
+ H5D_layout(int layout_type) {
+ this.code = layout_type;
+ }
+
+ public int getCode() {
+ return this.code;
+ }
+
+ public static H5D_layout get(int code) {
+ return lookup.get(code);
+ }
+ }
+
+ private static void writeCompact() {
+ long file_id = -1;
+ long filespace_id = -1;
+ long dataset_id = -1;
+ long dcpl_id = -1;
+ long[] dims = { DIM_X, DIM_Y };
+ int[][] dset_data = new int[DIM_X][DIM_Y];
+
+ // Initialize data.
+ for (int indx = 0; indx < DIM_X; indx++)
+ for (int jndx = 0; jndx < DIM_Y; jndx++)
+ dset_data[indx][jndx] = indx * jndx - jndx;
+
+ // Create a new file using default properties.
+ try {
+ file_id = H5.H5Fcreate(FILENAME, HDF5Constants.H5F_ACC_TRUNC, HDF5Constants.H5P_DEFAULT,
+ HDF5Constants.H5P_DEFAULT);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Create dataspace. Setting maximum size to NULL sets the maximum
+ // size to be the current size.
+ try {
+ filespace_id = H5.H5Screate_simple(RANK, dims, null);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Create the dataset creation property list.
+ try {
+ dcpl_id = H5.H5Pcreate(HDF5Constants.H5P_DATASET_CREATE);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Set the layout to compact.
+ try {
+ if (dcpl_id >= 0)
+ H5.H5Pset_layout(dcpl_id, H5D_layout.H5D_COMPACT.getCode());
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Create the dataset. We will use all default properties for this example.
+ try {
+ if ((file_id >= 0) && (filespace_id >= 0) && (dcpl_id >= 0))
+ dataset_id = H5.H5Dcreate(file_id, DATASETNAME, HDF5Constants.H5T_STD_I32LE, filespace_id,
+ HDF5Constants.H5P_DEFAULT, dcpl_id, HDF5Constants.H5P_DEFAULT);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Write the data to the dataset.
+ try {
+ if (dataset_id >= 0)
+ H5.H5Dwrite(dataset_id, HDF5Constants.H5T_NATIVE_INT, HDF5Constants.H5S_ALL, HDF5Constants.H5S_ALL,
+ HDF5Constants.H5P_DEFAULT, dset_data);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // End access to the dataset and release resources used by it.
+ try {
+ if (dcpl_id >= 0)
+ H5.H5Pclose(dcpl_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ try {
+ if (dataset_id >= 0)
+ H5.H5Dclose(dataset_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ try {
+ if (filespace_id >= 0)
+ H5.H5Sclose(filespace_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Close the file.
+ try {
+ if (file_id >= 0)
+ H5.H5Fclose(file_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+ }
+
+ private static void readCompact() {
+ long file_id = -1;
+ long filespace_id = -1;
+ long dataset_id = -1;
+ long dcpl_id = -1;
+ int[][] dset_data = new int[DIM_X][DIM_Y];
+
+ // Open file and dataset using the default properties.
+ try {
+ file_id = H5.H5Fopen(FILENAME, HDF5Constants.H5F_ACC_RDONLY, HDF5Constants.H5P_DEFAULT);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Open an existing dataset.
+ try {
+ if (file_id >= 0)
+ dataset_id = H5.H5Dopen(file_id, DATASETNAME, HDF5Constants.H5P_DEFAULT);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Retrieve the dataset creation property list.
+ try {
+ if (dataset_id >= 0)
+ dcpl_id = H5.H5Dget_create_plist(dataset_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Print the storage layout.
+ try {
+ if (dcpl_id >= 0) {
+ int layout_type = H5.H5Pget_layout(dcpl_id);
+ System.out.print("Storage layout for " + DATASETNAME + " is: ");
+ switch (H5D_layout.get(layout_type)) {
+ case H5D_COMPACT:
+ System.out.println("H5D_COMPACT");
+ break;
+ case H5D_CONTIGUOUS:
+ System.out.println("H5D_CONTIGUOUS");
+ break;
+ case H5D_CHUNKED:
+ System.out.println("H5D_CHUNKED");
+ break;
+ case H5D_LAYOUT_ERROR:
+ break;
+ case H5D_NLAYOUTS:
+ break;
+ default:
+ break;
+ }
+ System.out.println();
+ }
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Read the data using the default properties.
+ try {
+ if (dataset_id >= 0)
+ H5.H5Dread(dataset_id, HDF5Constants.H5T_NATIVE_INT, HDF5Constants.H5S_ALL, HDF5Constants.H5S_ALL,
+ HDF5Constants.H5P_DEFAULT, dset_data);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Output the data to the screen.
+ System.out.println("Data for " + DATASETNAME + " is: ");
+ for (int indx = 0; indx < DIM_X; indx++) {
+ System.out.print(" [ ");
+ for (int jndx = 0; jndx < DIM_Y; jndx++)
+ System.out.print(dset_data[indx][jndx] + " ");
+ System.out.println("]");
+ }
+ System.out.println();
+
+ // End access to the dataset and release resources used by it.
+ try {
+ if (dcpl_id >= 0)
+ H5.H5Pclose(dcpl_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ try {
+ if (dataset_id >= 0)
+ H5.H5Dclose(dataset_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ try {
+ if (filespace_id >= 0)
+ H5.H5Sclose(filespace_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Close the file.
+ try {
+ if (file_id >= 0)
+ H5.H5Fclose(file_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+ }
+
+ public static void main(String[] args) {
+ H5Ex_D_Compact.writeCompact();
+ H5Ex_D_Compact.readCompact();
+ }
+
+}
diff --git a/java/examples/datasets/H5Ex_D_External.java b/java/examples/datasets/H5Ex_D_External.java
new file mode 100644
index 0000000..5fdc696
--- /dev/null
+++ b/java/examples/datasets/H5Ex_D_External.java
@@ -0,0 +1,238 @@
+/* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
+ * Copyright by The HDF Group. *
+ * Copyright by the Board of Trustees of the University of Illinois. *
+ * All rights reserved. *
+ * *
+ * This file is part of HDF5. The full HDF5 copyright notice, including *
+ * terms governing use, modification, and redistribution, is contained in *
+ * the files COPYING and Copyright.html. COPYING can be found at the root *
+ * of the source code distribution tree; Copyright.html can be found at the *
+ * root level of an installed copy of the electronic HDF5 document set and *
+ * is linked from the top-level documents page. It can also be found at *
+ * http://hdfgroup.org/HDF5/doc/Copyright.html. If you do not have *
+ * access to either file, you may request a copy from help@hdfgroup.org. *
+ * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */
+
+/************************************************************
+ This example shows how to read and write data to an
+ external dataset. The program first writes integers to an
+ external dataset with dataspace dimensions of DIM_XxDIM_Y,
+ then closes the file. Next, it reopens the file, reads
+ back the data, and outputs the name of the external data
+ file and the data to the screen.
+ ************************************************************/
+package examples.datasets;
+
+import hdf.hdf5lib.H5;
+import hdf.hdf5lib.HDF5Constants;
+
+public class H5Ex_D_External {
+ private static String FILENAME = "H5Ex_D_External.h5";
+ private static String EXTERNALNAME = "H5Ex_D_External.data";
+ private static String DATASETNAME = "DS1";
+ private static final int DIM_X = 4;
+ private static final int DIM_Y = 7;
+ private static final int RANK = 2;
+ private static final int NAME_BUF_SIZE = 32;
+
+ private static void writeExternal() {
+ long file_id = -1;
+ long dcpl_id = -1;
+ long filespace_id = -1;
+ long dataset_id = -1;
+ long[] dims = { DIM_X, DIM_Y };
+ int[][] dset_data = new int[DIM_X][DIM_Y];
+
+ // Initialize the dataset.
+ for (int indx = 0; indx < DIM_X; indx++)
+ for (int jndx = 0; jndx < DIM_Y; jndx++)
+ dset_data[indx][jndx] = indx * jndx - jndx;
+
+ // Create a new file using default properties.
+ try {
+ file_id = H5.H5Fcreate(FILENAME, HDF5Constants.H5F_ACC_TRUNC, HDF5Constants.H5P_DEFAULT,
+ HDF5Constants.H5P_DEFAULT);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Create dataspace. Setting maximum size to NULL sets the maximum
+ // size to be the current size.
+ try {
+ filespace_id = H5.H5Screate_simple(RANK, dims, null);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Create the dataset creation property list.
+ try {
+ dcpl_id = H5.H5Pcreate(HDF5Constants.H5P_DATASET_CREATE);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // set the external file.
+ try {
+ if (dcpl_id >= 0)
+ H5.H5Pset_external(dcpl_id, EXTERNALNAME, 0, HDF5Constants.H5F_UNLIMITED);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Create the HDF5Constants.dataset.
+ try {
+ if ((file_id >= 0) && (filespace_id >= 0) && (dcpl_id >= 0))
+ dataset_id = H5.H5Dcreate(file_id, DATASETNAME, HDF5Constants.H5T_STD_I32LE, filespace_id,
+ HDF5Constants.H5P_DEFAULT, dcpl_id, HDF5Constants.H5P_DEFAULT);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Write the dataset.
+ try {
+ H5.H5Dwrite(dataset_id, HDF5Constants.H5T_NATIVE_INT, HDF5Constants.H5S_ALL, HDF5Constants.H5S_ALL,
+ HDF5Constants.H5P_DEFAULT, dset_data);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // End access to the dataset and release resources used by it.
+ try {
+ if (dataset_id >= 0)
+ H5.H5Dclose(dataset_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Terminate access to the data space.
+ try {
+ if (filespace_id >= 0)
+ H5.H5Sclose(filespace_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ try {
+ if (dcpl_id >= 0)
+ H5.H5Pclose(dcpl_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Close the file.
+ try {
+ if (file_id >= 0)
+ H5.H5Fclose(file_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ }
+
+ private static void readExternal() {
+ long file_id = -1;
+ long dcpl_id = -1;
+ long dataset_id = -1;
+ int[][] dset_data = new int[DIM_X][DIM_Y];
+ String[] Xname = new String[1];
+
+ // Open file using the default properties.
+ try {
+ file_id = H5.H5Fopen(FILENAME, HDF5Constants.H5F_ACC_RDWR, HDF5Constants.H5P_DEFAULT);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Open dataset using the default properties.
+ try {
+ if (file_id >= 0)
+ dataset_id = H5.H5Dopen(file_id, DATASETNAME, HDF5Constants.H5P_DEFAULT);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Retrieve the dataset creation property list.
+ try {
+ if (dataset_id >= 0)
+ dcpl_id = H5.H5Dget_create_plist(dataset_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Retrieve and print the name of the external file.
+ long[] Xsize = new long[NAME_BUF_SIZE];
+ try {
+ if (dcpl_id >= 0)
+ H5.H5Pget_external(dcpl_id, 0, Xsize.length, Xname, Xsize);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+ System.out.println(DATASETNAME + " is stored in file: " + Xname[0]);
+
+ // Read the data using the default properties.
+ try {
+ if (dataset_id >= 0)
+ H5.H5Dread(dataset_id, HDF5Constants.H5T_NATIVE_INT, HDF5Constants.H5S_ALL, HDF5Constants.H5S_ALL,
+ HDF5Constants.H5P_DEFAULT, dset_data);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Output the data to the screen.
+ System.out.println(DATASETNAME + ":");
+ for (int indx = 0; indx < DIM_X; indx++) {
+ System.out.print(" [ ");
+ for (int jndx = 0; jndx < DIM_Y; jndx++)
+ System.out.print(dset_data[indx][jndx] + " ");
+ System.out.println("]");
+ }
+ System.out.println();
+
+ // Close the dataset.
+ try {
+ if (dataset_id >= 0)
+ H5.H5Dclose(dataset_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ try {
+ if (dcpl_id >= 0)
+ H5.H5Pclose(dcpl_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Close the file.
+ try {
+ if (file_id >= 0)
+ H5.H5Fclose(file_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+ }
+
+ public static void main(String[] args) {
+ H5Ex_D_External.writeExternal();
+ H5Ex_D_External.readExternal();
+ }
+
+}
diff --git a/java/examples/datasets/H5Ex_D_FillValue.java b/java/examples/datasets/H5Ex_D_FillValue.java
new file mode 100644
index 0000000..982d2cb
--- /dev/null
+++ b/java/examples/datasets/H5Ex_D_FillValue.java
@@ -0,0 +1,246 @@
+/* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
+ * Copyright by The HDF Group. *
+ * Copyright by the Board of Trustees of the University of Illinois. *
+ * All rights reserved. *
+ * *
+ * This file is part of HDF5. The full HDF5 copyright notice, including *
+ * terms governing use, modification, and redistribution, is contained in *
+ * the files COPYING and Copyright.html. COPYING can be found at the root *
+ * of the source code distribution tree; Copyright.html can be found at the *
+ * root level of an installed copy of the electronic HDF5 document set and *
+ * is linked from the top-level documents page. It can also be found at *
+ * http://hdfgroup.org/HDF5/doc/Copyright.html. If you do not have *
+ * access to either file, you may request a copy from help@hdfgroup.org. *
+ * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */
+
+/************************************************************
+ This example shows how to set the fill value for a
+ dataset. The program first sets the fill value to
+ FILLVAL, creates a dataset with dimensions of DIM_XxDIM_Y,
+ reads from the uninitialized dataset, and outputs the
+ contents to the screen. Next, it writes integers to the
+ dataset, reads the data back, and outputs it to the
+ screen. Finally it extends the dataset, reads from it,
+ and outputs the result to the screen.
+ ************************************************************/
+package examples.datasets;
+
+import hdf.hdf5lib.H5;
+import hdf.hdf5lib.HDF5Constants;
+
+public class H5Ex_D_FillValue {
+ private static String FILENAME = "H5Ex_D_FillValue.h5";
+ private static String DATASETNAME = "ExtendibleArray";
+ private static final int DIM_X = 4;
+ private static final int DIM_Y = 7;
+ private static final int EDIM_X = 6;
+ private static final int EDIM_Y = 10;
+ private static final int CHUNK_X = 4;
+ private static final int CHUNK_Y = 4;
+ private static final int RANK = 2;
+ private static final int NDIMS = 2;
+ private static final int FILLVAL = 99;
+
+ private static void fillValue() {
+ long file_id = -1;
+ long dcpl_id = -1;
+ long dataspace_id = -1;
+ long dataset_id = -1;
+ long[] dims = { DIM_X, DIM_Y };
+ long[] extdims = { EDIM_X, EDIM_Y };
+ long[] chunk_dims = { CHUNK_X, CHUNK_Y };
+ long[] maxdims = { HDF5Constants.H5S_UNLIMITED, HDF5Constants.H5S_UNLIMITED };
+ int[][] write_dset_data = new int[DIM_X][DIM_Y];
+ int[][] read_dset_data = new int[DIM_X][DIM_Y];
+ int[][] extend_dset_data = new int[EDIM_X][EDIM_Y];
+
+ // Initialize the dataset.
+ for (int indx = 0; indx < DIM_X; indx++)
+ for (int jndx = 0; jndx < DIM_Y; jndx++)
+ write_dset_data[indx][jndx] = indx * jndx - jndx;
+
+ // Create a new file using default properties.
+ try {
+ file_id = H5.H5Fcreate(FILENAME, HDF5Constants.H5F_ACC_TRUNC, HDF5Constants.H5P_DEFAULT,
+ HDF5Constants.H5P_DEFAULT);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Create dataspace with unlimited dimensions.
+ try {
+ dataspace_id = H5.H5Screate_simple(RANK, dims, maxdims);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Create the dataset creation property list.
+ try {
+ dcpl_id = H5.H5Pcreate(HDF5Constants.H5P_DATASET_CREATE);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Set the chunk size.
+ try {
+ if (dcpl_id >= 0)
+ H5.H5Pset_chunk(dcpl_id, NDIMS, chunk_dims);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Set the fill value for the dataset
+ try {
+ int[] fill_value = { FILLVAL };
+ if (dcpl_id >= 0)
+ H5.H5Pset_fill_value(dcpl_id, HDF5Constants.H5T_NATIVE_INT, fill_value);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Set the allocation time to "early". This way we can be sure
+ // that reading from the dataset immediately after creation will
+ // return the fill value.
+ try {
+ if (dcpl_id >= 0)
+ H5.H5Pset_alloc_time(dcpl_id, HDF5Constants.H5D_ALLOC_TIME_EARLY);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Create the dataset using the dataset creation property list.
+ try {
+ if ((file_id >= 0) && (dataspace_id >= 0) && (dcpl_id >= 0))
+ dataset_id = H5.H5Dcreate(file_id, DATASETNAME, HDF5Constants.H5T_STD_I32LE, dataspace_id,
+ HDF5Constants.H5P_DEFAULT, dcpl_id, HDF5Constants.H5P_DEFAULT);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Read values from the dataset, which has not been written to yet.
+ try {
+ if (dataset_id >= 0)
+ H5.H5Dread(dataset_id, HDF5Constants.H5T_NATIVE_INT, HDF5Constants.H5S_ALL, HDF5Constants.H5S_ALL,
+ HDF5Constants.H5P_DEFAULT, read_dset_data);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Output the data to the screen.
+ System.out.println("Dataset before being written to:");
+ for (int indx = 0; indx < DIM_X; indx++) {
+ System.out.print(" [ ");
+ for (int jndx = 0; jndx < DIM_Y; jndx++)
+ System.out.print(read_dset_data[indx][jndx] + " ");
+ System.out.println("]");
+ }
+ System.out.println();
+
+ // Write the data to the dataset.
+ try {
+ if (dataset_id >= 0)
+ H5.H5Dwrite(dataset_id, HDF5Constants.H5T_NATIVE_INT, HDF5Constants.H5S_ALL, HDF5Constants.H5S_ALL,
+ HDF5Constants.H5P_DEFAULT, write_dset_data);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Read the data back.
+ try {
+ if (dataset_id >= 0)
+ H5.H5Dread(dataset_id, HDF5Constants.H5T_NATIVE_INT, HDF5Constants.H5S_ALL, HDF5Constants.H5S_ALL,
+ HDF5Constants.H5P_DEFAULT, read_dset_data);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Output the data to the screen.
+ System.out.println("Dataset after being written to:");
+ for (int indx = 0; indx < DIM_X; indx++) {
+ System.out.print(" [ ");
+ for (int jndx = 0; jndx < DIM_Y; jndx++)
+ System.out.print(read_dset_data[indx][jndx] + " ");
+ System.out.println("]");
+ }
+ System.out.println();
+
+ // Extend the dataset.
+ try {
+ if (dataset_id >= 0)
+ H5.H5Dset_extent(dataset_id, extdims);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Read from the extended dataset.
+ try {
+ if (dataset_id >= 0)
+ H5.H5Dread(dataset_id, HDF5Constants.H5T_NATIVE_INT, HDF5Constants.H5S_ALL, HDF5Constants.H5S_ALL,
+ HDF5Constants.H5P_DEFAULT, extend_dset_data);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Output the data to the screen.
+ System.out.println("Dataset after extension:");
+ for (int indx = 0; indx < EDIM_X; indx++) {
+ System.out.print(" [ ");
+ for (int jndx = 0; jndx < EDIM_Y; jndx++)
+ System.out.print(extend_dset_data[indx][jndx] + " ");
+ System.out.println("]");
+ }
+ System.out.println();
+
+ // End access to the dataset and release resources used by it.
+ try {
+ if (dataset_id >= 0)
+ H5.H5Dclose(dataset_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ try {
+ if (dataspace_id >= 0)
+ H5.H5Sclose(dataspace_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ try {
+ if (dcpl_id >= 0)
+ H5.H5Pclose(dcpl_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Close the file.
+ try {
+ if (file_id >= 0)
+ H5.H5Fclose(file_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ }
+
+ public static void main(String[] args) {
+ H5Ex_D_FillValue.fillValue();
+ }
+
+}
diff --git a/java/examples/datasets/H5Ex_D_Gzip.java b/java/examples/datasets/H5Ex_D_Gzip.java
new file mode 100644
index 0000000..b813367
--- /dev/null
+++ b/java/examples/datasets/H5Ex_D_Gzip.java
@@ -0,0 +1,336 @@
+/* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
+ * Copyright by The HDF Group. *
+ * Copyright by the Board of Trustees of the University of Illinois. *
+ * All rights reserved. *
+ * *
+ * This file is part of HDF5. The full HDF5 copyright notice, including *
+ * terms governing use, modification, and redistribution, is contained in *
+ * the files COPYING and Copyright.html. COPYING can be found at the root *
+ * of the source code distribution tree; Copyright.html can be found at the *
+ * root level of an installed copy of the electronic HDF5 document set and *
+ * is linked from the top-level documents page. It can also be found at *
+ * http://hdfgroup.org/HDF5/doc/Copyright.html. If you do not have *
+ * access to either file, you may request a copy from help@hdfgroup.org. *
+ * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */
+
+/************************************************************
+ This example shows how to read and write data to a dataset
+ using gzip compression (also called zlib or deflate). The
+ program first checks if gzip compression is available,
+ then if it is it writes integers to a dataset using gzip,
+ then closes the file. Next, it reopens the file, reads
+ back the data, and outputs the type of compression and the
+ maximum value in the dataset to the screen.
+ ************************************************************/
+package examples.datasets;
+
+import java.util.EnumSet;
+import java.util.HashMap;
+import java.util.Map;
+
+import hdf.hdf5lib.H5;
+import hdf.hdf5lib.HDF5Constants;
+
+public class H5Ex_D_Gzip {
+ private static String FILENAME = "H5Ex_D_Gzip.h5";
+ private static String DATASETNAME = "DS1";
+ private static final int DIM_X = 32;
+ private static final int DIM_Y = 64;
+ private static final int CHUNK_X = 4;
+ private static final int CHUNK_Y = 8;
+ private static final int RANK = 2;
+ private static final int NDIMS = 2;
+
+ // Values for the status of space allocation
+ enum H5Z_filter {
+ H5Z_FILTER_ERROR(HDF5Constants.H5Z_FILTER_ERROR), H5Z_FILTER_NONE(HDF5Constants.H5Z_FILTER_NONE), H5Z_FILTER_DEFLATE(
+ HDF5Constants.H5Z_FILTER_DEFLATE), H5Z_FILTER_SHUFFLE(HDF5Constants.H5Z_FILTER_SHUFFLE), H5Z_FILTER_FLETCHER32(
+ HDF5Constants.H5Z_FILTER_FLETCHER32), H5Z_FILTER_SZIP(HDF5Constants.H5Z_FILTER_SZIP), H5Z_FILTER_NBIT(
+ HDF5Constants.H5Z_FILTER_NBIT), H5Z_FILTER_SCALEOFFSET(HDF5Constants.H5Z_FILTER_SCALEOFFSET), H5Z_FILTER_RESERVED(
+ HDF5Constants.H5Z_FILTER_RESERVED), H5Z_FILTER_MAX(HDF5Constants.H5Z_FILTER_MAX);
+ private static final Map<Integer, H5Z_filter> lookup = new HashMap<Integer, H5Z_filter>();
+
+ static {
+ for (H5Z_filter s : EnumSet.allOf(H5Z_filter.class))
+ lookup.put(s.getCode(), s);
+ }
+
+ private int code;
+
+ H5Z_filter(int layout_type) {
+ this.code = layout_type;
+ }
+
+ public int getCode() {
+ return this.code;
+ }
+
+ public static H5Z_filter get(int code) {
+ return lookup.get(code);
+ }
+ }
+
+ private static boolean checkGzipFilter() {
+ try {
+ int available = H5.H5Zfilter_avail(HDF5Constants.H5Z_FILTER_DEFLATE);
+ if (available == 0) {
+ System.out.println("gzip filter not available.");
+ return false;
+ }
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ try {
+ int filter_info = H5.H5Zget_filter_info(HDF5Constants.H5Z_FILTER_DEFLATE);
+ if (((filter_info & HDF5Constants.H5Z_FILTER_CONFIG_ENCODE_ENABLED) == 0)
+ || ((filter_info & HDF5Constants.H5Z_FILTER_CONFIG_DECODE_ENABLED) == 0)) {
+ System.out.println("gzip filter not available for encoding and decoding.");
+ return false;
+ }
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+ return true;
+ }
+
+ private static void writeGzip() {
+ long file_id = -1;
+ long filespace_id = -1;
+ long dataset_id = -1;
+ long dcpl_id = -1;
+ long[] dims = { DIM_X, DIM_Y };
+ long[] chunk_dims = { CHUNK_X, CHUNK_Y };
+ int[][] dset_data = new int[DIM_X][DIM_Y];
+
+ // Initialize data.
+ for (int indx = 0; indx < DIM_X; indx++)
+ for (int jndx = 0; jndx < DIM_Y; jndx++)
+ dset_data[indx][jndx] = indx * jndx - jndx;
+
+ // Create a new file using default properties.
+ try {
+ file_id = H5.H5Fcreate(FILENAME, HDF5Constants.H5F_ACC_TRUNC, HDF5Constants.H5P_DEFAULT,
+ HDF5Constants.H5P_DEFAULT);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Create dataspace. Setting maximum size to NULL sets the maximum
+ // size to be the current size.
+ try {
+ filespace_id = H5.H5Screate_simple(RANK, dims, null);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Create the dataset creation property list, add the gzip compression
+ // filter.
+ try {
+ dcpl_id = H5.H5Pcreate(HDF5Constants.H5P_DATASET_CREATE);
+ if (dcpl_id >= 0) {
+ H5.H5Pset_deflate(dcpl_id, 9);
+ // Set the chunk size.
+ H5.H5Pset_chunk(dcpl_id, NDIMS, chunk_dims);
+ }
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Create the dataset.
+ try {
+ if ((file_id >= 0) && (filespace_id >= 0) && (dcpl_id >= 0))
+ dataset_id = H5.H5Dcreate(file_id, DATASETNAME, HDF5Constants.H5T_STD_I32LE, filespace_id,
+ HDF5Constants.H5P_DEFAULT, dcpl_id, HDF5Constants.H5P_DEFAULT);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Write the data to the dataset.
+ try {
+ if (dataset_id >= 0)
+ H5.H5Dwrite(dataset_id, HDF5Constants.H5T_NATIVE_INT, HDF5Constants.H5S_ALL, HDF5Constants.H5S_ALL,
+ HDF5Constants.H5P_DEFAULT, dset_data);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // End access to the dataset and release resources used by it.
+ try {
+ if (dcpl_id >= 0)
+ H5.H5Pclose(dcpl_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ try {
+ if (dataset_id >= 0)
+ H5.H5Dclose(dataset_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ try {
+ if (filespace_id >= 0)
+ H5.H5Sclose(filespace_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Close the file.
+ try {
+ if (file_id >= 0)
+ H5.H5Fclose(file_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+ }
+
+ private static void readGzip() {
+ long file_id = -1;
+ long dataset_id = -1;
+ long dcpl_id = -1;
+ int[][] dset_data = new int[DIM_X][DIM_Y];
+
+ // Open an existing file.
+ try {
+ file_id = H5.H5Fopen(FILENAME, HDF5Constants.H5F_ACC_RDONLY, HDF5Constants.H5P_DEFAULT);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Open an existing dataset.
+ try {
+ if (file_id >= 0)
+ dataset_id = H5.H5Dopen(file_id, DATASETNAME, HDF5Constants.H5P_DEFAULT);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Retrieve the dataset creation property list.
+ try {
+ if (dataset_id >= 0)
+ dcpl_id = H5.H5Dget_create_plist(dataset_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Retrieve and print the filter type. Here we only retrieve the
+ // first filter because we know that we only added one filter.
+ try {
+ if (dcpl_id >= 0) {
+ // Java lib requires a valid filter_name object and cd_values
+ int[] flags = { 0 };
+ long[] cd_nelmts = { 1 };
+ int[] cd_values = { 0 };
+ String[] filter_name = { "" };
+ int[] filter_config = { 0 };
+ int filter_type = -1;
+ filter_type = H5
+ .H5Pget_filter(dcpl_id, 0, flags, cd_nelmts, cd_values, 120, filter_name, filter_config);
+ System.out.print("Filter type is: ");
+ switch (H5Z_filter.get(filter_type)) {
+ case H5Z_FILTER_DEFLATE:
+ System.out.println("H5Z_FILTER_DEFLATE");
+ break;
+ case H5Z_FILTER_SHUFFLE:
+ System.out.println("H5Z_FILTER_SHUFFLE");
+ break;
+ case H5Z_FILTER_FLETCHER32:
+ System.out.println("H5Z_FILTER_FLETCHER32");
+ break;
+ case H5Z_FILTER_SZIP:
+ System.out.println("H5Z_FILTER_SZIP");
+ break;
+ case H5Z_FILTER_NBIT:
+ System.out.println("H5Z_FILTER_NBIT");
+ break;
+ case H5Z_FILTER_SCALEOFFSET:
+ System.out.println("H5Z_FILTER_SCALEOFFSET");
+ break;
+ default:
+ System.out.println("H5Z_FILTER_ERROR");
+ }
+ System.out.println();
+ }
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Read the data using the default properties.
+ try {
+ if (dataset_id >= 0) {
+ H5.H5Dread(dataset_id, HDF5Constants.H5T_NATIVE_INT, HDF5Constants.H5S_ALL, HDF5Constants.H5S_ALL,
+ HDF5Constants.H5P_DEFAULT, dset_data);
+ }
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Find the maximum value in the dataset, to verify that it was read
+ // correctly.
+ int max = dset_data[0][0];
+ for (int indx = 0; indx < DIM_X; indx++) {
+ for (int jndx = 0; jndx < DIM_Y; jndx++)
+ if (max < dset_data[indx][jndx])
+ max = dset_data[indx][jndx];
+ }
+ // Print the maximum value.
+ System.out.println("Maximum value in " + DATASETNAME + " is: " + max);
+
+ // End access to the dataset and release resources used by it.
+ try {
+ if (dcpl_id >= 0)
+ H5.H5Pclose(dcpl_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ try {
+ if (dataset_id >= 0)
+ H5.H5Dclose(dataset_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Close the file.
+ try {
+ if (file_id >= 0)
+ H5.H5Fclose(file_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+ }
+
+ public static void main(String[] args) {
+ // Check if gzip compression is available and can be used for both
+ // compression and decompression. Normally we do not perform error
+ // checking in these examples for the sake of clarity, but in this
+ // case we will make an exception because this filter is an
+ // optional part of the hdf5 library.
+ if (H5Ex_D_Gzip.checkGzipFilter()) {
+ H5Ex_D_Gzip.writeGzip();
+ H5Ex_D_Gzip.readGzip();
+ }
+ }
+
+}
diff --git a/java/examples/datasets/H5Ex_D_Hyperslab.java b/java/examples/datasets/H5Ex_D_Hyperslab.java
new file mode 100644
index 0000000..482e2c0
--- /dev/null
+++ b/java/examples/datasets/H5Ex_D_Hyperslab.java
@@ -0,0 +1,269 @@
+/* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
+ * Copyright by The HDF Group. *
+ * Copyright by the Board of Trustees of the University of Illinois. *
+ * All rights reserved. *
+ * *
+ * This file is part of HDF5. The full HDF5 copyright notice, including *
+ * terms governing use, modification, and redistribution, is contained in *
+ * the files COPYING and Copyright.html. COPYING can be found at the root *
+ * of the source code distribution tree; Copyright.html can be found at the *
+ * root level of an installed copy of the electronic HDF5 document set and *
+ * is linked from the top-level documents page. It can also be found at *
+ * http://hdfgroup.org/HDF5/doc/Copyright.html. If you do not have *
+ * access to either file, you may request a copy from help@hdfgroup.org. *
+ * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */
+
+/************************************************************
+ This example shows how to read and write data to a
+ dataset by hyberslabs. The program first writes integers
+ in a hyperslab selection to a dataset with dataspace
+ dimensions of DIM_XxDIM_Y, then closes the file. Next, it
+ reopens the file, reads back the data, and outputs it to
+ the screen. Finally it reads the data again using a
+ different hyperslab selection, and outputs the result to
+ the screen.
+ ************************************************************/
+package examples.datasets;
+
+import hdf.hdf5lib.H5;
+import hdf.hdf5lib.HDF5Constants;
+
+public class H5Ex_D_Hyperslab {
+ private static String FILENAME = "H5Ex_D_Hyperslab.h5";
+ private static String DATASETNAME = "DS1";
+ private static final int DIM_X = 6;
+ private static final int DIM_Y = 8;
+ private static final int RANK = 2;
+
+ private static void writeHyperslab() {
+ long file_id = -1;
+ long filespace_id = -1;
+ long dataset_id = -1;
+ long[] dims = { DIM_X, DIM_Y };
+ int[][] dset_data = new int[DIM_X][DIM_Y];
+
+ // Initialize data to "1", to make it easier to see the selections.
+ for (int indx = 0; indx < DIM_X; indx++)
+ for (int jndx = 0; jndx < DIM_Y; jndx++)
+ dset_data[indx][jndx] = 1;
+
+ // Print the data to the screen.
+ System.out.println("Original Data:");
+ for (int indx = 0; indx < DIM_X; indx++) {
+ System.out.print(" [ ");
+ for (int jndx = 0; jndx < DIM_Y; jndx++)
+ System.out.print(dset_data[indx][jndx] + " ");
+ System.out.println("]");
+ }
+ System.out.println();
+
+ // Create a new file using default properties.
+ try {
+ file_id = H5.H5Fcreate(FILENAME, HDF5Constants.H5F_ACC_TRUNC, HDF5Constants.H5P_DEFAULT,
+ HDF5Constants.H5P_DEFAULT);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Create dataspace. Setting maximum size to NULL sets the maximum
+ // size to be the current size.
+ try {
+ filespace_id = H5.H5Screate_simple(RANK, dims, null);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Create the dataset. We will use all default properties for this example.
+ try {
+ if ((file_id >= 0) && (filespace_id >= 0))
+ dataset_id = H5.H5Dcreate(file_id, DATASETNAME, HDF5Constants.H5T_STD_I32LE, filespace_id,
+ HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Define and select the first part of the hyperslab selection.
+ long[] start = { 0, 0 };
+ long[] stride = { 3, 3 };
+ long[] count = { 2, 3 };
+ long[] block = { 2, 2 };
+ try {
+ if ((filespace_id >= 0))
+ H5.H5Sselect_hyperslab(filespace_id, HDF5Constants.H5S_SELECT_SET, start, stride, count, block);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+ // Define and select the second part of the hyperslab selection,
+ // which is subtracted from the first selection by the use of
+ // H5S_SELECT_NOTB
+ block[0] = 1;
+ block[1] = 1;
+ try {
+ if ((filespace_id >= 0)) {
+ H5.H5Sselect_hyperslab(filespace_id, HDF5Constants.H5S_SELECT_NOTB, start, stride, count, block);
+
+ // Write the data to the dataset.
+ if (dataset_id >= 0)
+ H5.H5Dwrite(dataset_id, HDF5Constants.H5T_NATIVE_INT, HDF5Constants.H5S_ALL, filespace_id,
+ HDF5Constants.H5P_DEFAULT, dset_data);
+ }
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // End access to the dataset and release resources used by it.
+ try {
+ if (dataset_id >= 0)
+ H5.H5Dclose(dataset_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ try {
+ if (filespace_id >= 0)
+ H5.H5Sclose(filespace_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Close the file.
+ try {
+ if (file_id >= 0)
+ H5.H5Fclose(file_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+ }
+
+ private static void readHyperslab() {
+ long file_id = -1;
+ long filespace_id = -1;
+ long dataset_id = -1;
+ long dcpl_id = -1;
+ int[][] dset_data = new int[DIM_X][DIM_Y];
+
+ // Open an existing file.
+ try {
+ file_id = H5.H5Fopen(FILENAME, HDF5Constants.H5F_ACC_RDONLY, HDF5Constants.H5P_DEFAULT);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Open an existing dataset.
+ try {
+ if (file_id >= 0)
+ dataset_id = H5.H5Dopen(file_id, DATASETNAME, HDF5Constants.H5P_DEFAULT);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Read the data using the default properties.
+ try {
+ if (dataset_id >= 0)
+ H5.H5Dread(dataset_id, HDF5Constants.H5T_NATIVE_INT, HDF5Constants.H5S_ALL, HDF5Constants.H5S_ALL,
+ HDF5Constants.H5P_DEFAULT, dset_data);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Output the data to the screen.
+ System.out.println("Data as written to disk by hyberslabs:");
+ for (int indx = 0; indx < DIM_X; indx++) {
+ System.out.print(" [ ");
+ for (int jndx = 0; jndx < DIM_Y; jndx++)
+ System.out.print(dset_data[indx][jndx] + " ");
+ System.out.println("]");
+ }
+ System.out.println();
+
+ // Initialize the read array.
+ for (int indx = 0; indx < DIM_X; indx++)
+ for (int jndx = 0; jndx < DIM_Y; jndx++)
+ dset_data[indx][jndx] = 0;
+
+ // Define and select the hyperslab to use for reading.
+ try {
+ if (dataset_id >= 0) {
+ filespace_id = H5.H5Dget_space(dataset_id);
+
+ long[] start = { 0, 1 };
+ long[] stride = { 4, 4 };
+ long[] count = { 2, 2 };
+ long[] block = { 2, 3 };
+
+ if (filespace_id >= 0) {
+ H5.H5Sselect_hyperslab(filespace_id, HDF5Constants.H5S_SELECT_SET, start, stride, count, block);
+
+ // Read the data using the previously defined hyperslab.
+ if ((dataset_id >= 0) && (filespace_id >= 0))
+ H5.H5Dread(dataset_id, HDF5Constants.H5T_NATIVE_INT, HDF5Constants.H5S_ALL, filespace_id,
+ HDF5Constants.H5P_DEFAULT, dset_data);
+ }
+ }
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Output the data to the screen.
+ System.out.println("Data as read from disk by hyberslab:");
+ for (int indx = 0; indx < DIM_X; indx++) {
+ System.out.print(" [ ");
+ for (int jndx = 0; jndx < DIM_Y; jndx++)
+ System.out.print(dset_data[indx][jndx] + " ");
+ System.out.println("]");
+ }
+ System.out.println();
+
+ // End access to the dataset and release resources used by it.
+ try {
+ if (dcpl_id >= 0)
+ H5.H5Pclose(dcpl_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ try {
+ if (dataset_id >= 0)
+ H5.H5Dclose(dataset_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ try {
+ if (filespace_id >= 0)
+ H5.H5Sclose(filespace_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Close the file.
+ try {
+ if (file_id >= 0)
+ H5.H5Fclose(file_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+ }
+
+ public static void main(String[] args) {
+ H5Ex_D_Hyperslab.writeHyperslab();
+ H5Ex_D_Hyperslab.readHyperslab();
+ }
+
+}
diff --git a/java/examples/datasets/H5Ex_D_Nbit.java b/java/examples/datasets/H5Ex_D_Nbit.java
new file mode 100644
index 0000000..f74b675
--- /dev/null
+++ b/java/examples/datasets/H5Ex_D_Nbit.java
@@ -0,0 +1,305 @@
+/* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
+ * Copyright by The HDF Group. *
+ * Copyright by the Board of Trustees of the University of Illinois. *
+ * All rights reserved. *
+ * *
+ * This file is part of HDF5. The full HDF5 copyright notice, including *
+ * terms governing use, modification, and redistribution, is contained in *
+ * the files COPYING and Copyright.html. COPYING can be found at the root *
+ * of the source code distribution tree; Copyright.html can be found at the *
+ * root level of an installed copy of the electronic HDF5 document set and *
+ * is linked from the top-level documents page. It can also be found at *
+ * http://hdfgroup.org/HDF5/doc/Copyright.html. If you do not have *
+ * access to either file, you may request a copy from help@hdfgroup.org. *
+ * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */
+
+/************************************************************
+ This example shows how to read and write data to a dataset
+ using the N-Bit filter. The program first checks if the
+ N-Bit filter is available, then if it is it writes integers
+ to a dataset using N-Bit, then closes the file. Next, it
+ reopens the file, reads back the data, and outputs the type
+ of filter and the maximum value in the dataset to the screen.
+ ************************************************************/
+
+package examples.datasets;
+
+import java.util.EnumSet;
+import java.util.HashMap;
+import java.util.Map;
+
+import hdf.hdf5lib.H5;
+import hdf.hdf5lib.HDF5Constants;
+
+public class H5Ex_D_Nbit {
+ private static String FILENAME = "H5Ex_D_Nbit.h5";
+ private static String DATASETNAME = "DS1";
+ private static final int DIM_X = 32;
+ private static final int DIM_Y = 64;
+ private static final int CHUNK_X = 4;
+ private static final int CHUNK_Y = 8;
+ private static final int RANK = 2;
+ private static final int NDIMS = 2;
+
+ // Values for the status of space allocation
+ enum H5Z_filter {
+ H5Z_FILTER_ERROR(HDF5Constants.H5Z_FILTER_ERROR), H5Z_FILTER_NONE(HDF5Constants.H5Z_FILTER_NONE), H5Z_FILTER_DEFLATE(
+ HDF5Constants.H5Z_FILTER_DEFLATE), H5Z_FILTER_SHUFFLE(HDF5Constants.H5Z_FILTER_SHUFFLE), H5Z_FILTER_FLETCHER32(
+ HDF5Constants.H5Z_FILTER_FLETCHER32), H5Z_FILTER_SZIP(HDF5Constants.H5Z_FILTER_SZIP), H5Z_FILTER_NBIT(
+ HDF5Constants.H5Z_FILTER_NBIT), H5Z_FILTER_SCALEOFFSET(HDF5Constants.H5Z_FILTER_SCALEOFFSET), H5Z_FILTER_RESERVED(
+ HDF5Constants.H5Z_FILTER_RESERVED), H5Z_FILTER_MAX(HDF5Constants.H5Z_FILTER_MAX);
+ private static final Map<Integer, H5Z_filter> lookup = new HashMap<Integer, H5Z_filter>();
+
+ static {
+ for (H5Z_filter s : EnumSet.allOf(H5Z_filter.class))
+ lookup.put(s.getCode(), s);
+ }
+
+ private int code;
+
+ H5Z_filter(int layout_type) {
+ this.code = layout_type;
+ }
+
+ public int getCode() {
+ return this.code;
+ }
+
+ public static H5Z_filter get(int code) {
+ return lookup.get(code);
+ }
+ }
+
+ private static boolean checkNbitFilter() {
+ try {
+ // Check if N-Bit compression is available and can be used for both compression and decompression.
+ int available = H5.H5Zfilter_avail(HDF5Constants.H5Z_FILTER_NBIT);
+ if (available == 0) {
+ System.out.println("N-Bit filter not available.");
+ return false;
+ }
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ try {
+ int filter_info = H5.H5Zget_filter_info(HDF5Constants.H5Z_FILTER_NBIT);
+ if (((filter_info & HDF5Constants.H5Z_FILTER_CONFIG_ENCODE_ENABLED) == 0)
+ || ((filter_info & HDF5Constants.H5Z_FILTER_CONFIG_DECODE_ENABLED) == 0)) {
+ System.out.println("N-Bit filter not available for encoding and decoding.");
+ return false;
+ }
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+ return true;
+ }
+
+ private static void writeData() throws Exception {
+ long file_id = -1;
+ long filespace_id = -1;
+ long dataset_id = -1;
+ long dtype_id = -1;
+ long dcpl_id = -1;
+ long[] dims = { DIM_X, DIM_Y };
+ long[] chunk_dims = { CHUNK_X, CHUNK_Y };
+ int[][] dset_data = new int[DIM_X][DIM_Y];
+
+ // Initialize data.
+ for (int indx = 0; indx < DIM_X; indx++)
+ for (int jndx = 0; jndx < DIM_Y; jndx++)
+ dset_data[indx][jndx] = indx * jndx - jndx;
+
+ try {
+ // Create a new file using the default properties.
+ file_id = H5.H5Fcreate(FILENAME, HDF5Constants.H5F_ACC_TRUNC, HDF5Constants.H5P_DEFAULT,
+ HDF5Constants.H5P_DEFAULT);
+
+ // Create dataspace. Setting maximum size to NULL sets the maximum
+ // size to be the current size.
+ filespace_id = H5.H5Screate_simple(RANK, dims, null);
+
+ // Create the datatype to use with the N-Bit filter. It has an uncompressed size of 32 bits,
+ // but will have a size of 16 bits after being packed by the N-Bit filter.
+ dtype_id = H5.H5Tcopy(HDF5Constants.H5T_STD_I32LE);
+ H5.H5Tset_precision(dtype_id, 16);
+ H5.H5Tset_offset(dtype_id, 5);
+
+ // Create the dataset creation property list, add the N-Bit filter and set the chunk size.
+ dcpl_id = H5.H5Pcreate(HDF5Constants.H5P_DATASET_CREATE);
+ H5.H5Pset_nbit(dcpl_id);
+ H5.H5Pset_chunk(dcpl_id, NDIMS, chunk_dims);
+
+ // Create the dataset.
+ dataset_id = H5.H5Dcreate(file_id, DATASETNAME, dtype_id, filespace_id, HDF5Constants.H5P_DEFAULT, dcpl_id,
+ HDF5Constants.H5P_DEFAULT);
+
+ // Write the data to the dataset.
+ H5.H5Dwrite(dataset_id, HDF5Constants.H5T_NATIVE_INT, HDF5Constants.H5S_ALL, HDF5Constants.H5S_ALL,
+ HDF5Constants.H5P_DEFAULT, dset_data);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+ finally {
+ // Close and release resources.
+ if (dcpl_id >= 0)
+ H5.H5Pclose(dcpl_id);
+ if (dtype_id >= 0)
+ H5.H5Tclose(dtype_id);
+ if (dataset_id >= 0)
+ H5.H5Dclose(dataset_id);
+ if (filespace_id >= 0)
+ H5.H5Sclose(filespace_id);
+ if (file_id >= 0)
+ H5.H5Fclose(file_id);
+ }
+ }
+
+ private static void readData() throws Exception {
+ long file_id = -1;
+ long dataset_id = -1;
+ long dcpl_id = -1;
+ int[][] dset_data = new int[DIM_X][DIM_Y];
+
+ // Open an existing file.
+ try {
+ file_id = H5.H5Fopen(FILENAME, HDF5Constants.H5F_ACC_RDONLY, HDF5Constants.H5P_DEFAULT);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Open an existing dataset.
+ try {
+ if (file_id >= 0)
+ dataset_id = H5.H5Dopen(file_id, DATASETNAME, HDF5Constants.H5P_DEFAULT);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Retrieve the dataset creation property list.
+ try {
+ if (dataset_id >= 0)
+ dcpl_id = H5.H5Dget_create_plist(dataset_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Retrieve and print the filter type. Here we only retrieve the
+ // first filter because we know that we only added one filter.
+ try {
+ if (dcpl_id >= 0) {
+ // Java lib requires a valid filter_name object and cd_values
+ int[] flags = { 0 };
+ long[] cd_nelmts = { 1 };
+ int[] cd_values = { 0 };
+ String[] filter_name = { "" };
+ int[] filter_config = { 0 };
+ int filter_type = -1;
+ filter_type = H5
+ .H5Pget_filter(dcpl_id, 0, flags, cd_nelmts, cd_values, 120, filter_name, filter_config);
+ System.out.print("Filter type is: ");
+ switch (H5Z_filter.get(filter_type)) {
+ case H5Z_FILTER_DEFLATE:
+ System.out.println("H5Z_FILTER_DEFLATE");
+ break;
+ case H5Z_FILTER_SHUFFLE:
+ System.out.println("H5Z_FILTER_SHUFFLE");
+ break;
+ case H5Z_FILTER_FLETCHER32:
+ System.out.println("H5Z_FILTER_FLETCHER32");
+ break;
+ case H5Z_FILTER_SZIP:
+ System.out.println("H5Z_FILTER_SZIP");
+ break;
+ case H5Z_FILTER_NBIT:
+ System.out.println("H5Z_FILTER_NBIT");
+ break;
+ case H5Z_FILTER_SCALEOFFSET:
+ System.out.println("H5Z_FILTER_SCALEOFFSET");
+ break;
+ default:
+ System.out.println("H5Z_FILTER_ERROR");
+ }
+ System.out.println();
+ }
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Read the data using the default properties.
+ try {
+ if (dataset_id >= 0) {
+ int status = H5.H5Dread(dataset_id, HDF5Constants.H5T_NATIVE_INT, HDF5Constants.H5S_ALL,
+ HDF5Constants.H5S_ALL, HDF5Constants.H5P_DEFAULT, dset_data);
+ // Check if the read was successful.
+ if (status < 0)
+ System.out.print("Dataset read failed!");
+ }
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Find the maximum value in the dataset, to verify that it was read
+ // correctly.
+ int max = dset_data[0][0];
+ for (int indx = 0; indx < DIM_X; indx++) {
+ for (int jndx = 0; jndx < DIM_Y; jndx++)
+ if (max < dset_data[indx][jndx])
+ max = dset_data[indx][jndx];
+ }
+ // Print the maximum value.
+ System.out.println("Maximum value in " + DATASETNAME + " is: " + max);
+
+ // End access to the dataset and release resources used by it.
+ try {
+ if (dcpl_id >= 0)
+ H5.H5Pclose(dcpl_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ try {
+ if (dataset_id >= 0)
+ H5.H5Dclose(dataset_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Close the file.
+ try {
+ if (file_id >= 0)
+ H5.H5Fclose(file_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ }
+
+ public static void main(String[] args) {
+ /*
+ * Check if N-Bit compression is available and can be used for both compression and decompression. Normally we
+ * do not perform error checking in these examples for the sake of clarity, but in this case we will make an
+ * exception because this filter is an optional part of the hdf5 library.
+ */
+ try {
+ if (H5Ex_D_Nbit.checkNbitFilter()) {
+ H5Ex_D_Nbit.writeData();
+ H5Ex_D_Nbit.readData();
+ }
+ }
+ catch (Exception ex) {
+ ex.printStackTrace();
+ }
+ }
+}
diff --git a/java/examples/datasets/H5Ex_D_ReadWrite.java b/java/examples/datasets/H5Ex_D_ReadWrite.java
new file mode 100644
index 0000000..de94ccb
--- /dev/null
+++ b/java/examples/datasets/H5Ex_D_ReadWrite.java
@@ -0,0 +1,179 @@
+/* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
+ * Copyright by The HDF Group. *
+ * Copyright by the Board of Trustees of the University of Illinois. *
+ * All rights reserved. *
+ * *
+ * This file is part of HDF5. The full HDF5 copyright notice, including *
+ * terms governing use, modification, and redistribution, is contained in *
+ * the files COPYING and Copyright.html. COPYING can be found at the root *
+ * of the source code distribution tree; Copyright.html can be found at the *
+ * root level of an installed copy of the electronic HDF5 document set and *
+ * is linked from the top-level documents page. It can also be found at *
+ * http://hdfgroup.org/HDF5/doc/Copyright.html. If you do not have *
+ * access to either file, you may request a copy from help@hdfgroup.org. *
+ * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */
+
+/************************************************************
+
+ This example shows how to read and write data to a
+ dataset. The program first writes integers to a dataset
+ with dataspace dimensions of DIM_XxDIM_Y, then closes the
+ file. Next, it reopens the file, reads back the data, and
+ outputs it to the screen.
+ ************************************************************/
+package examples.datasets;
+
+import hdf.hdf5lib.H5;
+import hdf.hdf5lib.HDF5Constants;
+
+public class H5Ex_D_ReadWrite {
+ private static String FILENAME = "H5Ex_D_ReadWrite.h5";
+ private static String DATASETNAME = "DS1";
+ private static final int DIM_X = 4;
+ private static final int DIM_Y = 7;
+ private static final int RANK = 2;
+
+ private static void WriteDataset() {
+ long file_id = -1;
+ long filespace_id = -1;
+ long dataset_id = -1;
+ long[] dims = { DIM_X, DIM_Y };
+ int[][] dset_data = new int[DIM_X][DIM_Y];
+
+ // Initialize data.
+ for (int indx = 0; indx < DIM_X; indx++)
+ for (int jndx = 0; jndx < DIM_Y; jndx++)
+ dset_data[indx][jndx] = indx * jndx - jndx;
+
+ // Create a new file using default properties.
+ try {
+ file_id = H5.H5Fcreate(FILENAME, HDF5Constants.H5F_ACC_TRUNC, HDF5Constants.H5P_DEFAULT,
+ HDF5Constants.H5P_DEFAULT);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Create dataspace. Setting maximum size to NULL sets the maximum
+ // size to be the current size.
+ try {
+ filespace_id = H5.H5Screate_simple(RANK, dims, null);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Create the dataset. We will use all default properties for this example.
+ try {
+ if ((file_id >= 0) && (filespace_id >= 0))
+ dataset_id = H5.H5Dcreate(file_id, DATASETNAME, HDF5Constants.H5T_STD_I32LE, filespace_id,
+ HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Write the data to the dataset.
+ try {
+ if (dataset_id >= 0)
+ H5.H5Dwrite(dataset_id, HDF5Constants.H5T_NATIVE_INT, HDF5Constants.H5S_ALL, HDF5Constants.H5S_ALL,
+ HDF5Constants.H5P_DEFAULT, dset_data);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // End access to the dataset and release resources used by it.
+ try {
+ if (dataset_id >= 0)
+ H5.H5Dclose(dataset_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ try {
+ if (filespace_id >= 0)
+ H5.H5Sclose(filespace_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Close the file.
+ try {
+ if (file_id >= 0)
+ H5.H5Fclose(file_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+ }
+
+ private static void ReadDataset() {
+ long file_id = -1;
+ long dataset_id = -1;
+ int[][] dset_data = new int[DIM_X][DIM_Y];
+
+ // Open file using the default properties.
+ try {
+ file_id = H5.H5Fopen(FILENAME, HDF5Constants.H5F_ACC_RDWR, HDF5Constants.H5P_DEFAULT);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Open dataset using the default properties.
+ try {
+ if (file_id >= 0)
+ dataset_id = H5.H5Dopen(file_id, DATASETNAME, HDF5Constants.H5P_DEFAULT);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Read the data using the default properties.
+ try {
+ if (dataset_id >= 0)
+ H5.H5Dread(dataset_id, HDF5Constants.H5T_NATIVE_INT, HDF5Constants.H5S_ALL, HDF5Constants.H5S_ALL,
+ HDF5Constants.H5P_DEFAULT, dset_data);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Output the data to the screen.
+ System.out.println(DATASETNAME + ":");
+ for (int indx = 0; indx < DIM_X; indx++) {
+ System.out.print(" [ ");
+ for (int jndx = 0; jndx < DIM_Y; jndx++)
+ System.out.print(dset_data[indx][jndx] + " ");
+ System.out.println("]");
+ }
+ System.out.println();
+
+ // Close the dataset.
+ try {
+ if (dataset_id >= 0)
+ H5.H5Dclose(dataset_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Close the file.
+ try {
+ if (file_id >= 0)
+ H5.H5Fclose(file_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+ }
+
+ public static void main(String[] args) {
+ H5Ex_D_ReadWrite.WriteDataset();
+ H5Ex_D_ReadWrite.ReadDataset();
+ }
+
+}
diff --git a/java/examples/datasets/H5Ex_D_Shuffle.java b/java/examples/datasets/H5Ex_D_Shuffle.java
new file mode 100644
index 0000000..ac3c1b4
--- /dev/null
+++ b/java/examples/datasets/H5Ex_D_Shuffle.java
@@ -0,0 +1,373 @@
+/* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
+ * Copyright by The HDF Group. *
+ * Copyright by the Board of Trustees of the University of Illinois. *
+ * All rights reserved. *
+ * *
+ * This file is part of HDF5. The full HDF5 copyright notice, including *
+ * terms governing use, modification, and redistribution, is contained in *
+ * the files COPYING and Copyright.html. COPYING can be found at the root *
+ * of the source code distribution tree; Copyright.html can be found at the *
+ * root level of an installed copy of the electronic HDF5 document set and *
+ * is linked from the top-level documents page. It can also be found at *
+ * http://hdfgroup.org/HDF5/doc/Copyright.html. If you do not have *
+ * access to either file, you may request a copy from help@hdfgroup.org. *
+ * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */
+
+/************************************************************
+ This example shows how to read and write data to a dataset
+ using the shuffle filter with gzip compression. The
+ program first checks if the shuffle and gzip filters are
+ available, then if they are it writes integers to a
+ dataset using shuffle+gzip, then closes the file. Next,
+ it reopens the file, reads back the data, and outputs the
+ types of filters and the maximum value in the dataset to
+ the screen.
+ ************************************************************/
+package examples.datasets;
+
+import java.util.EnumSet;
+import java.util.HashMap;
+import java.util.Map;
+
+import hdf.hdf5lib.H5;
+import hdf.hdf5lib.HDF5Constants;
+
+public class H5Ex_D_Shuffle {
+ private static String FILENAME = "H5Ex_D_Shuffle.h5";
+ private static String DATASETNAME = "DS1";
+ private static final int DIM_X = 32;
+ private static final int DIM_Y = 64;
+ private static final int CHUNK_X = 4;
+ private static final int CHUNK_Y = 8;
+ private static final int RANK = 2;
+ private static final int NDIMS = 2;
+
+ // Values for the status of space allocation
+ enum H5Z_filter {
+ H5Z_FILTER_ERROR(HDF5Constants.H5Z_FILTER_ERROR), H5Z_FILTER_NONE(HDF5Constants.H5Z_FILTER_NONE), H5Z_FILTER_DEFLATE(
+ HDF5Constants.H5Z_FILTER_DEFLATE), H5Z_FILTER_SHUFFLE(HDF5Constants.H5Z_FILTER_SHUFFLE), H5Z_FILTER_FLETCHER32(
+ HDF5Constants.H5Z_FILTER_FLETCHER32), H5Z_FILTER_SZIP(HDF5Constants.H5Z_FILTER_SZIP), H5Z_FILTER_NBIT(
+ HDF5Constants.H5Z_FILTER_NBIT), H5Z_FILTER_SCALEOFFSET(HDF5Constants.H5Z_FILTER_SCALEOFFSET), H5Z_FILTER_RESERVED(
+ HDF5Constants.H5Z_FILTER_RESERVED), H5Z_FILTER_MAX(HDF5Constants.H5Z_FILTER_MAX);
+ private static final Map<Integer, H5Z_filter> lookup = new HashMap<Integer, H5Z_filter>();
+
+ static {
+ for (H5Z_filter s : EnumSet.allOf(H5Z_filter.class))
+ lookup.put(s.getCode(), s);
+ }
+
+ private int code;
+
+ H5Z_filter(int layout_type) {
+ this.code = layout_type;
+ }
+
+ public int getCode() {
+ return this.code;
+ }
+
+ public static H5Z_filter get(int code) {
+ return lookup.get(code);
+ }
+ }
+
+ private static boolean checkGzipFilter() {
+ try {
+ int available = H5.H5Zfilter_avail(HDF5Constants.H5Z_FILTER_DEFLATE);
+ if (available == 0) {
+ System.out.println("gzip filter not available.");
+ return false;
+ }
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ try {
+ int filter_info = H5.H5Zget_filter_info(HDF5Constants.H5Z_FILTER_DEFLATE);
+ if (((filter_info & HDF5Constants.H5Z_FILTER_CONFIG_ENCODE_ENABLED) == 0)
+ || ((filter_info & HDF5Constants.H5Z_FILTER_CONFIG_DECODE_ENABLED) == 0)) {
+ System.out.println("gzip filter not available for encoding and decoding.");
+ return false;
+ }
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+ return true;
+ }
+
+ private static boolean checkShuffleFilter() {
+ try {
+ int available = H5.H5Zfilter_avail(HDF5Constants.H5Z_FILTER_SHUFFLE);
+ if (available == 0) {
+ System.out.println("Shuffle filter not available.");
+ return false;
+ }
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ try {
+ int filter_info = H5.H5Zget_filter_info(HDF5Constants.H5Z_FILTER_SHUFFLE);
+ if (((filter_info & HDF5Constants.H5Z_FILTER_CONFIG_ENCODE_ENABLED) == 0)
+ || ((filter_info & HDF5Constants.H5Z_FILTER_CONFIG_DECODE_ENABLED) == 0)) {
+ System.out.println("Shuffle filter not available for encoding and decoding.");
+ return false;
+ }
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+ return true;
+ }
+
+ private static void writeShuffle() {
+ long file_id = -1;
+ long filespace_id = -1;
+ long dataset_id = -1;
+ long dcpl_id = -1;
+ long[] dims = { DIM_X, DIM_Y };
+ long[] chunk_dims = { CHUNK_X, CHUNK_Y };
+ int[][] dset_data = new int[DIM_X][DIM_Y];
+
+ // Initialize data.
+ for (int indx = 0; indx < DIM_X; indx++)
+ for (int jndx = 0; jndx < DIM_Y; jndx++)
+ dset_data[indx][jndx] = indx * jndx - jndx;
+
+ // Create a new file using default properties.
+ try {
+ file_id = H5.H5Fcreate(FILENAME, HDF5Constants.H5F_ACC_TRUNC, HDF5Constants.H5P_DEFAULT,
+ HDF5Constants.H5P_DEFAULT);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Create dataspace. Setting maximum size to NULL sets the maximum
+ // size to be the current size.
+ try {
+ filespace_id = H5.H5Screate_simple(RANK, dims, null);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Create the dataset creation property list, add the shuffle
+ // filter and the gzip compression filter.
+ // The order in which the filters are added here is significant -
+ // we will see much greater results when the shuffle is applied
+ // first. The order in which the filters are added to the property
+ // list is the order in which they will be invoked when writing
+ // data.
+ try {
+ dcpl_id = H5.H5Pcreate(HDF5Constants.H5P_DATASET_CREATE);
+ if (dcpl_id >= 0) {
+ H5.H5Pset_shuffle(dcpl_id);
+ H5.H5Pset_deflate(dcpl_id, 9);
+ // Set the chunk size.
+ H5.H5Pset_chunk(dcpl_id, NDIMS, chunk_dims);
+ }
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Create the dataset.
+ try {
+ if ((file_id >= 0) && (filespace_id >= 0) && (dcpl_id >= 0))
+ dataset_id = H5.H5Dcreate(file_id, DATASETNAME, HDF5Constants.H5T_STD_I32LE, filespace_id,
+ HDF5Constants.H5P_DEFAULT, dcpl_id, HDF5Constants.H5P_DEFAULT);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Write the data to the dataset.
+ try {
+ if (dataset_id >= 0)
+ H5.H5Dwrite(dataset_id, HDF5Constants.H5T_NATIVE_INT, HDF5Constants.H5S_ALL, HDF5Constants.H5S_ALL,
+ HDF5Constants.H5P_DEFAULT, dset_data);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // End access to the dataset and release resources used by it.
+ try {
+ if (dcpl_id >= 0)
+ H5.H5Pclose(dcpl_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ try {
+ if (dataset_id >= 0)
+ H5.H5Dclose(dataset_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ try {
+ if (filespace_id >= 0)
+ H5.H5Sclose(filespace_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Close the file.
+ try {
+ if (file_id >= 0)
+ H5.H5Fclose(file_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+ }
+
+ private static void readShuffle() {
+ long file_id = -1;
+ long dataset_id = -1;
+ long dcpl_id = -1;
+ int[][] dset_data = new int[DIM_X][DIM_Y];
+
+ // Open an existing file.
+ try {
+ file_id = H5.H5Fopen(FILENAME, HDF5Constants.H5F_ACC_RDONLY, HDF5Constants.H5P_DEFAULT);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Open an existing dataset.
+ try {
+ if (file_id >= 0)
+ dataset_id = H5.H5Dopen(file_id, DATASETNAME, HDF5Constants.H5P_DEFAULT);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Retrieve the dataset creation property list.
+ try {
+ if (dataset_id >= 0)
+ dcpl_id = H5.H5Dget_create_plist(dataset_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Retrieve the number of filters, and retrieve and print the
+ // type of each.
+ try {
+ if (dcpl_id >= 0) {
+ int nfilters = H5.H5Pget_nfilters(dcpl_id);
+ for (int indx = 0; indx < nfilters; indx++) {
+ // Java lib requires a valid filter_name object and cd_values
+ int[] flags = { 0 };
+ long[] cd_nelmts = { 1 };
+ int[] cd_values = { 0 };
+ String[] filter_name = { "" };
+ int[] filter_config = { 0 };
+ int filter_type = -1;
+ filter_type = H5.H5Pget_filter(dcpl_id, indx, flags, cd_nelmts, cd_values, 120, filter_name,
+ filter_config);
+ System.out.print("Filter " + indx + ": Type is: ");
+ switch (H5Z_filter.get(filter_type)) {
+ case H5Z_FILTER_DEFLATE:
+ System.out.println("H5Z_FILTER_DEFLATE");
+ break;
+ case H5Z_FILTER_SHUFFLE:
+ System.out.println("H5Z_FILTER_SHUFFLE");
+ break;
+ case H5Z_FILTER_FLETCHER32:
+ System.out.println("H5Z_FILTER_FLETCHER32");
+ break;
+ case H5Z_FILTER_SZIP:
+ System.out.println("H5Z_FILTER_SZIP");
+ break;
+ case H5Z_FILTER_NBIT:
+ System.out.println("H5Z_FILTER_NBIT");
+ break;
+ case H5Z_FILTER_SCALEOFFSET:
+ System.out.println("H5Z_FILTER_SCALEOFFSET");
+ break;
+ default:
+ System.out.println("H5Z_FILTER_ERROR");
+ }
+ System.out.println();
+ }
+ }
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Read the data using the default properties.
+ try {
+ if (dataset_id >= 0) {
+ H5.H5Dread(dataset_id, HDF5Constants.H5T_NATIVE_INT, HDF5Constants.H5S_ALL, HDF5Constants.H5S_ALL,
+ HDF5Constants.H5P_DEFAULT, dset_data);
+ }
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Find the maximum value in the dataset, to verify that it was read
+ // correctly.
+ int max = dset_data[0][0];
+ for (int indx = 0; indx < DIM_X; indx++) {
+ for (int jndx = 0; jndx < DIM_Y; jndx++)
+ if (max < dset_data[indx][jndx])
+ max = dset_data[indx][jndx];
+ }
+ // Print the maximum value.
+ System.out.println("Maximum value in " + DATASETNAME + " is: " + max);
+
+ // End access to the dataset and release resources used by it.
+ try {
+ if (dcpl_id >= 0)
+ H5.H5Pclose(dcpl_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ try {
+ if (dataset_id >= 0)
+ H5.H5Dclose(dataset_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Close the file.
+ try {
+ if (file_id >= 0)
+ H5.H5Fclose(file_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+ }
+
+ public static void main(String[] args) {
+ // Check if gzip compression is available and can be used for both
+ // compression and decompression. Normally we do not perform error
+ // checking in these examples for the sake of clarity, but in this
+ // case we will make an exception because this filter is an
+ // optional part of the hdf5 library.
+ // Similarly, check for availability of the shuffle filter.
+ if (H5Ex_D_Shuffle.checkGzipFilter() && H5Ex_D_Shuffle.checkShuffleFilter()) {
+ H5Ex_D_Shuffle.writeShuffle();
+ H5Ex_D_Shuffle.readShuffle();
+ }
+ }
+
+}
diff --git a/java/examples/datasets/H5Ex_D_Sofloat.java b/java/examples/datasets/H5Ex_D_Sofloat.java
new file mode 100644
index 0000000..26c8d49
--- /dev/null
+++ b/java/examples/datasets/H5Ex_D_Sofloat.java
@@ -0,0 +1,356 @@
+/* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
+ * Copyright by The HDF Group. *
+ * Copyright by the Board of Trustees of the University of Illinois. *
+ * All rights reserved. *
+ * *
+ * This file is part of HDF5. The full HDF5 copyright notice, including *
+ * terms governing use, modification, and redistribution, is contained in *
+ * the files COPYING and Copyright.html. COPYING can be found at the root *
+ * of the source code distribution tree; Copyright.html can be found at the *
+ * root level of an installed copy of the electronic HDF5 document set and *
+ * is linked from the top-level documents page. It can also be found at *
+ * http://hdfgroup.org/HDF5/doc/Copyright.html. If you do not have *
+ * access to either file, you may request a copy from help@hdfgroup.org. *
+ * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */
+
+/************************************************************
+ This example shows how to read and write data to a dataset
+ using the Scale-Offset filter. The program first checks
+ if the Scale-Offset filter is available, then if it is it
+ writes floating point numbers to a dataset using
+ Scale-Offset, then closes the file Next, it reopens the
+ file, reads back the data, and outputs the type of filter
+ and the maximum value in the dataset to the screen.
+ ************************************************************/
+package examples.datasets;
+
+import java.util.EnumSet;
+import java.util.HashMap;
+import java.util.Map;
+
+import hdf.hdf5lib.H5;
+import hdf.hdf5lib.HDF5Constants;
+
+public class H5Ex_D_Sofloat {
+
+ private static String FILENAME = "H5Ex_D_Sofloat.h5";
+ private static String DATASETNAME = "DS1";
+ private static final int DIM_X = 32;
+ private static final int DIM_Y = 64;
+ private static final int CHUNK_X = 4;
+ private static final int CHUNK_Y = 8;
+ private static final int RANK = 2;
+ private static final int NDIMS = 2;
+
+ // Values for the status of space allocation
+ enum H5Z_filter {
+ H5Z_FILTER_ERROR(HDF5Constants.H5Z_FILTER_ERROR), H5Z_FILTER_NONE(HDF5Constants.H5Z_FILTER_NONE), H5Z_FILTER_DEFLATE(
+ HDF5Constants.H5Z_FILTER_DEFLATE), H5Z_FILTER_SHUFFLE(HDF5Constants.H5Z_FILTER_SHUFFLE), H5Z_FILTER_FLETCHER32(
+ HDF5Constants.H5Z_FILTER_FLETCHER32), H5Z_FILTER_SZIP(HDF5Constants.H5Z_FILTER_SZIP), H5Z_FILTER_NBIT(
+ HDF5Constants.H5Z_FILTER_NBIT), H5Z_FILTER_SCALEOFFSET(HDF5Constants.H5Z_FILTER_SCALEOFFSET), H5Z_FILTER_RESERVED(
+ HDF5Constants.H5Z_FILTER_RESERVED), H5Z_FILTER_MAX(HDF5Constants.H5Z_FILTER_MAX);
+ private static final Map<Integer, H5Z_filter> lookup = new HashMap<Integer, H5Z_filter>();
+
+ static {
+ for (H5Z_filter s : EnumSet.allOf(H5Z_filter.class))
+ lookup.put(s.getCode(), s);
+ }
+
+ private int code;
+
+ H5Z_filter(int layout_type) {
+ this.code = layout_type;
+ }
+
+ public int getCode() {
+ return this.code;
+ }
+
+ public static H5Z_filter get(int code) {
+ return lookup.get(code);
+ }
+ }
+
+ private static boolean checkScaleoffsetFilter() {
+ try {
+ int available = H5.H5Zfilter_avail(HDF5Constants.H5Z_FILTER_SCALEOFFSET);
+ if (available == 0) {
+ System.out.println("Scale-Offset filter not available.");
+ return false;
+ }
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ try {
+ int filter_info = H5.H5Zget_filter_info(HDF5Constants.H5Z_FILTER_SCALEOFFSET);
+ if (((filter_info & HDF5Constants.H5Z_FILTER_CONFIG_ENCODE_ENABLED) == 0)
+ || ((filter_info & HDF5Constants.H5Z_FILTER_CONFIG_DECODE_ENABLED) == 0)) {
+ System.out.println("Scale-Offset filter not available for encoding and decoding.");
+ return false;
+ }
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+ return true;
+ }
+
+ private static void writeData() {
+ long file_id = -1;
+ long filespace_id = -1;
+ long dataset_id = -1;
+ long dcpl_id = -1;
+ long[] dims = { DIM_X, DIM_Y };
+ long[] chunk_dims = { CHUNK_X, CHUNK_Y };
+ double[][] dset_data = new double[DIM_X][DIM_Y];
+
+ // Initialize data.
+ for (int indx = 0; indx < DIM_X; indx++)
+ for (int jndx = 0; jndx < DIM_Y; jndx++) {
+ double x = indx;
+ double y = jndx;
+ dset_data[indx][jndx] = (x + 1) / (y + 0.3) + y;
+ }
+
+ // Find the maximum value in the dataset, to verify that it was read correctly.
+ double max = dset_data[0][0];
+ double min = dset_data[0][0];
+ for (int indx = 0; indx < DIM_X; indx++)
+ for (int jndx = 0; jndx < DIM_Y; jndx++) {
+ if (max < dset_data[indx][jndx])
+ max = dset_data[indx][jndx];
+ if (min > dset_data[indx][jndx])
+ min = dset_data[indx][jndx];
+ }
+
+ // Print the maximum value.
+ System.out.println("Maximum value in write buffer is: " + max);
+ System.out.println("Minimum value in write buffer is: " + min);
+
+ // Create a new file using the default properties.
+ try {
+ file_id = H5.H5Fcreate(FILENAME, HDF5Constants.H5F_ACC_TRUNC, HDF5Constants.H5P_DEFAULT,
+ HDF5Constants.H5P_DEFAULT);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Create dataspace. Setting maximum size to NULL sets the maximum size to be the current size.
+ try {
+ filespace_id = H5.H5Screate_simple(RANK, dims, null);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Create the dataset creation property list, add the Scale-Offset
+ // filter and set the chunk size.
+ try {
+ dcpl_id = H5.H5Pcreate(HDF5Constants.H5P_DATASET_CREATE);
+ if (dcpl_id >= 0) {
+ H5.H5Pset_scaleoffset(dcpl_id, HDF5Constants.H5Z_SO_FLOAT_DSCALE, 2);
+ H5.H5Pset_chunk(dcpl_id, NDIMS, chunk_dims);
+ }
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Create the dataset.
+ try {
+ if ((file_id >= 0) && (filespace_id >= 0) && (dcpl_id >= 0))
+ dataset_id = H5.H5Dcreate(file_id, DATASETNAME, HDF5Constants.H5T_IEEE_F64LE, filespace_id,
+ HDF5Constants.H5P_DEFAULT, dcpl_id, HDF5Constants.H5P_DEFAULT);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Write the data to the dataset.
+ try {
+ if (dataset_id >= 0)
+ H5.H5Dwrite(dataset_id, HDF5Constants.H5T_NATIVE_DOUBLE, HDF5Constants.H5S_ALL, HDF5Constants.H5S_ALL,
+ HDF5Constants.H5P_DEFAULT, dset_data);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Close and release resources.
+ try {
+ if (dcpl_id >= 0)
+ H5.H5Pclose(dcpl_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ try {
+ if (dataset_id >= 0)
+ H5.H5Dclose(dataset_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ try {
+ if (filespace_id >= 0)
+ H5.H5Sclose(filespace_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Close file
+ try {
+ if (file_id >= 0)
+ H5.H5Fclose(file_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+ }
+
+ private static void readData() {
+ long file_id = -1;
+ long dataset_id = -1;
+ long dcpl_id = -1;
+ double[][] dset_data = new double[DIM_X][DIM_Y];
+
+ // Open file using the default properties.
+ try {
+ file_id = H5.H5Fopen(FILENAME, HDF5Constants.H5F_ACC_RDONLY, HDF5Constants.H5P_DEFAULT);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+ // Open dataset using the default properties.
+ try {
+ if (file_id >= 0)
+ dataset_id = H5.H5Dopen(file_id, DATASETNAME, HDF5Constants.H5P_DEFAULT);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Retrieve dataset creation property list.
+ try {
+ if (dataset_id >= 0)
+ dcpl_id = H5.H5Dget_create_plist(dataset_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Retrieve and print the filter type. Here we only retrieve the
+ // first filter because we know that we only added one filter.
+ try {
+ if (dcpl_id >= 0) {
+ // Java lib requires a valid filter_name object and cd_values
+ int[] flags = { 0 };
+ long[] cd_nelmts = { 1 };
+ int[] cd_values = { 0 };
+ String[] filter_name = { "" };
+ int[] filter_config = { 0 };
+ int filter_type = -1;
+
+ filter_type = H5
+ .H5Pget_filter(dcpl_id, 0, flags, cd_nelmts, cd_values, 120, filter_name, filter_config);
+ System.out.print("Filter type is: ");
+ switch (H5Z_filter.get(filter_type)) {
+ case H5Z_FILTER_DEFLATE:
+ System.out.println("H5Z_FILTER_DEFLATE");
+ break;
+ case H5Z_FILTER_SHUFFLE:
+ System.out.println("H5Z_FILTER_SHUFFLE");
+ break;
+ case H5Z_FILTER_FLETCHER32:
+ System.out.println("H5Z_FILTER_FLETCHER32");
+ break;
+ case H5Z_FILTER_SZIP:
+ System.out.println("H5Z_FILTER_SZIP");
+ break;
+ case H5Z_FILTER_NBIT:
+ System.out.println("H5Z_FILTER_NBIT");
+ break;
+ case H5Z_FILTER_SCALEOFFSET:
+ System.out.println("H5Z_FILTER_SCALEOFFSET");
+ break;
+ default:
+ System.out.println("H5Z_FILTER_ERROR");
+ }
+ System.out.println();
+ }
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Read the data using the default properties.
+ try {
+ if (dataset_id >= 0)
+ H5.H5Dread(dataset_id, HDF5Constants.H5T_NATIVE_DOUBLE, HDF5Constants.H5S_ALL, HDF5Constants.H5S_ALL,
+ HDF5Constants.H5P_DEFAULT, dset_data);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Find the maximum value in the dataset, to verify that it was read correctly.
+ double max = dset_data[0][0];
+ double min = dset_data[0][0];
+ for (int indx = 0; indx < DIM_X; indx++)
+ for (int jndx = 0; jndx < DIM_Y; jndx++) {
+ if (max < dset_data[indx][jndx])
+ max = dset_data[indx][jndx];
+ if (min > dset_data[indx][jndx])
+ min = dset_data[indx][jndx];
+ }
+
+ // Print the maximum value.
+ System.out.println("Maximum value in " + DATASETNAME + " is: " + max);
+ System.out.println("Minimum value in " + DATASETNAME + " is: " + min);
+
+ // End access to the dataset and release resources used by it.
+ try {
+ if (dcpl_id >= 0)
+ H5.H5Pclose(dcpl_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ try {
+ if (dataset_id >= 0)
+ H5.H5Dclose(dataset_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Close the file.
+ try {
+ if (file_id >= 0)
+ H5.H5Fclose(file_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+ }
+
+ public static void main(String[] args) {
+
+ // Check if Scale-Offset compression is available and can be used
+ // for both compression and decompression. Normally we do not
+ // perform error checking in these examples for the sake of
+ // clarity, but in this case we will make an exception because this
+ // filter is an optional part of the hdf5 library.
+ if (H5Ex_D_Sofloat.checkScaleoffsetFilter()) {
+ H5Ex_D_Sofloat.writeData();
+ H5Ex_D_Sofloat.readData();
+ }
+ }
+}
diff --git a/java/examples/datasets/H5Ex_D_Soint.java b/java/examples/datasets/H5Ex_D_Soint.java
new file mode 100644
index 0000000..7939883
--- /dev/null
+++ b/java/examples/datasets/H5Ex_D_Soint.java
@@ -0,0 +1,335 @@
+/* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
+ * Copyright by The HDF Group. *
+ * Copyright by the Board of Trustees of the University of Illinois. *
+ * All rights reserved. *
+ * *
+ * This file is part of HDF5. The full HDF5 copyright notice, including *
+ * terms governing use, modification, and redistribution, is contained in *
+ * the files COPYING and Copyright.html. COPYING can be found at the root *
+ * of the source code distribution tree; Copyright.html can be found at the *
+ * root level of an installed copy of the electronic HDF5 document set and *
+ * is linked from the top-level documents page. It can also be found at *
+ * http://hdfgroup.org/HDF5/doc/Copyright.html. If you do not have *
+ * access to either file, you may request a copy from help@hdfgroup.org. *
+ * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */
+
+/************************************************************
+ This example shows how to read and write data to a dataset
+ using the Scale-Offset filter. The program first checks
+ if the Scale-Offset filter is available, then if it is it
+ writes integers to a dataset using Scale-Offset, then
+ closes the file Next, it reopens the file, reads back the
+ data, and outputs the type of filter and the maximum value
+ in the dataset to the screen.
+ ************************************************************/
+package examples.datasets;
+
+import java.util.EnumSet;
+import java.util.HashMap;
+import java.util.Map;
+
+import hdf.hdf5lib.H5;
+import hdf.hdf5lib.HDF5Constants;
+
+public class H5Ex_D_Soint {
+
+ private static String FILENAME = "H5Ex_D_Soint.h5";
+ private static String DATASETNAME = "DS1";
+ private static final int DIM_X = 32;
+ private static final int DIM_Y = 64;
+ private static final int CHUNK_X = 4;
+ private static final int CHUNK_Y = 8;
+ private static final int RANK = 2;
+ private static final int NDIMS = 2;
+
+ // Values for the status of space allocation
+ enum H5Z_filter {
+ H5Z_FILTER_ERROR(HDF5Constants.H5Z_FILTER_ERROR), H5Z_FILTER_NONE(HDF5Constants.H5Z_FILTER_NONE), H5Z_FILTER_DEFLATE(
+ HDF5Constants.H5Z_FILTER_DEFLATE), H5Z_FILTER_SHUFFLE(HDF5Constants.H5Z_FILTER_SHUFFLE), H5Z_FILTER_FLETCHER32(
+ HDF5Constants.H5Z_FILTER_FLETCHER32), H5Z_FILTER_SZIP(HDF5Constants.H5Z_FILTER_SZIP), H5Z_FILTER_NBIT(
+ HDF5Constants.H5Z_FILTER_NBIT), H5Z_FILTER_SCALEOFFSET(HDF5Constants.H5Z_FILTER_SCALEOFFSET), H5Z_FILTER_RESERVED(
+ HDF5Constants.H5Z_FILTER_RESERVED), H5Z_FILTER_MAX(HDF5Constants.H5Z_FILTER_MAX);
+ private static final Map<Integer, H5Z_filter> lookup = new HashMap<Integer, H5Z_filter>();
+
+ static {
+ for (H5Z_filter s : EnumSet.allOf(H5Z_filter.class))
+ lookup.put(s.getCode(), s);
+ }
+
+ private int code;
+
+ H5Z_filter(int layout_type) {
+ this.code = layout_type;
+ }
+
+ public int getCode() {
+ return this.code;
+ }
+
+ public static H5Z_filter get(int code) {
+ return lookup.get(code);
+ }
+ }
+
+ private static boolean checkScaleoffsetFilter() {
+ try {
+ int available = H5.H5Zfilter_avail(HDF5Constants.H5Z_FILTER_SCALEOFFSET);
+ if (available == 0) {
+ System.out.println("Scale-Offset filter not available.");
+ return false;
+ }
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ try {
+ int filter_info = H5.H5Zget_filter_info(HDF5Constants.H5Z_FILTER_SCALEOFFSET);
+ if (((filter_info & HDF5Constants.H5Z_FILTER_CONFIG_ENCODE_ENABLED) == 0)
+ || ((filter_info & HDF5Constants.H5Z_FILTER_CONFIG_DECODE_ENABLED) == 0)) {
+ System.out.println("Scale-Offset filter not available for encoding and decoding.");
+ return false;
+ }
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+ return true;
+ }
+
+ private static void writeData() {
+ long file_id = -1;
+ long filespace_id = -1;
+ long dataset_id = -1;
+ long dcpl_id = -1;
+ long[] dims = { DIM_X, DIM_Y };
+ long[] chunk_dims = { CHUNK_X, CHUNK_Y };
+ int[][] dset_data = new int[DIM_X][DIM_Y];
+
+ // Initialize data.
+ for (int indx = 0; indx < DIM_X; indx++)
+ for (int jndx = 0; jndx < DIM_Y; jndx++)
+ dset_data[indx][jndx] = indx * jndx - jndx;
+
+ // Create a new file using the default properties.
+ try {
+ file_id = H5.H5Fcreate(FILENAME, HDF5Constants.H5F_ACC_TRUNC, HDF5Constants.H5P_DEFAULT,
+ HDF5Constants.H5P_DEFAULT);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Create dataspace. Setting maximum size to NULL sets the maximum size to be the current size.
+ try {
+ filespace_id = H5.H5Screate_simple(RANK, dims, null);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Create the dataset creation property list, add the Scale-Offset
+ // filter and set the chunk size.
+ try {
+ dcpl_id = H5.H5Pcreate(HDF5Constants.H5P_DATASET_CREATE);
+ if (dcpl_id >= 0) {
+ H5.H5Pset_scaleoffset(dcpl_id, HDF5Constants.H5Z_SO_INT, HDF5Constants.H5Z_SO_INT_MINBITS_DEFAULT);
+ H5.H5Pset_chunk(dcpl_id, NDIMS, chunk_dims);
+ }
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Create the dataset.
+ try {
+ if ((file_id >= 0) && (filespace_id >= 0) && (dcpl_id >= 0))
+ dataset_id = H5.H5Dcreate(file_id, DATASETNAME, HDF5Constants.H5T_STD_I32LE, filespace_id,
+ HDF5Constants.H5P_DEFAULT, dcpl_id, HDF5Constants.H5P_DEFAULT);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Write the data to the dataset.
+ try {
+ if (dataset_id >= 0)
+ H5.H5Dwrite(dataset_id, HDF5Constants.H5T_NATIVE_INT, HDF5Constants.H5S_ALL, HDF5Constants.H5S_ALL,
+ HDF5Constants.H5P_DEFAULT, dset_data);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Close and release resources.
+ try {
+ if (dcpl_id >= 0)
+ H5.H5Pclose(dcpl_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ try {
+ if (dataset_id >= 0)
+ H5.H5Dclose(dataset_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ try {
+ if (filespace_id >= 0)
+ H5.H5Sclose(filespace_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Close file
+ try {
+ if (file_id >= 0)
+ H5.H5Fclose(file_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+ }
+
+ private static void readData() {
+ long file_id = -1;
+ long dataset_id = -1;
+ long dcpl_id = -1;
+ int[][] dset_data = new int[DIM_X][DIM_Y];
+
+ // Open file using the default properties.
+ try {
+ file_id = H5.H5Fopen(FILENAME, HDF5Constants.H5F_ACC_RDONLY, HDF5Constants.H5P_DEFAULT);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+ // Open dataset using the default properties.
+ try {
+ if (file_id >= 0)
+ dataset_id = H5.H5Dopen(file_id, DATASETNAME, HDF5Constants.H5P_DEFAULT);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Retrieve dataset creation property list.
+ try {
+ if (dataset_id >= 0)
+ dcpl_id = H5.H5Dget_create_plist(dataset_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Retrieve and print the filter type. Here we only retrieve the
+ // first filter because we know that we only added one filter.
+ try {
+ if (dcpl_id >= 0) {
+ // Java lib requires a valid filter_name object and cd_values
+ int[] flags = { 0 };
+ long[] cd_nelmts = { 1 };
+ int[] cd_values = { 0 };
+ String[] filter_name = { "" };
+ int[] filter_config = { 0 };
+ int filter_type = -1;
+
+ filter_type = H5
+ .H5Pget_filter(dcpl_id, 0, flags, cd_nelmts, cd_values, 120, filter_name, filter_config);
+ System.out.print("Filter type is: ");
+ switch (H5Z_filter.get(filter_type)) {
+ case H5Z_FILTER_DEFLATE:
+ System.out.println("H5Z_FILTER_DEFLATE");
+ break;
+ case H5Z_FILTER_SHUFFLE:
+ System.out.println("H5Z_FILTER_SHUFFLE");
+ break;
+ case H5Z_FILTER_FLETCHER32:
+ System.out.println("H5Z_FILTER_FLETCHER32");
+ break;
+ case H5Z_FILTER_SZIP:
+ System.out.println("H5Z_FILTER_SZIP");
+ break;
+ case H5Z_FILTER_NBIT:
+ System.out.println("H5Z_FILTER_NBIT");
+ break;
+ case H5Z_FILTER_SCALEOFFSET:
+ System.out.println("H5Z_FILTER_SCALEOFFSET");
+ break;
+ default:
+ System.out.println("H5Z_FILTER_ERROR");
+ }
+ System.out.println();
+ }
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Read the data using the default properties.
+ try {
+ if (dataset_id >= 0)
+ H5.H5Dread(dataset_id, HDF5Constants.H5T_NATIVE_INT, HDF5Constants.H5S_ALL, HDF5Constants.H5S_ALL,
+ HDF5Constants.H5P_DEFAULT, dset_data);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Find the maximum value in the dataset, to verify that it was read correctly.
+ int max = dset_data[0][0];
+ for (int indx = 0; indx < DIM_X; indx++)
+ for (int jndx = 0; jndx < DIM_Y; jndx++) {
+ if (max < dset_data[indx][jndx])
+ max = dset_data[indx][jndx];
+ }
+
+ // Print the maximum value.
+ System.out.println("Maximum value in " + DATASETNAME + " is: " + max);
+
+ // End access to the dataset and release resources used by it.
+ try {
+ if (dcpl_id >= 0)
+ H5.H5Pclose(dcpl_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ try {
+ if (dataset_id >= 0)
+ H5.H5Dclose(dataset_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Close the file.
+ try {
+ if (file_id >= 0)
+ H5.H5Fclose(file_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+ }
+
+ public static void main(String[] args) {
+
+ // Check if Scale-Offset compression is available and can be used
+ // for both compression and decompression. Normally we do not
+ // perform error checking in these examples for the sake of
+ // clarity, but in this case we will make an exception because this
+ // filter is an optional part of the hdf5 library.
+ if (H5Ex_D_Soint.checkScaleoffsetFilter()) {
+ H5Ex_D_Soint.writeData();
+ H5Ex_D_Soint.readData();
+ }
+ }
+
+}
diff --git a/java/examples/datasets/H5Ex_D_Szip.java b/java/examples/datasets/H5Ex_D_Szip.java
new file mode 100644
index 0000000..5258234
--- /dev/null
+++ b/java/examples/datasets/H5Ex_D_Szip.java
@@ -0,0 +1,337 @@
+/* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
+ * Copyright by The HDF Group. *
+ * Copyright by the Board of Trustees of the University of Illinois. *
+ * All rights reserved. *
+ * *
+ * This file is part of HDF5. The full HDF5 copyright notice, including *
+ * terms governing use, modification, and redistribution, is contained in *
+ * the files COPYING and Copyright.html. COPYING can be found at the root *
+ * of the source code distribution tree; Copyright.html can be found at the *
+ * root level of an installed copy of the electronic HDF5 document set and *
+ * is linked from the top-level documents page. It can also be found at *
+ * http://hdfgroup.org/HDF5/doc/Copyright.html. If you do not have *
+ * access to either file, you may request a copy from help@hdfgroup.org. *
+ * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */
+
+/************************************************************
+ This example shows how to read and write data to a dataset
+ using szip compression. The program first checks if
+ szip compression is available, then if it is it writes
+ integers to a dataset using szip, then closes the file.
+ Next, it reopens the file, reads back the data, and
+ outputs the type of compression and the maximum value in
+ the dataset to the screen.
+ ************************************************************/
+package examples.datasets;
+
+import java.util.EnumSet;
+import java.util.HashMap;
+import java.util.Map;
+
+import hdf.hdf5lib.H5;
+import hdf.hdf5lib.HDF5Constants;
+
+public class H5Ex_D_Szip {
+ private static String FILENAME = "H5Ex_D_Szip.h5";
+ private static String DATASETNAME = "DS1";
+ private static final int DIM_X = 32;
+ private static final int DIM_Y = 64;
+ private static final int CHUNK_X = 4;
+ private static final int CHUNK_Y = 8;
+ private static final int RANK = 2;
+ private static final int NDIMS = 2;
+
+ // Values for the status of space allocation
+ enum H5Z_filter {
+ H5Z_FILTER_ERROR(HDF5Constants.H5Z_FILTER_ERROR), H5Z_FILTER_NONE(HDF5Constants.H5Z_FILTER_NONE), H5Z_FILTER_DEFLATE(
+ HDF5Constants.H5Z_FILTER_DEFLATE), H5Z_FILTER_SHUFFLE(HDF5Constants.H5Z_FILTER_SHUFFLE), H5Z_FILTER_FLETCHER32(
+ HDF5Constants.H5Z_FILTER_FLETCHER32), H5Z_FILTER_SZIP(HDF5Constants.H5Z_FILTER_SZIP), H5Z_FILTER_NBIT(
+ HDF5Constants.H5Z_FILTER_NBIT), H5Z_FILTER_SCALEOFFSET(HDF5Constants.H5Z_FILTER_SCALEOFFSET), H5Z_FILTER_RESERVED(
+ HDF5Constants.H5Z_FILTER_RESERVED), H5Z_FILTER_MAX(HDF5Constants.H5Z_FILTER_MAX);
+ private static final Map<Integer, H5Z_filter> lookup = new HashMap<Integer, H5Z_filter>();
+
+ static {
+ for (H5Z_filter s : EnumSet.allOf(H5Z_filter.class))
+ lookup.put(s.getCode(), s);
+ }
+
+ private int code;
+
+ H5Z_filter(int layout_type) {
+ this.code = layout_type;
+ }
+
+ public int getCode() {
+ return this.code;
+ }
+
+ public static H5Z_filter get(int code) {
+ return lookup.get(code);
+ }
+ }
+
+ private static boolean checkSzipFilter() {
+ try {
+ int available = H5.H5Zfilter_avail(HDF5Constants.H5Z_FILTER_SZIP);
+ if (available == 0) {
+ System.out.println("szip filter not available.");
+ return false;
+ }
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ try {
+ int filter_info = H5.H5Zget_filter_info(HDF5Constants.H5Z_FILTER_SZIP);
+ if (((filter_info & HDF5Constants.H5Z_FILTER_CONFIG_ENCODE_ENABLED) == 0)
+ || ((filter_info & HDF5Constants.H5Z_FILTER_CONFIG_DECODE_ENABLED) == 0)) {
+ System.out.println("szip filter not available for encoding and decoding.");
+ return false;
+ }
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+ return true;
+ }
+
+ private static void writeSzip() {
+ long file_id = -1;
+ long filespace_id = -1;
+ long dataset_id = -1;
+ long dcpl_id = -1;
+ long[] dims = { DIM_X, DIM_Y };
+ long[] chunk_dims = { CHUNK_X, CHUNK_Y };
+ int[][] dset_data = new int[DIM_X][DIM_Y];
+
+ // Initialize data.
+ for (int indx = 0; indx < DIM_X; indx++)
+ for (int jndx = 0; jndx < DIM_Y; jndx++)
+ dset_data[indx][jndx] = indx * jndx - jndx;
+
+ // Create a new file using default properties.
+ try {
+ file_id = H5.H5Fcreate(FILENAME, HDF5Constants.H5F_ACC_TRUNC, HDF5Constants.H5P_DEFAULT,
+ HDF5Constants.H5P_DEFAULT);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Create dataspace. Setting maximum size to NULL sets the maximum
+ // size to be the current size.
+ try {
+ filespace_id = H5.H5Screate_simple(RANK, dims, null);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Create the dataset creation property list, add the szip compression
+ // filter.
+ try {
+ dcpl_id = H5.H5Pcreate(HDF5Constants.H5P_DATASET_CREATE);
+ if (dcpl_id >= 0) {
+ H5.H5Pset_szip(dcpl_id, HDF5Constants.H5_SZIP_NN_OPTION_MASK, 8);
+ // Set the chunk size.
+ H5.H5Pset_chunk(dcpl_id, NDIMS, chunk_dims);
+ }
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Create the dataset.
+ try {
+ if ((file_id >= 0) && (filespace_id >= 0) && (dcpl_id >= 0))
+ dataset_id = H5.H5Dcreate(file_id, DATASETNAME, HDF5Constants.H5T_STD_I32LE, filespace_id,
+ HDF5Constants.H5P_DEFAULT, dcpl_id, HDF5Constants.H5P_DEFAULT);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Write the data to the dataset.
+ try {
+ if (dataset_id >= 0)
+ H5.H5Dwrite(dataset_id, HDF5Constants.H5T_NATIVE_INT, HDF5Constants.H5S_ALL, HDF5Constants.H5S_ALL,
+ HDF5Constants.H5P_DEFAULT, dset_data);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // End access to the dataset and release resources used by it.
+ try {
+ if (dcpl_id >= 0)
+ H5.H5Pclose(dcpl_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ try {
+ if (dataset_id >= 0)
+ H5.H5Dclose(dataset_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ try {
+ if (filespace_id >= 0)
+ H5.H5Sclose(filespace_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Close the file.
+ try {
+ if (file_id >= 0)
+ H5.H5Fclose(file_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+ }
+
+ private static void readSzip() {
+ long file_id = -1;
+ long dataset_id = -1;
+ long dcpl_id = -1;
+ int[][] dset_data = new int[DIM_X][DIM_Y];
+
+ // Open an existing file.
+ try {
+ file_id = H5.H5Fopen(FILENAME, HDF5Constants.H5F_ACC_RDONLY, HDF5Constants.H5P_DEFAULT);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Open an existing dataset.
+ try {
+ if (file_id >= 0)
+ dataset_id = H5.H5Dopen(file_id, DATASETNAME, HDF5Constants.H5P_DEFAULT);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Retrieve the dataset creation property list.
+ try {
+ if (dataset_id >= 0)
+ dcpl_id = H5.H5Dget_create_plist(dataset_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Retrieve and print the filter type. Here we only retrieve the
+ // first filter because we know that we only added one filter.
+ try {
+ if (dcpl_id >= 0) {
+ // Java lib requires a valid filter_name object and cd_values
+ int[] flags = { 0 };
+ long[] cd_nelmts = { 1 };
+ int[] cd_values = { 0 };
+ String[] filter_name = { "" };
+ int[] filter_config = { 0 };
+ int filter_type = -1;
+
+ filter_type = H5
+ .H5Pget_filter(dcpl_id, 0, flags, cd_nelmts, cd_values, 120, filter_name, filter_config);
+ System.out.print("Filter type is: ");
+ switch (H5Z_filter.get(filter_type)) {
+ case H5Z_FILTER_DEFLATE:
+ System.out.println("H5Z_FILTER_DEFLATE");
+ break;
+ case H5Z_FILTER_SHUFFLE:
+ System.out.println("H5Z_FILTER_SHUFFLE");
+ break;
+ case H5Z_FILTER_FLETCHER32:
+ System.out.println("H5Z_FILTER_FLETCHER32");
+ break;
+ case H5Z_FILTER_SZIP:
+ System.out.println("H5Z_FILTER_SZIP");
+ break;
+ case H5Z_FILTER_NBIT:
+ System.out.println("H5Z_FILTER_NBIT");
+ break;
+ case H5Z_FILTER_SCALEOFFSET:
+ System.out.println("H5Z_FILTER_SCALEOFFSET");
+ break;
+ default:
+ System.out.println("H5Z_FILTER_ERROR");
+ }
+ System.out.println();
+ }
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Read the data using the default properties.
+ try {
+ if (dataset_id >= 0) {
+ H5.H5Dread(dataset_id, HDF5Constants.H5T_NATIVE_INT, HDF5Constants.H5S_ALL, HDF5Constants.H5S_ALL,
+ HDF5Constants.H5P_DEFAULT, dset_data);
+ }
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Find the maximum value in the dataset, to verify that it was read
+ // correctly.
+ int max = dset_data[0][0];
+ for (int indx = 0; indx < DIM_X; indx++) {
+ for (int jndx = 0; jndx < DIM_Y; jndx++)
+ if (max < dset_data[indx][jndx])
+ max = dset_data[indx][jndx];
+ }
+ // Print the maximum value.
+ System.out.println("Maximum value in " + DATASETNAME + " is: " + max);
+
+ // End access to the dataset and release resources used by it.
+ try {
+ if (dcpl_id >= 0)
+ H5.H5Pclose(dcpl_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ try {
+ if (dataset_id >= 0)
+ H5.H5Dclose(dataset_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Close the file.
+ try {
+ if (file_id >= 0)
+ H5.H5Fclose(file_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+ }
+
+ public static void main(String[] args) {
+ // Check if gzip compression is available and can be used for both
+ // compression and decompression. Normally we do not perform error
+ // checking in these examples for the sake of clarity, but in this
+ // case we will make an exception because this filter is an
+ // optional part of the hdf5 library.
+ if (H5Ex_D_Szip.checkSzipFilter()) {
+ H5Ex_D_Szip.writeSzip();
+ H5Ex_D_Szip.readSzip();
+ }
+ }
+
+}
diff --git a/java/examples/datasets/H5Ex_D_Transform.java b/java/examples/datasets/H5Ex_D_Transform.java
new file mode 100644
index 0000000..1f289f3
--- /dev/null
+++ b/java/examples/datasets/H5Ex_D_Transform.java
@@ -0,0 +1,250 @@
+/* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
+ * Copyright by The HDF Group. *
+ * Copyright by the Board of Trustees of the University of Illinois. *
+ * All rights reserved. *
+ * *
+ * This file is part of HDF5. The full HDF5 copyright notice, including *
+ * terms governing use, modification, and redistribution, is contained in *
+ * the files COPYING and Copyright.html. COPYING can be found at the root *
+ * of the source code distribution tree; Copyright.html can be found at the *
+ * root level of an installed copy of the electronic HDF5 document set and *
+ * is linked from the top-level documents page. It can also be found at *
+ * http://hdfgroup.org/HDF5/doc/Copyright.html. If you do not have *
+ * access to either file, you may request a copy from help@hdfgroup.org. *
+ * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */
+
+/************************************************************
+ This example shows how to read and write data to a dataset
+ using a data transform expression. The program first
+ writes integers to a dataset using the transform
+ expression TRANSFORM, then closes the file. Next, it
+ reopens the file, reads back the data without a transform,
+ and outputs the data to the screen. Finally it reads the
+ data using the transform expression RTRANSFORM and outputs
+ the results to the screen.
+ ************************************************************/
+package examples.datasets;
+
+import hdf.hdf5lib.H5;
+import hdf.hdf5lib.HDF5Constants;
+
+public class H5Ex_D_Transform {
+
+ private static String FILE = "H5Ex_D_Transform.h5";
+ private static String DATASET = "DS1";
+ private static final int DIM_X = 4;
+ private static final int DIM_Y = 7;
+ private static String TRANSFORM = "x+1";
+ private static String RTRANSFORM = "x-1";
+
+ private static void writeData() {
+ long file_id = -1;
+ long filespace_id = -1;
+ long dataset_id = -1;
+ long dxpl_id = -1;
+
+ long[] dims = { DIM_X, DIM_Y };
+ int[][] dset_data = new int[DIM_X][DIM_Y];
+
+ // Initialize data.
+ for (int i = 0; i < DIM_X; i++)
+ for (int j = 0; j < DIM_Y; j++)
+ dset_data[i][j] = i * j - j;
+
+ // Output the data to the screen.
+ System.out.println("Original Data:");
+ for (int i = 0; i < DIM_X; i++) {
+ System.out.print(" [");
+ for (int j = 0; j < DIM_Y; j++)
+ System.out.print(" " + dset_data[i][j] + " ");
+ System.out.println("]");
+ }
+
+ // Create a new file using the default properties.
+ try {
+ file_id = H5.H5Fcreate(FILE, HDF5Constants.H5F_ACC_TRUNC, HDF5Constants.H5P_DEFAULT,
+ HDF5Constants.H5P_DEFAULT);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Create dataspace. Setting maximum size to NULL sets the maximum
+ // size to be the current size.
+ try {
+ filespace_id = H5.H5Screate_simple(2, dims, null);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Create the dataset transfer property list and define the transform expression.
+ try {
+ dxpl_id = H5.H5Pcreate(HDF5Constants.H5P_DATASET_XFER);
+ if (dxpl_id >= 0)
+ H5.H5Pset_data_transform(dxpl_id, TRANSFORM);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Create the dataset using the default properties. Unfortunately we must save as
+ // a native type or the transform operation will fail.
+ try {
+ if ((file_id >= 0) && (filespace_id >= 0))
+ dataset_id = H5.H5Dcreate(file_id, DATASET, HDF5Constants.H5T_NATIVE_INT, filespace_id,
+ HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Write the data to the dataset using the dataset transfer property list.
+ try {
+ if ((dataset_id >= 0) && (dxpl_id >= 0))
+ H5.H5Dwrite(dataset_id, HDF5Constants.H5T_NATIVE_INT, HDF5Constants.H5S_ALL, HDF5Constants.H5S_ALL,
+ dxpl_id, dset_data);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // End access to the dataset and release resources used by it.
+ try {
+ if (dxpl_id >= 0)
+ H5.H5Pclose(dxpl_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ try {
+ if (dataset_id >= 0)
+ H5.H5Dclose(dataset_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ try {
+ if (filespace_id >= 0)
+ H5.H5Sclose(filespace_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Close the file.
+ try {
+ if (file_id >= 0)
+ H5.H5Fclose(file_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+ }
+
+ private static void readData() {
+
+ long file_id = -1;
+ long dataset_id = -1;
+ long dxpl_id = -1;
+ int[][] dset_data = new int[DIM_X][DIM_Y];
+
+ // Open an existing file using the default properties.
+ try {
+ file_id = H5.H5Fopen(FILE, HDF5Constants.H5F_ACC_RDONLY, HDF5Constants.H5P_DEFAULT);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Open an existing dataset using the default properties.
+ try {
+ if (file_id >= 0)
+ dataset_id = H5.H5Dopen(file_id, DATASET, HDF5Constants.H5P_DEFAULT);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Read the data using the default properties.
+ try {
+ if (dataset_id >= 0)
+ H5.H5Dread(dataset_id, HDF5Constants.H5T_NATIVE_INT, HDF5Constants.H5S_ALL, HDF5Constants.H5S_ALL,
+ HDF5Constants.H5P_DEFAULT, dset_data);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Output the data to the screen.
+ System.out.println("Data as written with transform '" + TRANSFORM + "'");
+ for (int i = 0; i < DIM_X; i++) {
+ System.out.print(" [");
+ for (int j = 0; j < DIM_Y; j++)
+ System.out.print(" " + dset_data[i][j] + " ");
+ System.out.println("]");
+ }
+
+ // Create the dataset transfer property list and define the transform expression.
+ try {
+ dxpl_id = H5.H5Pcreate(HDF5Constants.H5P_DATASET_XFER);
+ if (dxpl_id >= 0)
+ H5.H5Pset_data_transform(dxpl_id, RTRANSFORM);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Read the data using the dataset transfer property list.
+ try {
+ if ((dataset_id >= 0) && (dxpl_id >= 0))
+ H5.H5Dread(dataset_id, HDF5Constants.H5T_NATIVE_INT, HDF5Constants.H5S_ALL, HDF5Constants.H5S_ALL,
+ dxpl_id, dset_data);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Output the data to the screen.
+
+ System.out.println("Data as written with transform '" + TRANSFORM + "' and read with transform '"
+ + RTRANSFORM + "'");
+ for (int i = 0; i < DIM_X; i++) {
+ System.out.print(" [");
+ for (int j = 0; j < DIM_Y; j++)
+ System.out.print(" " + dset_data[i][j] + " ");
+ System.out.println("]");
+ }
+
+ // Close and release resources.
+ try {
+ if (dxpl_id >= 0)
+ H5.H5Pclose(dxpl_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+ try {
+ if (dataset_id >= 0)
+ H5.H5Dclose(dataset_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+ try {
+ if (file_id >= 0)
+ H5.H5Fclose(file_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+ }
+
+ public static void main(String[] args) {
+ H5Ex_D_Transform.writeData();
+ H5Ex_D_Transform.readData();
+ }
+
+}
diff --git a/java/examples/datasets/H5Ex_D_UnlimitedAdd.java b/java/examples/datasets/H5Ex_D_UnlimitedAdd.java
new file mode 100644
index 0000000..ada8df0
--- /dev/null
+++ b/java/examples/datasets/H5Ex_D_UnlimitedAdd.java
@@ -0,0 +1,393 @@
+/* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
+ * Copyright by The HDF Group. *
+ * Copyright by the Board of Trustees of the University of Illinois. *
+ * All rights reserved. *
+ * *
+ * This file is part of HDF5. The full HDF5 copyright notice, including *
+ * terms governing use, modification, and redistribution, is contained in *
+ * the files COPYING and Copyright.html. COPYING can be found at the root *
+ * of the source code distribution tree; Copyright.html can be found at the *
+ * root level of an installed copy of the electronic HDF5 document set and *
+ * is linked from the top-level documents page. It can also be found at *
+ * http://hdfgroup.org/HDF5/doc/Copyright.html. If you do not have *
+ * access to either file, you may request a copy from help@hdfgroup.org. *
+ * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */
+
+/************************************************************
+ This example shows how to create and extend an unlimited
+ dataset. The program first writes integers to a dataset
+ with dataspace dimensions of DIM_XxDIM_Y, then closes the
+ file. Next, it reopens the file, reads back the data,
+ outputs it to the screen, extends the dataset, and writes
+ new data to the extended portions of the dataset. Finally
+ it reopens the file again, reads back the data, and
+ outputs it to the screen.
+ ************************************************************/
+package examples.datasets;
+
+import hdf.hdf5lib.H5;
+import hdf.hdf5lib.HDF5Constants;
+
+public class H5Ex_D_UnlimitedAdd {
+ private static String FILENAME = "H5Ex_D_UnlimitedAdd.h5";
+ private static String DATASETNAME = "DS1";
+ private static final int DIM_X = 4;
+ private static final int DIM_Y = 7;
+ private static final int EDIM_X = 6;
+ private static final int EDIM_Y = 10;
+ private static final int CHUNK_X = 4;
+ private static final int CHUNK_Y = 4;
+ private static final int RANK = 2;
+ private static final int NDIMS = 2;
+
+ private static void writeUnlimited() {
+ long file_id = -1;
+ long dcpl_id = -1;
+ long dataspace_id = -1;
+ long dataset_id = -1;
+ long[] dims = { DIM_X, DIM_Y };
+ long[] chunk_dims = { CHUNK_X, CHUNK_Y };
+ long[] maxdims = { HDF5Constants.H5S_UNLIMITED, HDF5Constants.H5S_UNLIMITED };
+ int[][] dset_data = new int[DIM_X][DIM_Y];
+
+ // Initialize the dataset.
+ for (int indx = 0; indx < DIM_X; indx++)
+ for (int jndx = 0; jndx < DIM_Y; jndx++)
+ dset_data[indx][jndx] = indx * jndx - jndx;
+
+ // Create a new file using default properties.
+ try {
+ file_id = H5.H5Fcreate(FILENAME, HDF5Constants.H5F_ACC_TRUNC, HDF5Constants.H5P_DEFAULT,
+ HDF5Constants.H5P_DEFAULT);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Create dataspace with unlimited dimensions.
+ try {
+ dataspace_id = H5.H5Screate_simple(RANK, dims, maxdims);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Create the dataset creation property list.
+ try {
+ dcpl_id = H5.H5Pcreate(HDF5Constants.H5P_DATASET_CREATE);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Set the chunk size.
+ try {
+ if (dcpl_id >= 0)
+ H5.H5Pset_chunk(dcpl_id, NDIMS, chunk_dims);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Create the unlimited dataset.
+ try {
+ if ((file_id >= 0) && (dataspace_id >= 0) && (dcpl_id >= 0))
+ dataset_id = H5.H5Dcreate(file_id, DATASETNAME, HDF5Constants.H5T_STD_I32LE, dataspace_id,
+ HDF5Constants.H5P_DEFAULT, dcpl_id, HDF5Constants.H5P_DEFAULT);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Write the data to the dataset.
+ try {
+ if (dataset_id >= 0)
+ H5.H5Dwrite(dataset_id, HDF5Constants.H5T_NATIVE_INT, HDF5Constants.H5S_ALL, HDF5Constants.H5S_ALL,
+ HDF5Constants.H5P_DEFAULT, dset_data);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // End access to the dataset and release resources used by it.
+ try {
+ if (dataset_id >= 0)
+ H5.H5Dclose(dataset_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ try {
+ if (dataspace_id >= 0)
+ H5.H5Sclose(dataspace_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ try {
+ if (dcpl_id >= 0)
+ H5.H5Pclose(dcpl_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Close the file.
+ try {
+ if (file_id >= 0)
+ H5.H5Fclose(file_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+ }
+
+ private static void extendUnlimited() {
+ long file_id = -1;
+ long dataspace_id = -1;
+ long dataset_id = -1;
+ long[] dims = { DIM_X, DIM_Y };
+ long[] extdims = { EDIM_X, EDIM_Y };
+ long[] start = { 0, 0 };
+ long[] count = new long[2];
+ int[][] dset_data;
+ int[][] extend_dset_data = new int[EDIM_X][EDIM_Y];
+
+ // Open an existing file.
+ try {
+ file_id = H5.H5Fopen(FILENAME, HDF5Constants.H5F_ACC_RDWR, HDF5Constants.H5P_DEFAULT);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Open an existing dataset.
+ try {
+ if (file_id >= 0)
+ dataset_id = H5.H5Dopen(file_id, DATASETNAME, HDF5Constants.H5P_DEFAULT);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Get dataspace and allocate memory for read buffer. This is a
+ // two dimensional dataset so the dynamic allocation must be done
+ // in steps.
+ try {
+ if (dataset_id >= 0)
+ dataspace_id = H5.H5Dget_space(dataset_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ try {
+ if (dataspace_id >= 0)
+ H5.H5Sget_simple_extent_dims(dataspace_id, dims, null);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Allocate array of pointers to rows.
+ dset_data = new int[(int) dims[0]][(int) dims[1]];
+
+ // Read the data using the default properties.
+ try {
+ if (dataset_id >= 0)
+ H5.H5Dread(dataset_id, HDF5Constants.H5T_NATIVE_INT, HDF5Constants.H5S_ALL, HDF5Constants.H5S_ALL,
+ HDF5Constants.H5P_DEFAULT, dset_data);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Output the data to the screen.
+ System.out.println("Dataset before extension:");
+ for (int indx = 0; indx < DIM_X; indx++) {
+ System.out.print(" [ ");
+ for (int jndx = 0; jndx < DIM_Y; jndx++)
+ System.out.print(dset_data[indx][jndx] + " ");
+ System.out.println("]");
+ }
+ System.out.println();
+
+ try {
+ if (dataspace_id >= 0)
+ H5.H5Sclose(dataspace_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Extend the dataset.
+ try {
+ if (dataset_id >= 0)
+ H5.H5Dset_extent(dataset_id, extdims);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Retrieve the dataspace for the newly extended dataset.
+ try {
+ if (dataset_id >= 0)
+ dataspace_id = H5.H5Dget_space(dataset_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Initialize data for writing to the extended dataset.
+ for (int indx = 0; indx < EDIM_X; indx++)
+ for (int jndx = 0; jndx < EDIM_Y; jndx++)
+ extend_dset_data[indx][jndx] = jndx;
+
+ // Select the entire dataspace.
+ try {
+ if (dataspace_id >= 0) {
+ H5.H5Sselect_all(dataspace_id);
+
+ // Subtract a hyperslab reflecting the original dimensions from the
+ // selection. The selection now contains only the newly extended
+ // portions of the dataset.
+ count[0] = dims[0];
+ count[1] = dims[1];
+ H5.H5Sselect_hyperslab(dataspace_id, HDF5Constants.H5S_SELECT_NOTB, start, null, count, null);
+
+ // Write the data to the selected portion of the dataset.
+ if (dataset_id >= 0)
+ H5.H5Dwrite(dataset_id, HDF5Constants.H5T_NATIVE_INT, HDF5Constants.H5S_ALL, dataspace_id,
+ HDF5Constants.H5P_DEFAULT, extend_dset_data);
+ }
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // End access to the dataset and release resources used by it.
+ try {
+ if (dataset_id >= 0)
+ H5.H5Dclose(dataset_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ try {
+ if (dataspace_id >= 0)
+ H5.H5Sclose(dataspace_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Close the file.
+ try {
+ if (file_id >= 0)
+ H5.H5Fclose(file_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+ }
+
+ private static void readUnlimited() {
+ long file_id = -1;
+ long dataspace_id = -1;
+ long dataset_id = -1;
+ long[] dims = { DIM_X, DIM_Y };
+ int[][] dset_data;
+
+ // Open an existing file.
+ try {
+ file_id = H5.H5Fopen(FILENAME, HDF5Constants.H5F_ACC_RDONLY, HDF5Constants.H5P_DEFAULT);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Open an existing dataset.
+ try {
+ if (file_id >= 0)
+ dataset_id = H5.H5Dopen(file_id, DATASETNAME, HDF5Constants.H5P_DEFAULT);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Get dataspace and allocate memory for the read buffer as before.
+ try {
+ if (dataset_id >= 0)
+ dataspace_id = H5.H5Dget_space(dataset_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ try {
+ if (dataspace_id >= 0)
+ H5.H5Sget_simple_extent_dims(dataspace_id, dims, null);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+ // Allocate array of pointers to rows.
+ dset_data = new int[(int) dims[0]][(int) dims[1]];
+
+ // Read the data using the default properties.
+ try {
+ if (dataset_id >= 0)
+ H5.H5Dread(dataset_id, HDF5Constants.H5T_NATIVE_INT, HDF5Constants.H5S_ALL, HDF5Constants.H5S_ALL,
+ HDF5Constants.H5P_DEFAULT, dset_data);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Output the data to the screen.
+ System.out.println("Dataset after extension:");
+ for (int indx = 0; indx < dims[0]; indx++) {
+ System.out.print(" [ ");
+ for (int jndx = 0; jndx < dims[1]; jndx++)
+ System.out.print(dset_data[indx][jndx] + " ");
+ System.out.println("]");
+ }
+ System.out.println();
+
+ // End access to the dataset and release resources used by it.
+ try {
+ if (dataset_id >= 0)
+ H5.H5Dclose(dataset_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ try {
+ if (dataspace_id >= 0)
+ H5.H5Sclose(dataspace_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Close the file.
+ try {
+ if (file_id >= 0)
+ H5.H5Fclose(file_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+ }
+
+ public static void main(String[] args) {
+ H5Ex_D_UnlimitedAdd.writeUnlimited();
+ H5Ex_D_UnlimitedAdd.extendUnlimited();
+ H5Ex_D_UnlimitedAdd.readUnlimited();
+ }
+
+}
diff --git a/java/examples/datasets/H5Ex_D_UnlimitedGzip.java b/java/examples/datasets/H5Ex_D_UnlimitedGzip.java
new file mode 100644
index 0000000..c08ceef
--- /dev/null
+++ b/java/examples/datasets/H5Ex_D_UnlimitedGzip.java
@@ -0,0 +1,504 @@
+/* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
+ * Copyright by The HDF Group. *
+ * Copyright by the Board of Trustees of the University of Illinois. *
+ * All rights reserved. *
+ * *
+ * This file is part of HDF5. The full HDF5 copyright notice, including *
+ * terms governing use, modification, and redistribution, is contained in *
+ * the files COPYING and Copyright.html. COPYING can be found at the root *
+ * of the source code distribution tree; Copyright.html can be found at the *
+ * root level of an installed copy of the electronic HDF5 document set and *
+ * is linked from the top-level documents page. It can also be found at *
+ * http://hdfgroup.org/HDF5/doc/Copyright.html. If you do not have *
+ * access to either file, you may request a copy from help@hdfgroup.org. *
+ * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */
+
+/************************************************************
+ This example shows how to create and extend an unlimited
+ dataset with gzip compression. The program first writes
+ integers to a gzip compressed dataset with dataspace
+ dimensions of DIM_XxDIM_Y, then closes the file. Next, it
+ reopens the file, reads back the data, outputs it to the
+ screen, extends the dataset, and writes new data to the
+ extended portions of the dataset. Finally it reopens the
+ file again, reads back the data, and outputs it to the
+ screen.
+ ************************************************************/
+package examples.datasets;
+
+import java.util.EnumSet;
+import java.util.HashMap;
+import java.util.Map;
+
+import hdf.hdf5lib.H5;
+import hdf.hdf5lib.HDF5Constants;
+
+public class H5Ex_D_UnlimitedGzip {
+ private static String FILENAME = "H5Ex_D_UnlimitedGzip.h5";
+ private static String DATASETNAME = "DS1";
+ private static final int DIM_X = 4;
+ private static final int DIM_Y = 7;
+ private static final int EDIM_X = 6;
+ private static final int EDIM_Y = 10;
+ private static final int CHUNK_X = 4;
+ private static final int CHUNK_Y = 4;
+ private static final int RANK = 2;
+ private static final int NDIMS = 2;
+
+ // Values for the status of space allocation
+ enum H5Z_filter {
+ H5Z_FILTER_ERROR(HDF5Constants.H5Z_FILTER_ERROR), H5Z_FILTER_NONE(HDF5Constants.H5Z_FILTER_NONE), H5Z_FILTER_DEFLATE(
+ HDF5Constants.H5Z_FILTER_DEFLATE), H5Z_FILTER_SHUFFLE(HDF5Constants.H5Z_FILTER_SHUFFLE), H5Z_FILTER_FLETCHER32(
+ HDF5Constants.H5Z_FILTER_FLETCHER32), H5Z_FILTER_SZIP(HDF5Constants.H5Z_FILTER_SZIP), H5Z_FILTER_NBIT(
+ HDF5Constants.H5Z_FILTER_NBIT), H5Z_FILTER_SCALEOFFSET(HDF5Constants.H5Z_FILTER_SCALEOFFSET), H5Z_FILTER_RESERVED(
+ HDF5Constants.H5Z_FILTER_RESERVED), H5Z_FILTER_MAX(HDF5Constants.H5Z_FILTER_MAX);
+ private static final Map<Integer, H5Z_filter> lookup = new HashMap<Integer, H5Z_filter>();
+
+ static {
+ for (H5Z_filter s : EnumSet.allOf(H5Z_filter.class))
+ lookup.put(s.getCode(), s);
+ }
+
+ private int code;
+
+ H5Z_filter(int layout_type) {
+ this.code = layout_type;
+ }
+
+ public int getCode() {
+ return this.code;
+ }
+
+ public static H5Z_filter get(int code) {
+ return lookup.get(code);
+ }
+ }
+
+ private static boolean checkGzipFilter() {
+ try {
+ int available = H5.H5Zfilter_avail(HDF5Constants.H5Z_FILTER_DEFLATE);
+ if (available == 0) {
+ System.out.println("gzip filter not available.");
+ return false;
+ }
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ try {
+ int filter_info = H5.H5Zget_filter_info(HDF5Constants.H5Z_FILTER_DEFLATE);
+ if (((filter_info & HDF5Constants.H5Z_FILTER_CONFIG_ENCODE_ENABLED) == 0)
+ || ((filter_info & HDF5Constants.H5Z_FILTER_CONFIG_DECODE_ENABLED) == 0)) {
+ System.out.println("gzip filter not available for encoding and decoding.");
+ return false;
+ }
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+ return true;
+ }
+
+ private static void writeUnlimited() {
+ long file_id = -1;
+ long dcpl_id = -1;
+ long dataspace_id = -1;
+ long dataset_id = -1;
+ long[] dims = { DIM_X, DIM_Y };
+ long[] chunk_dims = { CHUNK_X, CHUNK_Y };
+ long[] maxdims = { HDF5Constants.H5S_UNLIMITED, HDF5Constants.H5S_UNLIMITED };
+ int[][] dset_data = new int[DIM_X][DIM_Y];
+
+ // Initialize the dataset.
+ for (int indx = 0; indx < DIM_X; indx++)
+ for (int jndx = 0; jndx < DIM_Y; jndx++)
+ dset_data[indx][jndx] = indx * jndx - jndx;
+
+ // Create a new file using default properties.
+ try {
+ file_id = H5.H5Fcreate(FILENAME, HDF5Constants.H5F_ACC_TRUNC, HDF5Constants.H5P_DEFAULT,
+ HDF5Constants.H5P_DEFAULT);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Create dataspace with unlimited dimensions.
+ try {
+ dataspace_id = H5.H5Screate_simple(RANK, dims, maxdims);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Create the dataset creation property list, add the gzip compression
+ // filter.
+ try {
+ dcpl_id = H5.H5Pcreate(HDF5Constants.H5P_DATASET_CREATE);
+ if (dcpl_id >= 0) {
+ H5.H5Pset_deflate(dcpl_id, 9);
+ // Set the chunk size.
+ H5.H5Pset_chunk(dcpl_id, NDIMS, chunk_dims);
+ }
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Create the unlimited dataset.
+ try {
+ if ((file_id >= 0) && (dataspace_id >= 0) && (dcpl_id >= 0))
+ dataset_id = H5.H5Dcreate(file_id, DATASETNAME, HDF5Constants.H5T_STD_I32LE, dataspace_id,
+ HDF5Constants.H5P_DEFAULT, dcpl_id, HDF5Constants.H5P_DEFAULT);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Write the data to the dataset.
+ try {
+ if (dataset_id >= 0)
+ H5.H5Dwrite(dataset_id, HDF5Constants.H5T_NATIVE_INT, HDF5Constants.H5S_ALL, HDF5Constants.H5S_ALL,
+ HDF5Constants.H5P_DEFAULT, dset_data);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // End access to the dataset and release resources used by it.
+ try {
+ if (dataset_id >= 0)
+ H5.H5Dclose(dataset_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ try {
+ if (dataspace_id >= 0)
+ H5.H5Sclose(dataspace_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ try {
+ if (dcpl_id >= 0)
+ H5.H5Pclose(dcpl_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Close the file.
+ try {
+ if (file_id >= 0)
+ H5.H5Fclose(file_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+ }
+
+ private static void extendUnlimited() {
+ long file_id = -1;
+ long dataspace_id = -1;
+ long dataset_id = -1;
+ long[] dims = { DIM_X, DIM_Y };
+ long[] extdims = { EDIM_X, EDIM_Y };
+ long[] start = { 0, 0 };
+ long[] count = new long[2];
+ int[][] dset_data;
+ int[][] extend_dset_data = new int[EDIM_X][EDIM_Y];
+
+ // Open an existing file.
+ try {
+ file_id = H5.H5Fopen(FILENAME, HDF5Constants.H5F_ACC_RDWR, HDF5Constants.H5P_DEFAULT);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Open an existing dataset.
+ try {
+ if (file_id >= 0)
+ dataset_id = H5.H5Dopen(file_id, DATASETNAME, HDF5Constants.H5P_DEFAULT);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Get dataspace and allocate memory for read buffer. This is a
+ // two dimensional dataset so the dynamic allocation must be done
+ // in steps.
+ try {
+ if (dataset_id >= 0)
+ dataspace_id = H5.H5Dget_space(dataset_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ try {
+ if (dataspace_id >= 0)
+ H5.H5Sget_simple_extent_dims(dataspace_id, dims, null);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Allocate array of pointers to rows.
+ dset_data = new int[(int) dims[0]][(int) dims[1]];
+
+ // Read the data using the default properties.
+ try {
+ if (dataset_id >= 0)
+ H5.H5Dread(dataset_id, HDF5Constants.H5T_NATIVE_INT, HDF5Constants.H5S_ALL, HDF5Constants.H5S_ALL,
+ HDF5Constants.H5P_DEFAULT, dset_data);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Output the data to the screen.
+ System.out.println("Dataset before extension:");
+ for (int indx = 0; indx < DIM_X; indx++) {
+ System.out.print(" [ ");
+ for (int jndx = 0; jndx < DIM_Y; jndx++)
+ System.out.print(dset_data[indx][jndx] + " ");
+ System.out.println("]");
+ }
+ System.out.println();
+
+ try {
+ if (dataspace_id >= 0)
+ H5.H5Sclose(dataspace_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Extend the dataset.
+ try {
+ if (dataset_id >= 0)
+ H5.H5Dset_extent(dataset_id, extdims);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Retrieve the dataspace for the newly extended dataset.
+ try {
+ if (dataset_id >= 0)
+ dataspace_id = H5.H5Dget_space(dataset_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Initialize data for writing to the extended dataset.
+ for (int indx = 0; indx < EDIM_X; indx++)
+ for (int jndx = 0; jndx < EDIM_Y; jndx++)
+ extend_dset_data[indx][jndx] = jndx;
+
+ // Select the entire dataspace.
+ try {
+ if (dataspace_id >= 0) {
+ H5.H5Sselect_all(dataspace_id);
+
+ // Subtract a hyperslab reflecting the original dimensions from the
+ // selection. The selection now contains only the newly extended
+ // portions of the dataset.
+ count[0] = dims[0];
+ count[1] = dims[1];
+ H5.H5Sselect_hyperslab(dataspace_id, HDF5Constants.H5S_SELECT_NOTB, start, null, count, null);
+
+ // Write the data to the selected portion of the dataset.
+ if (dataset_id >= 0)
+ H5.H5Dwrite(dataset_id, HDF5Constants.H5T_NATIVE_INT, HDF5Constants.H5S_ALL, dataspace_id,
+ HDF5Constants.H5P_DEFAULT, extend_dset_data);
+ }
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // End access to the dataset and release resources used by it.
+ try {
+ if (dataset_id >= 0)
+ H5.H5Dclose(dataset_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ try {
+ if (dataspace_id >= 0)
+ H5.H5Sclose(dataspace_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Close the file.
+ try {
+ if (file_id >= 0)
+ H5.H5Fclose(file_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+ }
+
+ private static void readUnlimited() {
+ long file_id = -1;
+ long dataspace_id = -1;
+ long dataset_id = -1;
+ long dcpl_id = -1;
+ long[] dims = { DIM_X, DIM_Y };
+ int[][] dset_data;
+
+ // Open an existing file.
+ try {
+ file_id = H5.H5Fopen(FILENAME, HDF5Constants.H5F_ACC_RDONLY, HDF5Constants.H5P_DEFAULT);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Open an existing dataset.
+ try {
+ if (file_id >= 0)
+ dataset_id = H5.H5Dopen(file_id, DATASETNAME, HDF5Constants.H5P_DEFAULT);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Retrieve the dataset creation property list.
+ try {
+ if (dataset_id >= 0)
+ dcpl_id = H5.H5Dget_create_plist(dataset_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Retrieve and print the filter type. Here we only retrieve the
+ // first filter because we know that we only added one filter.
+ try {
+ if (dcpl_id >= 0) {
+ // Java lib requires a valid filter_name object and cd_values
+ int[] flags = { 0 };
+ long[] cd_nelmts = { 1 };
+ int[] cd_values = { 0 };
+ String[] filter_name = { "" };
+ int[] filter_config = { 0 };
+ int filter_type = -1;
+ filter_type = H5
+ .H5Pget_filter(dcpl_id, 0, flags, cd_nelmts, cd_values, 120, filter_name, filter_config);
+ System.out.print("Filter type is: ");
+ switch (H5Z_filter.get(filter_type)) {
+ case H5Z_FILTER_DEFLATE:
+ System.out.println("H5Z_FILTER_DEFLATE");
+ break;
+ case H5Z_FILTER_SHUFFLE:
+ System.out.println("H5Z_FILTER_SHUFFLE");
+ break;
+ case H5Z_FILTER_FLETCHER32:
+ System.out.println("H5Z_FILTER_FLETCHER32");
+ break;
+ case H5Z_FILTER_SZIP:
+ System.out.println("H5Z_FILTER_SZIP");
+ break;
+ default:
+ System.out.println("H5Z_FILTER_ERROR");
+ }
+ System.out.println();
+ }
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Get dataspace and allocate memory for the read buffer as before.
+ try {
+ if (dataset_id >= 0)
+ dataspace_id = H5.H5Dget_space(dataset_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ try {
+ if (dataspace_id >= 0)
+ H5.H5Sget_simple_extent_dims(dataspace_id, dims, null);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+ // Allocate array of pointers to rows.
+ dset_data = new int[(int) dims[0]][(int) dims[1]];
+
+ // Read the data using the default properties.
+ try {
+ if (dataset_id >= 0)
+ H5.H5Dread(dataset_id, HDF5Constants.H5T_NATIVE_INT, HDF5Constants.H5S_ALL, HDF5Constants.H5S_ALL,
+ HDF5Constants.H5P_DEFAULT, dset_data);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Output the data to the screen.
+ System.out.println("Dataset after extension:");
+ for (int indx = 0; indx < dims[0]; indx++) {
+ System.out.print(" [ ");
+ for (int jndx = 0; jndx < dims[1]; jndx++)
+ System.out.print(dset_data[indx][jndx] + " ");
+ System.out.println("]");
+ }
+ System.out.println();
+
+ // End access to the dataset and release resources used by it.
+ try {
+ if (dataset_id >= 0)
+ H5.H5Dclose(dataset_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ try {
+ if (dataspace_id >= 0)
+ H5.H5Sclose(dataspace_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Close the file.
+ try {
+ if (file_id >= 0)
+ H5.H5Fclose(file_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+ }
+
+ public static void main(String[] args) {
+ // Check if gzip compression is available and can be used for both
+ // compression and decompression. Normally we do not perform error
+ // checking in these examples for the sake of clarity, but in this
+ // case we will make an exception because this filter is an
+ // optional part of the hdf5 library.
+ if (H5Ex_D_UnlimitedGzip.checkGzipFilter()) {
+ H5Ex_D_UnlimitedGzip.writeUnlimited();
+ H5Ex_D_UnlimitedGzip.extendUnlimited();
+ H5Ex_D_UnlimitedGzip.readUnlimited();
+ }
+ }
+
+}
diff --git a/java/examples/datasets/H5Ex_D_UnlimitedMod.java b/java/examples/datasets/H5Ex_D_UnlimitedMod.java
new file mode 100644
index 0000000..884cad3
--- /dev/null
+++ b/java/examples/datasets/H5Ex_D_UnlimitedMod.java
@@ -0,0 +1,379 @@
+/* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
+ * Copyright by The HDF Group. *
+ * Copyright by the Board of Trustees of the University of Illinois. *
+ * All rights reserved. *
+ * *
+ * This file is part of HDF5. The full HDF5 copyright notice, including *
+ * terms governing use, modification, and redistribution, is contained in *
+ * the files COPYING and Copyright.html. COPYING can be found at the root *
+ * of the source code distribution tree; Copyright.html can be found at the *
+ * root level of an installed copy of the electronic HDF5 document set and *
+ * is linked from the top-level documents page. It can also be found at *
+ * http://hdfgroup.org/HDF5/doc/Copyright.html. If you do not have *
+ * access to either file, you may request a copy from help@hdfgroup.org. *
+ * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */
+
+/************************************************************
+ This example shows how to create and extend an unlimited
+ dataset. The program first writes integers to a dataset
+ with dataspace dimensions of DIM_XxDIM_Y, then closes the
+ file. Next, it reopens the file, reads back the data,
+ outputs it to the screen, extends the dataset, and writes
+ new data to the entire extended dataset. Finally it
+ reopens the file again, reads back the data, and utputs it
+ to the screen.
+ ************************************************************/
+package examples.datasets;
+
+import hdf.hdf5lib.H5;
+import hdf.hdf5lib.HDF5Constants;
+
+public class H5Ex_D_UnlimitedMod {
+ private static String FILENAME = "H5Ex_D_UnlimitedMod.h5";
+ private static String DATASETNAME = "DS1";
+ private static final int DIM_X = 4;
+ private static final int DIM_Y = 7;
+ private static final int EDIM_X = 6;
+ private static final int EDIM_Y = 10;
+ private static final int CHUNK_X = 4;
+ private static final int CHUNK_Y = 4;
+ private static final int RANK = 2;
+ private static final int NDIMS = 2;
+
+ private static void writeUnlimited() {
+ long file_id = -1;
+ long dcpl_id = -1;
+ long dataspace_id = -1;
+ long dataset_id = -1;
+ long[] dims = { DIM_X, DIM_Y };
+ long[] chunk_dims = { CHUNK_X, CHUNK_Y };
+ long[] maxdims = { HDF5Constants.H5S_UNLIMITED, HDF5Constants.H5S_UNLIMITED };
+ int[][] dset_data = new int[DIM_X][DIM_Y];
+
+ // Initialize the dataset.
+ for (int indx = 0; indx < DIM_X; indx++)
+ for (int jndx = 0; jndx < DIM_Y; jndx++)
+ dset_data[indx][jndx] = indx * jndx - jndx;
+
+ // Create a new file using default properties.
+ try {
+ file_id = H5.H5Fcreate(FILENAME, HDF5Constants.H5F_ACC_TRUNC, HDF5Constants.H5P_DEFAULT,
+ HDF5Constants.H5P_DEFAULT);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Create dataspace with unlimited dimensions.
+ try {
+ dataspace_id = H5.H5Screate_simple(RANK, dims, maxdims);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Create the dataset creation property list.
+ try {
+ dcpl_id = H5.H5Pcreate(HDF5Constants.H5P_DATASET_CREATE);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Set the chunk size.
+ try {
+ if (dcpl_id >= 0)
+ H5.H5Pset_chunk(dcpl_id, NDIMS, chunk_dims);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Create the unlimited dataset.
+ try {
+ if ((file_id >= 0) && (dataspace_id >= 0) && (dcpl_id >= 0))
+ dataset_id = H5.H5Dcreate(file_id, DATASETNAME, HDF5Constants.H5T_STD_I32LE, dataspace_id,
+ HDF5Constants.H5P_DEFAULT, dcpl_id, HDF5Constants.H5P_DEFAULT);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Write the data to the dataset.
+ try {
+ if (dataset_id >= 0)
+ H5.H5Dwrite(dataset_id, HDF5Constants.H5T_NATIVE_INT, HDF5Constants.H5S_ALL, HDF5Constants.H5S_ALL,
+ HDF5Constants.H5P_DEFAULT, dset_data);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // End access to the dataset and release resources used by it.
+ try {
+ if (dataset_id >= 0)
+ H5.H5Dclose(dataset_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ try {
+ if (dataspace_id >= 0)
+ H5.H5Sclose(dataspace_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ try {
+ if (dcpl_id >= 0)
+ H5.H5Pclose(dcpl_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Close the file.
+ try {
+ if (file_id >= 0)
+ H5.H5Fclose(file_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+ }
+
+ private static void extendUnlimited() {
+ long file_id = -1;
+ long dataspace_id = -1;
+ long dataset_id = -1;
+ long[] dims = { DIM_X, DIM_Y };
+ long[] extdims = { EDIM_X, EDIM_Y };
+ int[][] dset_data;
+ int[][] extend_dset_data = new int[EDIM_X][EDIM_Y];
+
+ // Open an existing file.
+ try {
+ file_id = H5.H5Fopen(FILENAME, HDF5Constants.H5F_ACC_RDWR, HDF5Constants.H5P_DEFAULT);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Open an existing dataset.
+ try {
+ if (file_id >= 0)
+ dataset_id = H5.H5Dopen(file_id, DATASETNAME, HDF5Constants.H5P_DEFAULT);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Get dataspace and allocate memory for read buffer. This is a
+ // two dimensional dataset so the dynamic allocation must be done
+ // in steps.
+ try {
+ if (dataset_id >= 0)
+ dataspace_id = H5.H5Dget_space(dataset_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ try {
+ if (dataspace_id >= 0)
+ H5.H5Sget_simple_extent_dims(dataspace_id, dims, null);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Allocate array of pointers to rows.
+ dset_data = new int[(int) dims[0]][(int) dims[1]];
+
+ // Read the data using the default properties.
+ try {
+ if (dataset_id >= 0)
+ H5.H5Dread(dataset_id, HDF5Constants.H5T_NATIVE_INT, HDF5Constants.H5S_ALL, HDF5Constants.H5S_ALL,
+ HDF5Constants.H5P_DEFAULT, dset_data);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Output the data to the screen.
+ System.out.println("Dataset before extension:");
+ for (int indx = 0; indx < DIM_X; indx++) {
+ System.out.print(" [ ");
+ for (int jndx = 0; jndx < DIM_Y; jndx++)
+ System.out.print(dset_data[indx][jndx] + " ");
+ System.out.println("]");
+ }
+ System.out.println();
+
+ try {
+ if (dataspace_id >= 0)
+ H5.H5Sclose(dataspace_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Extend the dataset.
+ try {
+ if (dataset_id >= 0)
+ H5.H5Dset_extent(dataset_id, extdims);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Retrieve the dataspace for the newly extended dataset.
+ try {
+ if (dataset_id >= 0)
+ dataspace_id = H5.H5Dget_space(dataset_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Initialize data for writing to the extended dataset.
+ for (int indx = 0; indx < EDIM_X; indx++)
+ for (int jndx = 0; jndx < EDIM_Y; jndx++)
+ extend_dset_data[indx][jndx] = jndx;
+
+ // Write the data tto the extended dataset.
+ try {
+ if ((dataspace_id >= 0) && (dataset_id >= 0))
+ H5.H5Dwrite(dataset_id, HDF5Constants.H5T_NATIVE_INT, HDF5Constants.H5S_ALL, dataspace_id,
+ HDF5Constants.H5P_DEFAULT, extend_dset_data);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // End access to the dataset and release resources used by it.
+ try {
+ if (dataset_id >= 0)
+ H5.H5Dclose(dataset_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ try {
+ if (dataspace_id >= 0)
+ H5.H5Sclose(dataspace_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Close the file.
+ try {
+ if (file_id >= 0)
+ H5.H5Fclose(file_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+ }
+
+ private static void readUnlimited() {
+ long file_id = -1;
+ long dataspace_id = -1;
+ long dataset_id = -1;
+ long[] dims = { DIM_X, DIM_Y };
+ int[][] dset_data;
+
+ // Open an existing file.
+ try {
+ file_id = H5.H5Fopen(FILENAME, HDF5Constants.H5F_ACC_RDONLY, HDF5Constants.H5P_DEFAULT);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Open an existing dataset.
+ try {
+ if (file_id >= 0)
+ dataset_id = H5.H5Dopen(file_id, DATASETNAME, HDF5Constants.H5P_DEFAULT);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Get dataspace and allocate memory for the read buffer as before.
+ try {
+ if (dataset_id >= 0)
+ dataspace_id = H5.H5Dget_space(dataset_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ try {
+ if (dataspace_id >= 0)
+ H5.H5Sget_simple_extent_dims(dataspace_id, dims, null);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+ // Allocate array of pointers to rows.
+ dset_data = new int[(int) dims[0]][(int) dims[1]];
+
+ // Read the data using the default properties.
+ try {
+ if (dataset_id >= 0)
+ H5.H5Dread(dataset_id, HDF5Constants.H5T_NATIVE_INT, HDF5Constants.H5S_ALL, HDF5Constants.H5S_ALL,
+ HDF5Constants.H5P_DEFAULT, dset_data);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Output the data to the screen.
+ System.out.println("Dataset after extension:");
+ for (int indx = 0; indx < dims[0]; indx++) {
+ System.out.print(" [ ");
+ for (int jndx = 0; jndx < dims[1]; jndx++)
+ System.out.print(dset_data[indx][jndx] + " ");
+ System.out.println("]");
+ }
+ System.out.println();
+
+ // End access to the dataset and release resources used by it.
+ try {
+ if (dataset_id >= 0)
+ H5.H5Dclose(dataset_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ try {
+ if (dataspace_id >= 0)
+ H5.H5Sclose(dataspace_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Close the file.
+ try {
+ if (file_id >= 0)
+ H5.H5Fclose(file_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+ }
+
+ public static void main(String[] args) {
+ H5Ex_D_UnlimitedMod.writeUnlimited();
+ H5Ex_D_UnlimitedMod.extendUnlimited();
+ H5Ex_D_UnlimitedMod.readUnlimited();
+ }
+
+}
diff --git a/java/examples/datasets/Makefile.am b/java/examples/datasets/Makefile.am
new file mode 100644
index 0000000..41ba6d1
--- /dev/null
+++ b/java/examples/datasets/Makefile.am
@@ -0,0 +1,78 @@
+#
+# Copyright by The HDF Group.
+# Copyright by the Board of Trustees of the University of Illinois.
+# All rights reserved.
+#
+# This file is part of HDF5. The full HDF5 copyright notice, including
+# terms governing use, modification, and redistribution, is contained in
+# the files COPYING and Copyright.html. COPYING can be found at the root
+# of the source code distribution tree; Copyright.html can be found at the
+# root level of an installed copy of the electronic HDF5 document set and
+# is linked from the top-level documents page. It can also be found at
+# http://hdfgroup.org/HDF5/doc/Copyright.html. If you do not have
+# access to either file, you may request a copy from help@hdfgroup.org.
+##
+## Makefile.am
+## Run automake to generate a Makefile.in from this file.
+##
+#
+# HDF5 Java Library Examples Makefile(.in)
+
+include $(top_srcdir)/config/commence.am
+
+# Mark this directory as part of the JNI API
+JAVA_API=yes
+
+JAVAROOT = .classes
+
+classes:
+ test -d $(@D)/$(JAVAROOT) || $(MKDIR_P) $(@D)/$(JAVAROOT)
+
+pkgpath = examples/datasets
+hdfjarfile = jar$(PACKAGE_TARNAME)-$(PACKAGE_VERSION).jar
+CLASSPATH_ENV=CLASSPATH=.:$(JAVAROOT):$(top_builddir)/java/src/$(hdfjarfile):$(top_srcdir)/java/lib/slf4j-api-1.7.5.jar:$(top_srcdir)/java/lib/ext/slf4j-simple-1.7.5.jar:$$CLASSPATH
+
+jarfile = jar$(PACKAGE_TARNAME)datasets.jar
+
+AM_JAVACFLAGS = $(H5_JAVACFLAGS) -deprecation
+
+TESTPACKAGE =
+
+noinst_JAVA = \
+ H5Ex_D_Alloc.java \
+ H5Ex_D_Checksum.java \
+ H5Ex_D_Chunk.java \
+ H5Ex_D_Compact.java \
+ H5Ex_D_External.java \
+ H5Ex_D_FillValue.java \
+ H5Ex_D_Gzip.java \
+ H5Ex_D_Hyperslab.java \
+ H5Ex_D_ReadWrite.java \
+ H5Ex_D_Shuffle.java \
+ H5Ex_D_Szip.java \
+ H5Ex_D_UnlimitedAdd.java \
+ H5Ex_D_UnlimitedGzip.java \
+ H5Ex_D_UnlimitedMod.java \
+ H5Ex_D_Nbit.java \
+ H5Ex_D_Transform.java \
+ H5Ex_D_Sofloat.java \
+ H5Ex_D_Soint.java
+
+$(jarfile): classnoinst.stamp classes
+ $(JAR) cvf $@ -C $(JAVAROOT)/ $(pkgpath)
+
+noinst_DATA = $(jarfile)
+
+.PHONY: classes
+
+check_SCRIPTS = runExample.sh
+TEST_SCRIPT = $(check_SCRIPTS)
+
+CLEANFILES = classnoinst.stamp $(jarfile) $(JAVAROOT)/$(pkgpath)/*.class runExample.sh
+
+clean:
+ rm -rf $(JAVAROOT)/*
+ rm -f $(jarfile)
+ rm -f classnoinst.stamp
+
+include $(top_srcdir)/config/conclude.am
diff --git a/java/examples/datasets/runExample.sh.in b/java/examples/datasets/runExample.sh.in
new file mode 100644
index 0000000..83b06ec
--- /dev/null
+++ b/java/examples/datasets/runExample.sh.in
@@ -0,0 +1,405 @@
+#! /bin/sh
+#
+# Copyright by The HDF Group.
+# Copyright by the Board of Trustees of the University of Illinois.
+# All rights reserved.
+#
+# This file is part of HDF5. The full HDF5 copyright notice, including
+# terms governing use, modification, and redistribution, is contained in
+# the files COPYING and Copyright.html. COPYING can be found at the root
+# of the source code distribution tree; Copyright.html can be found at the
+# root level of an installed copy of the electronic HDF5 document set and
+# is linked from the top-level documents page. It can also be found at
+# http://hdfgroup.org/HDF5/doc/Copyright.html. If you do not have
+# access to either file, you may request a copy from help@hdfgroup.org.
+#
+
+top_builddir=@top_builddir@
+top_srcdir=@top_srcdir@
+srcdir=@srcdir@
+
+USE_FILTER_SZIP="@USE_FILTER_SZIP@"
+USE_FILTER_DEFLATE="@USE_FILTER_DEFLATE@"
+
+TESTNAME=EX_Datasets
+EXIT_SUCCESS=0
+EXIT_FAILURE=1
+
+# Set up default variable values if not supplied by the user.
+RM='rm -rf'
+CMP='cmp'
+DIFF='diff -c'
+CP='cp'
+DIRNAME='dirname'
+LS='ls'
+AWK='awk'
+
+nerrors=0
+
+# where the libs exist
+HDFLIB_HOME="$top_srcdir/java/lib"
+BLDLIBDIR="./lib"
+BLDDIR="."
+HDFTEST_HOME="$top_srcdir/java/examples/datasets"
+JARFILE=jar@PACKAGE_TARNAME@-@PACKAGE_VERSION@.jar
+TESTJARFILE=jar@PACKAGE_TARNAME@datasets.jar
+test -d $BLDLIBDIR || mkdir -p $BLDLIBDIR
+
+######################################################################
+# library files
+# --------------------------------------------------------------------
+# All the library files copy from source directory to test directory
+# NOTE: Keep this framework to add/remove test files.
+# This list are also used for checking exist.
+# Comment '#' without space can be used.
+# --------------------------------------------------------------------
+LIST_LIBRARY_FILES="
+$HDFLIB_HOME/slf4j-api-1.7.5.jar
+$HDFLIB_HOME/ext/slf4j-simple-1.7.5.jar
+$top_builddir/src/.libs/libhdf5.*
+$top_builddir/java/src/jni/.libs/libhdf5_java.*
+$top_builddir/java/src/$JARFILE
+"
+LIST_DATA_FILES="
+$HDFTEST_HOME/../testfiles/examples.datasets.H5Ex_D_Alloc.txt
+$HDFTEST_HOME/../testfiles/examples.datasets.H5Ex_D_Checksum.txt
+$HDFTEST_HOME/../testfiles/examples.datasets.H5Ex_D_Chunk.txt
+$HDFTEST_HOME/../testfiles/examples.datasets.H5Ex_D_Compact.txt
+$HDFTEST_HOME/../testfiles/examples.datasets.H5Ex_D_External.txt
+$HDFTEST_HOME/../testfiles/examples.datasets.H5Ex_D_FillValue.txt
+$HDFTEST_HOME/../testfiles/examples.datasets.H5Ex_D_Gzip.txt
+$HDFTEST_HOME/../testfiles/examples.datasets.H5Ex_D_Hyperslab.txt
+$HDFTEST_HOME/../testfiles/examples.datasets.H5Ex_D_ReadWrite.txt
+$HDFTEST_HOME/../testfiles/examples.datasets.H5Ex_D_Shuffle.txt
+$HDFTEST_HOME/../testfiles/examples.datasets.H5Ex_D_Szip.txt
+$HDFTEST_HOME/../testfiles/examples.datasets.H5Ex_D_UnlimitedAdd.txt
+$HDFTEST_HOME/../testfiles/examples.datasets.H5Ex_D_UnlimitedGzip.txt
+$HDFTEST_HOME/../testfiles/examples.datasets.H5Ex_D_UnlimitedMod.txt
+$HDFTEST_HOME/../testfiles/examples.datasets.H5Ex_D_Nbit.txt
+$HDFTEST_HOME/../testfiles/examples.datasets.H5Ex_D_Transform.txt
+$HDFTEST_HOME/../testfiles/examples.datasets.H5Ex_D_Sofloat.txt
+$HDFTEST_HOME/../testfiles/examples.datasets.H5Ex_D_Soint.txt
+"
+
+#
+# copy files from source dirs to test dir
+#
+COPY_LIBFILES="$LIST_LIBRARY_FILES"
+
+COPY_LIBFILES_TO_BLDLIBDIR()
+{
+ # copy test files. Used -f to make sure get a new copy
+ for tstfile in $COPY_LIBFILES
+ do
+ # ignore '#' comment
+ echo $tstfile | tr -d ' ' | grep '^#' > /dev/null
+ RET=$?
+ if [ $RET -eq 1 ]; then
+ # skip cp if srcdir is same as destdir
+ # this occurs when build/test performed in source dir and
+ # make cp fail
+ SDIR=`$DIRNAME $tstfile`
+ INODE_SDIR=`$LS -i -d $SDIR | $AWK -F' ' '{print $1}'`
+ INODE_DDIR=`$LS -i -d $BLDLIBDIR | $AWK -F' ' '{print $1}'`
+ if [ "$INODE_SDIR" != "$INODE_DDIR" ]; then
+ $CP -f $tstfile $BLDLIBDIR
+ if [ $? -ne 0 ]; then
+ echo "Error: FAILED to copy $tstfile ."
+
+ # Comment out this to CREATE expected file
+ exit $EXIT_FAILURE
+ fi
+ fi
+ fi
+ done
+}
+
+CLEAN_LIBFILES_AND_BLDLIBDIR()
+{
+ # skip rm if srcdir is same as destdir
+ # this occurs when build/test performed in source dir and
+ # make cp fail
+ SDIR=`$DIRNAME $HDFLIB_HOME/slf4j-api-1.7.5.jar`
+ INODE_SDIR=`$LS -i -d $SDIR | $AWK -F' ' '{print $1}'`
+ INODE_DDIR=`$LS -i -d $BLDLIBDIR | $AWK -F' ' '{print $1}'`
+ if [ "$INODE_SDIR" != "$INODE_DDIR" ]; then
+ $RM $BLDLIBDIR
+ fi
+}
+
+COPY_DATAFILES="$LIST_DATA_FILES"
+
+COPY_DATAFILES_TO_BLDDIR()
+{
+ # copy test files. Used -f to make sure get a new copy
+ for tstfile in $COPY_DATAFILES
+ do
+ # ignore '#' comment
+ echo $tstfile | tr -d ' ' | grep '^#' > /dev/null
+ RET=$?
+ if [ $RET -eq 1 ]; then
+ # skip cp if srcdir is same as destdir
+ # this occurs when build/test performed in source dir and
+ # make cp fail
+ SDIR=`$DIRNAME $tstfile`
+ INODE_SDIR=`$LS -i -d $SDIR | $AWK -F' ' '{print $1}'`
+ INODE_DDIR=`$LS -i -d $BLDDIR | $AWK -F' ' '{print $1}'`
+ if [ "$INODE_SDIR" != "$INODE_DDIR" ]; then
+ $CP -f $tstfile $BLDDIR
+ if [ $? -ne 0 ]; then
+ echo "Error: FAILED to copy $tstfile ."
+
+ # Comment out this to CREATE expected file
+ exit $EXIT_FAILURE
+ fi
+ fi
+ fi
+ done
+}
+
+CLEAN_DATAFILES_AND_BLDDIR()
+{
+ $RM $BLDDIR/examples.datasets.H5Ex_D_*.txt
+ $RM $BLDDIR/H5Ex_D_*.out
+}
+
+# Print a line-line message left justified in a field of 70 characters
+# beginning with the word "Testing".
+#
+TESTING() {
+ SPACES=" "
+ echo "Testing $* $SPACES" | cut -c1-70 | tr -d '\012'
+}
+
+# where Java is installed (requires jdk1.7.x)
+JAVAEXE=@JAVA@
+JAVAEXEFLAGS=@H5_JAVAFLAGS@
+
+###############################################################################
+# DO NOT MODIFY BELOW THIS LINE
+###############################################################################
+
+# prepare for test
+COPY_LIBFILES_TO_BLDLIBDIR
+COPY_DATAFILES_TO_BLDDIR
+
+CPATH=".:"$BLDLIBDIR"/"$JARFILE":"$BLDLIBDIR"/slf4j-api-1.7.5.jar:"$BLDLIBDIR"/slf4j-simple-1.7.5.jar:"$TESTJARFILE""
+
+TEST=/usr/bin/test
+if [ ! -x /usr/bin/test ]
+then
+TEST=`which test`
+fi
+
+if $TEST -z "$CLASSPATH"; then
+ CLASSPATH=""
+fi
+CLASSPATH=$CPATH":"$CLASSPATH
+export CLASSPATH
+
+if $TEST -n "$JAVAPATH" ; then
+ PATH=$JAVAPATH":"$PATH
+ export PATH
+fi
+
+if $TEST -e /bin/uname; then
+ os_name=`/bin/uname -s`
+elif $TEST -e /usr/bin/uname; then
+ os_name=`/usr/bin/uname -s`
+else
+ os_name=unknown
+fi
+
+if $TEST -z "$LD_LIBRARY_PATH" ; then
+ LD_LIBRARY_PATH=""
+fi
+
+case $os_name in
+ Darwin)
+ DYLD_LIBRARY_PATH=$BLDLIBDIR:$DYLD_LIBRARY_PATH
+ export DYLD_LIBRARY_PATH
+ LD_LIBRARY_PATH=$DYLD_LIBRARY_PATH
+ ;;
+ *)
+ LD_LIBRARY_PATH=$BLDLIBDIR:$LD_LIBRARY_PATH
+ ;;
+esac
+
+export LD_LIBRARY_PATH
+
+echo "$JAVAEXE $JAVAEXEFLAGS -Xmx1024M -Dorg.slf4j.simpleLogger.defaultLog=trace -Djava.library.path=$BLDLIBDIR -cp $CLASSPATH examples.datasets.H5Ex_D_Alloc"
+($JAVAEXE $JAVAEXEFLAGS -Xmx1024M -Dorg.slf4j.simpleLogger.defaultLog=trace -Djava.library.path=$BLDLIBDIR -cp $CLASSPATH examples.datasets.H5Ex_D_Alloc > H5Ex_D_Alloc.out)
+if diff H5Ex_D_Alloc.out examples.datasets.H5Ex_D_Alloc.txt > /dev/null; then
+ echo " PASSED datasets.H5Ex_D_Alloc"
+else
+ echo "**FAILED** datasets.H5Ex_D_Alloc"
+ nerrors="`expr $nerrors + 1`"
+fi
+
+echo "$JAVAEXE $JAVAEXEFLAGS -Xmx1024M -Dorg.slf4j.simpleLogger.defaultLog=trace -Djava.library.path=$BLDLIBDIR -cp $CLASSPATH examples.datasets.H5Ex_D_Checksum"
+($JAVAEXE $JAVAEXEFLAGS -Xmx1024M -Dorg.slf4j.simpleLogger.defaultLog=trace -Djava.library.path=$BLDLIBDIR -cp $CLASSPATH examples.datasets.H5Ex_D_Checksum > H5Ex_D_Checksum.out)
+if diff H5Ex_D_Checksum.out examples.datasets.H5Ex_D_Checksum.txt > /dev/null; then
+ echo " PASSED datasets.H5Ex_D_Checksum"
+else
+ echo "**FAILED** datasets.H5Ex_D_Checksum"
+ nerrors="`expr $nerrors + 1`"
+fi
+
+echo "$JAVAEXE $JAVAEXEFLAGS -Xmx1024M -Dorg.slf4j.simpleLogger.defaultLog=trace -Djava.library.path=$BLDLIBDIR -cp $CLASSPATH examples.datasets.H5Ex_D_Chunk"
+($JAVAEXE $JAVAEXEFLAGS -Xmx1024M -Dorg.slf4j.simpleLogger.defaultLog=trace -Djava.library.path=$BLDLIBDIR -cp $CLASSPATH examples.datasets.H5Ex_D_Chunk > H5Ex_D_Chunk.out)
+if diff H5Ex_D_Chunk.out examples.datasets.H5Ex_D_Chunk.txt > /dev/null; then
+ echo " PASSED datasets.H5Ex_D_Chunk"
+else
+ echo "**FAILED** datasets.H5Ex_D_Chunk"
+ nerrors="`expr $nerrors + 1`"
+fi
+
+echo "$JAVAEXE $JAVAEXEFLAGS -Xmx1024M -Dorg.slf4j.simpleLogger.defaultLog=trace -Djava.library.path=$BLDLIBDIR -cp $CLASSPATH examples.datasets.H5Ex_D_Compact"
+($JAVAEXE $JAVAEXEFLAGS -Xmx1024M -Dorg.slf4j.simpleLogger.defaultLog=trace -Djava.library.path=$BLDLIBDIR -cp $CLASSPATH examples.datasets.H5Ex_D_Compact > H5Ex_D_Compact.out)
+if diff H5Ex_D_Compact.out examples.datasets.H5Ex_D_Compact.txt > /dev/null; then
+ echo " PASSED datasets.H5Ex_D_Compact"
+else
+ echo "**FAILED** datasets.H5Ex_D_Compact"
+ nerrors="`expr $nerrors + 1`"
+fi
+
+echo "$JAVAEXE $JAVAEXEFLAGS -Xmx1024M -Dorg.slf4j.simpleLogger.defaultLog=trace -Djava.library.path=$BLDLIBDIR -cp $CLASSPATH examples.datasets.H5Ex_D_External"
+($JAVAEXE $JAVAEXEFLAGS -Xmx1024M -Dorg.slf4j.simpleLogger.defaultLog=trace -Djava.library.path=$BLDLIBDIR -cp $CLASSPATH examples.datasets.H5Ex_D_External > H5Ex_D_External.out)
+if diff H5Ex_D_External.out examples.datasets.H5Ex_D_External.txt > /dev/null; then
+ echo " PASSED datasets.H5Ex_D_External"
+else
+ echo "**FAILED** datasets.H5Ex_D_External"
+ nerrors="`expr $nerrors + 1`"
+fi
+
+echo "$JAVAEXE $JAVAEXEFLAGS -Xmx1024M -Dorg.slf4j.simpleLogger.defaultLog=trace -Djava.library.path=$BLDLIBDIR -cp $CLASSPATH examples.datasets.H5Ex_D_FillValue"
+($JAVAEXE $JAVAEXEFLAGS -Xmx1024M -Dorg.slf4j.simpleLogger.defaultLog=trace -Djava.library.path=$BLDLIBDIR -cp $CLASSPATH examples.datasets.H5Ex_D_FillValue > H5Ex_D_FillValue.out)
+if diff H5Ex_D_FillValue.out examples.datasets.H5Ex_D_FillValue.txt > /dev/null; then
+ echo " PASSED datasets.H5Ex_D_FillValue"
+else
+ echo "**FAILED** datasets.H5Ex_D_FillValue"
+ nerrors="`expr $nerrors + 1`"
+fi
+
+echo "$JAVAEXE $JAVAEXEFLAGS -Xmx1024M -Dorg.slf4j.simpleLogger.defaultLog=trace -Djava.library.path=$BLDLIBDIR -cp $CLASSPATH examples.datasets.H5Ex_D_Gzip"
+($JAVAEXE $JAVAEXEFLAGS -Xmx1024M -Dorg.slf4j.simpleLogger.defaultLog=trace -Djava.library.path=$BLDLIBDIR -cp $CLASSPATH examples.datasets.H5Ex_D_Gzip > H5Ex_D_Gzip.out)
+if diff H5Ex_D_Gzip.out examples.datasets.H5Ex_D_Gzip.txt > /dev/null; then
+ echo " PASSED datasets.H5Ex_D_Gzip"
+else
+ echo "**FAILED** datasets.H5Ex_D_Gzip"
+ nerrors="`expr $nerrors + 1`"
+fi
+
+echo "$JAVAEXE $JAVAEXEFLAGS -Xmx1024M -Dorg.slf4j.simpleLogger.defaultLog=trace -Djava.library.path=$BLDLIBDIR -cp $CLASSPATH examples.datasets.H5Ex_D_Hyperslab"
+($JAVAEXE $JAVAEXEFLAGS -Xmx1024M -Dorg.slf4j.simpleLogger.defaultLog=trace -Djava.library.path=$BLDLIBDIR -cp $CLASSPATH examples.datasets.H5Ex_D_Hyperslab > H5Ex_D_Hyperslab.out)
+if diff H5Ex_D_Hyperslab.out examples.datasets.H5Ex_D_Hyperslab.txt > /dev/null; then
+ echo " PASSED datasets.H5Ex_D_Hyperslab"
+else
+ echo "**FAILED** datasets.H5Ex_D_Hyperslab"
+ nerrors="`expr $nerrors + 1`"
+fi
+
+echo "$JAVAEXE $JAVAEXEFLAGS -Xmx1024M -Dorg.slf4j.simpleLogger.defaultLog=trace -Djava.library.path=$BLDLIBDIR -cp $CLASSPATH examples.datasets.H5Ex_D_ReadWrite"
+($JAVAEXE $JAVAEXEFLAGS -Xmx1024M -Dorg.slf4j.simpleLogger.defaultLog=trace -Djava.library.path=$BLDLIBDIR -cp $CLASSPATH examples.datasets.H5Ex_D_ReadWrite > H5Ex_D_ReadWrite.out)
+if diff H5Ex_D_ReadWrite.out examples.datasets.H5Ex_D_ReadWrite.txt > /dev/null; then
+ echo " PASSED datasets.H5Ex_D_ReadWrite"
+else
+ echo "**FAILED** datasets.H5Ex_D_ReadWrite"
+ nerrors="`expr $nerrors + 1`"
+fi
+
+echo "$JAVAEXE $JAVAEXEFLAGS -Xmx1024M -Dorg.slf4j.simpleLogger.defaultLog=trace -Djava.library.path=$BLDLIBDIR -cp $CLASSPATH examples.datasets.H5Ex_D_Shuffle"
+($JAVAEXE $JAVAEXEFLAGS -Xmx1024M -Dorg.slf4j.simpleLogger.defaultLog=trace -Djava.library.path=$BLDLIBDIR -cp $CLASSPATH examples.datasets.H5Ex_D_Shuffle > H5Ex_D_Shuffle.out)
+if diff H5Ex_D_Shuffle.out examples.datasets.H5Ex_D_Shuffle.txt > /dev/null; then
+ echo " PASSED datasets.H5Ex_D_Shuffle"
+else
+ echo "**FAILED** datasets.H5Ex_D_Shuffle"
+ nerrors="`expr $nerrors + 1`"
+fi
+
+if test $USE_FILTER_SZIP = "yes"; then
+ echo "$JAVAEXE $JAVAEXEFLAGS -Xmx1024M -Dorg.slf4j.simpleLogger.defaultLog=trace -Djava.library.path=$BLDLIBDIR -cp $CLASSPATH examples.datasets.H5Ex_D_Szip"
+ ($JAVAEXE $JAVAEXEFLAGS -Xmx1024M -Dorg.slf4j.simpleLogger.defaultLog=trace -Djava.library.path=$BLDLIBDIR -cp $CLASSPATH examples.datasets.H5Ex_D_Szip > H5Ex_D_Szip.out)
+ if diff H5Ex_D_Szip.out examples.datasets.H5Ex_D_Szip.txt > /dev/null; then
+ echo " PASSED datasets.H5Ex_D_Szip"
+ else
+ echo "**FAILED** datasets.H5Ex_D_Szip"
+ nerrors="`expr $nerrors + 1`"
+ fi
+fi
+
+echo "$JAVAEXE $JAVAEXEFLAGS -Xmx1024M -Dorg.slf4j.simpleLogger.defaultLog=trace -Djava.library.path=$BLDLIBDIR -cp $CLASSPATH examples.datasets.H5Ex_D_UnlimitedAdd"
+($JAVAEXE $JAVAEXEFLAGS -Xmx1024M -Dorg.slf4j.simpleLogger.defaultLog=trace -Djava.library.path=$BLDLIBDIR -cp $CLASSPATH examples.datasets.H5Ex_D_UnlimitedAdd > H5Ex_D_UnlimitedAdd.out)
+if diff H5Ex_D_UnlimitedAdd.out examples.datasets.H5Ex_D_UnlimitedAdd.txt > /dev/null; then
+ echo " PASSED datasets.H5Ex_D_UnlimitedAdd"
+else
+ echo "**FAILED** datasets.H5Ex_D_UnlimitedAdd"
+ nerrors="`expr $nerrors + 1`"
+fi
+
+echo "$JAVAEXE $JAVAEXEFLAGS -Xmx1024M -Dorg.slf4j.simpleLogger.defaultLog=trace -Djava.library.path=$BLDLIBDIR -cp $CLASSPATH examples.datasets.H5Ex_D_UnlimitedGzip"
+($JAVAEXE $JAVAEXEFLAGS -Xmx1024M -Dorg.slf4j.simpleLogger.defaultLog=trace -Djava.library.path=$BLDLIBDIR -cp $CLASSPATH examples.datasets.H5Ex_D_UnlimitedGzip > H5Ex_D_UnlimitedGzip.out)
+if diff H5Ex_D_External.out examples.datasets.H5Ex_D_External.txt > /dev/null; then
+ echo " PASSED datasets.H5Ex_D_UnlimitedGzip"
+else
+ echo "**FAILED** datasets.H5Ex_D_UnlimitedGzip"
+ nerrors="`expr $nerrors + 1`"
+fi
+
+echo "$JAVAEXE $JAVAEXEFLAGS -Xmx1024M -Dorg.slf4j.simpleLogger.defaultLog=trace -Djava.library.path=$BLDLIBDIR -cp $CLASSPATH examples.datasets.H5Ex_D_UnlimitedMod"
+($JAVAEXE $JAVAEXEFLAGS -Xmx1024M -Dorg.slf4j.simpleLogger.defaultLog=trace -Djava.library.path=$BLDLIBDIR -cp $CLASSPATH examples.datasets.H5Ex_D_UnlimitedMod > H5Ex_D_UnlimitedMod.out)
+if diff H5Ex_D_UnlimitedMod.out examples.datasets.H5Ex_D_UnlimitedMod.txt > /dev/null; then
+ echo " PASSED datasets.H5Ex_D_UnlimitedMod"
+else
+ echo "**FAILED** datasets.H5Ex_D_UnlimitedMod"
+ nerrors="`expr $nerrors + 1`"
+fi
+
+echo "$JAVAEXE $JAVAEXEFLAGS -Xmx1024M -Dorg.slf4j.simpleLogger.defaultLog=trace -Djava.library.path=$BLDLIBDIR -cp $CLASSPATH examples.datasets.H5Ex_D_Nbit"
+($JAVAEXE $JAVAEXEFLAGS -Xmx1024M -Dorg.slf4j.simpleLogger.defaultLog=trace -Djava.library.path=$BLDLIBDIR -cp $CLASSPATH examples.datasets.H5Ex_D_Nbit > H5Ex_D_Nbit.out)
+if diff H5Ex_D_Nbit.out examples.datasets.H5Ex_D_Nbit.txt > /dev/null; then
+ echo " PASSED datasets.H5Ex_D_Nbit"
+else
+ echo "**FAILED** datasets.H5Ex_D_Nbit"
+ nerrors="`expr $nerrors + 1`"
+fi
+
+echo "$JAVAEXE $JAVAEXEFLAGS -Xmx1024M -Dorg.slf4j.simpleLogger.defaultLog=trace -Djava.library.path=$BLDLIBDIR -cp $CLASSPATH examples.datasets.H5Ex_D_Transform"
+($JAVAEXE $JAVAEXEFLAGS -Xmx1024M -Dorg.slf4j.simpleLogger.defaultLog=trace -Djava.library.path=$BLDLIBDIR -cp $CLASSPATH examples.datasets.H5Ex_D_Transform > H5Ex_D_Transform.out)
+if diff H5Ex_D_Transform.out examples.datasets.H5Ex_D_Transform.txt > /dev/null; then
+ echo " PASSED datasets.H5Ex_D_Transform"
+else
+ echo "**FAILED** datasets.H5Ex_D_Transform"
+ nerrors="`expr $nerrors + 1`"
+fi
+
+echo "$JAVAEXE $JAVAEXEFLAGS -Xmx1024M -Dorg.slf4j.simpleLogger.defaultLog=trace -Djava.library.path=$BLDLIBDIR -cp $CLASSPATH examples.datasets.H5Ex_D_Sofloat"
+($JAVAEXE $JAVAEXEFLAGS -Xmx1024M -Dorg.slf4j.simpleLogger.defaultLog=trace -Djava.library.path=$BLDLIBDIR -cp $CLASSPATH examples.datasets.H5Ex_D_Sofloat > H5Ex_D_Sofloat.out)
+if diff H5Ex_D_Sofloat.out examples.datasets.H5Ex_D_Sofloat.txt > /dev/null; then
+ echo " PASSED datasets.H5Ex_D_Sofloat"
+else
+ echo "**FAILED** datasets.H5Ex_D_Sofloat"
+ nerrors="`expr $nerrors + 1`"
+fi
+
+echo "$JAVAEXE $JAVAEXEFLAGS -Xmx1024M -Dorg.slf4j.simpleLogger.defaultLog=trace -Djava.library.path=$BLDLIBDIR -cp $CLASSPATH examples.datasets.H5Ex_D_Soint"
+($JAVAEXE $JAVAEXEFLAGS -Xmx1024M -Dorg.slf4j.simpleLogger.defaultLog=trace -Djava.library.path=$BLDLIBDIR -cp $CLASSPATH examples.datasets.H5Ex_D_Soint > H5Ex_D_Soint.out)
+if diff H5Ex_D_Soint.out examples.datasets.H5Ex_D_Soint.txt > /dev/null; then
+ echo " PASSED datasets.H5Ex_D_Soint"
+else
+ echo "**FAILED** datasets.H5Ex_D_Soint"
+ nerrors="`expr $nerrors + 1`"
+fi
+
+# Clean up temporary files/directories
+CLEAN_LIBFILES_AND_BLDLIBDIR
+CLEAN_DATAFILES_AND_BLDDIR
+
+# Report test results and exit
+if test $nerrors -eq 0 ; then
+ echo "All $TESTNAME tests passed."
+ exit $EXIT_SUCCESS
+else
+ echo "$TESTNAME tests failed with $nerrors errors."
+ exit $EXIT_FAILURE
+fi
diff --git a/java/examples/datatypes/CMakeLists.txt b/java/examples/datatypes/CMakeLists.txt
new file mode 100644
index 0000000..6525506
--- /dev/null
+++ b/java/examples/datatypes/CMakeLists.txt
@@ -0,0 +1,109 @@
+cmake_minimum_required (VERSION 3.1.0)
+PROJECT (HDFJAVA_EXAMPLES_DATATYPES Java)
+
+set (CMAKE_VERBOSE_MAKEFILE 1)
+
+INCLUDE_DIRECTORIES (
+ ${HDF5_JAVA_JNI_BINARY_DIR}
+ ${HDF5_JAVA_HDF5_LIB_DIR}
+)
+
+set (HDF_JAVA_EXAMPLES
+ H5Ex_T_Array
+ H5Ex_T_ArrayAttribute
+ H5Ex_T_Bit
+ H5Ex_T_BitAttribute
+ H5Ex_T_Commit
+ H5Ex_T_Compound
+ H5Ex_T_CompoundAttribute
+ H5Ex_T_Float
+ H5Ex_T_FloatAttribute
+ H5Ex_T_Integer
+ H5Ex_T_IntegerAttribute
+ H5Ex_T_ObjectReference
+ H5Ex_T_ObjectReferenceAttribute
+ H5Ex_T_Opaque
+ H5Ex_T_OpaqueAttribute
+ H5Ex_T_String
+ H5Ex_T_StringAttribute
+ H5Ex_T_VLString
+)
+
+if (WIN32)
+ set (CMAKE_JAVA_INCLUDE_FLAG_SEP ";")
+else (WIN32)
+ set (CMAKE_JAVA_INCLUDE_FLAG_SEP ":")
+endif (WIN32)
+
+set (CMAKE_JAVA_INCLUDE_PATH "${HDF5_JAVA_JARS}")
+
+set (CMAKE_JAVA_CLASSPATH ".")
+foreach (CMAKE_INCLUDE_PATH ${CMAKE_JAVA_INCLUDE_PATH})
+ set (CMAKE_JAVA_CLASSPATH "${CMAKE_JAVA_CLASSPATH}${CMAKE_JAVA_INCLUDE_FLAG_SEP}${CMAKE_INCLUDE_PATH}")
+endforeach (CMAKE_INCLUDE_PATH)
+
+foreach (example ${HDF_JAVA_EXAMPLES})
+ file (WRITE ${PROJECT_BINARY_DIR}/${example}_Manifest.txt
+ "Main-Class: examples.datatypes.${example}
+"
+ )
+ add_jar (${example} MANIFEST ${PROJECT_BINARY_DIR}/${example}_Manifest.txt ${example}.java)
+ get_target_property (${example}_JAR_FILE ${example} JAR_FILE)
+# install_jar (${example} ${HJAVA_INSTALL_DATA_DIR}/examples examples)
+ get_target_property (${example}_CLASSPATH ${example} CLASSDIR)
+ add_dependencies (${example} ${HDF5_JAVA_HDF5_LIB_TARGET})
+endforeach (example ${HDF_JAVA_EXAMPLES})
+
+set (CMAKE_JAVA_INCLUDE_PATH "${HDF5_JAVA_JARS};${HDF5_JAVA_LOGGING_JAR};${HDF5_JAVA_LOGGING_NOP_JAR}")
+
+set (CMAKE_JAVA_CLASSPATH ".")
+foreach (HDFJAVA_JAR ${CMAKE_JAVA_INCLUDE_PATH})
+ set (CMAKE_JAVA_CLASSPATH "${CMAKE_JAVA_CLASSPATH}${CMAKE_JAVA_INCLUDE_FLAG_SEP}${HDFJAVA_JAR}")
+endforeach (HDFJAVA_JAR)
+
+MACRO (ADD_H5_TEST resultfile resultcode)
+ add_test (
+ NAME JAVA_datatypes-${resultfile}
+ COMMAND "${CMAKE_COMMAND}"
+ -D "TEST_TESTER=${CMAKE_Java_RUNTIME};${CMAKE_Java_RUNTIME_FLAGS}"
+ -D "TEST_PROGRAM=examples.datatypes.${resultfile}"
+ -D "TEST_ARGS:STRING=${ARGN}"
+ -D "TEST_CLASSPATH:STRING=${CMAKE_JAVA_CLASSPATH}${CMAKE_JAVA_INCLUDE_FLAG_SEP}${${resultfile}_JAR_FILE}"
+ -D "TEST_LIBRARY_DIRECTORY=${CMAKE_TEST_OUTPUT_DIRECTORY}"
+ -D "TEST_FOLDER=${HDFJAVA_EXAMPLES_BINARY_DIR}"
+ -D "TEST_OUTPUT=datatypes/${resultfile}.out"
+ -D "TEST_EXPECT=${resultcode}"
+ -D "TEST_REFERENCE=datatypes/${resultfile}.txt"
+ -P "${HDF_RESOURCES_DIR}/jrunTest.cmake"
+ )
+ if (NOT "${last_test}" STREQUAL "")
+ set_tests_properties (JAVA_datatypes-${resultfile} PROPERTIES DEPENDS ${last_test})
+ endif (NOT "${last_test}" STREQUAL "")
+ set (last_test "JAVA_datatypes-${resultfile}")
+ENDMACRO (ADD_H5_TEST file)
+
+if (BUILD_TESTING)
+ foreach (example ${HDF_JAVA_EXAMPLES})
+ add_test (
+ NAME JAVA_datatypes-${example}-clearall-objects
+ COMMAND ${CMAKE_COMMAND}
+ -E remove
+ ${HDFJAVA_EXAMPLES_BINARY_DIR}/${example}.h5
+ ${example}.out
+ ${example}.out.err
+ )
+ if (NOT "${last_test}" STREQUAL "")
+ set_tests_properties (JAVA_datatypes-${example}-clearall-objects PROPERTIES DEPENDS ${last_test})
+ endif (NOT "${last_test}" STREQUAL "")
+ add_test (
+ NAME JAVA_datatypes-${example}-copy-objects
+ COMMAND ${CMAKE_COMMAND}
+ -E copy_if_different
+ ${HDFJAVA_EXAMPLES_SOURCE_DIR}/testfiles/examples.datatypes.${example}.txt
+ ${HDFJAVA_EXAMPLES_DATATYPES_BINARY_DIR}/${example}.txt
+ )
+ set_tests_properties (JAVA_datatypes-${example}-copy-objects PROPERTIES DEPENDS JAVA_datatypes-${example}-clearall-objects)
+ set (last_test "JAVA_datatypes-${example}-copy-objects")
+ ADD_H5_TEST (${example} 0)
+ endforeach (example ${HDF_JAVA_EXAMPLES})
+endif (BUILD_TESTING)
diff --git a/java/examples/datatypes/H5Ex_T_Array.java b/java/examples/datatypes/H5Ex_T_Array.java
new file mode 100644
index 0000000..7b7009a
--- /dev/null
+++ b/java/examples/datatypes/H5Ex_T_Array.java
@@ -0,0 +1,282 @@
+/* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
+ * Copyright by The HDF Group. *
+ * Copyright by the Board of Trustees of the University of Illinois. *
+ * All rights reserved. *
+ * *
+ * This file is part of HDF5. The full HDF5 copyright notice, including *
+ * terms governing use, modification, and redistribution, is contained in *
+ * the files COPYING and Copyright.html. COPYING can be found at the root *
+ * of the source code distribution tree; Copyright.html can be found at the *
+ * root level of an installed copy of the electronic HDF5 document set and *
+ * is linked from the top-level documents page. It can also be found at *
+ * http://hdfgroup.org/HDF5/doc/Copyright.html. If you do not have *
+ * access to either file, you may request a copy from help@hdfgroup.org. *
+ * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */
+
+/************************************************************
+ This example shows how to read and write array datatypes
+ to a dataset. The program first writes integers arrays of
+ dimension ADIM0xADIM1 to a dataset with a dataspace of
+ DIM0, then closes the file. Next, it reopens the file,
+ reads back the data, and outputs it to the screen.
+ ************************************************************/
+
+package examples.datatypes;
+
+import hdf.hdf5lib.H5;
+import hdf.hdf5lib.HDF5Constants;
+
+public class H5Ex_T_Array {
+ private static String FILENAME = "H5Ex_T_Array.h5";
+ private static String DATASETNAME = "DS1";
+ private static final int DIM0 = 4;
+ private static final int ADIM0 = 3;
+ private static final int ADIM1 = 5;
+ private static final int RANK = 1;
+ private static final int NDIMS = 2;
+
+ private static void CreateDataset() {
+ long file_id = -1;
+ long filetype_id = -1;
+ long memtype_id = -1;
+ long dataspace_id = -1;
+ long dataset_id = -1;
+ long[] dims = { DIM0 };
+ long[] adims = { ADIM0, ADIM1 };
+ int[][][] dset_data = new int[DIM0][ADIM0][ADIM1];
+
+ // Initialize data. indx is the element in the dataspace, jndx and kndx the
+ // elements within the array datatype.
+ for (int indx = 0; indx < DIM0; indx++)
+ for (int jndx = 0; jndx < ADIM0; jndx++)
+ for (int kndx = 0; kndx < ADIM1; kndx++)
+ dset_data[indx][jndx][kndx] = indx * jndx - jndx * kndx + indx * kndx;
+
+ // Create a new file using default properties.
+ try {
+ file_id = H5.H5Fcreate(FILENAME, HDF5Constants.H5F_ACC_TRUNC, HDF5Constants.H5P_DEFAULT,
+ HDF5Constants.H5P_DEFAULT);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Create array datatypes for file.
+ try {
+ filetype_id = H5.H5Tarray_create(HDF5Constants.H5T_STD_I64LE, NDIMS, adims);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Create array datatypes for memory.
+ try {
+ memtype_id = H5.H5Tarray_create(HDF5Constants.H5T_NATIVE_INT, NDIMS, adims);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Create dataspace. Setting maximum size to NULL sets the maximum
+ // size to be the current size.
+ try {
+ dataspace_id = H5.H5Screate_simple(RANK, dims, null);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Create the dataset.
+ try {
+ if ((file_id >= 0) && (dataspace_id >= 0) && (filetype_id >= 0))
+ dataset_id = H5.H5Dcreate(file_id, DATASETNAME, filetype_id, dataspace_id, HDF5Constants.H5P_DEFAULT,
+ HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Write the dataset.
+ try {
+ if ((dataset_id >= 0) && (memtype_id >= 0))
+ H5.H5Dwrite(dataset_id, memtype_id, HDF5Constants.H5S_ALL, HDF5Constants.H5S_ALL,
+ HDF5Constants.H5P_DEFAULT, dset_data);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // End access to the dataset and release resources used by it.
+ try {
+ if (dataset_id >= 0)
+ H5.H5Dclose(dataset_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Terminate access to the data space.
+ try {
+ if (dataspace_id >= 0)
+ H5.H5Sclose(dataspace_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Terminate access to the file type.
+ try {
+ if (filetype_id >= 0)
+ H5.H5Tclose(filetype_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Terminate access to the mem type.
+ try {
+ if (memtype_id >= 0)
+ H5.H5Tclose(memtype_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Close the file.
+ try {
+ if (file_id >= 0)
+ H5.H5Fclose(file_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ }
+
+ private static void ReadDataset() {
+ long file_id = -1;
+ long filetype_id = -1;
+ long memtype_id = -1;
+ long dataset_id = -1;
+ long[] dims = { DIM0 };
+ long[] adims = { ADIM0, ADIM1 };
+ int[][][] dset_data;
+
+ // Open an existing file.
+ try {
+ file_id = H5.H5Fopen(FILENAME, HDF5Constants.H5F_ACC_RDONLY, HDF5Constants.H5P_DEFAULT);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Open an existing dataset.
+ try {
+ if (file_id >= 0)
+ dataset_id = H5.H5Dopen(file_id, DATASETNAME, HDF5Constants.H5P_DEFAULT);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Get the datatype.
+ try {
+ if (dataset_id >= 0)
+ filetype_id = H5.H5Dget_type(dataset_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Get the datatype's dimensions.
+ try {
+ if (filetype_id >= 0)
+ H5.H5Tget_array_dims(filetype_id, adims);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Allocate array of pointers to two-dimensional arrays (the
+ // elements of the dataset.
+ dset_data = new int[(int) dims[0]][(int) (adims[0])][(int) (adims[1])];
+
+ // Create array datatypes for memory.
+ try {
+ memtype_id = H5.H5Tarray_create(HDF5Constants.H5T_NATIVE_INT, 2, adims);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Read data.
+ try {
+ if ((dataset_id >= 0) && (memtype_id >= 0))
+ H5.H5Dread(dataset_id, memtype_id, HDF5Constants.H5S_ALL, HDF5Constants.H5S_ALL,
+ HDF5Constants.H5P_DEFAULT, dset_data);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Output the data to the screen.
+ for (int indx = 0; indx < dims[0]; indx++) {
+ System.out.println(DATASETNAME + " [" + indx + "]:");
+ for (int jndx = 0; jndx < adims[0]; jndx++) {
+ System.out.print(" [");
+ for (int kndx = 0; kndx < adims[1]; kndx++)
+ System.out.print(dset_data[indx][jndx][kndx] + " ");
+ System.out.println("]");
+ }
+ System.out.println();
+ }
+ System.out.println();
+
+ // End access to the dataset and release resources used by it.
+ try {
+ if (dataset_id >= 0)
+ H5.H5Dclose(dataset_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Terminate access to the file type.
+ try {
+ if (filetype_id >= 0)
+ H5.H5Tclose(filetype_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Terminate access to the mem type.
+ try {
+ if (memtype_id >= 0)
+ H5.H5Tclose(memtype_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Close the file.
+ try {
+ if (file_id >= 0)
+ H5.H5Fclose(file_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ }
+
+ public static void main(String[] args) {
+ H5Ex_T_Array.CreateDataset();
+ // Now we begin the read section of this example. Here we assume
+ // the dataset and array have the same name and rank, but can have
+ // any size. Therefore we must allocate a new array to read in
+ // data using malloc().
+ H5Ex_T_Array.ReadDataset();
+ }
+
+}
diff --git a/java/examples/datatypes/H5Ex_T_ArrayAttribute.java b/java/examples/datatypes/H5Ex_T_ArrayAttribute.java
new file mode 100644
index 0000000..ce97457
--- /dev/null
+++ b/java/examples/datatypes/H5Ex_T_ArrayAttribute.java
@@ -0,0 +1,322 @@
+/* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
+ * Copyright by The HDF Group. *
+ * Copyright by the Board of Trustees of the University of Illinois. *
+ * All rights reserved. *
+ * *
+ * This file is part of HDF5. The full HDF5 copyright notice, including *
+ * terms governing use, modification, and redistribution, is contained in *
+ * the files COPYING and Copyright.html. COPYING can be found at the root *
+ * of the source code distribution tree; Copyright.html can be found at the *
+ * root level of an installed copy of the electronic HDF5 document set and *
+ * is linked from the top-level documents page. It can also be found at *
+ * http://hdfgroup.org/HDF5/doc/Copyright.html. If you do not have *
+ * access to either file, you may request a copy from help@hdfgroup.org. *
+ * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */
+
+/************************************************************
+ This example shows how to read and write array datatypes
+ to an attribute. The program first writes integers arrays
+ of dimension ADIM0xADIM1 to an attribute with a dataspace
+ of DIM0, then closes the file. Next, it reopens the
+ file, reads back the data, and outputs it to the screen.
+ ************************************************************/
+
+package examples.datatypes;
+
+import hdf.hdf5lib.H5;
+import hdf.hdf5lib.HDF5Constants;
+
+public class H5Ex_T_ArrayAttribute {
+ private static String FILENAME = "H5Ex_T_ArrayAttribute.h5";
+ private static String DATASETNAME = "DS1";
+ private static String ATTRIBUTENAME = "A1";
+ private static final int DIM0 = 4;
+ private static final int ADIM0 = 3;
+ private static final int ADIM1 = 5;
+ private static final int RANK = 1;
+ private static final int NDIMS = 2;
+
+ private static void CreateDataset() {
+ long file_id = -1;
+ long filetype_id = -1;
+ long memtype_id = -1;
+ long dataspace_id = -1;
+ long dataset_id = -1;
+ long attribute_id = -1;
+ long[] dims = { DIM0 };
+ long[] adims = { ADIM0, ADIM1 };
+ int[][][] dset_data = new int[DIM0][ADIM0][ADIM1];
+
+ // Initialize data. indx is the element in the dataspace, jndx and kndx the
+ // elements within the array datatype.
+ for (int indx = 0; indx < DIM0; indx++)
+ for (int jndx = 0; jndx < ADIM0; jndx++)
+ for (int kndx = 0; kndx < ADIM1; kndx++)
+ dset_data[indx][jndx][kndx] = indx * jndx - jndx * kndx + indx * kndx;
+
+ // Create a new file using default properties.
+ try {
+ file_id = H5.H5Fcreate(FILENAME, HDF5Constants.H5F_ACC_TRUNC, HDF5Constants.H5P_DEFAULT,
+ HDF5Constants.H5P_DEFAULT);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Create array datatypes for file.
+ try {
+ filetype_id = H5.H5Tarray_create(HDF5Constants.H5T_STD_I64LE, NDIMS, adims);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Create array datatypes for memory.
+ try {
+ memtype_id = H5.H5Tarray_create(HDF5Constants.H5T_NATIVE_INT, NDIMS, adims);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Create dataset with a scalar dataspace.
+ try {
+ dataspace_id = H5.H5Screate(HDF5Constants.H5S_SCALAR);
+ if (dataspace_id >= 0) {
+ dataset_id = H5.H5Dcreate(file_id, DATASETNAME, HDF5Constants.H5T_STD_I32LE, dataspace_id,
+ HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT);
+ H5.H5Sclose(dataspace_id);
+ dataspace_id = -1;
+ }
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Create dataspace. Setting maximum size to NULL sets the maximum
+ // size to be the current size.
+ try {
+ dataspace_id = H5.H5Screate_simple(RANK, dims, null);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Create the attribute and write the array data to it.
+ try {
+ if ((dataset_id >= 0) && (dataspace_id >= 0) && (filetype_id >= 0))
+ attribute_id = H5.H5Acreate(dataset_id, ATTRIBUTENAME, filetype_id, dataspace_id,
+ HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Write the dataset.
+ try {
+ if ((attribute_id >= 0) && (memtype_id >= 0))
+ H5.H5Awrite(attribute_id, memtype_id, dset_data);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // End access to the dataset and release resources used by it.
+ try {
+ if (attribute_id >= 0)
+ H5.H5Aclose(attribute_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ try {
+ if (dataset_id >= 0)
+ H5.H5Dclose(dataset_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Terminate access to the data space.
+ try {
+ if (dataspace_id >= 0)
+ H5.H5Sclose(dataspace_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Terminate access to the file type.
+ try {
+ if (filetype_id >= 0)
+ H5.H5Tclose(filetype_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Terminate access to the mem type.
+ try {
+ if (memtype_id >= 0)
+ H5.H5Tclose(memtype_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Close the file.
+ try {
+ if (file_id >= 0)
+ H5.H5Fclose(file_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ }
+
+ private static void ReadDataset() {
+ long file_id = -1;
+ long filetype_id = -1;
+ long memtype_id = -1;
+ long dataset_id = -1;
+ long attribute_id = -1;
+ long[] dims = { DIM0 };
+ long[] adims = { ADIM0, ADIM1 };
+ int[][][] dset_data;
+
+ // Open an existing file.
+ try {
+ file_id = H5.H5Fopen(FILENAME, HDF5Constants.H5F_ACC_RDONLY, HDF5Constants.H5P_DEFAULT);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Open an existing dataset.
+ try {
+ if (file_id >= 0)
+ dataset_id = H5.H5Dopen(file_id, DATASETNAME, HDF5Constants.H5P_DEFAULT);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ try {
+ if (dataset_id >= 0)
+ attribute_id = H5.H5Aopen_by_name(dataset_id, ".", ATTRIBUTENAME, HDF5Constants.H5P_DEFAULT,
+ HDF5Constants.H5P_DEFAULT);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Get the datatype.
+ try {
+ if (attribute_id >= 0)
+ filetype_id = H5.H5Aget_type(attribute_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Get the datatype's dimensions.
+ try {
+ if (filetype_id >= 0)
+ H5.H5Tget_array_dims(filetype_id, adims);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Allocate array of pointers to two-dimensional arrays (the
+ // elements of the dataset.
+ dset_data = new int[(int) dims[0]][(int) (adims[0])][(int) (adims[1])];
+
+ // Create array datatypes for memory.
+ try {
+ memtype_id = H5.H5Tarray_create(HDF5Constants.H5T_NATIVE_INT, 2, adims);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Read data.
+ try {
+ if ((attribute_id >= 0) && (memtype_id >= 0))
+ H5.H5Aread(attribute_id, memtype_id, dset_data);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Output the data to the screen.
+ for (int indx = 0; indx < dims[0]; indx++) {
+ System.out.println(ATTRIBUTENAME + " [" + indx + "]:");
+ for (int jndx = 0; jndx < adims[0]; jndx++) {
+ System.out.print(" [");
+ for (int kndx = 0; kndx < adims[1]; kndx++)
+ System.out.print(dset_data[indx][jndx][kndx] + " ");
+ System.out.println("]");
+ }
+ System.out.println();
+ }
+ System.out.println();
+
+ // End access to the dataset and release resources used by it.
+ try {
+ if (attribute_id >= 0)
+ H5.H5Aclose(attribute_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ try {
+ if (dataset_id >= 0)
+ H5.H5Dclose(dataset_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Terminate access to the file type.
+ try {
+ if (filetype_id >= 0)
+ H5.H5Tclose(filetype_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Terminate access to the mem type.
+ try {
+ if (memtype_id >= 0)
+ H5.H5Tclose(memtype_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Close the file.
+ try {
+ if (file_id >= 0)
+ H5.H5Fclose(file_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ }
+
+ public static void main(String[] args) {
+ H5Ex_T_ArrayAttribute.CreateDataset();
+ // Now we begin the read section of this example. Here we assume
+ // the dataset and array have the same name and rank, but can have
+ // any size. Therefore we must allocate a new array to read in
+ // data using malloc().
+ H5Ex_T_ArrayAttribute.ReadDataset();
+ }
+
+}
diff --git a/java/examples/datatypes/H5Ex_T_Bit.java b/java/examples/datatypes/H5Ex_T_Bit.java
new file mode 100644
index 0000000..f76c7d5
--- /dev/null
+++ b/java/examples/datatypes/H5Ex_T_Bit.java
@@ -0,0 +1,227 @@
+/* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
+ * Copyright by The HDF Group. *
+ * Copyright by the Board of Trustees of the University of Illinois. *
+ * All rights reserved. *
+ * *
+ * This file is part of HDF5. The full HDF5 copyright notice, including *
+ * terms governing use, modification, and redistribution, is contained in *
+ * the files COPYING and Copyright.html. COPYING can be found at the root *
+ * of the source code distribution tree; Copyright.html can be found at the *
+ * root level of an installed copy of the electronic HDF5 document set and *
+ * is linked from the top-level documents page. It can also be found at *
+ * http://hdfgroup.org/HDF5/doc/Copyright.html. If you do not have *
+ * access to either file, you may request a copy from help@hdfgroup.org. *
+ * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */
+
+/************************************************************
+ This example shows how to read and write bitfield
+ datatypes to a dataset. The program first writes bit
+ fields to a dataset with a dataspace of DIM0xDIM1, then
+ closes the file. Next, it reopens the file, reads back
+ the data, and outputs it to the screen.
+ ************************************************************/
+
+package examples.datatypes;
+
+import hdf.hdf5lib.H5;
+import hdf.hdf5lib.HDF5Constants;
+
+public class H5Ex_T_Bit {
+ private static String FILENAME = "H5Ex_T_Bit.h5";
+ private static String DATASETNAME = "DS1";
+ private static final int DIM0 = 4;
+ private static final int DIM1 = 7;
+ private static final int RANK = 2;
+
+ private static void CreateDataset() {
+ long file_id = -1;
+ long dataspace_id = -1;
+ long dataset_id = -1;
+ long[] dims = { DIM0, DIM1 };
+ int[][] dset_data = new int[DIM0][DIM1];
+
+ // Initialize data.
+ for (int indx = 0; indx < DIM0; indx++)
+ for (int jndx = 0; jndx < DIM1; jndx++) {
+ dset_data[indx][jndx] = 0;
+ dset_data[indx][jndx] |= (indx * jndx - jndx) & 0x03; /* Field "A" */
+ dset_data[indx][jndx] |= (indx & 0x03) << 2; /* Field "B" */
+ dset_data[indx][jndx] |= (jndx & 0x03) << 4; /* Field "C" */
+ dset_data[indx][jndx] |= ((indx + jndx) & 0x03) << 6; /* Field "D" */
+ }
+
+ // Create a new file using default properties.
+ try {
+ file_id = H5.H5Fcreate(FILENAME, HDF5Constants.H5F_ACC_TRUNC, HDF5Constants.H5P_DEFAULT,
+ HDF5Constants.H5P_DEFAULT);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Create dataspace. Setting maximum size to NULL sets the maximum
+ // size to be the current size.
+ try {
+ dataspace_id = H5.H5Screate_simple(RANK, dims, null);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Create the dataset.
+ try {
+ if ((file_id >= 0) && (dataspace_id >= 0))
+ dataset_id = H5.H5Dcreate(file_id, DATASETNAME, HDF5Constants.H5T_STD_B8BE, dataspace_id,
+ HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Write the bitfield data to the dataset.
+ try {
+ if (dataset_id >= 0)
+ H5.H5Dwrite(dataset_id, HDF5Constants.H5T_NATIVE_B8, HDF5Constants.H5S_ALL, HDF5Constants.H5S_ALL,
+ HDF5Constants.H5P_DEFAULT, dset_data);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // End access to the dataset and release resources used by it.
+ try {
+ if (dataset_id >= 0)
+ H5.H5Dclose(dataset_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Terminate access to the data space.
+ try {
+ if (dataspace_id >= 0)
+ H5.H5Sclose(dataspace_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Close the file.
+ try {
+ if (file_id >= 0)
+ H5.H5Fclose(file_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ }
+
+ private static void ReadDataset() {
+ long file_id = -1;
+ long dataspace_id = -1;
+ long dataset_id = -1;
+ long[] dims = { DIM0, DIM1 };
+ int[][] dset_data;
+
+ // Open an existing file.
+ try {
+ file_id = H5.H5Fopen(FILENAME, HDF5Constants.H5F_ACC_RDONLY, HDF5Constants.H5P_DEFAULT);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Open an existing dataset.
+ try {
+ if (file_id >= 0)
+ dataset_id = H5.H5Dopen(file_id, DATASETNAME, HDF5Constants.H5P_DEFAULT);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Get dataspace and allocate memory for read buffer.
+ try {
+ if (dataset_id >= 0)
+ dataspace_id = H5.H5Dget_space(dataset_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ try {
+ if (dataspace_id >= 0)
+ H5.H5Sget_simple_extent_dims(dataspace_id, dims, null);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Allocate array of pointers to two-dimensional arrays (the
+ // elements of the dataset.
+ dset_data = new int[(int) dims[0]][(int) (dims[1])];
+
+ // Read data.
+ try {
+ if (dataset_id >= 0)
+ H5.H5Dread(dataset_id, HDF5Constants.H5T_NATIVE_B8, HDF5Constants.H5S_ALL, HDF5Constants.H5S_ALL,
+ HDF5Constants.H5P_DEFAULT, dset_data);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Output the data to the screen.
+ System.out.println(DATASETNAME + ":");
+ for (int indx = 0; indx < dims[0]; indx++) {
+ System.out.print(" [");
+ for (int jndx = 0; jndx < dims[1]; jndx++) {
+ System.out.print("{" + (dset_data[indx][jndx] & 0x03) + ", ");
+ System.out.print(((dset_data[indx][jndx] >> 2) & 0x03) + ", ");
+ System.out.print(((dset_data[indx][jndx] >> 4) & 0x03) + ", ");
+ System.out.print(((dset_data[indx][jndx] >> 6) & 0x03) + "}");
+ }
+ System.out.println("]");
+ }
+ System.out.println();
+
+ // End access to the dataset and release resources used by it.
+ try {
+ if (dataset_id >= 0)
+ H5.H5Dclose(dataset_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Terminate access to the data space.
+ try {
+ if (dataspace_id >= 0)
+ H5.H5Sclose(dataspace_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Close the file.
+ try {
+ if (file_id >= 0)
+ H5.H5Fclose(file_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ }
+
+ public static void main(String[] args) {
+ H5Ex_T_Bit.CreateDataset();
+ // Now we begin the read section of this example. Here we assume
+ // the dataset and array have the same name and rank, but can have
+ // any size. Therefore we must allocate a new array to read in
+ // data using malloc().
+ H5Ex_T_Bit.ReadDataset();
+ }
+
+}
diff --git a/java/examples/datatypes/H5Ex_T_BitAttribute.java b/java/examples/datatypes/H5Ex_T_BitAttribute.java
new file mode 100644
index 0000000..a5ab81b
--- /dev/null
+++ b/java/examples/datatypes/H5Ex_T_BitAttribute.java
@@ -0,0 +1,267 @@
+/* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
+ * Copyright by The HDF Group. *
+ * Copyright by the Board of Trustees of the University of Illinois. *
+ * All rights reserved. *
+ * *
+ * This file is part of HDF5. The full HDF5 copyright notice, including *
+ * terms governing use, modification, and redistribution, is contained in *
+ * the files COPYING and Copyright.html. COPYING can be found at the root *
+ * of the source code distribution tree; Copyright.html can be found at the *
+ * root level of an installed copy of the electronic HDF5 document set and *
+ * is linked from the top-level documents page. It can also be found at *
+ * http://hdfgroup.org/HDF5/doc/Copyright.html. If you do not have *
+ * access to either file, you may request a copy from help@hdfgroup.org. *
+ * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */
+
+/************************************************************
+ This example shows how to read and write bitfield
+ datatypes to an attribute. The program first writes bit
+ fields to an attribute with a dataspace of DIM0xDIM1, then
+ closes the file. Next, it reopens the file, reads back
+ the data, and outputs it to the screen.
+ ************************************************************/
+
+package examples.datatypes;
+
+import hdf.hdf5lib.H5;
+import hdf.hdf5lib.HDF5Constants;
+
+public class H5Ex_T_BitAttribute {
+ private static String FILENAME = "H5Ex_T_BitAttribute.h5";
+ private static String DATASETNAME = "DS1";
+ private static String ATTRIBUTENAME = "A1";
+ private static final int DIM0 = 4;
+ private static final int DIM1 = 7;
+ private static final int RANK = 2;
+
+ private static void CreateDataset() {
+ long file_id = -1;
+ long dataspace_id = -1;
+ long dataset_id = -1;
+ long attribute_id = -1;
+ long[] dims = { DIM0, DIM1 };
+ int[][] dset_data = new int[DIM0][DIM1];
+
+ // Initialize data.
+ for (int indx = 0; indx < DIM0; indx++)
+ for (int jndx = 0; jndx < DIM1; jndx++) {
+ dset_data[indx][jndx] = 0;
+ dset_data[indx][jndx] |= (indx * jndx - jndx) & 0x03; /* Field "A" */
+ dset_data[indx][jndx] |= (indx & 0x03) << 2; /* Field "B" */
+ dset_data[indx][jndx] |= (jndx & 0x03) << 4; /* Field "C" */
+ dset_data[indx][jndx] |= ((indx + jndx) & 0x03) << 6; /* Field "D" */
+ }
+
+ // Create a new file using default properties.
+ try {
+ file_id = H5.H5Fcreate(FILENAME, HDF5Constants.H5F_ACC_TRUNC, HDF5Constants.H5P_DEFAULT,
+ HDF5Constants.H5P_DEFAULT);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Create dataset with a scalar dataspace.
+ try {
+ dataspace_id = H5.H5Screate(HDF5Constants.H5S_SCALAR);
+ if (dataspace_id >= 0) {
+ dataset_id = H5.H5Dcreate(file_id, DATASETNAME, HDF5Constants.H5T_STD_I32LE, dataspace_id,
+ HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT);
+ H5.H5Sclose(dataspace_id);
+ dataspace_id = -1;
+ }
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Create dataspace. Setting maximum size to NULL sets the maximum
+ // size to be the current size.
+ try {
+ dataspace_id = H5.H5Screate_simple(RANK, dims, null);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Create the attribute and write the array data to it.
+ try {
+ if ((dataset_id >= 0) && (dataspace_id >= 0))
+ attribute_id = H5.H5Acreate(dataset_id, ATTRIBUTENAME, HDF5Constants.H5T_STD_B8BE, dataspace_id,
+ HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Write the dataset.
+ try {
+ if (attribute_id >= 0)
+ H5.H5Awrite(attribute_id, HDF5Constants.H5T_NATIVE_B8, dset_data);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // End access to the dataset and release resources used by it.
+ try {
+ if (attribute_id >= 0)
+ H5.H5Aclose(attribute_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ try {
+ if (dataset_id >= 0)
+ H5.H5Dclose(dataset_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Terminate access to the data space.
+ try {
+ if (dataspace_id >= 0)
+ H5.H5Sclose(dataspace_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Close the file.
+ try {
+ if (file_id >= 0)
+ H5.H5Fclose(file_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ }
+
+ private static void ReadDataset() {
+ long file_id = -1;
+ long dataspace_id = -1;
+ long dataset_id = -1;
+ long attribute_id = -1;
+ long[] dims = { DIM0, DIM1 };
+ int[][] dset_data;
+
+ // Open an existing file.
+ try {
+ file_id = H5.H5Fopen(FILENAME, HDF5Constants.H5F_ACC_RDONLY, HDF5Constants.H5P_DEFAULT);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Open an existing dataset.
+ try {
+ if (file_id >= 0)
+ dataset_id = H5.H5Dopen(file_id, DATASETNAME, HDF5Constants.H5P_DEFAULT);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ try {
+ if (dataset_id >= 0)
+ attribute_id = H5.H5Aopen_by_name(dataset_id, ".", ATTRIBUTENAME, HDF5Constants.H5P_DEFAULT,
+ HDF5Constants.H5P_DEFAULT);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Get dataspace and allocate memory for read buffer.
+ try {
+ if (attribute_id >= 0)
+ dataspace_id = H5.H5Aget_space(attribute_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ try {
+ if (dataspace_id >= 0)
+ H5.H5Sget_simple_extent_dims(dataspace_id, dims, null);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Allocate array of pointers to two-dimensional arrays (the
+ // elements of the dataset.
+ dset_data = new int[(int) dims[0]][(int) (dims[1])];
+
+ // Read data.
+ try {
+ if (attribute_id >= 0)
+ H5.H5Aread(attribute_id, HDF5Constants.H5T_NATIVE_B8, dset_data);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Output the data to the screen.
+ System.out.println(ATTRIBUTENAME + ":");
+ for (int indx = 0; indx < dims[0]; indx++) {
+ System.out.print(" [");
+ for (int jndx = 0; jndx < dims[1]; jndx++) {
+ System.out.print("{" + (dset_data[indx][jndx] & 0x03) + ", ");
+ System.out.print(((dset_data[indx][jndx] >> 2) & 0x03) + ", ");
+ System.out.print(((dset_data[indx][jndx] >> 4) & 0x03) + ", ");
+ System.out.print(((dset_data[indx][jndx] >> 6) & 0x03) + "}");
+ }
+ System.out.println("]");
+ }
+ System.out.println();
+
+ // End access to the dataset and release resources used by it.
+ try {
+ if (attribute_id >= 0)
+ H5.H5Aclose(attribute_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ try {
+ if (dataset_id >= 0)
+ H5.H5Dclose(dataset_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Terminate access to the data space.
+ try {
+ if (dataspace_id >= 0)
+ H5.H5Sclose(dataspace_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Close the file.
+ try {
+ if (file_id >= 0)
+ H5.H5Fclose(file_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ }
+
+ public static void main(String[] args) {
+ H5Ex_T_BitAttribute.CreateDataset();
+ // Now we begin the read section of this example. Here we assume
+ // the dataset and array have the same name and rank, but can have
+ // any size. Therefore we must allocate a new array to read in
+ // data using malloc().
+ H5Ex_T_BitAttribute.ReadDataset();
+ }
+
+}
diff --git a/java/examples/datatypes/H5Ex_T_Commit.java b/java/examples/datatypes/H5Ex_T_Commit.java
new file mode 100644
index 0000000..d4e43f9
--- /dev/null
+++ b/java/examples/datatypes/H5Ex_T_Commit.java
@@ -0,0 +1,265 @@
+/* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
+ * Copyright by The HDF Group. *
+ * Copyright by the Board of Trustees of the University of Illinois. *
+ * All rights reserved. *
+ * *
+ * This file is part of HDF5. The full HDF5 copyright notice, including *
+ * terms governing use, modification, and redistribution, is contained in *
+ * the files COPYING and Copyright.html. COPYING can be found at the root *
+ * of the source code distribution tree; Copyright.html can be found at the *
+ * root level of an installed copy of the electronic HDF5 document set and *
+ * is linked from the top-level documents page. It can also be found at *
+ * http://hdfgroup.org/HDF5/doc/Copyright.html. If you do not have *
+ * access to either file, you may request a copy from help@hdfgroup.org. *
+ * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */
+
+/************************************************************
+ This example shows how to commit a named datatype to a
+ file, and read back that datatype. The program first
+ defines a compound datatype, commits it to a file, then
+ closes the file. Next, it reopens the file, opens the
+ datatype, and outputs the names of its fields to the
+ screen.
+ ************************************************************/
+
+package examples.datatypes;
+
+import hdf.hdf5lib.H5;
+import hdf.hdf5lib.HDF5Constants;
+
+import java.util.EnumSet;
+import java.util.HashMap;
+import java.util.Map;
+
+public class H5Ex_T_Commit {
+ private static String FILENAME = "H5Ex_T_Commit.h5";
+ private static String DATATYPENAME = "Sensor_Type";
+ protected static final int INTEGERSIZE = 4;
+ protected static final int DOUBLESIZE = 8;
+ protected final static int MAXSTRINGSIZE = 80;
+
+ // Values for the various classes of datatypes
+ enum H5T_class {
+ H5T_NO_CLASS(HDF5Constants.H5T_NO_CLASS), // error
+ H5T_INTEGER(HDF5Constants.H5T_INTEGER), // integer types
+ H5T_FLOAT(HDF5Constants.H5T_FLOAT), // floating-point types
+ H5T_TIME(HDF5Constants.H5T_TIME), // date and time types
+ H5T_STRING(HDF5Constants.H5T_STRING), // character string types
+ H5T_BITFIELD(HDF5Constants.H5T_BITFIELD), // bit field types
+ H5T_OPAQUE(HDF5Constants.H5T_OPAQUE), // opaque types
+ H5T_COMPOUND(HDF5Constants.H5T_COMPOUND), // compound types
+ H5T_REFERENCE(HDF5Constants.H5T_REFERENCE), // reference types
+ H5T_ENUM(HDF5Constants.H5T_ENUM), // enumeration types
+ H5T_VLEN(HDF5Constants.H5T_VLEN), // Variable-Length types
+ H5T_ARRAY(HDF5Constants.H5T_ARRAY), // Array types
+ H5T_NCLASSES(11); // this must be last
+
+ private static final Map<Long, H5T_class> lookup = new HashMap<Long, H5T_class>();
+
+ static {
+ for (H5T_class s : EnumSet.allOf(H5T_class.class))
+ lookup.put(s.getCode(), s);
+ }
+
+ private long code;
+
+ H5T_class(long layout_type) {
+ this.code = layout_type;
+ }
+
+ public long getCode() {
+ return this.code;
+ }
+
+ public static H5T_class get(long typeclass_id) {
+ return lookup.get(typeclass_id);
+ }
+ }
+
+ // The supporting Sensor_Datatype class.
+ private static class Sensor_Datatype {
+ static int numberMembers = 4;
+ static int[] memberDims = { 1, 1, 1, 1 };
+
+ String[] memberNames = { "Serial number", "Location", "Temperature (F)", "Pressure (inHg)" };
+ long[] memberFileTypes = { HDF5Constants.H5T_STD_I32BE, HDF5Constants.H5T_C_S1, HDF5Constants.H5T_IEEE_F64BE,
+ HDF5Constants.H5T_IEEE_F64BE };
+ static int[] memberStorage = { INTEGERSIZE, MAXSTRINGSIZE, DOUBLESIZE, DOUBLESIZE };
+
+ // Data size is the storage size for the members not the object.
+ static long getDataSize() {
+ long data_size = 0;
+ for (int indx = 0; indx < numberMembers; indx++)
+ data_size += memberStorage[indx] * memberDims[indx];
+ return data_size;
+ }
+
+ static int getOffset(int memberItem) {
+ int data_offset = 0;
+ for (int indx = 0; indx < memberItem; indx++)
+ data_offset += memberStorage[indx];
+ return data_offset;
+ }
+ }
+
+ private static void CreateDataType() {
+ long file_id = -1;
+ long strtype_id = -1;
+ long filetype_id = -1;
+ Sensor_Datatype datatypes = new Sensor_Datatype();
+ // Create a new file using default properties.
+ try {
+ file_id = H5.H5Fcreate(FILENAME, HDF5Constants.H5F_ACC_TRUNC, HDF5Constants.H5P_DEFAULT,
+ HDF5Constants.H5P_DEFAULT);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Create string datatype.
+ try {
+ strtype_id = H5.H5Tcopy(HDF5Constants.H5T_C_S1);
+ if (strtype_id >= 0)
+ H5.H5Tset_size(strtype_id, MAXSTRINGSIZE);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Create the compound datatype for the file. Because the standard
+ // types we are using for the file may have different sizes than
+ // the corresponding native types, we must manually calculate the
+ // offset of each member.
+ try {
+ filetype_id = H5.H5Tcreate(HDF5Constants.H5T_COMPOUND, Sensor_Datatype.getDataSize());
+ if (filetype_id >= 0) {
+ for (int indx = 0; indx < Sensor_Datatype.numberMembers; indx++) {
+ long type_id = datatypes.memberFileTypes[indx];
+ if (type_id == HDF5Constants.H5T_C_S1)
+ type_id = strtype_id;
+ H5.H5Tinsert(filetype_id, datatypes.memberNames[indx], Sensor_Datatype.getOffset(indx), type_id);
+ }
+ }
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Commit the compound datatype to the file, creating a named datatype.
+ try {
+ if ((file_id >= 0) && (filetype_id >= 0))
+ H5.H5Tcommit(file_id, DATATYPENAME, filetype_id, HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT,
+ HDF5Constants.H5P_DEFAULT);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Terminate access to the file type.
+ try {
+ if (filetype_id >= 0)
+ H5.H5Tclose(filetype_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Terminate access to the str type.
+ try {
+ if (strtype_id >= 0)
+ H5.H5Tclose(strtype_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Close the file.
+ try {
+ if (file_id >= 0)
+ H5.H5Fclose(file_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ }
+
+ private static void ReadDataType() {
+ long file_id = -1;
+ long typeclass_id = -1;
+ long filetype_id = -1;
+
+ // Open an existing file.
+ try {
+ file_id = H5.H5Fopen(FILENAME, HDF5Constants.H5F_ACC_RDONLY, HDF5Constants.H5P_DEFAULT);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Open named datatype.
+ try {
+ if (file_id >= 0)
+ filetype_id = H5.H5Topen(file_id, DATATYPENAME, HDF5Constants.H5P_DEFAULT);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Output the data to the screen.
+ System.out.println("Named datatype: " + DATATYPENAME + ":");
+
+ // Get datatype class. If it isn't compound, we won't print anything.
+ try {
+ if (filetype_id >= 0)
+ typeclass_id = H5.H5Tget_class(filetype_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+ // Read data.
+ try {
+ if (H5T_class.get(typeclass_id) == H5T_class.H5T_COMPOUND) {
+ System.out.println(" Class: H5T_COMPOUND");
+ int nmembs = H5.H5Tget_nmembers(filetype_id);
+ // Iterate over compound datatype members.
+ for (int indx = 0; indx < nmembs; indx++) {
+ String member_name = H5.H5Tget_member_name(filetype_id, indx);
+ System.out.println(" " + member_name);
+ }
+ }
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Terminate access to the mem type.
+ try {
+ if (filetype_id >= 0)
+ H5.H5Tclose(filetype_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Close the file.
+ try {
+ if (file_id >= 0)
+ H5.H5Fclose(file_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ }
+
+ public static void main(String[] args) {
+ H5Ex_T_Commit.CreateDataType();
+ // Now we begin the read section of this example. Here we assume
+ // the dataset and array have the same name and rank, but can have
+ // any size. Therefore we must allocate a new array to read in
+ // data using malloc().
+ H5Ex_T_Commit.ReadDataType();
+ }
+
+}
diff --git a/java/examples/datatypes/H5Ex_T_Compound.java b/java/examples/datatypes/H5Ex_T_Compound.java
new file mode 100644
index 0000000..f270cb9
--- /dev/null
+++ b/java/examples/datatypes/H5Ex_T_Compound.java
@@ -0,0 +1,443 @@
+/* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
+ * Copyright by The HDF Group. *
+ * Copyright by the Board of Trustees of the University of Illinois. *
+ * All rights reserved. *
+ * *
+ * This file is part of HDF5. The full HDF5 copyright notice, including *
+ * terms governing use, modification, and redistribution, is contained in *
+ * the files COPYING and Copyright.html. COPYING can be found at the root *
+ * of the source code distribution tree; Copyright.html can be found at the *
+ * root level of an installed copy of the electronic HDF5 document set and *
+ * is linked from the top-level documents page. It can also be found at *
+ * http://hdfgroup.org/HDF5/doc/Copyright.html. If you do not have *
+ * access to either file, you may request a copy from help@hdfgroup.org. *
+ * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */
+
+/************************************************************
+ This example shows how to read and write compound
+ datatypes to a dataset. The program first writes
+ compound structures to a dataset with a dataspace of DIM0,
+ then closes the file. Next, it reopens the file, reads
+ back the data, and outputs it to the screen.
+ ************************************************************/
+
+package examples.datatypes;
+
+import hdf.hdf5lib.H5;
+import hdf.hdf5lib.HDF5Constants;
+
+import java.nio.ByteBuffer;
+import java.nio.ByteOrder;
+import java.nio.charset.Charset;
+
+public class H5Ex_T_Compound {
+ private static String FILENAME = "H5Ex_T_Compound.h5";
+ private static String DATASETNAME = "DS1";
+ private static final int DIM0 = 4;
+ private static final int RANK = 1;
+ protected static final int INTEGERSIZE = 4;
+ protected static final int DOUBLESIZE = 8;
+ protected final static int MAXSTRINGSIZE = 80;
+
+ static class Sensor_Datatype {
+ static int numberMembers = 4;
+ static int[] memberDims = { 1, 1, 1, 1 };
+
+ static String[] memberNames = { "Serial number", "Location", "Temperature (F)", "Pressure (inHg)" };
+ static long[] memberMemTypes = { HDF5Constants.H5T_NATIVE_INT, HDF5Constants.H5T_C_S1,
+ HDF5Constants.H5T_NATIVE_DOUBLE, HDF5Constants.H5T_NATIVE_DOUBLE };
+ static long[] memberFileTypes = { HDF5Constants.H5T_STD_I32BE, HDF5Constants.H5T_C_S1,
+ HDF5Constants.H5T_IEEE_F64BE, HDF5Constants.H5T_IEEE_F64BE };
+ static int[] memberStorage = { INTEGERSIZE, MAXSTRINGSIZE, DOUBLESIZE, DOUBLESIZE };
+
+ // Data size is the storage size for the members.
+ static long getTotalDataSize() {
+ long data_size = 0;
+ for (int indx = 0; indx < numberMembers; indx++)
+ data_size += memberStorage[indx] * memberDims[indx];
+ return DIM0 * data_size;
+ }
+
+ static long getDataSize() {
+ long data_size = 0;
+ for (int indx = 0; indx < numberMembers; indx++)
+ data_size += memberStorage[indx] * memberDims[indx];
+ return data_size;
+ }
+
+ static int getOffset(int memberItem) {
+ int data_offset = 0;
+ for (int indx = 0; indx < memberItem; indx++)
+ data_offset += memberStorage[indx];
+ return data_offset;
+ }
+ }
+
+ static class Sensor {
+ public int serial_no;
+ public String location;
+ public double temperature;
+ public double pressure;
+
+ Sensor(int serial_no, String location, double temperature, double pressure) {
+ this.serial_no = serial_no;
+ this.location = location;
+ this.temperature = temperature;
+ this.pressure = pressure;
+ }
+
+ Sensor(ByteBuffer databuf, int dbposition) {
+ readBuffer(databuf, dbposition);
+ }
+
+ void writeBuffer(ByteBuffer databuf, int dbposition) {
+ databuf.putInt(dbposition + Sensor_Datatype.getOffset(0), serial_no);
+ byte[] temp_str = location.getBytes(Charset.forName("UTF-8"));
+ int arraylen = (temp_str.length > MAXSTRINGSIZE) ? MAXSTRINGSIZE : temp_str.length;
+ for (int ndx = 0; ndx < arraylen; ndx++)
+ databuf.put(dbposition + Sensor_Datatype.getOffset(1) + ndx, temp_str[ndx]);
+ for (int ndx = arraylen; ndx < MAXSTRINGSIZE; ndx++)
+ databuf.put(dbposition + Sensor_Datatype.getOffset(1) + arraylen, (byte) 0);
+ databuf.putDouble(dbposition + Sensor_Datatype.getOffset(2), temperature);
+ databuf.putDouble(dbposition + Sensor_Datatype.getOffset(3), pressure);
+ }
+
+ void readBuffer(ByteBuffer databuf, int dbposition) {
+ this.serial_no = databuf.getInt(dbposition + Sensor_Datatype.getOffset(0));
+ ByteBuffer stringbuf = databuf.duplicate();
+ stringbuf.position(dbposition + Sensor_Datatype.getOffset(1));
+ stringbuf.limit(dbposition + Sensor_Datatype.getOffset(1) + MAXSTRINGSIZE);
+ byte[] bytearr = new byte[stringbuf.remaining()];
+ stringbuf.get(bytearr);
+ this.location = new String(bytearr, Charset.forName("UTF-8")).trim();
+ this.temperature = databuf.getDouble(dbposition + Sensor_Datatype.getOffset(2));
+ this.pressure = databuf.getDouble(dbposition + Sensor_Datatype.getOffset(3));
+ }
+
+ @Override
+ public String toString() {
+ return String.format("Serial number : " + serial_no + "%n" +
+ "Location : " + location + "%n" +
+ "Temperature (F) : " + temperature + "%n" +
+ "Pressure (inHg) : " + pressure + "%n");
+ }
+ }
+
+ private static void CreateDataset() {
+ long file_id = -1;
+ long strtype_id = -1;
+ long memtype_id = -1;
+ long filetype_id = -1;
+ long dataspace_id = -1;
+ long dataset_id = -1;
+ long[] dims = { DIM0 };
+ Sensor[] object_data = new Sensor[DIM0];
+ byte[] dset_data = null;
+
+ // Initialize data.
+ object_data[0] = new Sensor(1153, new String("Exterior (static)"), 53.23, 24.57);
+ object_data[1] = new Sensor(1184, new String("Intake"), 55.12, 22.95);
+ object_data[2] = new Sensor(1027, new String("Intake manifold"), 103.55, 31.23);
+ object_data[3] = new Sensor(1313, new String("Exhaust manifold"), 1252.89, 84.11);
+
+ // Create a new file using default properties.
+ try {
+ file_id = H5.H5Fcreate(FILENAME, HDF5Constants.H5F_ACC_TRUNC, HDF5Constants.H5P_DEFAULT,
+ HDF5Constants.H5P_DEFAULT);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Create string datatype.
+ try {
+ strtype_id = H5.H5Tcopy(HDF5Constants.H5T_C_S1);
+ if (strtype_id >= 0)
+ H5.H5Tset_size(strtype_id, MAXSTRINGSIZE);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Create the compound datatype for memory.
+ try {
+ memtype_id = H5.H5Tcreate(HDF5Constants.H5T_COMPOUND, Sensor_Datatype.getDataSize());
+ if (memtype_id >= 0) {
+ for (int indx = 0; indx < Sensor_Datatype.numberMembers; indx++) {
+ long type_id = Sensor_Datatype.memberMemTypes[indx];
+ if (type_id == HDF5Constants.H5T_C_S1)
+ type_id = strtype_id;
+ H5.H5Tinsert(memtype_id, Sensor_Datatype.memberNames[indx], Sensor_Datatype.getOffset(indx),
+ type_id);
+ }
+ }
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Create the compound datatype for the file. Because the standard
+ // types we are using for the file may have different sizes than
+ // the corresponding native types, we must manually calculate the
+ // offset of each member.
+ try {
+ filetype_id = H5.H5Tcreate(HDF5Constants.H5T_COMPOUND, Sensor_Datatype.getDataSize());
+ if (filetype_id >= 0) {
+ for (int indx = 0; indx < Sensor_Datatype.numberMembers; indx++) {
+ long type_id = Sensor_Datatype.memberFileTypes[indx];
+ if (type_id == HDF5Constants.H5T_C_S1)
+ type_id = strtype_id;
+ H5.H5Tinsert(filetype_id, Sensor_Datatype.memberNames[indx], Sensor_Datatype.getOffset(indx),
+ type_id);
+ }
+ }
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Create dataspace. Setting maximum size to NULL sets the maximum
+ // size to be the current size.
+ try {
+ dataspace_id = H5.H5Screate_simple(RANK, dims, null);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Create the dataset.
+ try {
+ if ((file_id >= 0) && (dataspace_id >= 0) && (filetype_id >= 0))
+ dataset_id = H5.H5Dcreate(file_id, DATASETNAME, filetype_id, dataspace_id, HDF5Constants.H5P_DEFAULT,
+ HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Write the compound data to the dataset.
+ // allocate memory for read buffer.
+ dset_data = new byte[(int)dims[0] * (int)Sensor_Datatype.getDataSize()];
+ ByteBuffer outBuf = ByteBuffer.wrap(dset_data);
+ outBuf.order(ByteOrder.nativeOrder());
+ for (int indx = 0; indx < (int) dims[0]; indx++) {
+ object_data[indx].writeBuffer(outBuf, indx * (int)Sensor_Datatype.getDataSize());
+ }
+ try {
+ if ((dataset_id >= 0) && (memtype_id >= 0))
+ H5.H5Dwrite(dataset_id, memtype_id, HDF5Constants.H5S_ALL, HDF5Constants.H5S_ALL,
+ HDF5Constants.H5P_DEFAULT, dset_data);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // End access to the dataset and release resources used by it.
+ try {
+ if (dataset_id >= 0)
+ H5.H5Dclose(dataset_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Terminate access to the data space.
+ try {
+ if (dataspace_id >= 0)
+ H5.H5Sclose(dataspace_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Terminate access to the file type.
+ try {
+ if (filetype_id >= 0)
+ H5.H5Tclose(filetype_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Terminate access to the mem type.
+ try {
+ if (memtype_id >= 0)
+ H5.H5Tclose(memtype_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ try {
+ if (strtype_id >= 0)
+ H5.H5Tclose(strtype_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Close the file.
+ try {
+ if (file_id >= 0)
+ H5.H5Fclose(file_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ }
+
+ private static void ReadDataset() {
+ long file_id = -1;
+ long strtype_id = -1;
+ long memtype_id = -1;
+ long dataspace_id = -1;
+ long dataset_id = -1;
+ long[] dims = { DIM0 };
+ Sensor[] object_data2;
+ byte[] dset_data;
+
+ // Open an existing file.
+ try {
+ file_id = H5.H5Fopen(FILENAME, HDF5Constants.H5F_ACC_RDONLY, HDF5Constants.H5P_DEFAULT);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Open an existing dataset.
+ try {
+ if (file_id >= 0)
+ dataset_id = H5.H5Dopen(file_id, DATASETNAME, HDF5Constants.H5P_DEFAULT);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Get dataspace and allocate memory for read buffer.
+ try {
+ if (dataset_id >= 0)
+ dataspace_id = H5.H5Dget_space(dataset_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ try {
+ if (dataspace_id >= 0)
+ H5.H5Sget_simple_extent_dims(dataspace_id, dims, null);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Create string datatype.
+ try {
+ strtype_id = H5.H5Tcopy(HDF5Constants.H5T_C_S1);
+ if (strtype_id >= 0)
+ H5.H5Tset_size(strtype_id, MAXSTRINGSIZE);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Create the compound datatype for memory.
+ try {
+ memtype_id = H5.H5Tcreate(HDF5Constants.H5T_COMPOUND, Sensor_Datatype.getDataSize());
+ if (memtype_id >= 0) {
+ for (int indx = 0; indx < Sensor_Datatype.numberMembers; indx++) {
+ long type_id = Sensor_Datatype.memberMemTypes[indx];
+ if (type_id == HDF5Constants.H5T_C_S1)
+ type_id = strtype_id;
+ H5.H5Tinsert(memtype_id, Sensor_Datatype.memberNames[indx], Sensor_Datatype.getOffset(indx),
+ type_id);
+ }
+ }
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // allocate memory for read buffer.
+ dset_data = new byte[(int) dims[0] * (int)Sensor_Datatype.getDataSize()];
+
+ object_data2 = new Sensor[(int) dims[0]];
+
+ // Read data.
+ try {
+ if ((dataset_id >= 0) && (memtype_id >= 0))
+ H5.H5Dread(dataset_id, memtype_id, HDF5Constants.H5S_ALL, HDF5Constants.H5S_ALL,
+ HDF5Constants.H5P_DEFAULT, dset_data);
+
+ ByteBuffer inBuf = ByteBuffer.wrap(dset_data);
+ inBuf.order(ByteOrder.nativeOrder());
+ for (int indx = 0; indx < (int) dims[0]; indx++) {
+ object_data2[indx] = new Sensor(inBuf, indx * (int)Sensor_Datatype.getDataSize());
+ }
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Output the data to the screen.
+ for (int indx = 0; indx < dims[0]; indx++) {
+ System.out.println(DATASETNAME + " [" + indx + "]:");
+ System.out.println(object_data2[indx].toString());
+ }
+ System.out.println();
+
+ try {
+ if (dataset_id >= 0)
+ H5.H5Dclose(dataset_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Terminate access to the data space.
+ try {
+ if (dataspace_id >= 0)
+ H5.H5Sclose(dataspace_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Terminate access to the mem type.
+ try {
+ if (memtype_id >= 0)
+ H5.H5Tclose(memtype_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ try {
+ if (strtype_id >= 0)
+ H5.H5Tclose(strtype_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Close the file.
+ try {
+ if (file_id >= 0)
+ H5.H5Fclose(file_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ }
+
+ public static void main(String[] args) {
+ H5Ex_T_Compound.CreateDataset();
+ // Now we begin the read section of this example. Here we assume
+ // the dataset and array have the same name and rank, but can have
+ // any size. Therefore we must allocate a new array to read in
+ // data using malloc().
+ H5Ex_T_Compound.ReadDataset();
+ }
+
+}
diff --git a/java/examples/datatypes/H5Ex_T_CompoundAttribute.java b/java/examples/datatypes/H5Ex_T_CompoundAttribute.java
new file mode 100644
index 0000000..25581d4
--- /dev/null
+++ b/java/examples/datatypes/H5Ex_T_CompoundAttribute.java
@@ -0,0 +1,486 @@
+/* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
+ * Copyright by The HDF Group. *
+ * Copyright by the Board of Trustees of the University of Illinois. *
+ * All rights reserved. *
+ * *
+ * This file is part of HDF5. The full HDF5 copyright notice, including *
+ * terms governing use, modification, and redistribution, is contained in *
+ * the files COPYING and Copyright.html. COPYING can be found at the root *
+ * of the source code distribution tree; Copyright.html can be found at the *
+ * root level of an installed copy of the electronic HDF5 document set and *
+ * is linked from the top-level documents page. It can also be found at *
+ * http://hdfgroup.org/HDF5/doc/Copyright.html. If you do not have *
+ * access to either file, you may request a copy from help@hdfgroup.org. *
+ * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */
+
+/************************************************************
+ This example shows how to read and write compound
+ datatypes to an attribute. The program first writes
+ compound structures to an attribute with a dataspace of
+ DIM0, then closes the file. Next, it reopens the file,
+ reads back the data, and outputs it to the screen.
+ ************************************************************/
+
+package examples.datatypes;
+
+import hdf.hdf5lib.H5;
+import hdf.hdf5lib.HDF5Constants;
+
+import java.nio.ByteBuffer;
+import java.nio.ByteOrder;
+import java.nio.charset.Charset;
+
+public class H5Ex_T_CompoundAttribute {
+ private static String FILENAME = "H5Ex_T_CompoundAttribute.h5";
+ private static String DATASETNAME = "DS1";
+ private static String ATTRIBUTENAME = "A1";
+ private static final int DIM0 = 4;
+ private static final int RANK = 1;
+ protected static final int INTEGERSIZE = 4;
+ protected static final int DOUBLESIZE = 8;
+ protected final static int MAXSTRINGSIZE = 80;
+
+ // Using Java Externalization will add a two-byte object header in
+ // the stream, which needs to be called out in the datatypes.
+ static class Sensor_Datatype {
+ static int numberMembers = 4;
+ static int[] memberDims = { 1, 1, 1, 1 };
+
+ static String[] memberNames = { "Serial number", "Location", "Temperature (F)", "Pressure (inHg)" };
+ static long[] memberMemTypes = { HDF5Constants.H5T_NATIVE_INT, HDF5Constants.H5T_C_S1,
+ HDF5Constants.H5T_NATIVE_DOUBLE, HDF5Constants.H5T_NATIVE_DOUBLE };
+ static long[] memberFileTypes = { HDF5Constants.H5T_STD_I32BE, HDF5Constants.H5T_C_S1,
+ HDF5Constants.H5T_IEEE_F64BE, HDF5Constants.H5T_IEEE_F64BE };
+ static int[] memberStorage = { INTEGERSIZE, MAXSTRINGSIZE, DOUBLESIZE, DOUBLESIZE };
+
+ // Data size is the storage size for the members not the object.
+ static long getTotalDataSize() {
+ long data_size = 0;
+ for (int indx = 0; indx < numberMembers; indx++)
+ data_size += memberStorage[indx] * memberDims[indx];
+ return DIM0 * data_size;
+ }
+
+ static long getDataSize() {
+ long data_size = 0;
+ for (int indx = 0; indx < numberMembers; indx++)
+ data_size += memberStorage[indx] * memberDims[indx];
+ return data_size;
+ }
+
+ static int getOffset(int memberItem) {
+ int data_offset = 0;
+ for (int indx = 0; indx < memberItem; indx++)
+ data_offset += memberStorage[indx];
+ return data_offset;
+ }
+ }
+
+ static class Sensor {
+ public int serial_no;
+ public String location;
+ public double temperature;
+ public double pressure;
+
+ Sensor(int serial_no, String location, double temperature, double pressure) {
+ this.serial_no = serial_no;
+ this.location = location;
+ this.temperature = temperature;
+ this.pressure = pressure;
+ }
+
+ Sensor(ByteBuffer databuf, int dbposition) {
+ readBuffer(databuf, dbposition);
+ }
+
+ void writeBuffer(ByteBuffer databuf, int dbposition) {
+ databuf.putInt(dbposition + Sensor_Datatype.getOffset(0), serial_no);
+ byte[] temp_str = location.getBytes(Charset.forName("UTF-8"));
+ int arraylen = (temp_str.length > MAXSTRINGSIZE) ? MAXSTRINGSIZE : temp_str.length;
+ for (int ndx = 0; ndx < arraylen; ndx++)
+ databuf.put(dbposition + Sensor_Datatype.getOffset(1) + ndx, temp_str[ndx]);
+ for (int ndx = arraylen; ndx < MAXSTRINGSIZE; ndx++)
+ databuf.put(dbposition + Sensor_Datatype.getOffset(1) + arraylen, (byte) 0);
+ databuf.putDouble(dbposition + Sensor_Datatype.getOffset(2), temperature);
+ databuf.putDouble(dbposition + Sensor_Datatype.getOffset(3), pressure);
+ }
+
+ void readBuffer(ByteBuffer databuf, int dbposition) {
+ this.serial_no = databuf.getInt(dbposition + Sensor_Datatype.getOffset(0));
+ ByteBuffer stringbuf = databuf.duplicate();
+ stringbuf.position(dbposition + Sensor_Datatype.getOffset(1));
+ stringbuf.limit(dbposition + Sensor_Datatype.getOffset(1) + MAXSTRINGSIZE);
+ byte[] bytearr = new byte[stringbuf.remaining()];
+ stringbuf.get(bytearr);
+ this.location = new String(bytearr, Charset.forName("UTF-8")).trim();
+ this.temperature = databuf.getDouble(dbposition + Sensor_Datatype.getOffset(2));
+ this.pressure = databuf.getDouble(dbposition + Sensor_Datatype.getOffset(3));
+ }
+
+ @Override
+ public String toString() {
+ return String.format("Serial number : " + serial_no + "%n" +
+ "Location : " + location + "%n" +
+ "Temperature (F) : " + temperature + "%n" +
+ "Pressure (inHg) : " + pressure + "%n");
+ }
+ }
+
+ private static void CreateDataset() {
+ long file_id = -1;
+ long strtype_id = -1;
+ long memtype_id = -1;
+ long filetype_id = -1;
+ long dataspace_id = -1;
+ long dataset_id = -1;
+ long attribute_id = -1;
+ long[] dims = { DIM0 };
+ Sensor[] object_data = new Sensor[DIM0];
+ byte[] dset_data = null;
+
+ // Initialize data.
+ object_data[0] = new Sensor(1153, new String("Exterior (static)"), 53.23, 24.57);
+ object_data[1] = new Sensor(1184, new String("Intake"), 55.12, 22.95);
+ object_data[2] = new Sensor(1027, new String("Intake manifold"), 103.55, 31.23);
+ object_data[3] = new Sensor(1313, new String("Exhaust manifold"), 1252.89, 84.11);
+
+ // Create a new file using default properties.
+ try {
+ file_id = H5.H5Fcreate(FILENAME, HDF5Constants.H5F_ACC_TRUNC, HDF5Constants.H5P_DEFAULT,
+ HDF5Constants.H5P_DEFAULT);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Create string datatype.
+ try {
+ strtype_id = H5.H5Tcopy(HDF5Constants.H5T_C_S1);
+ if (strtype_id >= 0)
+ H5.H5Tset_size(strtype_id, MAXSTRINGSIZE);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Create the compound datatype for memory.
+ try {
+ memtype_id = H5.H5Tcreate(HDF5Constants.H5T_COMPOUND, Sensor_Datatype.getDataSize());
+ if (memtype_id >= 0) {
+ for (int indx = 0; indx < Sensor_Datatype.numberMembers; indx++) {
+ long type_id = Sensor_Datatype.memberMemTypes[indx];
+ if (type_id == HDF5Constants.H5T_C_S1)
+ type_id = strtype_id;
+ H5.H5Tinsert(memtype_id, Sensor_Datatype.memberNames[indx], Sensor_Datatype.getOffset(indx),
+ type_id);
+ }
+ }
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Create the compound datatype for the file. Because the standard
+ // types we are using for the file may have different sizes than
+ // the corresponding native types, we must manually calculate the
+ // offset of each member.
+ try {
+ filetype_id = H5.H5Tcreate(HDF5Constants.H5T_COMPOUND, Sensor_Datatype.getDataSize());
+ if (filetype_id >= 0) {
+ for (int indx = 0; indx < Sensor_Datatype.numberMembers; indx++) {
+ long type_id = Sensor_Datatype.memberFileTypes[indx];
+ if (type_id == HDF5Constants.H5T_C_S1)
+ type_id = strtype_id;
+ H5.H5Tinsert(filetype_id, Sensor_Datatype.memberNames[indx], Sensor_Datatype.getOffset(indx),
+ type_id);
+ }
+ }
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Create dataset with a scalar dataspace.
+ try {
+ dataspace_id = H5.H5Screate(HDF5Constants.H5S_SCALAR);
+ if (dataspace_id >= 0) {
+ dataset_id = H5.H5Dcreate(file_id, DATASETNAME, HDF5Constants.H5T_STD_I32LE, dataspace_id,
+ HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT);
+ H5.H5Sclose(dataspace_id);
+ dataspace_id = -1;
+ }
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Create dataspace. Setting maximum size to NULL sets the maximum
+ // size to be the current size.
+ try {
+ dataspace_id = H5.H5Screate_simple(RANK, dims, null);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Create the attribute.
+ try {
+ if ((dataset_id >= 0) && (dataspace_id >= 0) && (filetype_id >= 0))
+ attribute_id = H5.H5Acreate(dataset_id, ATTRIBUTENAME, filetype_id, dataspace_id,
+ HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Write the compound data.
+ dset_data = new byte[(int) dims[0] * (int)Sensor_Datatype.getDataSize()];
+ ByteBuffer outBuf = ByteBuffer.wrap(dset_data);
+ outBuf.order(ByteOrder.nativeOrder());
+ for (int indx = 0; indx < (int) dims[0]; indx++) {
+ object_data[indx].writeBuffer(outBuf, indx * (int)Sensor_Datatype.getDataSize());
+ }
+ try {
+ if ((attribute_id >= 0) && (memtype_id >= 0))
+ H5.H5Awrite(attribute_id, memtype_id, dset_data);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // End access to the dataset and release resources used by it.
+ try {
+ if (attribute_id >= 0)
+ H5.H5Aclose(attribute_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ try {
+ if (dataset_id >= 0)
+ H5.H5Dclose(dataset_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Terminate access to the data space.
+ try {
+ if (dataspace_id >= 0)
+ H5.H5Sclose(dataspace_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Terminate access to the file type.
+ try {
+ if (filetype_id >= 0)
+ H5.H5Tclose(filetype_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Terminate access to the mem type.
+ try {
+ if (memtype_id >= 0)
+ H5.H5Tclose(memtype_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ try {
+ if (strtype_id >= 0)
+ H5.H5Tclose(strtype_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Close the file.
+ try {
+ if (file_id >= 0)
+ H5.H5Fclose(file_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ }
+
+ private static void ReadDataset() {
+ long file_id = -1;
+ long strtype_id = -1;
+ long memtype_id = -1;
+ long dataspace_id = -1;
+ long dataset_id = -1;
+ long attribute_id = -1;
+ long[] dims = { DIM0 };
+ Sensor[] object_data2;
+ byte[] dset_data;
+
+ // Open an existing file.
+ try {
+ file_id = H5.H5Fopen(FILENAME, HDF5Constants.H5F_ACC_RDONLY, HDF5Constants.H5P_DEFAULT);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Open an existing dataset.
+ try {
+ if (file_id >= 0)
+ dataset_id = H5.H5Dopen(file_id, DATASETNAME, HDF5Constants.H5P_DEFAULT);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ try {
+ if (dataset_id >= 0)
+ attribute_id = H5.H5Aopen_by_name(dataset_id, ".", ATTRIBUTENAME, HDF5Constants.H5P_DEFAULT,
+ HDF5Constants.H5P_DEFAULT);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Get dataspace and allocate memory for read buffer. This is a
+ // three dimensional dataset when the array datatype is included so
+ // the dynamic allocation must be done in steps.
+ try {
+ if (attribute_id >= 0)
+ dataspace_id = H5.H5Aget_space(attribute_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ try {
+ if (dataspace_id >= 0)
+ H5.H5Sget_simple_extent_dims(dataspace_id, dims, null);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Create string datatype.
+ try {
+ strtype_id = H5.H5Tcopy(HDF5Constants.H5T_C_S1);
+ if (strtype_id >= 0)
+ H5.H5Tset_size(strtype_id, MAXSTRINGSIZE);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Create the compound datatype for memory.
+ try {
+ memtype_id = H5.H5Tcreate(HDF5Constants.H5T_COMPOUND, Sensor_Datatype.getDataSize());
+ if (memtype_id >= 0) {
+ for (int indx = 0; indx < Sensor_Datatype.numberMembers; indx++) {
+ long type_id = Sensor_Datatype.memberMemTypes[indx];
+ if (type_id == HDF5Constants.H5T_C_S1)
+ type_id = strtype_id;
+ H5.H5Tinsert(memtype_id, Sensor_Datatype.memberNames[indx], Sensor_Datatype.getOffset(indx),
+ type_id);
+ }
+ }
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // allocate memory for read buffer.
+ dset_data = new byte[(int) dims[0] * (int)Sensor_Datatype.getDataSize()];
+
+ object_data2 = new Sensor[(int) dims[0]];
+
+ // Read data.
+ try {
+ if ((attribute_id >= 0) && (memtype_id >= 0))
+ H5.H5Aread(attribute_id, memtype_id, dset_data);
+
+ ByteBuffer inBuf = ByteBuffer.wrap(dset_data);
+ inBuf.order(ByteOrder.nativeOrder());
+ for (int indx = 0; indx < (int) dims[0]; indx++) {
+ object_data2[indx] = new Sensor(inBuf, indx * (int)Sensor_Datatype.getDataSize());
+ }
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Output the data to the screen.
+ for (int indx = 0; indx < dims[0]; indx++) {
+ System.out.println(ATTRIBUTENAME + " [" + indx + "]:");
+ System.out.println(object_data2[indx].toString());
+ }
+ System.out.println();
+
+ try {
+ if (attribute_id >= 0)
+ H5.H5Aclose(attribute_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ try {
+ if (dataset_id >= 0)
+ H5.H5Dclose(dataset_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Terminate access to the data space.
+ try {
+ if (dataspace_id >= 0)
+ H5.H5Sclose(dataspace_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Terminate access to the mem type.
+ try {
+ if (memtype_id >= 0)
+ H5.H5Tclose(memtype_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ try {
+ if (strtype_id >= 0)
+ H5.H5Tclose(strtype_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Close the file.
+ try {
+ if (file_id >= 0)
+ H5.H5Fclose(file_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ }
+
+ public static void main(String[] args) {
+ H5Ex_T_CompoundAttribute.CreateDataset();
+ // Now we begin the read section of this example. Here we assume
+ // the dataset and array have the same name and rank, but can have
+ // any size. Therefore we must allocate a new array to read in
+ // data using malloc().
+ H5Ex_T_CompoundAttribute.ReadDataset();
+ }
+
+}
diff --git a/java/examples/datatypes/H5Ex_T_Float.java b/java/examples/datatypes/H5Ex_T_Float.java
new file mode 100644
index 0000000..1b5fd9b
--- /dev/null
+++ b/java/examples/datatypes/H5Ex_T_Float.java
@@ -0,0 +1,227 @@
+/* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
+ * Copyright by The HDF Group. *
+ * Copyright by the Board of Trustees of the University of Illinois. *
+ * All rights reserved. *
+ * *
+ * This file is part of HDF5. The full HDF5 copyright notice, including *
+ * terms governing use, modification, and redistribution, is contained in *
+ * the files COPYING and Copyright.html. COPYING can be found at the root *
+ * of the source code distribution tree; Copyright.html can be found at the *
+ * root level of an installed copy of the electronic HDF5 document set and *
+ * is linked from the top-level documents page. It can also be found at *
+ * http://hdfgroup.org/HDF5/doc/Copyright.html. If you do not have *
+ * access to either file, you may request a copy from help@hdfgroup.org. *
+ * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */
+
+/************************************************************
+ This example shows how to read and write integer datatypes
+ to a dataset. The program first writes integers to a
+ dataset with a dataspace of DIM0xDIM1, then closes the
+ file. Next, it reopens the file, reads back the data, and
+ outputs it to the screen.
+ ************************************************************/
+
+package examples.datatypes;
+
+import java.text.DecimalFormat;
+
+import hdf.hdf5lib.H5;
+import hdf.hdf5lib.HDF5Constants;
+
+public class H5Ex_T_Float {
+ private static String FILENAME = "H5Ex_T_Float.h5";
+ private static String DATASETNAME = "DS1";
+ private static final int DIM0 = 4;
+ private static final int DIM1 = 7;
+ private static final int RANK = 2;
+
+ private static void CreateDataset() {
+ long file_id = -1;
+ long dataspace_id = -1;
+ long dataset_id = -1;
+ long[] dims = { DIM0, DIM1 };
+ double[][] dset_data = new double[DIM0][DIM1];
+
+ // Initialize data.
+ for (int indx = 0; indx < DIM0; indx++)
+ for (int jndx = 0; jndx < DIM1; jndx++) {
+ dset_data[indx][jndx] = indx / (jndx + 0.5) + jndx;
+ }
+
+ // Create a new file using default properties.
+ try {
+ file_id = H5.H5Fcreate(FILENAME, HDF5Constants.H5F_ACC_TRUNC, HDF5Constants.H5P_DEFAULT,
+ HDF5Constants.H5P_DEFAULT);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Create dataspace. Setting maximum size to NULL sets the maximum
+ // size to be the current size.
+ try {
+ dataspace_id = H5.H5Screate_simple(RANK, dims, null);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Create the dataset and write the floating point data to it. In
+ // this example we will save the data as 64 bit little endian IEEE
+ // floating point numbers, regardless of the native type. The HDF5
+ // library automatically converts between different floating point
+ // types.
+ try {
+ if ((file_id >= 0) && (dataspace_id >= 0))
+ dataset_id = H5.H5Dcreate(file_id, DATASETNAME, HDF5Constants.H5T_IEEE_F64LE, dataspace_id,
+ HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Write the data to the dataset.
+ try {
+ if (dataset_id >= 0)
+ H5.H5Dwrite(dataset_id, HDF5Constants.H5T_NATIVE_DOUBLE, HDF5Constants.H5S_ALL, HDF5Constants.H5S_ALL,
+ HDF5Constants.H5P_DEFAULT, dset_data);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // End access to the dataset and release resources used by it.
+ try {
+ if (dataset_id >= 0)
+ H5.H5Dclose(dataset_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Terminate access to the data space.
+ try {
+ if (dataspace_id >= 0)
+ H5.H5Sclose(dataspace_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Close the file.
+ try {
+ if (file_id >= 0)
+ H5.H5Fclose(file_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ }
+
+ private static void ReadDataset() {
+ long file_id = -1;
+ long dataspace_id = -1;
+ long dataset_id = -1;
+ long[] dims = { DIM0, DIM1 };
+ double[][] dset_data;
+
+ // Open an existing file.
+ try {
+ file_id = H5.H5Fopen(FILENAME, HDF5Constants.H5F_ACC_RDONLY, HDF5Constants.H5P_DEFAULT);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Open an existing dataset.
+ try {
+ if (file_id >= 0)
+ dataset_id = H5.H5Dopen(file_id, DATASETNAME, HDF5Constants.H5P_DEFAULT);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Get dataspace and allocate memory for read buffer.
+ try {
+ if (dataset_id >= 0)
+ dataspace_id = H5.H5Dget_space(dataset_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ try {
+ if (dataspace_id >= 0)
+ H5.H5Sget_simple_extent_dims(dataspace_id, dims, null);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Allocate array of pointers to two-dimensional arrays (the
+ // elements of the dataset.
+ dset_data = new double[(int) dims[0]][(int) (dims[1])];
+
+ // Read data.
+ try {
+ if (dataset_id >= 0)
+ H5.H5Dread(dataset_id, HDF5Constants.H5T_NATIVE_DOUBLE, HDF5Constants.H5S_ALL, HDF5Constants.H5S_ALL,
+ HDF5Constants.H5P_DEFAULT, dset_data);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Output the data to the screen.
+ DecimalFormat df = new DecimalFormat("#,##0.0000");
+ System.out.println(DATASETNAME + ":");
+ for (int indx = 0; indx < dims[0]; indx++) {
+ System.out.print(" [");
+ for (int jndx = 0; jndx < dims[1]; jndx++) {
+ System.out.print(" " + df.format(dset_data[indx][jndx]));
+ }
+ System.out.println("]");
+ }
+ System.out.println();
+
+ // End access to the dataset and release resources used by it.
+ try {
+ if (dataset_id >= 0)
+ H5.H5Dclose(dataset_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Terminate access to the data space.
+ try {
+ if (dataspace_id >= 0)
+ H5.H5Sclose(dataspace_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Close the file.
+ try {
+ if (file_id >= 0)
+ H5.H5Fclose(file_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ }
+
+ public static void main(String[] args) {
+ H5Ex_T_Float.CreateDataset();
+ // Now we begin the read section of this example. Here we assume
+ // the dataset and array have the same name and rank, but can have
+ // any size. Therefore we must allocate a new array to read in
+ // data using malloc().
+ H5Ex_T_Float.ReadDataset();
+ }
+
+}
diff --git a/java/examples/datatypes/H5Ex_T_FloatAttribute.java b/java/examples/datatypes/H5Ex_T_FloatAttribute.java
new file mode 100644
index 0000000..de1dac7
--- /dev/null
+++ b/java/examples/datatypes/H5Ex_T_FloatAttribute.java
@@ -0,0 +1,263 @@
+/* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
+ * Copyright by The HDF Group. *
+ * Copyright by the Board of Trustees of the University of Illinois. *
+ * All rights reserved. *
+ * *
+ * This file is part of HDF5. The full HDF5 copyright notice, including *
+ * terms governing use, modification, and redistribution, is contained in *
+ * the files COPYING and Copyright.html. COPYING can be found at the root *
+ * of the source code distribution tree; Copyright.html can be found at the *
+ * root level of an installed copy of the electronic HDF5 document set and *
+ * is linked from the top-level documents page. It can also be found at *
+ * http://hdfgroup.org/HDF5/doc/Copyright.html. If you do not have *
+ * access to either file, you may request a copy from help@hdfgroup.org. *
+ * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */
+
+/************************************************************
+ This example shows how to read and write floating point
+ datatypes to an attribute. The program first writes
+ floating point numbers to an attribute with a dataspace of
+ DIM0xDIM1, then closes the file. Next, it reopens the
+ file, reads back the data, and outputs it to the screen.
+ ************************************************************/
+
+package examples.datatypes;
+
+import java.text.DecimalFormat;
+
+import hdf.hdf5lib.H5;
+import hdf.hdf5lib.HDF5Constants;
+
+public class H5Ex_T_FloatAttribute {
+ private static String FILENAME = "H5Ex_T_FloatAttribute.h5";
+ private static String DATASETNAME = "DS1";
+ private static String ATTRIBUTENAME = "A1";
+ private static final int DIM0 = 4;
+ private static final int DIM1 = 7;
+ private static final int RANK = 2;
+
+ private static void CreateDataset() {
+ long file_id = -1;
+ long dataspace_id = -1;
+ long dataset_id = -1;
+ long attribute_id = -1;
+ long[] dims = { DIM0, DIM1 };
+ double[][] dset_data = new double[DIM0][DIM1];
+
+ // Initialize data.
+ for (int indx = 0; indx < DIM0; indx++)
+ for (int jndx = 0; jndx < DIM1; jndx++) {
+ dset_data[indx][jndx] = indx / (jndx + 0.5) + jndx;
+ }
+
+ // Create a new file using default properties.
+ try {
+ file_id = H5.H5Fcreate(FILENAME, HDF5Constants.H5F_ACC_TRUNC, HDF5Constants.H5P_DEFAULT,
+ HDF5Constants.H5P_DEFAULT);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Create dataset with a scalar dataspace.
+ try {
+ dataspace_id = H5.H5Screate(HDF5Constants.H5S_SCALAR);
+ if (dataspace_id >= 0) {
+ dataset_id = H5.H5Dcreate(file_id, DATASETNAME, HDF5Constants.H5T_STD_I32LE, dataspace_id,
+ HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT);
+ H5.H5Sclose(dataspace_id);
+ dataspace_id = -1;
+ }
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Create dataspace. Setting maximum size to NULL sets the maximum
+ // size to be the current size.
+ try {
+ dataspace_id = H5.H5Screate_simple(RANK, dims, null);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Create the attribute and write the array data to it.
+ try {
+ if ((dataset_id >= 0) && (dataspace_id >= 0))
+ attribute_id = H5.H5Acreate(dataset_id, ATTRIBUTENAME, HDF5Constants.H5T_IEEE_F64LE, dataspace_id,
+ HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Write the dataset.
+ try {
+ if (attribute_id >= 0)
+ H5.H5Awrite(attribute_id, HDF5Constants.H5T_NATIVE_DOUBLE, dset_data);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // End access to the dataset and release resources used by it.
+ try {
+ if (attribute_id >= 0)
+ H5.H5Aclose(attribute_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ try {
+ if (dataset_id >= 0)
+ H5.H5Dclose(dataset_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Terminate access to the data space.
+ try {
+ if (dataspace_id >= 0)
+ H5.H5Sclose(dataspace_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Close the file.
+ try {
+ if (file_id >= 0)
+ H5.H5Fclose(file_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ }
+
+ private static void ReadDataset() {
+ long file_id = -1;
+ long dataspace_id = -1;
+ long dataset_id = -1;
+ long attribute_id = -1;
+ long[] dims = { DIM0, DIM1 };
+ double[][] dset_data;
+
+ // Open an existing file.
+ try {
+ file_id = H5.H5Fopen(FILENAME, HDF5Constants.H5F_ACC_RDONLY, HDF5Constants.H5P_DEFAULT);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Open an existing dataset.
+ try {
+ if (file_id >= 0)
+ dataset_id = H5.H5Dopen(file_id, DATASETNAME, HDF5Constants.H5P_DEFAULT);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ try {
+ if (dataset_id >= 0)
+ attribute_id = H5.H5Aopen_by_name(dataset_id, ".", ATTRIBUTENAME, HDF5Constants.H5P_DEFAULT,
+ HDF5Constants.H5P_DEFAULT);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Get dataspace and allocate memory for read buffer.
+ try {
+ if (attribute_id >= 0)
+ dataspace_id = H5.H5Aget_space(attribute_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ try {
+ if (dataspace_id >= 0)
+ H5.H5Sget_simple_extent_dims(dataspace_id, dims, null);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Allocate array of pointers to two-dimensional arrays (the
+ // elements of the dataset.
+ dset_data = new double[(int) dims[0]][(int) (dims[1])];
+
+ // Read data.
+ try {
+ if (attribute_id >= 0)
+ H5.H5Aread(attribute_id, HDF5Constants.H5T_NATIVE_DOUBLE, dset_data);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Output the data to the screen.
+ DecimalFormat df = new DecimalFormat("#,##0.0000");
+ System.out.println(ATTRIBUTENAME + ":");
+ for (int indx = 0; indx < dims[0]; indx++) {
+ System.out.print(" [");
+ for (int jndx = 0; jndx < dims[1]; jndx++) {
+ System.out.print(" " + df.format(dset_data[indx][jndx]));
+ }
+ System.out.println("]");
+ }
+ System.out.println();
+
+ // End access to the dataset and release resources used by it.
+ try {
+ if (attribute_id >= 0)
+ H5.H5Aclose(attribute_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ try {
+ if (dataset_id >= 0)
+ H5.H5Dclose(dataset_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Terminate access to the data space.
+ try {
+ if (dataspace_id >= 0)
+ H5.H5Sclose(dataspace_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Close the file.
+ try {
+ if (file_id >= 0)
+ H5.H5Fclose(file_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ }
+
+ public static void main(String[] args) {
+ H5Ex_T_FloatAttribute.CreateDataset();
+ // Now we begin the read section of this example. Here we assume
+ // the dataset and array have the same name and rank, but can have
+ // any size. Therefore we must allocate a new array to read in
+ // data using malloc().
+ H5Ex_T_FloatAttribute.ReadDataset();
+ }
+
+}
diff --git a/java/examples/datatypes/H5Ex_T_Integer.java b/java/examples/datatypes/H5Ex_T_Integer.java
new file mode 100644
index 0000000..2f365cd
--- /dev/null
+++ b/java/examples/datatypes/H5Ex_T_Integer.java
@@ -0,0 +1,226 @@
+/* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
+ * Copyright by The HDF Group. *
+ * Copyright by the Board of Trustees of the University of Illinois. *
+ * All rights reserved. *
+ * *
+ * This file is part of HDF5. The full HDF5 copyright notice, including *
+ * terms governing use, modification, and redistribution, is contained in *
+ * the files COPYING and Copyright.html. COPYING can be found at the root *
+ * of the source code distribution tree; Copyright.html can be found at the *
+ * root level of an installed copy of the electronic HDF5 document set and *
+ * is linked from the top-level documents page. It can also be found at *
+ * http://hdfgroup.org/HDF5/doc/Copyright.html. If you do not have *
+ * access to either file, you may request a copy from help@hdfgroup.org. *
+ * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */
+
+/************************************************************
+ This example shows how to read and write integer datatypes
+ to a dataset. The program first writes integers to a
+ dataset with a dataspace of DIM0xDIM1, then closes the
+ file. Next, it reopens the file, reads back the data, and
+ outputs it to the screen.
+ ************************************************************/
+
+package examples.datatypes;
+
+import java.text.DecimalFormat;
+
+import hdf.hdf5lib.H5;
+import hdf.hdf5lib.HDF5Constants;
+
+public class H5Ex_T_Integer {
+ private static String FILENAME = "H5Ex_T_Integer.h5";
+ private static String DATASETNAME = "DS1";
+ private static final int DIM0 = 4;
+ private static final int DIM1 = 7;
+ private static final int RANK = 2;
+
+ private static void CreateDataset() {
+ long file_id = -1;
+ long dataspace_id = -1;
+ long dataset_id = -1;
+ long[] dims = { DIM0, DIM1 };
+ int[][] dset_data = new int[DIM0][DIM1];
+
+ // Initialize data.
+ for (int indx = 0; indx < DIM0; indx++)
+ for (int jndx = 0; jndx < DIM1; jndx++) {
+ dset_data[indx][jndx] = indx * jndx - jndx;
+ }
+
+ // Create a new file using default properties.
+ try {
+ file_id = H5.H5Fcreate(FILENAME, HDF5Constants.H5F_ACC_TRUNC, HDF5Constants.H5P_DEFAULT,
+ HDF5Constants.H5P_DEFAULT);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Create dataspace. Setting maximum size to NULL sets the maximum
+ // size to be the current size.
+ try {
+ dataspace_id = H5.H5Screate_simple(RANK, dims, null);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Create the dataset and write the integer data to it. In this
+ // example we will save the data as 64 bit big endian integers,
+ // regardless of the native integer type. The HDF5 library
+ // automatically converts between different integer types.
+ try {
+ if ((file_id >= 0) && (dataspace_id >= 0))
+ dataset_id = H5.H5Dcreate(file_id, DATASETNAME, HDF5Constants.H5T_STD_I64BE, dataspace_id,
+ HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Write the data to the dataset.
+ try {
+ if (dataset_id >= 0)
+ H5.H5Dwrite(dataset_id, HDF5Constants.H5T_NATIVE_INT, HDF5Constants.H5S_ALL, HDF5Constants.H5S_ALL,
+ HDF5Constants.H5P_DEFAULT, dset_data);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // End access to the dataset and release resources used by it.
+ try {
+ if (dataset_id >= 0)
+ H5.H5Dclose(dataset_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Terminate access to the data space.
+ try {
+ if (dataspace_id >= 0)
+ H5.H5Sclose(dataspace_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Close the file.
+ try {
+ if (file_id >= 0)
+ H5.H5Fclose(file_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ }
+
+ private static void ReadDataset() {
+ long file_id = -1;
+ long dataspace_id = -1;
+ long dataset_id = -1;
+ long[] dims = { DIM0, DIM1 };
+ int[][] dset_data;
+
+ // Open an existing file.
+ try {
+ file_id = H5.H5Fopen(FILENAME, HDF5Constants.H5F_ACC_RDONLY, HDF5Constants.H5P_DEFAULT);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Open an existing dataset.
+ try {
+ if (file_id >= 0)
+ dataset_id = H5.H5Dopen(file_id, DATASETNAME, HDF5Constants.H5P_DEFAULT);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Get dataspace and allocate memory for read buffer.
+ try {
+ if (dataset_id >= 0)
+ dataspace_id = H5.H5Dget_space(dataset_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ try {
+ if (dataspace_id >= 0)
+ H5.H5Sget_simple_extent_dims(dataspace_id, dims, null);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Allocate array of pointers to two-dimensional arrays (the
+ // elements of the dataset.
+ dset_data = new int[(int) dims[0]][(int) (dims[1])];
+
+ // Read data.
+ try {
+ if (dataset_id >= 0)
+ H5.H5Dread(dataset_id, HDF5Constants.H5T_NATIVE_INT, HDF5Constants.H5S_ALL, HDF5Constants.H5S_ALL,
+ HDF5Constants.H5P_DEFAULT, dset_data);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Output the data to the screen.
+ DecimalFormat df = new DecimalFormat("#,##0");
+ System.out.println(DATASETNAME + ":");
+ for (int indx = 0; indx < dims[0]; indx++) {
+ System.out.print(" [");
+ for (int jndx = 0; jndx < dims[1]; jndx++) {
+ System.out.print(" " + df.format(dset_data[indx][jndx]));
+ }
+ System.out.println("]");
+ }
+ System.out.println();
+
+ // End access to the dataset and release resources used by it.
+ try {
+ if (dataset_id >= 0)
+ H5.H5Dclose(dataset_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Terminate access to the data space.
+ try {
+ if (dataspace_id >= 0)
+ H5.H5Sclose(dataspace_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Close the file.
+ try {
+ if (file_id >= 0)
+ H5.H5Fclose(file_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ }
+
+ public static void main(String[] args) {
+ H5Ex_T_Integer.CreateDataset();
+ // Now we begin the read section of this example. Here we assume
+ // the dataset and array have the same name and rank, but can have
+ // any size. Therefore we must allocate a new array to read in
+ // data using malloc().
+ H5Ex_T_Integer.ReadDataset();
+ }
+
+}
diff --git a/java/examples/datatypes/H5Ex_T_IntegerAttribute.java b/java/examples/datatypes/H5Ex_T_IntegerAttribute.java
new file mode 100644
index 0000000..c153d99
--- /dev/null
+++ b/java/examples/datatypes/H5Ex_T_IntegerAttribute.java
@@ -0,0 +1,263 @@
+/* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
+ * Copyright by The HDF Group. *
+ * Copyright by the Board of Trustees of the University of Illinois. *
+ * All rights reserved. *
+ * *
+ * This file is part of HDF5. The full HDF5 copyright notice, including *
+ * terms governing use, modification, and redistribution, is contained in *
+ * the files COPYING and Copyright.html. COPYING can be found at the root *
+ * of the source code distribution tree; Copyright.html can be found at the *
+ * root level of an installed copy of the electronic HDF5 document set and *
+ * is linked from the top-level documents page. It can also be found at *
+ * http://hdfgroup.org/HDF5/doc/Copyright.html. If you do not have *
+ * access to either file, you may request a copy from help@hdfgroup.org. *
+ * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */
+
+/************************************************************
+ This example shows how to read and write integer datatypes
+ to an attribute. The program first writes integers to an
+ attribute with a dataspace of DIM0xDIM1, then closes the
+ file. Next, it reopens the file, reads back the data, and
+ outputs it to the screen.
+ ************************************************************/
+
+package examples.datatypes;
+
+import java.text.DecimalFormat;
+
+import hdf.hdf5lib.H5;
+import hdf.hdf5lib.HDF5Constants;
+
+public class H5Ex_T_IntegerAttribute {
+ private static String FILENAME = "H5Ex_T_IntegerAttribute.h5";
+ private static String DATASETNAME = "DS1";
+ private static String ATTRIBUTENAME = "A1";
+ private static final int DIM0 = 4;
+ private static final int DIM1 = 7;
+ private static final int RANK = 2;
+
+ private static void CreateDataset() {
+ long file_id = -1;
+ long dataspace_id = -1;
+ long dataset_id = -1;
+ long attribute_id = -1;
+ long[] dims = { DIM0, DIM1 };
+ int[][] dset_data = new int[DIM0][DIM1];
+
+ // Initialize data.
+ for (int indx = 0; indx < DIM0; indx++)
+ for (int jndx = 0; jndx < DIM1; jndx++) {
+ dset_data[indx][jndx] = indx * jndx - jndx;
+ }
+
+ // Create a new file using default properties.
+ try {
+ file_id = H5.H5Fcreate(FILENAME, HDF5Constants.H5F_ACC_TRUNC, HDF5Constants.H5P_DEFAULT,
+ HDF5Constants.H5P_DEFAULT);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Create dataset with a scalar dataspace.
+ try {
+ dataspace_id = H5.H5Screate(HDF5Constants.H5S_SCALAR);
+ if (dataspace_id >= 0) {
+ dataset_id = H5.H5Dcreate(file_id, DATASETNAME, HDF5Constants.H5T_STD_I32LE, dataspace_id,
+ HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT);
+ H5.H5Sclose(dataspace_id);
+ dataspace_id = -1;
+ }
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Create dataspace. Setting maximum size to NULL sets the maximum
+ // size to be the current size.
+ try {
+ dataspace_id = H5.H5Screate_simple(RANK, dims, null);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Create the attribute and write the array data to it.
+ try {
+ if ((dataset_id >= 0) && (dataspace_id >= 0))
+ attribute_id = H5.H5Acreate(dataset_id, ATTRIBUTENAME, HDF5Constants.H5T_STD_I64BE, dataspace_id,
+ HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Write the dataset.
+ try {
+ if (attribute_id >= 0)
+ H5.H5Awrite(attribute_id, HDF5Constants.H5T_NATIVE_INT, dset_data);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // End access to the dataset and release resources used by it.
+ try {
+ if (attribute_id >= 0)
+ H5.H5Aclose(attribute_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ try {
+ if (dataset_id >= 0)
+ H5.H5Dclose(dataset_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Terminate access to the data space.
+ try {
+ if (dataspace_id >= 0)
+ H5.H5Sclose(dataspace_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Close the file.
+ try {
+ if (file_id >= 0)
+ H5.H5Fclose(file_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ }
+
+ private static void ReadDataset() {
+ long file_id = -1;
+ long dataspace_id = -1;
+ long dataset_id = -1;
+ long attribute_id = -1;
+ long[] dims = { DIM0, DIM1 };
+ int[][] dset_data;
+
+ // Open an existing file.
+ try {
+ file_id = H5.H5Fopen(FILENAME, HDF5Constants.H5F_ACC_RDONLY, HDF5Constants.H5P_DEFAULT);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Open an existing dataset.
+ try {
+ if (file_id >= 0)
+ dataset_id = H5.H5Dopen(file_id, DATASETNAME, HDF5Constants.H5P_DEFAULT);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ try {
+ if (dataset_id >= 0)
+ attribute_id = H5.H5Aopen_by_name(dataset_id, ".", ATTRIBUTENAME, HDF5Constants.H5P_DEFAULT,
+ HDF5Constants.H5P_DEFAULT);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Get dataspace and allocate memory for read buffer.
+ try {
+ if (attribute_id >= 0)
+ dataspace_id = H5.H5Aget_space(attribute_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ try {
+ if (dataspace_id >= 0)
+ H5.H5Sget_simple_extent_dims(dataspace_id, dims, null);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Allocate array of pointers to two-dimensional arrays (the
+ // elements of the dataset.
+ dset_data = new int[(int) dims[0]][(int) (dims[1])];
+
+ // Read data.
+ try {
+ if (attribute_id >= 0)
+ H5.H5Aread(attribute_id, HDF5Constants.H5T_NATIVE_INT, dset_data);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Output the data to the screen.
+ DecimalFormat df = new DecimalFormat("#,##0");
+ System.out.println(ATTRIBUTENAME + ":");
+ for (int indx = 0; indx < dims[0]; indx++) {
+ System.out.print(" [");
+ for (int jndx = 0; jndx < dims[1]; jndx++) {
+ System.out.print(" " + df.format(dset_data[indx][jndx]));
+ }
+ System.out.println("]");
+ }
+ System.out.println();
+
+ // End access to the dataset and release resources used by it.
+ try {
+ if (attribute_id >= 0)
+ H5.H5Aclose(attribute_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ try {
+ if (dataset_id >= 0)
+ H5.H5Dclose(dataset_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Terminate access to the data space.
+ try {
+ if (dataspace_id >= 0)
+ H5.H5Sclose(dataspace_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Close the file.
+ try {
+ if (file_id >= 0)
+ H5.H5Fclose(file_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ }
+
+ public static void main(String[] args) {
+ H5Ex_T_IntegerAttribute.CreateDataset();
+ // Now we begin the read section of this example. Here we assume
+ // the dataset and array have the same name and rank, but can have
+ // any size. Therefore we must allocate a new array to read in
+ // data using malloc().
+ H5Ex_T_IntegerAttribute.ReadDataset();
+ }
+
+}
diff --git a/java/examples/datatypes/H5Ex_T_ObjectReference.java b/java/examples/datatypes/H5Ex_T_ObjectReference.java
new file mode 100644
index 0000000..3cad6c9
--- /dev/null
+++ b/java/examples/datatypes/H5Ex_T_ObjectReference.java
@@ -0,0 +1,347 @@
+/* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
+ * Copyright by The HDF Group. *
+ * Copyright by the Board of Trustees of the University of Illinois. *
+ * All rights reserved. *
+ * *
+ * This file is part of HDF5. The full HDF5 copyright notice, including *
+ * terms governing use, modification, and redistribution, is contained in *
+ * the files COPYING and Copyright.html. COPYING can be found at the root *
+ * of the source code distribution tree; Copyright.html can be found at the *
+ * root level of an installed copy of the electronic HDF5 document set and *
+ * is linked from the top-level documents page. It can also be found at *
+ * http://hdfgroup.org/HDF5/doc/Copyright.html. If you do not have *
+ * access to either file, you may request a copy from help@hdfgroup.org. *
+ * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */
+
+/************************************************************
+ This example shows how to read and write object references
+ to a dataset. The program first creates objects in the
+ file and writes references to those objects to a dataset
+ with a dataspace of DIM0, then closes the file. Next, it
+ reopens the file, dereferences the references, and outputs
+ the names of their targets to the screen.
+ ************************************************************/
+package examples.datatypes;
+
+import java.util.EnumSet;
+import java.util.HashMap;
+import java.util.Map;
+
+import hdf.hdf5lib.H5;
+import hdf.hdf5lib.HDF5Constants;
+
+public class H5Ex_T_ObjectReference {
+ private static String FILENAME = "H5Ex_T_ObjectReference.h5";
+ private static String DATASETNAME = "DS1";
+ private static String DATASETNAME2 = "DS2";
+ private static String GROUPNAME = "G1";
+ private static final int DIM0 = 2;
+ private static final int RANK = 1;
+
+ // Values for the status of space allocation
+ enum H5G_obj {
+ H5G_UNKNOWN(HDF5Constants.H5O_TYPE_UNKNOWN), /* Unknown object type */
+ H5G_GROUP(HDF5Constants.H5O_TYPE_GROUP), /* Object is a group */
+ H5G_DATASET(HDF5Constants.H5O_TYPE_DATASET), /* Object is a dataset */
+ H5G_TYPE(HDF5Constants.H5O_TYPE_NAMED_DATATYPE); /* Object is a named data type */
+ private static final Map<Integer, H5G_obj> lookup = new HashMap<Integer, H5G_obj>();
+
+ static {
+ for (H5G_obj s : EnumSet.allOf(H5G_obj.class))
+ lookup.put(s.getCode(), s);
+ }
+
+ private int code;
+
+ H5G_obj(int layout_type) {
+ this.code = layout_type;
+ }
+
+ public int getCode() {
+ return this.code;
+ }
+
+ public static H5G_obj get(int code) {
+ return lookup.get(code);
+ }
+ }
+
+ private static void writeObjRef() {
+ long file_id = -1;
+ long dataspace_id = -1;
+ long filespace_id = -1;
+ long group_id = -1;
+ long dataset_id = -1;
+ long[] dims = { DIM0 };
+ byte[][] dset_data = new byte[DIM0][8];
+
+ // Create a new file using default properties.
+ try {
+ file_id = H5.H5Fcreate(FILENAME, HDF5Constants.H5F_ACC_TRUNC, HDF5Constants.H5P_DEFAULT,
+ HDF5Constants.H5P_DEFAULT);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Create dataset with a scalar dataspace.
+ try {
+ dataspace_id = H5.H5Screate(HDF5Constants.H5S_SCALAR);
+ if ((file_id >= 0) && (dataspace_id >= 0)) {
+ dataset_id = H5.H5Dcreate(file_id, DATASETNAME2, HDF5Constants.H5T_STD_I32LE, dataspace_id,
+ HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT);
+ if (dataset_id >= 0)
+ H5.H5Dclose(dataset_id);
+ dataset_id = -1;
+ H5.H5Sclose(dataspace_id);
+ dataspace_id = -1;
+ }
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Create a group in the file.
+ try {
+ if (file_id >= 0)
+ group_id = H5.H5Gcreate(file_id, GROUPNAME, HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT,
+ HDF5Constants.H5P_DEFAULT);
+ if (group_id >= 0)
+ H5.H5Gclose(group_id);
+ group_id = -1;
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Create references to the previously created objects. Passing -1
+ // as space_id causes this parameter to be ignored. Other values
+ // besides valid dataspaces result in an error.
+ try {
+ if (file_id >= 0) {
+ byte rbuf0[] = H5.H5Rcreate(file_id, GROUPNAME, HDF5Constants.H5R_OBJECT, -1);
+ byte rbuf1[] = H5.H5Rcreate(file_id, DATASETNAME2, HDF5Constants.H5R_OBJECT, -1);
+ for (int indx = 0; indx < 8; indx++) {
+ dset_data[0][indx] = rbuf0[indx];
+ dset_data[1][indx] = rbuf1[indx];
+ }
+ }
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Create dataspace. Setting maximum size to NULL sets the maximum
+ // size to be the current size.
+ try {
+ filespace_id = H5.H5Screate_simple(RANK, dims, null);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Create the dataset.
+ try {
+ if ((file_id >= 0) && (filespace_id >= 0))
+ dataset_id = H5.H5Dcreate(file_id, DATASETNAME, HDF5Constants.H5T_STD_REF_OBJ, filespace_id,
+ HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Write the object references to it.
+ try {
+ if (dataset_id >= 0)
+ H5.H5Dwrite(dataset_id, HDF5Constants.H5T_STD_REF_OBJ, HDF5Constants.H5S_ALL, HDF5Constants.H5S_ALL,
+ HDF5Constants.H5P_DEFAULT, dset_data);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // End access to the dataset and release resources used by it.
+
+ try {
+ if (dataset_id >= 0)
+ H5.H5Dclose(dataset_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ try {
+ if (filespace_id >= 0)
+ H5.H5Sclose(filespace_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Close the file.
+ try {
+ if (file_id >= 0)
+ H5.H5Fclose(file_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+ }
+
+ private static void readObjRef() {
+ long file_id = -1;
+ long dataset_id = -1;
+ long dataspace_id = -1;
+ int object_type = -1;
+ long object_id = -1;
+ long[] dims = { DIM0 };
+ byte[][] dset_data;
+
+ // Open an existing file.
+ try {
+ file_id = H5.H5Fopen(FILENAME, HDF5Constants.H5F_ACC_RDONLY, HDF5Constants.H5P_DEFAULT);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Open an existing dataset.
+ try {
+ if (file_id >= 0)
+ dataset_id = H5.H5Dopen(file_id, DATASETNAME, HDF5Constants.H5P_DEFAULT);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Get dataspace and allocate memory for read buffer.
+ try {
+ if (dataset_id >= 0)
+ dataspace_id = H5.H5Dget_space(dataset_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ try {
+ if (dataspace_id >= 0)
+ H5.H5Sget_simple_extent_dims(dataspace_id, dims, null);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Allocate array of pointers to two-dimensional arrays (the
+ // elements of the dataset.
+ dset_data = new byte[(int)dims[0]][8];
+
+ // Read the data using the default properties.
+ try {
+ if (dataset_id >= 0) {
+ H5.H5Dread(dataset_id, HDF5Constants.H5T_STD_REF_OBJ, HDF5Constants.H5S_ALL, HDF5Constants.H5S_ALL,
+ HDF5Constants.H5P_DEFAULT, dset_data);
+ }
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Output the data to the screen.
+ for (int indx = 0; indx < dims[0]; indx++) {
+ System.out.println(DATASETNAME + "[" + indx + "]:");
+ System.out.print(" ->");
+ // Open the referenced object, get its name and type.
+ try {
+ if (dataset_id >= 0) {
+ object_id = H5.H5Rdereference(dataset_id, HDF5Constants.H5P_DEFAULT, HDF5Constants.H5R_OBJECT, dset_data[indx]);
+ object_type = H5.H5Rget_obj_type(dataset_id, HDF5Constants.H5R_OBJECT, dset_data[indx]);
+ }
+ String[] obj_name = new String[1];
+ long name_size = 1;
+ if (object_type >= 0) {
+ // Get the length of the name and retrieve the name.
+ name_size = 1 + H5.H5Iget_name(object_id, obj_name, name_size);
+ }
+ if ((object_id >= 0) && (object_type >= -1)) {
+ switch (H5G_obj.get(object_type)) {
+ case H5G_GROUP:
+ System.out.print("H5G_GROUP");
+ try {
+ if (object_id >= 0)
+ H5.H5Gclose(object_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+ break;
+ case H5G_DATASET:
+ System.out.print("H5G_DATASET");
+ try {
+ if (object_id >= 0)
+ H5.H5Dclose(object_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+ break;
+ case H5G_TYPE:
+ System.out.print("H5G_TYPE");
+ try {
+ if (object_id >= 0)
+ H5.H5Tclose(object_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+ break;
+ default:
+ System.out.print("UNHANDLED");
+ }
+ }
+ // Print the name.
+ if (name_size > 1)
+ System.out.println(": " + obj_name[0]);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+ }
+
+ // End access to the dataset and release resources used by it.
+ try {
+ if (dataspace_id >= 0)
+ H5.H5Sclose(dataspace_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ try {
+ if (dataset_id >= 0)
+ H5.H5Dclose(dataset_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Close the file.
+ try {
+ if (file_id >= 0)
+ H5.H5Fclose(file_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+ }
+
+ public static void main(String[] args) {
+ // Check if gzip compression is available and can be used for both
+ // compression and decompression. Normally we do not perform error
+ // checking in these examples for the sake of clarity, but in this
+ // case we will make an exception because this filter is an
+ // optional part of the hdf5 library.
+ H5Ex_T_ObjectReference.writeObjRef();
+ H5Ex_T_ObjectReference.readObjRef();
+ }
+
+}
diff --git a/java/examples/datatypes/H5Ex_T_ObjectReferenceAttribute.java b/java/examples/datatypes/H5Ex_T_ObjectReferenceAttribute.java
new file mode 100644
index 0000000..e354029
--- /dev/null
+++ b/java/examples/datatypes/H5Ex_T_ObjectReferenceAttribute.java
@@ -0,0 +1,389 @@
+/* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
+ * Copyright by The HDF Group. *
+ * Copyright by the Board of Trustees of the University of Illinois. *
+ * All rights reserved. *
+ * *
+ * This file is part of HDF5. The full HDF5 copyright notice, including *
+ * terms governing use, modification, and redistribution, is contained in *
+ * the files COPYING and Copyright.html. COPYING can be found at the root *
+ * of the source code distribution tree; Copyright.html can be found at the *
+ * root level of an installed copy of the electronic HDF5 document set and *
+ * is linked from the top-level documents page. It can also be found at *
+ * http://hdfgroup.org/HDF5/doc/Copyright.html. If you do not have *
+ * access to either file, you may request a copy from help@hdfgroup.org. *
+ * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */
+
+/************************************************************
+ This example shows how to read and write object references
+ to an attribute. The program first creates objects in the
+ file and writes references to those objects to an
+ attribute with a dataspace of DIM0, then closes the file.
+ Next, it reopens the file, dereferences the references,
+ and outputs the names of their targets to the screen.
+ ************************************************************/
+
+package examples.datatypes;
+
+import java.util.EnumSet;
+import java.util.HashMap;
+import java.util.Map;
+
+import hdf.hdf5lib.H5;
+import hdf.hdf5lib.HDF5Constants;
+
+public class H5Ex_T_ObjectReferenceAttribute {
+ private static String FILENAME = "H5Ex_T_ObjectReferenceAttribute.h5";
+ private static String DATASETNAME = "DS1";
+ private static String ATTRIBUTENAME = "A1";
+ private static String DATASETNAME2 = "DS2";
+ private static String GROUPNAME = "G1";
+ private static final int DIM0 = 2;
+ private static final int RANK = 1;
+
+ // Values for the status of space allocation
+ enum H5G_obj {
+ H5G_UNKNOWN(HDF5Constants.H5O_TYPE_UNKNOWN), /* Unknown object type */
+ H5G_GROUP(HDF5Constants.H5O_TYPE_GROUP), /* Object is a group */
+ H5G_DATASET(HDF5Constants.H5O_TYPE_DATASET), /* Object is a dataset */
+ H5G_TYPE(HDF5Constants.H5O_TYPE_NAMED_DATATYPE); /* Object is a named data type */
+ private static final Map<Integer, H5G_obj> lookup = new HashMap<Integer, H5G_obj>();
+
+ static {
+ for (H5G_obj s : EnumSet.allOf(H5G_obj.class))
+ lookup.put(s.getCode(), s);
+ }
+
+ private int code;
+
+ H5G_obj(int layout_type) {
+ this.code = layout_type;
+ }
+
+ public int getCode() {
+ return this.code;
+ }
+
+ public static H5G_obj get(int code) {
+ return lookup.get(code);
+ }
+ }
+
+ private static void CreateDataset() {
+ long file_id = -1;
+ long dataspace_id = -1;
+ long group_id = -1;
+ long dataset_id = -1;
+ long attribute_id = -1;
+ long[] dims = { DIM0 };
+ byte[][] dset_data = new byte[DIM0][8];
+
+ // Create a new file using default properties.
+ try {
+ file_id = H5.H5Fcreate(FILENAME, HDF5Constants.H5F_ACC_TRUNC, HDF5Constants.H5P_DEFAULT,
+ HDF5Constants.H5P_DEFAULT);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Create dataset with a scalar dataspace.
+ try {
+ dataspace_id = H5.H5Screate(HDF5Constants.H5S_SCALAR);
+ if (dataspace_id >= 0) {
+ dataset_id = H5.H5Dcreate(file_id, DATASETNAME2, HDF5Constants.H5T_STD_I32LE, dataspace_id,
+ HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT);
+ if (dataset_id >= 0)
+ H5.H5Dclose(dataset_id);
+ dataset_id = -1;
+ H5.H5Sclose(dataspace_id);
+ dataspace_id = -1;
+ }
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Create a group in the file.
+ try {
+ if (file_id >= 0)
+ group_id = H5.H5Gcreate(file_id, GROUPNAME, HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT,
+ HDF5Constants.H5P_DEFAULT);
+ if (group_id >= 0)
+ H5.H5Gclose(group_id);
+ group_id = -1;
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Create references to the previously created objects. Passing -1
+ // as space_id causes this parameter to be ignored. Other values
+ // besides valid dataspaces result in an error.
+ try {
+ if (file_id >= 0) {
+ byte rbuf0[] = H5.H5Rcreate(file_id, GROUPNAME, HDF5Constants.H5R_OBJECT, -1);
+ byte rbuf1[] = H5.H5Rcreate(file_id, DATASETNAME2, HDF5Constants.H5R_OBJECT, -1);
+ for (int indx = 0; indx < 8; indx++) {
+ dset_data[0][indx] = rbuf0[indx];
+ dset_data[1][indx] = rbuf1[indx];
+ }
+ }
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Create dataset with a scalar dataspace to serve as the parent
+ // for the attribute.
+ try {
+ dataspace_id = H5.H5Screate(HDF5Constants.H5S_SCALAR);
+ if (dataspace_id >= 0) {
+ dataset_id = H5.H5Dcreate(file_id, DATASETNAME, HDF5Constants.H5T_STD_I32LE, dataspace_id,
+ HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT);
+ H5.H5Sclose(dataspace_id);
+ dataspace_id = -1;
+ }
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Create dataspace. Setting maximum size to NULL sets the maximum
+ // size to be the current size.
+ try {
+ dataspace_id = H5.H5Screate_simple(RANK, dims, null);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Create the attribute and write the array data to it.
+ try {
+ if ((dataset_id >= 0) && (dataspace_id >= 0))
+ attribute_id = H5.H5Acreate(dataset_id, ATTRIBUTENAME, HDF5Constants.H5T_STD_REF_OBJ, dataspace_id,
+ HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Write the dataset.
+ try {
+ if (attribute_id >= 0)
+ H5.H5Awrite(attribute_id, HDF5Constants.H5T_STD_REF_OBJ, dset_data);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // End access to the dataset and release resources used by it.
+ try {
+ if (attribute_id >= 0)
+ H5.H5Aclose(attribute_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ try {
+ if (dataset_id >= 0)
+ H5.H5Dclose(dataset_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Terminate access to the data space.
+ try {
+ if (dataspace_id >= 0)
+ H5.H5Sclose(dataspace_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Close the file.
+ try {
+ if (file_id >= 0)
+ H5.H5Fclose(file_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ }
+
+ private static void ReadDataset() {
+ long file_id = -1;
+ long dataspace_id = -1;
+ long dataset_id = -1;
+ long attribute_id = -1;
+ int object_type = -1;
+ long object_id = -1;
+ long[] dims = { DIM0 };
+ byte[][] dset_data;
+
+ // Open an existing file.
+ try {
+ file_id = H5.H5Fopen(FILENAME, HDF5Constants.H5F_ACC_RDONLY, HDF5Constants.H5P_DEFAULT);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Open an existing dataset.
+ try {
+ if (file_id >= 0)
+ dataset_id = H5.H5Dopen(file_id, DATASETNAME, HDF5Constants.H5P_DEFAULT);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ try {
+ if (dataset_id >= 0)
+ attribute_id = H5.H5Aopen_by_name(dataset_id, ".", ATTRIBUTENAME, HDF5Constants.H5P_DEFAULT,
+ HDF5Constants.H5P_DEFAULT);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Get dataspace and allocate memory for read buffer.
+ try {
+ if (attribute_id >= 0)
+ dataspace_id = H5.H5Aget_space(attribute_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ try {
+ if (dataspace_id >= 0)
+ H5.H5Sget_simple_extent_dims(dataspace_id, dims, null);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Allocate array of pointers to two-dimensional arrays (the
+ // elements of the dataset.
+ dset_data = new byte[(int) dims[0]][8];
+
+ // Read data.
+ try {
+ if (attribute_id >= 0)
+ H5.H5Aread(attribute_id, HDF5Constants.H5T_STD_REF_OBJ, dset_data);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Output the data to the screen.
+ for (int indx = 0; indx < dims[0]; indx++) {
+ System.out.println(ATTRIBUTENAME + "[" + indx + "]:");
+ System.out.print(" ->");
+ // Open the referenced object, get its name and type.
+ try {
+ if (dataset_id >= 0) {
+ object_id = H5.H5Rdereference(dataset_id, HDF5Constants.H5P_DEFAULT, HDF5Constants.H5R_OBJECT, dset_data[indx]);
+ object_type = H5.H5Rget_obj_type(dataset_id, HDF5Constants.H5R_OBJECT, dset_data[indx]);
+ }
+ String[] obj_name = new String[1];
+ long name_size = 1;
+ if (object_type >= 0) {
+ // Get the length of the name and retrieve the name.
+ name_size = 1 + H5.H5Iget_name(object_id, obj_name, name_size);
+ }
+ if ((object_id >= 0) && (object_type >= -1)) {
+ switch (H5G_obj.get(object_type)) {
+ case H5G_GROUP:
+ System.out.print("H5G_GROUP");
+ try {
+ if (object_id >= 0)
+ H5.H5Gclose(object_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+ break;
+ case H5G_DATASET:
+ System.out.print("H5G_DATASET");
+ try {
+ if (object_id >= 0)
+ H5.H5Dclose(object_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+ break;
+ case H5G_TYPE:
+ System.out.print("H5G_TYPE");
+ try {
+ if (object_id >= 0)
+ H5.H5Tclose(object_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+ break;
+ default:
+ System.out.print("UNHANDLED");
+ }
+ }
+ // Print the name.
+ if (name_size > 1)
+ System.out.println(": " + obj_name[0]);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+ }
+
+ // End access to the dataset and release resources used by it.
+ try {
+ if (attribute_id >= 0)
+ H5.H5Aclose(attribute_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ try {
+ if (dataset_id >= 0)
+ H5.H5Dclose(dataset_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Terminate access to the data space.
+ try {
+ if (dataspace_id >= 0)
+ H5.H5Sclose(dataspace_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Close the file.
+ try {
+ if (file_id >= 0)
+ H5.H5Fclose(file_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ }
+
+ public static void main(String[] args) {
+ H5Ex_T_ObjectReferenceAttribute.CreateDataset();
+ // Now we begin the read section of this example. Here we assume
+ // the dataset and array have the same name and rank, but can have
+ // any size. Therefore we must allocate a new array to read in
+ // data using malloc().
+ H5Ex_T_ObjectReferenceAttribute.ReadDataset();
+ }
+
+}
diff --git a/java/examples/datatypes/H5Ex_T_Opaque.java b/java/examples/datatypes/H5Ex_T_Opaque.java
new file mode 100644
index 0000000..eb45686
--- /dev/null
+++ b/java/examples/datatypes/H5Ex_T_Opaque.java
@@ -0,0 +1,270 @@
+/* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
+ * Copyright by The HDF Group. *
+ * Copyright by the Board of Trustees of the University of Illinois. *
+ * All rights reserved. *
+ * *
+ * This file is part of HDF5. The full HDF5 copyright notice, including *
+ * terms governing use, modification, and redistribution, is contained in *
+ * the files COPYING and Copyright.html. COPYING can be found at the root *
+ * of the source code distribution tree; Copyright.html can be found at the *
+ * root level of an installed copy of the electronic HDF5 document set and *
+ * is linked from the top-level documents page. It can also be found at *
+ * http://hdfgroup.org/HDF5/doc/Copyright.html. If you do not have *
+ * access to either file, you may request a copy from help@hdfgroup.org. *
+ * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */
+
+/************************************************************
+ This example shows how to read and write opaque datatypes
+ to a dataset. The program first writes opaque data to a
+ dataset with a dataspace of DIM0, then closes the file.
+ Next, it reopens the file, reads back the data, and
+ outputs it to the screen.
+ ************************************************************/
+
+package examples.datatypes;
+
+import hdf.hdf5lib.H5;
+import hdf.hdf5lib.HDF5Constants;
+
+public class H5Ex_T_Opaque {
+ private static String FILENAME = "H5Ex_T_Opaque.h5";
+ private static String DATASETNAME = "DS1";
+ private static final int DIM0 = 4;
+ private static final int LEN = 7;
+ private static final int RANK = 1;
+
+ private static void CreateDataset() {
+ long file_id = -1;
+ long dataspace_id = -1;
+ long datatype_id = -1;
+ long dataset_id = -1;
+ long[] dims = { DIM0 };
+ byte[] dset_data = new byte[DIM0 * LEN];
+ byte[] str_data = { 'O', 'P', 'A', 'Q', 'U', 'E' };
+
+ // Initialize data.
+ for (int indx = 0; indx < DIM0; indx++) {
+ for (int jndx = 0; jndx < LEN - 1; jndx++)
+ dset_data[jndx + indx * LEN] = str_data[jndx];
+ dset_data[LEN - 1 + indx * LEN] = (byte) (indx + '0');
+ }
+
+ // Create a new file using default properties.
+ try {
+ file_id = H5.H5Fcreate(FILENAME, HDF5Constants.H5F_ACC_TRUNC, HDF5Constants.H5P_DEFAULT,
+ HDF5Constants.H5P_DEFAULT);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Create opaque datatype and set the tag to something appropriate.
+ // For this example we will write and view the data as a character
+ // array.
+ try {
+ datatype_id = H5.H5Tcreate(HDF5Constants.H5T_OPAQUE, (long)LEN);
+ if (datatype_id >= 0)
+ H5.H5Tset_tag(datatype_id, "Character array");
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Create dataspace. Setting maximum size to NULL sets the maximum
+ // size to be the current size.
+ try {
+ dataspace_id = H5.H5Screate_simple(RANK, dims, null);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Create the dataset and write the integer data to it. In this
+ // example we will save the data as 64 bit big endian integers,
+ // regardless of the native integer type. The HDF5 library
+ // automatically converts between different integer types.
+ try {
+ if ((file_id >= 0) && (datatype_id >= 0) && (dataspace_id >= 0))
+ dataset_id = H5.H5Dcreate(file_id, DATASETNAME, datatype_id, dataspace_id, HDF5Constants.H5P_DEFAULT,
+ HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Write the opaque data to the dataset.
+ try {
+ if ((dataset_id >= 0) && (datatype_id >= 0))
+ H5.H5Dwrite(dataset_id, datatype_id, HDF5Constants.H5S_ALL, HDF5Constants.H5S_ALL,
+ HDF5Constants.H5P_DEFAULT, dset_data);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // End access to the dataset and release resources used by it.
+ try {
+ if (dataset_id >= 0)
+ H5.H5Dclose(dataset_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Terminate access to the data space.
+ try {
+ if (dataspace_id >= 0)
+ H5.H5Sclose(dataspace_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ try {
+ if (datatype_id >= 0)
+ H5.H5Tclose(datatype_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Close the file.
+ try {
+ if (file_id >= 0)
+ H5.H5Fclose(file_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ }
+
+ private static void ReadDataset() {
+ long file_id = -1;
+ long datatype_id = -1;
+ long dataspace_id = -1;
+ long dataset_id = -1;
+ long type_len = -1;
+ long[] dims = { DIM0 };
+ byte[] dset_data;
+ String tag_name = null;
+
+ // Open an existing file.
+ try {
+ file_id = H5.H5Fopen(FILENAME, HDF5Constants.H5F_ACC_RDONLY, HDF5Constants.H5P_DEFAULT);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Open an existing dataset.
+ try {
+ if (file_id >= 0)
+ dataset_id = H5.H5Dopen(file_id, DATASETNAME, HDF5Constants.H5P_DEFAULT);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Get datatype and properties for the datatype.
+ try {
+ if (dataset_id >= 0)
+ datatype_id = H5.H5Dget_type(dataset_id);
+ if (datatype_id >= 0) {
+ type_len = H5.H5Tget_size(datatype_id);
+ tag_name = H5.H5Tget_tag(datatype_id);
+ }
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Get dataspace and allocate memory for read buffer.
+ try {
+ if (dataset_id >= 0)
+ dataspace_id = H5.H5Dget_space(dataset_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ try {
+ if (dataspace_id >= 0)
+ H5.H5Sget_simple_extent_dims(dataspace_id, dims, null);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Allocate buffer.
+ dset_data = new byte[(int) (dims[0] * type_len)];
+
+ // Read data.
+ try {
+ if ((dataset_id >= 0) && (datatype_id >= 0))
+ H5.H5Dread(dataset_id, datatype_id, HDF5Constants.H5S_ALL, HDF5Constants.H5S_ALL,
+ HDF5Constants.H5P_DEFAULT, dset_data);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Output the data to the screen.
+ System.out.println("Datatype tag for " + DATASETNAME + " is: \"" + tag_name + "\"");
+ for (int indx = 0; indx < dims[0]; indx++) {
+ System.out.print(DATASETNAME + "[" + indx + "]: ");
+ for (int jndx = 0; jndx < type_len; jndx++) {
+ char temp = (char) dset_data[jndx + indx * (int)type_len];
+ System.out.print(temp);
+ }
+ System.out.println("");
+ }
+ System.out.println();
+
+ // End access to the dataset and release resources used by it.
+ try {
+ if (dataset_id >= 0)
+ H5.H5Dclose(dataset_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Terminate access to the data space.
+ try {
+ if (dataspace_id >= 0)
+ H5.H5Sclose(dataspace_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ try {
+ if (datatype_id >= 0)
+ H5.H5Tclose(datatype_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Close the file.
+ try {
+ if (file_id >= 0)
+ H5.H5Fclose(file_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ }
+
+ public static void main(String[] args) {
+ H5Ex_T_Opaque.CreateDataset();
+ // Now we begin the read section of this example. Here we assume
+ // the dataset and array have the same name and rank, but can have
+ // any size. Therefore we must allocate a new array to read in
+ // data using malloc().
+ H5Ex_T_Opaque.ReadDataset();
+ }
+
+}
diff --git a/java/examples/datatypes/H5Ex_T_OpaqueAttribute.java b/java/examples/datatypes/H5Ex_T_OpaqueAttribute.java
new file mode 100644
index 0000000..e42bfe8
--- /dev/null
+++ b/java/examples/datatypes/H5Ex_T_OpaqueAttribute.java
@@ -0,0 +1,307 @@
+/* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
+ * Copyright by The HDF Group. *
+ * Copyright by the Board of Trustees of the University of Illinois. *
+ * All rights reserved. *
+ * *
+ * This file is part of HDF5. The full HDF5 copyright notice, including *
+ * terms governing use, modification, and redistribution, is contained in *
+ * the files COPYING and Copyright.html. COPYING can be found at the root *
+ * of the source code distribution tree; Copyright.html can be found at the *
+ * root level of an installed copy of the electronic HDF5 document set and *
+ * is linked from the top-level documents page. It can also be found at *
+ * http://hdfgroup.org/HDF5/doc/Copyright.html. If you do not have *
+ * access to either file, you may request a copy from help@hdfgroup.org. *
+ * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */
+
+/************************************************************
+ This example shows how to read and write opaque datatypes
+ to an attribute. The program first writes opaque data to
+ an attribute with a dataspace of DIM0, then closes the
+ file. Next, it reopens the file, reads back the data, and
+ outputs it to the screen.
+ ************************************************************/
+
+package examples.datatypes;
+
+import hdf.hdf5lib.H5;
+import hdf.hdf5lib.HDF5Constants;
+
+public class H5Ex_T_OpaqueAttribute {
+ private static String FILENAME = "H5Ex_T_OpaqueAttribute.h5";
+ private static String DATASETNAME = "DS1";
+ private static String ATTRIBUTENAME = "A1";
+ private static final int DIM0 = 4;
+ private static final int LEN = 7;
+ private static final int RANK = 1;
+
+ private static void CreateDataset() {
+ long file_id = -1;
+ long dataspace_id = -1;
+ long datatype_id = -1;
+ long dataset_id = -1;
+ long attribute_id = -1;
+ long[] dims = { DIM0 };
+ byte[] dset_data = new byte[DIM0 * LEN];
+ byte[] str_data = { 'O', 'P', 'A', 'Q', 'U', 'E' };
+
+ // Initialize data.
+ for (int indx = 0; indx < DIM0; indx++) {
+ for (int jndx = 0; jndx < LEN - 1; jndx++)
+ dset_data[jndx + indx * LEN] = str_data[jndx];
+ dset_data[LEN - 1 + indx * LEN] = (byte) (indx + '0');
+ }
+
+ // Create a new file using default properties.
+ try {
+ file_id = H5.H5Fcreate(FILENAME, HDF5Constants.H5F_ACC_TRUNC, HDF5Constants.H5P_DEFAULT,
+ HDF5Constants.H5P_DEFAULT);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Create dataset with a scalar dataspace.
+ try {
+ dataspace_id = H5.H5Screate(HDF5Constants.H5S_SCALAR);
+ if (dataspace_id >= 0) {
+ dataset_id = H5.H5Dcreate(file_id, DATASETNAME, HDF5Constants.H5T_STD_I32LE, dataspace_id,
+ HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT);
+ H5.H5Sclose(dataspace_id);
+ dataspace_id = -1;
+ }
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Create opaque datatype and set the tag to something appropriate.
+ // For this example we will write and view the data as a character
+ // array.
+ try {
+ datatype_id = H5.H5Tcreate(HDF5Constants.H5T_OPAQUE, (long)LEN);
+ if (datatype_id >= 0)
+ H5.H5Tset_tag(datatype_id, "Character array");
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Create dataspace. Setting maximum size to NULL sets the maximum
+ // size to be the current size.
+ try {
+ dataspace_id = H5.H5Screate_simple(RANK, dims, null);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Create the attribute and write the array data to it.
+ try {
+ if ((dataset_id >= 0) && (datatype_id >= 0) && (dataspace_id >= 0))
+ attribute_id = H5.H5Acreate(dataset_id, ATTRIBUTENAME, datatype_id, dataspace_id,
+ HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Write the dataset.
+ try {
+ if ((attribute_id >= 0) && (datatype_id >= 0))
+ H5.H5Awrite(attribute_id, datatype_id, dset_data);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // End access to the dataset and release resources used by it.
+ try {
+ if (attribute_id >= 0)
+ H5.H5Aclose(attribute_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ try {
+ if (dataset_id >= 0)
+ H5.H5Dclose(dataset_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Terminate access to the data space.
+ try {
+ if (dataspace_id >= 0)
+ H5.H5Sclose(dataspace_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ try {
+ if (datatype_id >= 0)
+ H5.H5Tclose(datatype_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Close the file.
+ try {
+ if (file_id >= 0)
+ H5.H5Fclose(file_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ }
+
+ private static void ReadDataset() {
+ long file_id = -1;
+ long datatype_id = -1;
+ long dataspace_id = -1;
+ long dataset_id = -1;
+ long attribute_id = -1;
+ long type_len = -1;
+ long[] dims = { DIM0 };
+ byte[] dset_data;
+ String tag_name = null;
+
+ // Open an existing file.
+ try {
+ file_id = H5.H5Fopen(FILENAME, HDF5Constants.H5F_ACC_RDONLY, HDF5Constants.H5P_DEFAULT);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Open an existing dataset.
+ try {
+ if (file_id >= 0)
+ dataset_id = H5.H5Dopen(file_id, DATASETNAME, HDF5Constants.H5P_DEFAULT);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ try {
+ if (dataset_id >= 0)
+ attribute_id = H5.H5Aopen_by_name(dataset_id, ".", ATTRIBUTENAME, HDF5Constants.H5P_DEFAULT,
+ HDF5Constants.H5P_DEFAULT);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Get datatype and properties for the datatype.
+ try {
+ if (attribute_id >= 0)
+ datatype_id = H5.H5Aget_type(attribute_id);
+ if (datatype_id >= 0) {
+ type_len = H5.H5Tget_size(datatype_id);
+ tag_name = H5.H5Tget_tag(datatype_id);
+ }
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Get dataspace and allocate memory for read buffer.
+ try {
+ if (attribute_id >= 0)
+ dataspace_id = H5.H5Aget_space(attribute_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ try {
+ if (dataspace_id >= 0)
+ H5.H5Sget_simple_extent_dims(dataspace_id, dims, null);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Allocate buffer.
+ dset_data = new byte[(int) (dims[0] * type_len)];
+
+ // Read data.
+ try {
+ if ((attribute_id >= 0) && (datatype_id >= 0))
+ H5.H5Aread(attribute_id, datatype_id, dset_data);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Output the data to the screen.
+ System.out.println("Datatype tag for " + ATTRIBUTENAME + " is: \"" + tag_name + "\"");
+ for (int indx = 0; indx < dims[0]; indx++) {
+ System.out.print(ATTRIBUTENAME + "[" + indx + "]: ");
+ for (int jndx = 0; jndx < type_len; jndx++) {
+ char temp = (char) dset_data[jndx + indx * (int)type_len];
+ System.out.print(temp);
+ }
+ System.out.println("");
+ }
+ System.out.println();
+
+ // End access to the dataset and release resources used by it.
+ try {
+ if (attribute_id >= 0)
+ H5.H5Aclose(attribute_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ try {
+ if (dataset_id >= 0)
+ H5.H5Dclose(dataset_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Terminate access to the data space.
+ try {
+ if (dataspace_id >= 0)
+ H5.H5Sclose(dataspace_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ try {
+ if (datatype_id >= 0)
+ H5.H5Tclose(datatype_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Close the file.
+ try {
+ if (file_id >= 0)
+ H5.H5Fclose(file_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ }
+
+ public static void main(String[] args) {
+ H5Ex_T_OpaqueAttribute.CreateDataset();
+ // Now we begin the read section of this example. Here we assume
+ // the dataset and array have the same name and rank, but can have
+ // any size. Therefore we must allocate a new array to read in
+ // data using malloc().
+ H5Ex_T_OpaqueAttribute.ReadDataset();
+ }
+
+}
diff --git a/java/examples/datatypes/H5Ex_T_String.java b/java/examples/datatypes/H5Ex_T_String.java
new file mode 100644
index 0000000..469172d
--- /dev/null
+++ b/java/examples/datatypes/H5Ex_T_String.java
@@ -0,0 +1,311 @@
+/* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
+ * Copyright by The HDF Group. *
+ * Copyright by the Board of Trustees of the University of Illinois. *
+ * All rights reserved. *
+ * *
+ * This file is part of HDF5. The full HDF5 copyright notice, including *
+ * terms governing use, modification, and redistribution, is contained in *
+ * the files COPYING and Copyright.html. COPYING can be found at the root *
+ * of the source code distribution tree; Copyright.html can be found at the *
+ * root level of an installed copy of the electronic HDF5 document set and *
+ * is linked from the top-level documents page. It can also be found at *
+ * http://hdfgroup.org/HDF5/doc/Copyright.html. If you do not have *
+ * access to either file, you may request a copy from help@hdfgroup.org. *
+ * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */
+
+/************************************************************
+ This example shows how to read and write string datatypes
+ to a dataset. The program first writes strings to a
+ dataset with a dataspace of DIM0, then closes the file.
+ Next, it reopens the file, reads back the data, and
+ outputs it to the screen.
+ ************************************************************/
+
+package examples.datatypes;
+
+import hdf.hdf5lib.H5;
+import hdf.hdf5lib.HDF5Constants;
+
+public class H5Ex_T_String {
+ private static String FILENAME = "H5Ex_T_String.h5";
+ private static String DATASETNAME = "DS1";
+ private static final int DIM0 = 4;
+ private static final int SDIM = 8;
+ private static final int RANK = 1;
+
+ private static void CreateDataset() {
+ long file_id = -1;
+ long memtype_id = -1;
+ long filetype_id = -1;
+ long dataspace_id = -1;
+ long dataset_id = -1;
+ long[] dims = { DIM0 };
+ byte[][] dset_data = new byte[DIM0][SDIM];
+ StringBuffer[] str_data = { new StringBuffer("Parting"), new StringBuffer("is such"),
+ new StringBuffer("sweet"), new StringBuffer("sorrow.") };
+
+ // Create a new file using default properties.
+ try {
+ file_id = H5.H5Fcreate(FILENAME, HDF5Constants.H5F_ACC_TRUNC, HDF5Constants.H5P_DEFAULT,
+ HDF5Constants.H5P_DEFAULT);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Create file and memory datatypes. For this example we will save
+ // the strings as FORTRAN strings, therefore they do not need space
+ // for the null terminator in the file.
+ try {
+ filetype_id = H5.H5Tcopy(HDF5Constants.H5T_FORTRAN_S1);
+ if (filetype_id >= 0)
+ H5.H5Tset_size(filetype_id, SDIM - 1);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+ try {
+ memtype_id = H5.H5Tcopy(HDF5Constants.H5T_C_S1);
+ if (memtype_id >= 0)
+ H5.H5Tset_size(memtype_id, SDIM);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Create dataspace. Setting maximum size to NULL sets the maximum
+ // size to be the current size.
+ try {
+ dataspace_id = H5.H5Screate_simple(RANK, dims, null);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Create the dataset and write the string data to it.
+ try {
+ if ((file_id >= 0) && (filetype_id >= 0) && (dataspace_id >= 0))
+ dataset_id = H5.H5Dcreate(file_id, DATASETNAME, filetype_id, dataspace_id, HDF5Constants.H5P_DEFAULT,
+ HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Write the data to the dataset.
+ try {
+ for (int indx = 0; indx < DIM0; indx++) {
+ for (int jndx = 0; jndx < SDIM; jndx++) {
+ if (jndx < str_data[indx].length())
+ dset_data[indx][jndx] = (byte) str_data[indx].charAt(jndx);
+ else
+ dset_data[indx][jndx] = 0;
+ }
+ }
+ if ((dataset_id >= 0) && (memtype_id >= 0))
+ H5.H5Dwrite(dataset_id, memtype_id, HDF5Constants.H5S_ALL, HDF5Constants.H5S_ALL,
+ HDF5Constants.H5P_DEFAULT, dset_data);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // End access to the dataset and release resources used by it.
+ try {
+ if (dataset_id >= 0)
+ H5.H5Dclose(dataset_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Terminate access to the data space.
+ try {
+ if (dataspace_id >= 0)
+ H5.H5Sclose(dataspace_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Terminate access to the file type.
+ try {
+ if (filetype_id >= 0)
+ H5.H5Tclose(filetype_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Terminate access to the mem type.
+ try {
+ if (memtype_id >= 0)
+ H5.H5Tclose(memtype_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Close the file.
+ try {
+ if (file_id >= 0)
+ H5.H5Fclose(file_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ }
+
+ private static void ReadDataset() {
+ long file_id = -1;
+ long filetype_id = -1;
+ long memtype_id = -1;
+ long dataspace_id = -1;
+ long dataset_id = -1;
+ long sdim = 0;
+ long[] dims = { DIM0 };
+ byte[][] dset_data;
+ StringBuffer[] str_data;
+
+ // Open an existing file.
+ try {
+ file_id = H5.H5Fopen(FILENAME, HDF5Constants.H5F_ACC_RDONLY, HDF5Constants.H5P_DEFAULT);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Open an existing dataset.
+ try {
+ if (file_id >= 0)
+ dataset_id = H5.H5Dopen(file_id, DATASETNAME, HDF5Constants.H5P_DEFAULT);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Get the datatype and its size.
+ try {
+ if (dataset_id >= 0)
+ filetype_id = H5.H5Dget_type(dataset_id);
+ if (filetype_id >= 0) {
+ sdim = H5.H5Tget_size(filetype_id);
+ sdim++; // Make room for null terminator
+ }
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Get dataspace and allocate memory for read buffer.
+ try {
+ if (dataset_id >= 0)
+ dataspace_id = H5.H5Dget_space(dataset_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ try {
+ if (dataspace_id >= 0)
+ H5.H5Sget_simple_extent_dims(dataspace_id, dims, null);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Allocate space for data.
+ dset_data = new byte[(int) dims[0]][(int)sdim];
+ str_data = new StringBuffer[(int) dims[0]];
+
+ // Create the memory datatype.
+ try {
+ memtype_id = H5.H5Tcopy(HDF5Constants.H5T_C_S1);
+ if (memtype_id >= 0)
+ H5.H5Tset_size(memtype_id, sdim);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Read data.
+ try {
+ if ((dataset_id >= 0) && (memtype_id >= 0))
+ H5.H5Dread(dataset_id, memtype_id, HDF5Constants.H5S_ALL, HDF5Constants.H5S_ALL,
+ HDF5Constants.H5P_DEFAULT, dset_data);
+ byte[] tempbuf = new byte[(int)sdim];
+ for (int indx = 0; indx < (int) dims[0]; indx++) {
+ for (int jndx = 0; jndx < sdim; jndx++) {
+ tempbuf[jndx] = dset_data[indx][jndx];
+ }
+ str_data[indx] = new StringBuffer(new String(tempbuf).trim());
+ }
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Output the data to the screen.
+ for (int indx = 0; indx < dims[0]; indx++) {
+ System.out.println(DATASETNAME + " [" + indx + "]: " + str_data[indx]);
+ }
+ System.out.println();
+
+ // End access to the dataset and release resources used by it.
+ try {
+ if (dataset_id >= 0)
+ H5.H5Dclose(dataset_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Terminate access to the data space.
+ try {
+ if (dataspace_id >= 0)
+ H5.H5Sclose(dataspace_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Terminate access to the file type.
+ try {
+ if (filetype_id >= 0)
+ H5.H5Tclose(filetype_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Terminate access to the mem type.
+ try {
+ if (memtype_id >= 0)
+ H5.H5Tclose(memtype_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Close the file.
+ try {
+ if (file_id >= 0)
+ H5.H5Fclose(file_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ }
+
+ public static void main(String[] args) {
+ H5Ex_T_String.CreateDataset();
+ // Now we begin the read section of this example. Here we assume
+ // the dataset and array have the same name and rank, but can have
+ // any size. Therefore we must allocate a new array to read in
+ // data using malloc().
+ H5Ex_T_String.ReadDataset();
+ }
+
+}
diff --git a/java/examples/datatypes/H5Ex_T_StringAttribute.java b/java/examples/datatypes/H5Ex_T_StringAttribute.java
new file mode 100644
index 0000000..49361bc
--- /dev/null
+++ b/java/examples/datatypes/H5Ex_T_StringAttribute.java
@@ -0,0 +1,351 @@
+/* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
+ * Copyright by The HDF Group. *
+ * Copyright by the Board of Trustees of the University of Illinois. *
+ * All rights reserved. *
+ * *
+ * This file is part of HDF5. The full HDF5 copyright notice, including *
+ * terms governing use, modification, and redistribution, is contained in *
+ * the files COPYING and Copyright.html. COPYING can be found at the root *
+ * of the source code distribution tree; Copyright.html can be found at the *
+ * root level of an installed copy of the electronic HDF5 document set and *
+ * is linked from the top-level documents page. It can also be found at *
+ * http://hdfgroup.org/HDF5/doc/Copyright.html. If you do not have *
+ * access to either file, you may request a copy from help@hdfgroup.org. *
+ * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */
+
+/************************************************************
+ This example shows how to read and write string datatypes
+ to an attribute. The program first writes strings to an
+ attribute with a dataspace of DIM0, then closes the file.
+ Next, it reopens the file, reads back the data, and
+ outputs it to the screen.
+ ************************************************************/
+
+package examples.datatypes;
+
+import hdf.hdf5lib.H5;
+import hdf.hdf5lib.HDF5Constants;
+
+public class H5Ex_T_StringAttribute {
+ private static String FILENAME = "H5Ex_T_StringAttribute.h5";
+ private static String DATASETNAME = "DS1";
+ private static String ATTRIBUTENAME = "A1";
+ private static final int DIM0 = 4;
+ private static final int SDIM = 8;
+ private static final int RANK = 1;
+
+ private static void CreateDataset() {
+ long file_id = -1;
+ long memtype_id = -1;
+ long filetype_id = -1;
+ long dataspace_id = -1;
+ long dataset_id = -1;
+ long attribute_id = -1;
+ long[] dims = { DIM0 };
+ byte[][] dset_data = new byte[DIM0][SDIM];
+ StringBuffer[] str_data = { new StringBuffer("Parting"), new StringBuffer("is such"),
+ new StringBuffer("sweet"), new StringBuffer("sorrow.") };
+
+ // Create a new file using default properties.
+ try {
+ file_id = H5.H5Fcreate(FILENAME, HDF5Constants.H5F_ACC_TRUNC, HDF5Constants.H5P_DEFAULT,
+ HDF5Constants.H5P_DEFAULT);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Create file and memory datatypes. For this example we will save
+ // the strings as FORTRAN strings, therefore they do not need space
+ // for the null terminator in the file.
+ try {
+ filetype_id = H5.H5Tcopy(HDF5Constants.H5T_FORTRAN_S1);
+ if (filetype_id >= 0)
+ H5.H5Tset_size(filetype_id, SDIM - 1);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+ try {
+ memtype_id = H5.H5Tcopy(HDF5Constants.H5T_C_S1);
+ if (memtype_id >= 0)
+ H5.H5Tset_size(memtype_id, SDIM);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Create dataset with a scalar dataspace.
+ try {
+ dataspace_id = H5.H5Screate(HDF5Constants.H5S_SCALAR);
+ if (dataspace_id >= 0) {
+ dataset_id = H5.H5Dcreate(file_id, DATASETNAME, HDF5Constants.H5T_STD_I32LE, dataspace_id,
+ HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT);
+ H5.H5Sclose(dataspace_id);
+ dataspace_id = -1;
+ }
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Create dataspace. Setting maximum size to NULL sets the maximum
+ // size to be the current size.
+ try {
+ dataspace_id = H5.H5Screate_simple(RANK, dims, null);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Create the attribute.
+ try {
+ if ((dataset_id >= 0) && (dataspace_id >= 0) && (filetype_id >= 0))
+ attribute_id = H5.H5Acreate(dataset_id, ATTRIBUTENAME, filetype_id, dataspace_id,
+ HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Write the data to the dataset.
+ try {
+ for (int indx = 0; indx < DIM0; indx++) {
+ for (int jndx = 0; jndx < SDIM; jndx++) {
+ if (jndx < str_data[indx].length())
+ dset_data[indx][jndx] = (byte) str_data[indx].charAt(jndx);
+ else
+ dset_data[indx][jndx] = 0;
+ }
+ }
+ if ((attribute_id >= 0) && (memtype_id >= 0))
+ H5.H5Awrite(attribute_id, memtype_id, dset_data);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // End access to the dataset and release resources used by it.
+ try {
+ if (attribute_id >= 0)
+ H5.H5Aclose(attribute_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ try {
+ if (dataset_id >= 0)
+ H5.H5Dclose(dataset_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Terminate access to the data space.
+ try {
+ if (dataspace_id >= 0)
+ H5.H5Sclose(dataspace_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Terminate access to the file type.
+ try {
+ if (filetype_id >= 0)
+ H5.H5Tclose(filetype_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Terminate access to the mem type.
+ try {
+ if (memtype_id >= 0)
+ H5.H5Tclose(memtype_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Close the file.
+ try {
+ if (file_id >= 0)
+ H5.H5Fclose(file_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ }
+
+ private static void ReadDataset() {
+ long file_id = -1;
+ long filetype_id = -1;
+ long memtype_id = -1;
+ long dataspace_id = -1;
+ long dataset_id = -1;
+ long attribute_id = -1;
+ long sdim = 0;
+ long[] dims = { DIM0 };
+ byte[][] dset_data;
+ StringBuffer[] str_data;
+
+ // Open an existing file.
+ try {
+ file_id = H5.H5Fopen(FILENAME, HDF5Constants.H5F_ACC_RDONLY, HDF5Constants.H5P_DEFAULT);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Open an existing dataset.
+ try {
+ if (file_id >= 0)
+ dataset_id = H5.H5Dopen(file_id, DATASETNAME, HDF5Constants.H5P_DEFAULT);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ try {
+ if (dataset_id >= 0)
+ attribute_id = H5.H5Aopen_by_name(dataset_id, ".", ATTRIBUTENAME, HDF5Constants.H5P_DEFAULT,
+ HDF5Constants.H5P_DEFAULT);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Get the datatype and its size.
+ try {
+ if (attribute_id >= 0)
+ filetype_id = H5.H5Aget_type(attribute_id);
+ if (filetype_id >= 0) {
+ sdim = H5.H5Tget_size(filetype_id);
+ sdim++; // Make room for null terminator
+ }
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Get dataspace and allocate memory for read buffer.
+ try {
+ if (attribute_id >= 0)
+ dataspace_id = H5.H5Aget_space(attribute_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ try {
+ if (dataspace_id >= 0)
+ H5.H5Sget_simple_extent_dims(dataspace_id, dims, null);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Allocate space for data.
+ dset_data = new byte[(int) dims[0]][(int)sdim];
+ str_data = new StringBuffer[(int) dims[0]];
+
+ // Create the memory datatype.
+ try {
+ memtype_id = H5.H5Tcopy(HDF5Constants.H5T_C_S1);
+ if (memtype_id >= 0)
+ H5.H5Tset_size(memtype_id, sdim);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Read data.
+ try {
+ if ((attribute_id >= 0) && (memtype_id >= 0))
+ H5.H5Aread(attribute_id, memtype_id, dset_data);
+ byte[] tempbuf = new byte[(int)sdim];
+ for (int indx = 0; indx < (int) dims[0]; indx++) {
+ for (int jndx = 0; jndx < sdim; jndx++) {
+ tempbuf[jndx] = dset_data[indx][jndx];
+ }
+ str_data[indx] = new StringBuffer(new String(tempbuf).trim());
+ }
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Output the data to the screen.
+ for (int indx = 0; indx < dims[0]; indx++) {
+ System.out.println(DATASETNAME + " [" + indx + "]: " + str_data[indx]);
+ }
+ System.out.println();
+
+ // End access to the dataset and release resources used by it.
+ try {
+ if (attribute_id >= 0)
+ H5.H5Aclose(attribute_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ try {
+ if (dataset_id >= 0)
+ H5.H5Dclose(dataset_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Terminate access to the data space.
+ try {
+ if (dataspace_id >= 0)
+ H5.H5Sclose(dataspace_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Terminate access to the file type.
+ try {
+ if (filetype_id >= 0)
+ H5.H5Tclose(filetype_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Terminate access to the mem type.
+ try {
+ if (memtype_id >= 0)
+ H5.H5Tclose(memtype_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Close the file.
+ try {
+ if (file_id >= 0)
+ H5.H5Fclose(file_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ }
+
+ public static void main(String[] args) {
+ H5Ex_T_StringAttribute.CreateDataset();
+ // Now we begin the read section of this example. Here we assume
+ // the dataset and array have the same name and rank, but can have
+ // any size. Therefore we must allocate a new array to read in
+ // data using malloc().
+ H5Ex_T_StringAttribute.ReadDataset();
+ }
+
+}
diff --git a/java/examples/datatypes/H5Ex_T_VLString.java b/java/examples/datatypes/H5Ex_T_VLString.java
new file mode 100644
index 0000000..c8892ba
--- /dev/null
+++ b/java/examples/datatypes/H5Ex_T_VLString.java
@@ -0,0 +1,138 @@
+/* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
+ * Copyright by The HDF Group. *
+ * Copyright by the Board of Trustees of the University of Illinois. *
+ * All rights reserved. *
+ * *
+ * This file is part of HDF5. The full HDF5 copyright notice, including *
+ * terms governing use, modification, and redistribution, is contained in *
+ * the files COPYING and Copyright.html. COPYING can be found at the root *
+ * of the source code distribution tree; Copyright.html can be found at the *
+ * root level of an installed copy of the electronic HDF5 document set and *
+ * is linked from the top-level documents page. It can also be found at *
+ * http://hdfgroup.org/HDF5/doc/Copyright.html. If you do not have *
+ * access to either file, you may request a copy from help@hdfgroup.org. *
+ * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */
+
+/************************************************************
+ Creating and writing a VL string to a file.
+ ************************************************************/
+
+package examples.datatypes;
+
+import hdf.hdf5lib.H5;
+import hdf.hdf5lib.HDF5Constants;
+
+public class H5Ex_T_VLString
+{
+ private static String FILENAME = "H5Ex_T_VLString.h5";
+ private static String DATASETNAME = "DS1";
+
+ private static void createDataset() {
+ long file_id = -1;
+ long type_id = -1;
+ long dataspace_id = -1;
+ long dataset_id = -1;
+ int rank = 1;
+ String[] str_data = { "Parting", "is such", "sweet", "sorrow." };
+ long[] dims = { str_data.length };
+
+ // Create a new file using default properties.
+ try {
+ file_id = H5.H5Fcreate(FILENAME, HDF5Constants.H5F_ACC_TRUNC, HDF5Constants.H5P_DEFAULT,
+ HDF5Constants.H5P_DEFAULT);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ try {
+ type_id = H5.H5Tcopy(HDF5Constants.H5T_C_S1);
+ H5.H5Tset_size(type_id, HDF5Constants.H5T_VARIABLE);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Create dataspace. Setting maximum size to NULL sets the maximum
+ // size to be the current size.
+ try {
+ dataspace_id = H5.H5Screate_simple(rank, dims, null);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Create the dataset and write the string data to it.
+ try {
+ if ((file_id >= 0) && (type_id >= 0) && (dataspace_id >= 0)) {
+ dataset_id = H5.H5Dcreate(file_id, DATASETNAME, type_id, dataspace_id, HDF5Constants.H5P_DEFAULT,
+ HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT);
+ }
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Write the data to the dataset.
+ try {
+ if (dataset_id >= 0)
+ H5.H5Dwrite_VLStrings(dataset_id, type_id, HDF5Constants.H5S_ALL, HDF5Constants.H5S_ALL,
+ HDF5Constants.H5P_DEFAULT, str_data);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ try {
+ H5.H5Sclose(dataspace_id);
+ H5.H5Tclose(type_id);
+ H5.H5Dclose(dataset_id);
+ H5.H5Fclose(file_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+ }
+
+ private static void readDataset() {
+ long file_id = -1;
+ long type_id = -1;
+ long dataset_id = -1;
+ String[] str_data = { "", "", "", "" };
+
+ try {
+ file_id = H5.H5Fopen(FILENAME, HDF5Constants.H5F_ACC_RDONLY, HDF5Constants.H5P_DEFAULT);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ try {
+ dataset_id = H5.H5Dopen(file_id, DATASETNAME, HDF5Constants.H5P_DEFAULT);
+ type_id = H5.H5Dget_type(dataset_id);
+ H5.H5Dread_VLStrings(dataset_id, type_id, HDF5Constants.H5S_ALL, HDF5Constants.H5S_ALL, HDF5Constants.H5P_DEFAULT,
+ str_data);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ for (int indx = 0; indx < str_data.length; indx++)
+ System.out.println(DATASETNAME + " [" + indx + "]: " + str_data[indx]);
+
+ try {
+ H5.H5Tclose(type_id);
+ H5.H5Dclose(dataset_id);
+ H5.H5Fclose(file_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+ }
+
+ public static void main(String[] args) {
+ H5Ex_T_VLString.createDataset();
+ H5Ex_T_VLString.readDataset();
+ }
+
+}
diff --git a/java/examples/datatypes/Makefile.am b/java/examples/datatypes/Makefile.am
new file mode 100644
index 0000000..7d95a56
--- /dev/null
+++ b/java/examples/datatypes/Makefile.am
@@ -0,0 +1,78 @@
+#
+# Copyright by The HDF Group.
+# Copyright by the Board of Trustees of the University of Illinois.
+# All rights reserved.
+#
+# This file is part of HDF5. The full HDF5 copyright notice, including
+# terms governing use, modification, and redistribution, is contained in
+# the files COPYING and Copyright.html. COPYING can be found at the root
+# of the source code distribution tree; Copyright.html can be found at the
+# root level of an installed copy of the electronic HDF5 document set and
+# is linked from the top-level documents page. It can also be found at
+# http://hdfgroup.org/HDF5/doc/Copyright.html. If you do not have
+# access to either file, you may request a copy from help@hdfgroup.org.
+##
+## Makefile.am
+## Run automake to generate a Makefile.in from this file.
+##
+#
+# HDF5 Java Library Examples Makefile(.in)
+
+include $(top_srcdir)/config/commence.am
+
+# Mark this directory as part of the JNI API
+JAVA_API=yes
+
+JAVAROOT = .classes
+
+classes:
+ test -d $(@D)/$(JAVAROOT) || $(MKDIR_P) $(@D)/$(JAVAROOT)
+
+pkgpath = examples/datatypes
+hdfjarfile = jar$(PACKAGE_TARNAME)-$(PACKAGE_VERSION).jar
+CLASSPATH_ENV=CLASSPATH=.:$(JAVAROOT):$(top_builddir)/java/src/$(hdfjarfile):$(top_srcdir)/java/lib/slf4j-api-1.7.5.jar:$(top_srcdir)/java/lib/ext/slf4j-simple-1.7.5.jar:$$CLASSPATH
+
+jarfile = jar$(PACKAGE_TARNAME)datatypes.jar
+
+AM_JAVACFLAGS = $(H5_JAVACFLAGS) -deprecation
+
+TESTPACKAGE =
+
+noinst_JAVA = \
+ H5Ex_T_Array.java \
+ H5Ex_T_ArrayAttribute.java \
+ H5Ex_T_Bit.java \
+ H5Ex_T_BitAttribute.java \
+ H5Ex_T_Commit.java \
+ H5Ex_T_Compound.java \
+ H5Ex_T_CompoundAttribute.java \
+ H5Ex_T_Float.java \
+ H5Ex_T_FloatAttribute.java \
+ H5Ex_T_Integer.java \
+ H5Ex_T_IntegerAttribute.java \
+ H5Ex_T_ObjectReference.java \
+ H5Ex_T_ObjectReferenceAttribute.java \
+ H5Ex_T_Opaque.java \
+ H5Ex_T_OpaqueAttribute.java \
+ H5Ex_T_String.java \
+ H5Ex_T_StringAttribute.java \
+ H5Ex_T_VLString.java
+
+$(jarfile): classnoinst.stamp classes
+ $(JAR) cvf $@ -C $(JAVAROOT)/ $(pkgpath)
+
+noinst_DATA = $(jarfile)
+
+.PHONY: classes
+
+check_SCRIPTS = runExample.sh
+TEST_SCRIPT = $(check_SCRIPTS)
+
+CLEANFILES = classnoinst.stamp $(jarfile) $(JAVAROOT)/$(pkgpath)/*.class runExample.sh
+
+clean:
+ rm -rf $(JAVAROOT)/*
+ rm -f $(jarfile)
+ rm -f classnoinst.stamp
+
+include $(top_srcdir)/config/conclude.am
diff --git a/java/examples/datatypes/runExample.sh.in b/java/examples/datatypes/runExample.sh.in
new file mode 100644
index 0000000..f80450e
--- /dev/null
+++ b/java/examples/datatypes/runExample.sh.in
@@ -0,0 +1,400 @@
+#! /bin/sh
+#
+# Copyright by The HDF Group.
+# Copyright by the Board of Trustees of the University of Illinois.
+# All rights reserved.
+#
+# This file is part of HDF5. The full HDF5 copyright notice, including
+# terms governing use, modification, and redistribution, is contained in
+# the files COPYING and Copyright.html. COPYING can be found at the root
+# of the source code distribution tree; Copyright.html can be found at the
+# root level of an installed copy of the electronic HDF5 document set and
+# is linked from the top-level documents page. It can also be found at
+# http://hdfgroup.org/HDF5/doc/Copyright.html. If you do not have
+# access to either file, you may request a copy from help@hdfgroup.org.
+#
+
+top_builddir=@top_builddir@
+top_srcdir=@top_srcdir@
+srcdir=@srcdir@
+
+TESTNAME=EX_Datatypes
+EXIT_SUCCESS=0
+EXIT_FAILURE=1
+
+# Set up default variable values if not supplied by the user.
+RM='rm -rf'
+CMP='cmp'
+DIFF='diff -c'
+CP='cp'
+DIRNAME='dirname'
+LS='ls'
+AWK='awk'
+
+nerrors=0
+
+# where the libs exist
+HDFLIB_HOME="$top_srcdir/java/lib"
+BLDLIBDIR="./lib"
+BLDDIR="."
+HDFTEST_HOME="$top_srcdir/java/examples/datatypes"
+JARFILE=jar@PACKAGE_TARNAME@-@PACKAGE_VERSION@.jar
+TESTJARFILE=jar@PACKAGE_TARNAME@datatypes.jar
+test -d $BLDLIBDIR || mkdir -p $BLDLIBDIR
+
+######################################################################
+# library files
+# --------------------------------------------------------------------
+# All the library files copy from source directory to test directory
+# NOTE: Keep this framework to add/remove test files.
+# This list are also used for checking exist.
+# Comment '#' without space can be used.
+# --------------------------------------------------------------------
+LIST_LIBRARY_FILES="
+$HDFLIB_HOME/slf4j-api-1.7.5.jar
+$HDFLIB_HOME/ext/slf4j-simple-1.7.5.jar
+$top_builddir/src/.libs/libhdf5.*
+$top_builddir/java/src/jni/.libs/libhdf5_java.*
+$top_builddir/java/src/$JARFILE
+"
+LIST_DATA_FILES="
+$HDFTEST_HOME/../testfiles/examples.datatypes.H5Ex_T_Array.txt
+$HDFTEST_HOME/../testfiles/examples.datatypes.H5Ex_T_ArrayAttribute.txt
+$HDFTEST_HOME/../testfiles/examples.datatypes.H5Ex_T_Bit.txt
+$HDFTEST_HOME/../testfiles/examples.datatypes.H5Ex_T_BitAttribute.txt
+$HDFTEST_HOME/../testfiles/examples.datatypes.H5Ex_T_Commit.txt
+$HDFTEST_HOME/../testfiles/examples.datatypes.H5Ex_T_Compound.txt
+$HDFTEST_HOME/../testfiles/examples.datatypes.H5Ex_T_CompoundAttribute.txt
+$HDFTEST_HOME/../testfiles/examples.datatypes.H5Ex_T_Float.txt
+$HDFTEST_HOME/../testfiles/examples.datatypes.H5Ex_T_FloatAttribute.txt
+$HDFTEST_HOME/../testfiles/examples.datatypes.H5Ex_T_Integer.txt
+$HDFTEST_HOME/../testfiles/examples.datatypes.H5Ex_T_IntegerAttribute.txt
+$HDFTEST_HOME/../testfiles/examples.datatypes.H5Ex_T_ObjectReference.txt
+$HDFTEST_HOME/../testfiles/examples.datatypes.H5Ex_T_ObjectReferenceAttribute.txt
+$HDFTEST_HOME/../testfiles/examples.datatypes.H5Ex_T_Opaque.txt
+$HDFTEST_HOME/../testfiles/examples.datatypes.H5Ex_T_OpaqueAttribute.txt
+$HDFTEST_HOME/../testfiles/examples.datatypes.H5Ex_T_String.txt
+$HDFTEST_HOME/../testfiles/examples.datatypes.H5Ex_T_StringAttribute.txt
+$HDFTEST_HOME/../testfiles/examples.datatypes.H5Ex_T_VLString.txt
+"
+
+#
+# copy files from source dirs to test dir
+#
+COPY_LIBFILES="$LIST_LIBRARY_FILES"
+
+COPY_LIBFILES_TO_BLDLIBDIR()
+{
+ # copy test files. Used -f to make sure get a new copy
+ for tstfile in $COPY_LIBFILES
+ do
+ # ignore '#' comment
+ echo $tstfile | tr -d ' ' | grep '^#' > /dev/null
+ RET=$?
+ if [ $RET -eq 1 ]; then
+ # skip cp if srcdir is same as destdir
+ # this occurs when build/test performed in source dir and
+ # make cp fail
+ SDIR=`$DIRNAME $tstfile`
+ INODE_SDIR=`$LS -i -d $SDIR | $AWK -F' ' '{print $1}'`
+ INODE_DDIR=`$LS -i -d $BLDLIBDIR | $AWK -F' ' '{print $1}'`
+ if [ "$INODE_SDIR" != "$INODE_DDIR" ]; then
+ $CP -f $tstfile $BLDLIBDIR
+ if [ $? -ne 0 ]; then
+ echo "Error: FAILED to copy $tstfile ."
+
+ # Comment out this to CREATE expected file
+ exit $EXIT_FAILURE
+ fi
+ fi
+ fi
+ done
+}
+
+CLEAN_LIBFILES_AND_BLDLIBDIR()
+{
+ # skip rm if srcdir is same as destdir
+ # this occurs when build/test performed in source dir and
+ # make cp fail
+ SDIR=`$DIRNAME $HDFLIB_HOME/slf4j-api-1.7.5.jar`
+ INODE_SDIR=`$LS -i -d $SDIR | $AWK -F' ' '{print $1}'`
+ INODE_DDIR=`$LS -i -d $BLDLIBDIR | $AWK -F' ' '{print $1}'`
+ if [ "$INODE_SDIR" != "$INODE_DDIR" ]; then
+ $RM $BLDLIBDIR
+ fi
+}
+
+COPY_DATAFILES="$LIST_DATA_FILES"
+
+COPY_DATAFILES_TO_BLDDIR()
+{
+ # copy test files. Used -f to make sure get a new copy
+ for tstfile in $COPY_DATAFILES
+ do
+ # ignore '#' comment
+ echo $tstfile | tr -d ' ' | grep '^#' > /dev/null
+ RET=$?
+ if [ $RET -eq 1 ]; then
+ # skip cp if srcdir is same as destdir
+ # this occurs when build/test performed in source dir and
+ # make cp fail
+ SDIR=`$DIRNAME $tstfile`
+ INODE_SDIR=`$LS -i -d $SDIR | $AWK -F' ' '{print $1}'`
+ INODE_DDIR=`$LS -i -d $BLDDIR | $AWK -F' ' '{print $1}'`
+ if [ "$INODE_SDIR" != "$INODE_DDIR" ]; then
+ $CP -f $tstfile $BLDDIR
+ if [ $? -ne 0 ]; then
+ echo "Error: FAILED to copy $tstfile ."
+
+ # Comment out this to CREATE expected file
+ exit $EXIT_FAILURE
+ fi
+ fi
+ fi
+ done
+}
+
+CLEAN_DATAFILES_AND_BLDDIR()
+{
+ $RM $BLDDIR/examples.datatypes.H5Ex_T_*.txt
+ $RM $BLDDIR/H5Ex_T_*.out
+}
+
+# Print a line-line message left justified in a field of 70 characters
+# beginning with the word "Testing".
+#
+TESTING() {
+ SPACES=" "
+ echo "Testing $* $SPACES" | cut -c1-70 | tr -d '\012'
+}
+
+# where Java is installed (requires jdk1.7.x)
+JAVAEXE=@JAVA@
+JAVAEXEFLAGS=@H5_JAVAFLAGS@
+
+###############################################################################
+# DO NOT MODIFY BELOW THIS LINE
+###############################################################################
+
+# prepare for test
+COPY_LIBFILES_TO_BLDLIBDIR
+COPY_DATAFILES_TO_BLDDIR
+
+CPATH=".:"$BLDLIBDIR"/"$JARFILE":"$BLDLIBDIR"/slf4j-api-1.7.5.jar:"$BLDLIBDIR"/slf4j-simple-1.7.5.jar:"$TESTJARFILE""
+
+TEST=/usr/bin/test
+if [ ! -x /usr/bin/test ]
+then
+TEST=`which test`
+fi
+
+if $TEST -z "$CLASSPATH"; then
+ CLASSPATH=""
+fi
+CLASSPATH=$CPATH":"$CLASSPATH
+export CLASSPATH
+
+if $TEST -n "$JAVAPATH" ; then
+ PATH=$JAVAPATH":"$PATH
+ export PATH
+fi
+
+if $TEST -e /bin/uname; then
+ os_name=`/bin/uname -s`
+elif $TEST -e /usr/bin/uname; then
+ os_name=`/usr/bin/uname -s`
+else
+ os_name=unknown
+fi
+
+if $TEST -z "$LD_LIBRARY_PATH" ; then
+ LD_LIBRARY_PATH=""
+fi
+
+case $os_name in
+ Darwin)
+ DYLD_LIBRARY_PATH=$BLDLIBDIR:$DYLD_LIBRARY_PATH
+ export DYLD_LIBRARY_PATH
+ LD_LIBRARY_PATH=$DYLD_LIBRARY_PATH
+ ;;
+ *)
+ LD_LIBRARY_PATH=$BLDLIBDIR:$LD_LIBRARY_PATH
+ ;;
+esac
+
+export LD_LIBRARY_PATH
+
+echo "$JAVAEXE $JAVAEXEFLAGS -Xmx1024M -Dorg.slf4j.simpleLogger.defaultLog=trace -Djava.library.path=$BLDLIBDIR -cp $CLASSPATH examples.datatypes.H5Ex_T_Array"
+($JAVAEXE $JAVAEXEFLAGS -Xmx1024M -Dorg.slf4j.simpleLogger.defaultLog=trace -Djava.library.path=$BLDLIBDIR -cp $CLASSPATH examples.datatypes.H5Ex_T_Array > H5Ex_T_Array.out)
+if diff H5Ex_T_Array.out examples.datatypes.H5Ex_T_Array.txt > /dev/null; then
+ echo " PASSED datatypes.H5Ex_T_Array"
+else
+ echo "**FAILED** datatypes.H5Ex_T_Array"
+ nerrors="`expr $nerrors + 1`"
+fi
+
+echo "$JAVAEXE $JAVAEXEFLAGS -Xmx1024M -Dorg.slf4j.simpleLogger.defaultLog=trace -Djava.library.path=$BLDLIBDIR -cp $CLASSPATH examples.datatypes.H5Ex_T_ArrayAttribute"
+($JAVAEXE $JAVAEXEFLAGS -Xmx1024M -Dorg.slf4j.simpleLogger.defaultLog=trace -Djava.library.path=$BLDLIBDIR -cp $CLASSPATH examples.datatypes.H5Ex_T_ArrayAttribute > H5Ex_T_ArrayAttribute.out)
+if diff H5Ex_T_ArrayAttribute.out examples.datatypes.H5Ex_T_ArrayAttribute.txt > /dev/null; then
+ echo " PASSED datatypes.H5Ex_T_ArrayAttribute"
+else
+ echo "**FAILED** datatypes.H5Ex_T_ArrayAttribute"
+ nerrors="`expr $nerrors + 1`"
+fi
+
+echo "$JAVAEXE $JAVAEXEFLAGS -Xmx1024M -Dorg.slf4j.simpleLogger.defaultLog=trace -Djava.library.path=$BLDLIBDIR -cp $CLASSPATH examples.datatypes.H5Ex_T_Bit"
+($JAVAEXE $JAVAEXEFLAGS -Xmx1024M -Dorg.slf4j.simpleLogger.defaultLog=trace -Djava.library.path=$BLDLIBDIR -cp $CLASSPATH examples.datatypes.H5Ex_T_Bit > H5Ex_T_Bit.out)
+if diff H5Ex_T_Bit.out examples.datatypes.H5Ex_T_Bit.txt > /dev/null; then
+ echo " PASSED datatypes.H5Ex_T_Bit"
+else
+ echo "**FAILED** datatypes.H5Ex_T_Bit"
+ nerrors="`expr $nerrors + 1`"
+fi
+
+echo "$JAVAEXE $JAVAEXEFLAGS -Xmx1024M -Dorg.slf4j.simpleLogger.defaultLog=trace -Djava.library.path=$BLDLIBDIR -cp $CLASSPATH examples.datatypes.H5Ex_T_BitAttribute"
+($JAVAEXE $JAVAEXEFLAGS -Xmx1024M -Dorg.slf4j.simpleLogger.defaultLog=trace -Djava.library.path=$BLDLIBDIR -cp $CLASSPATH examples.datatypes.H5Ex_T_BitAttribute > H5Ex_T_BitAttribute.out)
+if diff H5Ex_T_BitAttribute.out examples.datatypes.H5Ex_T_BitAttribute.txt > /dev/null; then
+ echo " PASSED datatypes.H5Ex_T_BitAttribute"
+else
+ echo "**FAILED** datatypes.H5Ex_T_BitAttribute"
+ nerrors="`expr $nerrors + 1`"
+fi
+
+echo "$JAVAEXE $JAVAEXEFLAGS -Xmx1024M -Dorg.slf4j.simpleLogger.defaultLog=trace -Djava.library.path=$BLDLIBDIR -cp $CLASSPATH examples.datatypes.H5Ex_T_Commit"
+($JAVAEXE $JAVAEXEFLAGS -Xmx1024M -Dorg.slf4j.simpleLogger.defaultLog=trace -Djava.library.path=$BLDLIBDIR -cp $CLASSPATH examples.datatypes.H5Ex_T_Commit > H5Ex_T_Commit.out)
+if diff H5Ex_T_Commit.out examples.datatypes.H5Ex_T_Commit.txt > /dev/null; then
+ echo " PASSED datatypes.H5Ex_T_Commit"
+else
+ echo "**FAILED** datatypes.H5Ex_T_Commit"
+ nerrors="`expr $nerrors + 1`"
+fi
+
+echo "$JAVAEXE $JAVAEXEFLAGS -Xmx1024M -Dorg.slf4j.simpleLogger.defaultLog=trace -Djava.library.path=$BLDLIBDIR -cp $CLASSPATH examples.datatypes.H5Ex_T_Compound"
+($JAVAEXE $JAVAEXEFLAGS -Xmx1024M -Dorg.slf4j.simpleLogger.defaultLog=trace -Djava.library.path=$BLDLIBDIR -cp $CLASSPATH examples.datatypes.H5Ex_T_Compound > H5Ex_T_Compound.out)
+if diff H5Ex_T_Compound.out examples.datatypes.H5Ex_T_Compound.txt > /dev/null; then
+ echo " PASSED datatypes.H5Ex_T_Compound"
+else
+ echo "**FAILED** datatypes.H5Ex_T_Compound"
+ nerrors="`expr $nerrors + 1`"
+fi
+
+echo "$JAVAEXE $JAVAEXEFLAGS -Xmx1024M -Dorg.slf4j.simpleLogger.defaultLog=trace -Djava.library.path=$BLDLIBDIR -cp $CLASSPATH examples.datatypes.H5Ex_T_CompoundAttribute"
+($JAVAEXE $JAVAEXEFLAGS -Xmx1024M -Dorg.slf4j.simpleLogger.defaultLog=trace -Djava.library.path=$BLDLIBDIR -cp $CLASSPATH examples.datatypes.H5Ex_T_CompoundAttribute > H5Ex_T_CompoundAttribute.out)
+if diff H5Ex_T_CompoundAttribute.out examples.datatypes.H5Ex_T_CompoundAttribute.txt > /dev/null; then
+ echo " PASSED datatypes.H5Ex_T_CompoundAttribute"
+else
+ echo "**FAILED** datatypes.H5Ex_T_CompoundAttribute"
+ nerrors="`expr $nerrors + 1`"
+fi
+
+echo "$JAVAEXE $JAVAEXEFLAGS -Xmx1024M -Dorg.slf4j.simpleLogger.defaultLog=trace -Djava.library.path=$BLDLIBDIR -cp $CLASSPATH examples.datatypes.H5Ex_T_Float"
+($JAVAEXE $JAVAEXEFLAGS -Xmx1024M -Dorg.slf4j.simpleLogger.defaultLog=trace -Djava.library.path=$BLDLIBDIR -cp $CLASSPATH examples.datatypes.H5Ex_T_Float > H5Ex_T_Float.out)
+if diff H5Ex_T_Float.out examples.datatypes.H5Ex_T_Float.txt > /dev/null; then
+ echo " PASSED datatypes.H5Ex_T_Float"
+else
+ echo "**FAILED** datatypes.H5Ex_T_Float"
+ nerrors="`expr $nerrors + 1`"
+fi
+
+echo "$JAVAEXE $JAVAEXEFLAGS -Xmx1024M -Dorg.slf4j.simpleLogger.defaultLog=trace -Djava.library.path=$BLDLIBDIR -cp $CLASSPATH examples.datatypes.H5Ex_T_FloatAttribute"
+($JAVAEXE $JAVAEXEFLAGS -Xmx1024M -Dorg.slf4j.simpleLogger.defaultLog=trace -Djava.library.path=$BLDLIBDIR -cp $CLASSPATH examples.datatypes.H5Ex_T_FloatAttribute > H5Ex_T_FloatAttribute.out)
+if diff H5Ex_T_FloatAttribute.out examples.datatypes.H5Ex_T_FloatAttribute.txt > /dev/null; then
+ echo " PASSED datatypes.H5Ex_T_FloatAttribute"
+else
+ echo "**FAILED** datatypes.H5Ex_T_FloatAttribute"
+ nerrors="`expr $nerrors + 1`"
+fi
+
+echo "$JAVAEXE $JAVAEXEFLAGS -Xmx1024M -Dorg.slf4j.simpleLogger.defaultLog=trace -Djava.library.path=$BLDLIBDIR -cp $CLASSPATH examples.datatypes.H5Ex_T_Integer"
+($JAVAEXE $JAVAEXEFLAGS -Xmx1024M -Dorg.slf4j.simpleLogger.defaultLog=trace -Djava.library.path=$BLDLIBDIR -cp $CLASSPATH examples.datatypes.H5Ex_T_Integer > H5Ex_T_Integer.out)
+if diff H5Ex_T_Integer.out examples.datatypes.H5Ex_T_Integer.txt > /dev/null; then
+ echo " PASSED datatypes.H5Ex_T_Integer"
+else
+ echo "**FAILED** datatypes.H5Ex_T_Integer"
+ nerrors="`expr $nerrors + 1`"
+fi
+
+echo "$JAVAEXE $JAVAEXEFLAGS -Xmx1024M -Dorg.slf4j.simpleLogger.defaultLog=trace -Djava.library.path=$BLDLIBDIR -cp $CLASSPATH examples.datatypes.H5Ex_T_IntegerAttribute"
+($JAVAEXE $JAVAEXEFLAGS -Xmx1024M -Dorg.slf4j.simpleLogger.defaultLog=trace -Djava.library.path=$BLDLIBDIR -cp $CLASSPATH examples.datatypes.H5Ex_T_IntegerAttribute > H5Ex_T_IntegerAttribute.out)
+if diff H5Ex_T_IntegerAttribute.out examples.datatypes.H5Ex_T_IntegerAttribute.txt > /dev/null; then
+ echo " PASSED datatypes.H5Ex_T_IntegerAttribute"
+else
+ echo "**FAILED** datatypes.H5Ex_T_IntegerAttribute"
+ nerrors="`expr $nerrors + 1`"
+fi
+
+echo "$JAVAEXE $JAVAEXEFLAGS -Xmx1024M -Dorg.slf4j.simpleLogger.defaultLog=trace -Djava.library.path=$BLDLIBDIR -cp $CLASSPATH examples.datatypes.H5Ex_T_ObjectReference"
+($JAVAEXE $JAVAEXEFLAGS -Xmx1024M -Dorg.slf4j.simpleLogger.defaultLog=trace -Djava.library.path=$BLDLIBDIR -cp $CLASSPATH examples.datatypes.H5Ex_T_ObjectReference > H5Ex_T_ObjectReference.out)
+if diff H5Ex_T_ObjectReference.out examples.datatypes.H5Ex_T_ObjectReference.txt > /dev/null; then
+ echo " PASSED datatypes.H5Ex_T_ObjectReference"
+else
+ echo "**FAILED** datatypes.H5Ex_T_ObjectReference"
+ nerrors="`expr $nerrors + 1`"
+fi
+
+echo "$JAVAEXE $JAVAEXEFLAGS -Xmx1024M -Dorg.slf4j.simpleLogger.defaultLog=trace -Djava.library.path=$BLDLIBDIR -cp $CLASSPATH examples.datatypes.H5Ex_T_ObjectReferenceAttribute"
+($JAVAEXE $JAVAEXEFLAGS -Xmx1024M -Dorg.slf4j.simpleLogger.defaultLog=trace -Djava.library.path=$BLDLIBDIR -cp $CLASSPATH examples.datatypes.H5Ex_T_ObjectReferenceAttribute > H5Ex_T_ObjectReferenceAttribute.out)
+if diff H5Ex_T_ObjectReferenceAttribute.out examples.datatypes.H5Ex_T_ObjectReferenceAttribute.txt > /dev/null; then
+ echo " PASSED datatypes.H5Ex_T_ObjectReferenceAttribute"
+else
+ echo "**FAILED** datatypes.H5Ex_T_ObjectReferenceAttribute"
+ nerrors="`expr $nerrors + 1`"
+fi
+
+echo "$JAVAEXE $JAVAEXEFLAGS -Xmx1024M -Dorg.slf4j.simpleLogger.defaultLog=trace -Djava.library.path=$BLDLIBDIR -cp $CLASSPATH examples.datatypes.H5Ex_T_Opaque"
+($JAVAEXE $JAVAEXEFLAGS -Xmx1024M -Dorg.slf4j.simpleLogger.defaultLog=trace -Djava.library.path=$BLDLIBDIR -cp $CLASSPATH examples.datatypes.H5Ex_T_Opaque > H5Ex_T_Opaque.out)
+if diff H5Ex_T_Opaque.out examples.datatypes.H5Ex_T_Opaque.txt > /dev/null; then
+ echo " PASSED datatypes.H5Ex_T_Opaque"
+else
+ echo "**FAILED** datatypes.H5Ex_T_Opaque"
+ nerrors="`expr $nerrors + 1`"
+fi
+
+echo "$JAVAEXE $JAVAEXEFLAGS -Xmx1024M -Dorg.slf4j.simpleLogger.defaultLog=trace -Djava.library.path=$BLDLIBDIR -cp $CLASSPATH examples.datatypes.H5Ex_T_OpaqueAttribute"
+($JAVAEXE $JAVAEXEFLAGS -Xmx1024M -Dorg.slf4j.simpleLogger.defaultLog=trace -Djava.library.path=$BLDLIBDIR -cp $CLASSPATH examples.datatypes.H5Ex_T_OpaqueAttribute > H5Ex_T_OpaqueAttribute.out)
+if diff H5Ex_T_OpaqueAttribute.out examples.datatypes.H5Ex_T_OpaqueAttribute.txt > /dev/null; then
+ echo " PASSED datatypes.H5Ex_T_OpaqueAttribute"
+else
+ echo "**FAILED** datatypes.H5Ex_T_OpaqueAttribute"
+ nerrors="`expr $nerrors + 1`"
+fi
+
+echo "$JAVAEXE $JAVAEXEFLAGS -Xmx1024M -Dorg.slf4j.simpleLogger.defaultLog=trace -Djava.library.path=$BLDLIBDIR -cp $CLASSPATH examples.datatypes.H5Ex_T_String"
+($JAVAEXE $JAVAEXEFLAGS -Xmx1024M -Dorg.slf4j.simpleLogger.defaultLog=trace -Djava.library.path=$BLDLIBDIR -cp $CLASSPATH examples.datatypes.H5Ex_T_String > H5Ex_T_String.out)
+if diff H5Ex_T_String.out examples.datatypes.H5Ex_T_String.txt > /dev/null; then
+ echo " PASSED datatypes.H5Ex_T_String"
+else
+ echo "**FAILED** datatypes.H5Ex_T_String"
+ nerrors="`expr $nerrors + 1`"
+fi
+
+echo "$JAVAEXE $JAVAEXEFLAGS -Xmx1024M -Dorg.slf4j.simpleLogger.defaultLog=trace -Djava.library.path=$BLDLIBDIR -cp $CLASSPATH examples.datatypes.H5Ex_T_StringAttribute"
+($JAVAEXE $JAVAEXEFLAGS -Xmx1024M -Dorg.slf4j.simpleLogger.defaultLog=trace -Djava.library.path=$BLDLIBDIR -cp $CLASSPATH examples.datatypes.H5Ex_T_StringAttribute > H5Ex_T_StringAttribute.out)
+if diff H5Ex_T_StringAttribute.out examples.datatypes.H5Ex_T_StringAttribute.txt > /dev/null; then
+ echo " PASSED datatypes.H5Ex_T_StringAttribute"
+else
+ echo "**FAILED** datatypes.H5Ex_T_StringAttribute"
+ nerrors="`expr $nerrors + 1`"
+fi
+
+echo "$JAVAEXE $JAVAEXEFLAGS -Xmx1024M -Dorg.slf4j.simpleLogger.defaultLog=trace -Djava.library.path=$BLDLIBDIR -cp $CLASSPATH examples.datatypes.H5Ex_T_VLString"
+($JAVAEXE $JAVAEXEFLAGS -Xmx1024M -Dorg.slf4j.simpleLogger.defaultLog=trace -Djava.library.path=$BLDLIBDIR -cp $CLASSPATH examples.datatypes.H5Ex_T_VLString > H5Ex_T_VLString.out)
+if diff H5Ex_T_VLString.out examples.datatypes.H5Ex_T_VLString.txt > /dev/null; then
+ echo " PASSED datatypes.H5Ex_T_VLString"
+else
+ echo "**FAILED** datatypes.H5Ex_T_VLString"
+ nerrors="`expr $nerrors + 1`"
+fi
+
+# Clean up temporary files/directories
+CLEAN_LIBFILES_AND_BLDLIBDIR
+CLEAN_DATAFILES_AND_BLDDIR
+
+# Report test results and exit
+if test $nerrors -eq 0 ; then
+ echo "All $TESTNAME tests passed."
+ exit $EXIT_SUCCESS
+else
+ echo "$TESTNAME tests failed with $nerrors errors."
+ exit $EXIT_FAILURE
+fi
diff --git a/java/examples/groups/CMakeLists.txt b/java/examples/groups/CMakeLists.txt
new file mode 100644
index 0000000..0849f0c
--- /dev/null
+++ b/java/examples/groups/CMakeLists.txt
@@ -0,0 +1,138 @@
+cmake_minimum_required (VERSION 3.1.0)
+PROJECT (HDFJAVA_EXAMPLES_GROUPS Java)
+
+set (CMAKE_VERBOSE_MAKEFILE 1)
+
+INCLUDE_DIRECTORIES (
+ ${HDF5_JAVA_JNI_BINARY_DIR}
+ ${HDF5_JAVA_HDF5_LIB_DIR}
+)
+
+set (HDF_JAVA_EXAMPLES
+ H5Ex_G_Create
+ H5Ex_G_Iterate
+ H5Ex_G_Compact
+ H5Ex_G_Corder
+ H5Ex_G_Intermediate
+ H5Ex_G_Phase
+ H5Ex_G_Visit
+)
+
+if (WIN32)
+ set (CMAKE_JAVA_INCLUDE_FLAG_SEP ";")
+else (WIN32)
+ set (CMAKE_JAVA_INCLUDE_FLAG_SEP ":")
+endif (WIN32)
+
+set (CMAKE_JAVA_INCLUDE_PATH "${HDF5_JAVA_JARS}")
+
+set (CMAKE_JAVA_CLASSPATH ".")
+foreach (CMAKE_INCLUDE_PATH ${CMAKE_JAVA_INCLUDE_PATH})
+ set (CMAKE_JAVA_CLASSPATH "${CMAKE_JAVA_CLASSPATH}${CMAKE_JAVA_INCLUDE_FLAG_SEP}${CMAKE_INCLUDE_PATH}")
+endforeach (CMAKE_INCLUDE_PATH)
+
+foreach (example ${HDF_JAVA_EXAMPLES})
+ file (WRITE ${PROJECT_BINARY_DIR}/${example}_Manifest.txt
+ "Main-Class: examples.groups.${example}
+"
+ )
+ add_jar (${example} MANIFEST ${PROJECT_BINARY_DIR}/${example}_Manifest.txt ${example}.java)
+ get_target_property (${example}_JAR_FILE ${example} JAR_FILE)
+# install_jar (${example} ${HJAVA_INSTALL_DATA_DIR}/examples examples)
+ get_target_property (${example}_CLASSPATH ${example} CLASSDIR)
+ add_dependencies (${example} ${HDF5_JAVA_HDF5_LIB_TARGET})
+endforeach (example ${HDF_JAVA_EXAMPLES})
+
+set (CMAKE_JAVA_INCLUDE_PATH "${HDF5_JAVA_JARS};${HDF5_JAVA_LOGGING_JAR};${HDF5_JAVA_LOGGING_NOP_JAR}")
+
+set (CMAKE_JAVA_CLASSPATH ".")
+foreach (HDFJAVA_JAR ${CMAKE_JAVA_INCLUDE_PATH})
+ set (CMAKE_JAVA_CLASSPATH "${CMAKE_JAVA_CLASSPATH}${CMAKE_JAVA_INCLUDE_FLAG_SEP}${HDFJAVA_JAR}")
+endforeach (HDFJAVA_JAR)
+
+set (HDF_JAVA_TEST_FILES
+ h5ex_g_iterate.h5
+ h5ex_g_visit.h5
+)
+
+foreach (h5_file ${HDF_JAVA_TEST_FILES})
+ set (dest "${PROJECT_BINARY_DIR}/${h5_file}")
+ #message (STATUS " Copying ${h5_file}")
+ add_custom_command (
+ TARGET H5Ex_G_Visit
+ POST_BUILD
+ COMMAND ${CMAKE_COMMAND}
+ ARGS -E copy_if_different ${PROJECT_SOURCE_DIR}/${h5_file} ${dest}
+ )
+endforeach (h5_file ${HDF_JAVA_TEST_FILES})
+
+MACRO (ADD_H5_TEST resultfile resultcode)
+ add_test (
+ NAME JAVA_groups-${resultfile}
+ COMMAND "${CMAKE_COMMAND}"
+ -D "TEST_TESTER=${CMAKE_Java_RUNTIME};${CMAKE_Java_RUNTIME_FLAGS}"
+ -D "TEST_PROGRAM=examples.groups.${resultfile}"
+ -D "TEST_ARGS:STRING=${ARGN}"
+ -D "TEST_CLASSPATH:STRING=${CMAKE_JAVA_CLASSPATH}${CMAKE_JAVA_INCLUDE_FLAG_SEP}${${resultfile}_JAR_FILE}"
+ -D "TEST_LIBRARY_DIRECTORY=${CMAKE_TEST_OUTPUT_DIRECTORY}"
+ -D "TEST_FOLDER=${HDFJAVA_EXAMPLES_BINARY_DIR}"
+ -D "TEST_OUTPUT=groups/${resultfile}.out"
+ -D "TEST_EXPECT=${resultcode}"
+ -D "TEST_REFERENCE=groups/${resultfile}.txt"
+ -P "${HDF_RESOURCES_DIR}/jrunTest.cmake"
+ )
+ if (NOT "${last_test}" STREQUAL "")
+ set_tests_properties (JAVA_groups-${resultfile} PROPERTIES DEPENDS ${last_test})
+ endif (NOT "${last_test}" STREQUAL "")
+ set (last_test "JAVA_groups-${resultfile}")
+ENDMACRO (ADD_H5_TEST file)
+
+if (BUILD_TESTING)
+ foreach (example ${HDF_JAVA_EXAMPLES})
+ add_test (
+ NAME JAVA_groups-${example}-clearall-objects
+ COMMAND ${CMAKE_COMMAND}
+ -E remove
+ ${example}.out
+ ${example}.out.err
+ )
+ if (NOT "${last_test}" STREQUAL "")
+ set_tests_properties (JAVA_groups-${example}-clearall-objects PROPERTIES DEPENDS ${last_test})
+ endif (NOT "${last_test}" STREQUAL "")
+ set (last_test "JAVA_groups-${example}-clearall-objects")
+ if (NOT ${example} STREQUAL "H5Ex_G_Iterate" AND NOT ${example} STREQUAL "H5Ex_G_Visit")
+ if (${example} STREQUAL "H5Ex_G_Compact")
+ add_test (
+ NAME JAVA_groups-${example}-clearall-h5s
+ COMMAND ${CMAKE_COMMAND}
+ -E remove
+ ${HDFJAVA_EXAMPLES_BINARY_DIR}/${example}1.h5
+ ${HDFJAVA_EXAMPLES_BINARY_DIR}/${example}2.h5
+ )
+ else (${example} STREQUAL "H5Ex_G_Compact")
+ add_test (
+ NAME JAVA_groups-${example}-clearall-h5s
+ COMMAND ${CMAKE_COMMAND}
+ -E remove
+ ${HDFJAVA_EXAMPLES_BINARY_DIR}/${example}.h5
+ )
+ endif (${example} STREQUAL "H5Ex_G_Compact")
+ if (NOT "${last_test}" STREQUAL "")
+ set_tests_properties (JAVA_groups-${example}-clearall-h5s PROPERTIES DEPENDS ${last_test})
+ endif (NOT "${last_test}" STREQUAL "")
+ set (last_test "JAVA_groups-${example}-clearall-h5s")
+ endif (NOT ${example} STREQUAL "H5Ex_G_Iterate" AND NOT ${example} STREQUAL "H5Ex_G_Visit")
+ add_test (
+ NAME JAVA_groups-${example}-copy-objects
+ COMMAND ${CMAKE_COMMAND}
+ -E copy_if_different
+ ${HDFJAVA_EXAMPLES_SOURCE_DIR}/testfiles/examples.groups.${example}.txt
+ ${HDFJAVA_EXAMPLES_GROUPS_BINARY_DIR}/${example}.txt
+ )
+ if (NOT "${last_test}" STREQUAL "")
+ set_tests_properties (JAVA_groups-${example}-copy-objects PROPERTIES DEPENDS ${last_test})
+ endif (NOT "${last_test}" STREQUAL "")
+ set (last_test "JAVA_groups-${example}-copy-objects")
+ ADD_H5_TEST (${example} 0)
+ endforeach (example ${HDF_JAVA_EXAMPLES})
+endif (BUILD_TESTING)
diff --git a/java/examples/groups/H5Ex_G_Compact.java b/java/examples/groups/H5Ex_G_Compact.java
new file mode 100644
index 0000000..ca9b6c8
--- /dev/null
+++ b/java/examples/groups/H5Ex_G_Compact.java
@@ -0,0 +1,266 @@
+/* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
+ * Copyright by The HDF Group. *
+ * Copyright by the Board of Trustees of the University of Illinois. *
+ * All rights reserved. *
+ * *
+ * This file is part of HDF5. The full HDF5 copyright notice, including *
+ * terms governing use, modification, and redistribution, is contained in *
+ * the files COPYING and Copyright.html. COPYING can be found at the root *
+ * of the source code distribution tree; Copyright.html can be found at the *
+ * root level of an installed copy of the electronic HDF5 document set and *
+ * is linked from the top-level documents page. It can also be found at *
+ * http://hdfgroup.org/HDF5/doc/Copyright.html. If you do not have *
+ * access to either file, you may request a copy from help@hdfgroup.org. *
+ * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */
+
+/************************************************************
+ Creating a file and print the storage layout.
+ ************************************************************/
+
+package examples.groups;
+
+import hdf.hdf5lib.H5;
+import hdf.hdf5lib.HDF5Constants;
+import hdf.hdf5lib.structs.H5G_info_t;
+
+import java.util.EnumSet;
+import java.util.HashMap;
+import java.util.Map;
+
+public class H5Ex_G_Compact {
+
+ private static final String FILE1 = "H5Ex_G_Compact1.h5";
+ private static final String FILE2 = "H5Ex_G_Compact2.h5";
+ private static final String GROUP = "G1";
+
+ enum H5G_storage {
+ H5G_STORAGE_TYPE_UNKNOWN(-1),
+ H5G_STORAGE_TYPE_SYMBOL_TABLE(0),
+ H5G_STORAGE_TYPE_COMPACT(1),
+ H5G_STORAGE_TYPE_DENSE(2);
+
+ private static final Map<Integer, H5G_storage> lookup = new HashMap<Integer, H5G_storage>();
+
+ static {
+ for (H5G_storage s : EnumSet.allOf(H5G_storage.class))
+ lookup.put(s.getCode(), s);
+ }
+
+ private int code;
+
+ H5G_storage(int layout_type) {
+ this.code = layout_type;
+ }
+
+ public int getCode() {
+ return this.code;
+ }
+
+ public static H5G_storage get(int code) {
+ return lookup.get(code);
+ }
+ }
+
+ public static void CreateGroup() {
+ long file_id = -1;
+ long group_id = -1;
+ long fapl_id = -1;
+ H5G_info_t ginfo;
+ long size;
+
+ // Create file 1. This file will use original format groups.
+ try {
+ file_id = H5.H5Fcreate (FILE1, HDF5Constants.H5F_ACC_TRUNC, HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+ // Create a group in the file1.
+ try {
+ if(file_id >= 0)
+ group_id = H5.H5Gcreate(file_id, GROUP, HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Obtain the group info and print the group storage type.
+ try {
+ if(group_id >= 0) {
+ ginfo = H5.H5Gget_info(group_id);
+ System.out.print("Group storage type for " + FILE1 + " is: ");
+ switch (H5G_storage.get(ginfo.storage_type)) {
+ case H5G_STORAGE_TYPE_COMPACT:
+ System.out.println("H5G_STORAGE_TYPE_COMPACT"); // New compact format
+ break;
+ case H5G_STORAGE_TYPE_DENSE:
+ System.out.println("H5G_STORAGE_TYPE_DENSE"); // New dense (indexed) format
+ break;
+ case H5G_STORAGE_TYPE_SYMBOL_TABLE:
+ System.out.println("H5G_STORAGE_TYPE_SYMBOL_TABLE"); // Original format
+ break;
+ case H5G_STORAGE_TYPE_UNKNOWN:
+ System.out.println("H5G_STORAGE_TYPE_UNKNOWN");
+ break;
+ default:
+ System.out.println("Storage Type Invalid");
+ break;
+ }
+ }
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Close the group.
+ try {
+ if (group_id >= 0)
+ H5.H5Gclose(group_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // close the file 1.
+ try {
+ if (file_id >= 0)
+ H5.H5Fclose(file_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Re-open file 1. Need to get the correct file size.
+ try {
+ file_id = H5.H5Fopen(FILE1, HDF5Constants.H5F_ACC_RDONLY, HDF5Constants.H5P_DEFAULT);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Obtain and print the file size.
+ try {
+ if (file_id >= 0) {
+ size = H5.H5Fget_filesize(file_id);
+ System.out.println("File size for " + FILE1 + " is: " + size + " bytes");
+ }
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Close FILE1.
+ try {
+ if (file_id >= 0)
+ H5.H5Fclose(file_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Set file access property list to allow the latest file format.
+ // This will allow the library to create new compact format groups.
+ try {
+ fapl_id = H5.H5Pcreate(HDF5Constants.H5P_FILE_ACCESS);
+ if (fapl_id >= 0)
+ H5.H5Pset_libver_bounds(fapl_id, HDF5Constants.H5F_LIBVER_LATEST, HDF5Constants.H5F_LIBVER_LATEST);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+ System.out.println();
+ // Create file 2 using the new file access property list.
+ try {
+ file_id = H5.H5Fcreate(FILE2, HDF5Constants.H5F_ACC_TRUNC, HDF5Constants.H5P_DEFAULT, fapl_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+ // Create group in file2.
+ try {
+ if(file_id >= 0)
+ group_id = H5.H5Gcreate(file_id, GROUP, HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Obtain the group info and print the group storage type.
+ try {
+ if (group_id >= 0) {
+ ginfo = H5.H5Gget_info(group_id);
+ System.out.print("Group storage type for " + FILE2 + " is: ");
+ switch (H5G_storage.get(ginfo.storage_type)) {
+ case H5G_STORAGE_TYPE_COMPACT:
+ System.out.println("H5G_STORAGE_TYPE_COMPACT"); // New compact format
+ break;
+ case H5G_STORAGE_TYPE_DENSE:
+ System.out.println("H5G_STORAGE_TYPE_DENSE"); // New dense (indexed) format
+ break;
+ case H5G_STORAGE_TYPE_SYMBOL_TABLE:
+ System.out.println("H5G_STORAGE_TYPE_SYMBOL_TABLE"); // Original format
+ break;
+ case H5G_STORAGE_TYPE_UNKNOWN:
+ System.out.println("H5G_STORAGE_TYPE_UNKNOWN");
+ break;
+ default:
+ System.out.println("Storage Type Invalid");
+ break;
+ }
+ }
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Close the group.
+ try {
+ if (group_id >= 0)
+ H5.H5Gclose(group_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // close the file 2.
+ try {
+ if (file_id >= 0)
+ H5.H5Fclose(file_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Re-open file 2. Needed to get the correct file size.
+ try {
+ file_id = H5.H5Fopen(FILE2, HDF5Constants.H5F_ACC_RDONLY, fapl_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Obtain and print the file size.
+ try {
+ if (file_id >= 0) {
+ size = H5.H5Fget_filesize(file_id);
+ System.out.println("File size for " + FILE2 + " is: " + size + " bytes");
+ }
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Close FILE2.
+ try {
+ if (file_id >= 0)
+ H5.H5Fclose(file_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+ }
+
+ public static void main(String[] args) {
+ H5Ex_G_Compact.CreateGroup();
+ }
+}
diff --git a/java/examples/groups/H5Ex_G_Corder.java b/java/examples/groups/H5Ex_G_Corder.java
new file mode 100644
index 0000000..95790bf
--- /dev/null
+++ b/java/examples/groups/H5Ex_G_Corder.java
@@ -0,0 +1,121 @@
+/* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
+ * Copyright by The HDF Group. *
+ * Copyright by the Board of Trustees of the University of Illinois. *
+ * All rights reserved. *
+ * *
+ * This file is part of HDF5. The full HDF5 copyright notice, including *
+ * terms governing use, modification, and redistribution, is contained in *
+ * the files COPYING and Copyright.html. COPYING can be found at the root *
+ * of the source code distribution tree; Copyright.html can be found at the *
+ * root level of an installed copy of the electronic HDF5 document set and *
+ * is linked from the top-level documents page. It can also be found at *
+ * http://hdfgroup.org/HDF5/doc/Copyright.html. If you do not have *
+ * access to either file, you may request a copy from help@hdfgroup.org. *
+ * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */
+/************************************************************
+ Creating a file with creation properties and traverse the
+ groups in alpabetical and creation order.
+ ************************************************************/
+
+package examples.groups;
+
+import hdf.hdf5lib.H5;
+import hdf.hdf5lib.HDF5Constants;
+import hdf.hdf5lib.structs.H5G_info_t;
+
+public class H5Ex_G_Corder {
+ private static String FILE = "H5Ex_G_Corder.h5";
+
+ private static void CreateGroup() throws Exception {
+ long file_id = -1;
+ long group_id = -1;
+ long subgroup_id = -1;
+ long gcpl_id = -1;
+ int status;
+ H5G_info_t ginfo;
+ int i;
+ String name;
+
+ try {
+ // Create a new file using default properties.
+ file_id = H5.H5Fcreate(FILE, HDF5Constants.H5F_ACC_TRUNC, HDF5Constants.H5P_DEFAULT,
+ HDF5Constants.H5P_DEFAULT);
+
+ // Create group creation property list and enable link creation order tracking.
+ gcpl_id = H5.H5Pcreate(HDF5Constants.H5P_GROUP_CREATE);
+ status = H5.H5Pset_link_creation_order(gcpl_id, HDF5Constants.H5P_CRT_ORDER_TRACKED
+ + HDF5Constants.H5P_CRT_ORDER_INDEXED);
+
+ // Create primary group using the property list.
+ if (status >= 0)
+ group_id = H5.H5Gcreate(file_id, "index_group", HDF5Constants.H5P_DEFAULT, gcpl_id,
+ HDF5Constants.H5P_DEFAULT);
+
+ try {
+ /*
+ * Create subgroups in the primary group. These will be tracked by creation order. Note that these
+ * groups do not have to have the creation order tracking property set.
+ */
+ subgroup_id = H5.H5Gcreate(group_id, "H", HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT,
+ HDF5Constants.H5P_DEFAULT);
+ status = H5.H5Gclose(subgroup_id);
+ subgroup_id = H5.H5Gcreate(group_id, "D", HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT,
+ HDF5Constants.H5P_DEFAULT);
+ status = H5.H5Gclose(subgroup_id);
+ subgroup_id = H5.H5Gcreate(group_id, "F", HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT,
+ HDF5Constants.H5P_DEFAULT);
+ status = H5.H5Gclose(subgroup_id);
+ subgroup_id = H5.H5Gcreate(group_id, "5", HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT,
+ HDF5Constants.H5P_DEFAULT);
+ status = H5.H5Gclose(subgroup_id);
+
+ // Get group info.
+ ginfo = H5.H5Gget_info(group_id);
+
+ // Traverse links in the primary group using alphabetical indices (H5_INDEX_NAME).
+ System.out.println("Traversing group using alphabetical indices:");
+ for (i = 0; i < ginfo.nlinks; i++) {
+ // Retrieve the name of the ith link in a group
+ name = H5.H5Lget_name_by_idx(group_id, ".", HDF5Constants.H5_INDEX_NAME, HDF5Constants.H5_ITER_INC,
+ i, HDF5Constants.H5P_DEFAULT);
+ System.out.println("Index " + i + ": " + name);
+ }
+
+ // Traverse links in the primary group by creation order (H5_INDEX_CRT_ORDER).
+ System.out.println("Traversing group using creation order indices:");
+ for (i = 0; i < ginfo.nlinks; i++) {
+ // Retrieve the name of the ith link in a group
+ name = H5.H5Lget_name_by_idx(group_id, ".", HDF5Constants.H5_INDEX_CRT_ORDER,
+ HDF5Constants.H5_ITER_INC, i, HDF5Constants.H5P_DEFAULT);
+ System.out.println("Index " + i + ": " + name);
+ }
+
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+ finally {
+ // Close and release resources.
+ if (gcpl_id >= 0)
+ H5.H5Pclose(gcpl_id);
+ if (group_id >= 0)
+ H5.H5Gclose(group_id);
+ if (file_id >= 0)
+ H5.H5Fclose(file_id);
+ }
+ }
+
+ public static void main(String[] args) {
+ try {
+ H5Ex_G_Corder.CreateGroup();
+ }
+ catch (Exception ex) {
+ ex.printStackTrace();
+ }
+ }
+
+}
diff --git a/java/examples/groups/H5Ex_G_Create.java b/java/examples/groups/H5Ex_G_Create.java
new file mode 100644
index 0000000..1902d86
--- /dev/null
+++ b/java/examples/groups/H5Ex_G_Create.java
@@ -0,0 +1,94 @@
+/* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
+ * Copyright by The HDF Group. *
+ * Copyright by the Board of Trustees of the University of Illinois. *
+ * All rights reserved. *
+ * *
+ * This file is part of HDF5. The full HDF5 copyright notice, including *
+ * terms governing use, modification, and redistribution, is contained in *
+ * the files COPYING and Copyright.html. COPYING can be found at the root *
+ * of the source code distribution tree; Copyright.html can be found at the *
+ * root level of an installed copy of the electronic HDF5 document set and *
+ * is linked from the top-level documents page. It can also be found at *
+ * http://hdfgroup.org/HDF5/doc/Copyright.html. If you do not have *
+ * access to either file, you may request a copy from help@hdfgroup.org. *
+ * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */
+
+/************************************************************
+ This example shows how to create, open, and close a group.
+ ************************************************************/
+
+package examples.groups;
+
+import hdf.hdf5lib.H5;
+import hdf.hdf5lib.HDF5Constants;
+
+public class H5Ex_G_Create {
+ private static String FILENAME = "H5Ex_G_Create.h5";
+ private static String GROUPNAME = "G1";
+
+ private static void CreateGroup() {
+ long file_id = -1;
+ long group_id = -1;
+
+ // Create a new file using default properties.
+ try {
+ file_id = H5.H5Fcreate(FILENAME, HDF5Constants.H5F_ACC_TRUNC, HDF5Constants.H5P_DEFAULT,
+ HDF5Constants.H5P_DEFAULT);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Create a group in the file.
+ try {
+ if (file_id >= 0)
+ group_id = H5.H5Gcreate(file_id, "/" + GROUPNAME, HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT,
+ HDF5Constants.H5P_DEFAULT);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Close the group. The handle "group" can no longer be used.
+ try {
+ if (group_id >= 0)
+ H5.H5Gclose(group_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Re-open the group, obtaining a new handle.
+ try {
+ if (file_id >= 0)
+ group_id = H5.H5Gopen(file_id, "/" + GROUPNAME, HDF5Constants.H5P_DEFAULT);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Close the group.
+ try {
+ if (group_id >= 0)
+ H5.H5Gclose(group_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Close the file.
+ try {
+ if (file_id >= 0)
+ H5.H5Fclose(file_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ }
+
+ public static void main(String[] args) {
+ H5Ex_G_Create.CreateGroup();
+ }
+
+}
diff --git a/java/examples/groups/H5Ex_G_Intermediate.java b/java/examples/groups/H5Ex_G_Intermediate.java
new file mode 100644
index 0000000..a3d620b
--- /dev/null
+++ b/java/examples/groups/H5Ex_G_Intermediate.java
@@ -0,0 +1,125 @@
+/* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
+ * Copyright by The HDF Group. *
+ * Copyright by the Board of Trustees of the University of Illinois. *
+ * All rights reserved. *
+ * *
+ * This file is part of HDF5. The full HDF5 copyright notice, including *
+ * terms governing use, modification, and redistribution, is contained in *
+ * the files COPYING and Copyright.html. COPYING can be found at the root *
+ * of the source code distribution tree; Copyright.html can be found at the *
+ * root level of an installed copy of the electronic HDF5 document set and *
+ * is linked from the top-level documents page. It can also be found at *
+ * http://hdfgroup.org/HDF5/doc/Copyright.html. If you do not have *
+ * access to either file, you may request a copy from help@hdfgroup.org. *
+ * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */
+
+/************************************************************
+ This example shows how to create intermediate groups with
+ a single call to H5Gcreate.
+ ************************************************************/
+package examples.groups;
+
+import hdf.hdf5lib.H5;
+import hdf.hdf5lib.HDF5Constants;
+import hdf.hdf5lib.callbacks.H5O_iterate_cb;
+import hdf.hdf5lib.callbacks.H5O_iterate_t;
+import hdf.hdf5lib.structs.H5O_info_t;
+
+import java.util.ArrayList;
+
+public class H5Ex_G_Intermediate {
+
+ private static String FILE = "H5Ex_G_Intermediate.h5";
+
+ private void CreateGroup() throws Exception {
+
+ long file_id = -1;
+ long group_id = -1;
+ long gcpl_id = -1;
+
+ try {
+ // Create a new file_id using the default properties.
+ file_id = H5.H5Fcreate(FILE, HDF5Constants.H5F_ACC_TRUNC, HDF5Constants.H5P_DEFAULT,
+ HDF5Constants.H5P_DEFAULT);
+
+ // Create group_id creation property list and set it to allow creation of intermediate group_ids.
+ gcpl_id = H5.H5Pcreate(HDF5Constants.H5P_LINK_CREATE);
+ H5.H5Pset_create_intermediate_group(gcpl_id, true);
+
+ /*
+ * Create the group_id /G1/G2/G3. Note that /G1 and /G1/G2 do not exist yet. This call would cause an error
+ * if we did not use the previously created property list.
+ */
+ group_id = H5
+ .H5Gcreate(file_id, "/G1/G2/G3", gcpl_id, HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT);
+ // Print all the objects in the file_ids to show that intermediate group_ids have been created.
+ System.out.println("Objects in the file_id:");
+
+ // H5O_iterate_t iter_data = null;
+ H5O_iterate_t iter_data = new H5O_iter_data();
+ H5O_iterate_cb iter_cb = new H5O_iter_callback();
+
+ H5.H5Ovisit(file_id, HDF5Constants.H5_INDEX_NAME, HDF5Constants.H5_ITER_NATIVE, iter_cb, iter_data);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+ finally {
+ // Close and release resources.
+ if (gcpl_id >= 0)
+ H5.H5Pclose(gcpl_id);
+ if (group_id >= 0)
+ H5.H5Gclose(group_id);
+ if (file_id >= 0)
+ H5.H5Fclose(file_id);
+ }
+ }
+
+ public static void main(String[] args) {
+ try {
+ (new H5Ex_G_Intermediate()).CreateGroup();
+ }
+ catch (Exception ex) {
+ ex.printStackTrace();
+ }
+ }
+
+ private class idata {
+ public String link_name = null;
+ public int link_type = -1;
+
+ idata(String name, int type) {
+ this.link_name = name;
+ this.link_type = type;
+ }
+ }
+
+ private class H5O_iter_data implements H5O_iterate_t {
+ public ArrayList<idata> iterdata = new ArrayList<idata>();
+ }
+
+ private class H5O_iter_callback implements H5O_iterate_cb {
+ public int callback(long group, String name, H5O_info_t info, H5O_iterate_t op_data) {
+ idata id = new idata(name, info.type);
+ ((H5O_iter_data) op_data).iterdata.add(id);
+
+ System.out.print("/"); /* Print root group in object path */
+
+ // Check if the current object is the root group, and if not print the full path name and type.
+
+ if (name.charAt(0) == '.') /* Root group, do not print '.' */
+ System.out.println(" (Group)");
+ else if (info.type == HDF5Constants.H5O_TYPE_GROUP)
+ System.out.println(name + " (Group)");
+ else if (info.type == HDF5Constants.H5O_TYPE_DATASET)
+ System.out.println(name + " (Dataset)");
+ else if (info.type == HDF5Constants.H5O_TYPE_NAMED_DATATYPE)
+ System.out.println(name + " (Datatype)");
+ else
+ System.out.println(name + " (Unknown)");
+
+ return 0;
+ }
+ }
+
+}
diff --git a/java/examples/groups/H5Ex_G_Iterate.java b/java/examples/groups/H5Ex_G_Iterate.java
new file mode 100644
index 0000000..d0b4ea4
--- /dev/null
+++ b/java/examples/groups/H5Ex_G_Iterate.java
@@ -0,0 +1,119 @@
+/* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
+ * Copyright by The HDF Group. *
+ * Copyright by the Board of Trustees of the University of Illinois. *
+ * All rights reserved. *
+ * *
+ * This file is part of HDF5. The full HDF5 copyright notice, including *
+ * terms governing use, modification, and redistribution, is contained in *
+ * the files COPYING and Copyright.html. COPYING can be found at the root *
+ * of the source code distribution tree; Copyright.html can be found at the *
+ * root level of an installed copy of the electronic HDF5 document set and *
+ * is linked from the top-level documents page. It can also be found at *
+ * http://hdfgroup.org/HDF5/doc/Copyright.html. If you do not have *
+ * access to either file, you may request a copy from help@hdfgroup.org. *
+ * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */
+
+/************************************************************
+ This example shows how to iterate over group members using
+ H5Gget_obj_info_all.
+ ************************************************************/
+package examples.groups;
+
+import hdf.hdf5lib.H5;
+import hdf.hdf5lib.HDF5Constants;
+
+import java.util.EnumSet;
+import java.util.HashMap;
+import java.util.Map;
+
+public class H5Ex_G_Iterate {
+ private static String FILENAME = "groups/h5ex_g_iterate.h5";
+ private static String DATASETNAME = "/";
+
+ enum H5O_type {
+ H5O_TYPE_UNKNOWN(-1), // Unknown object type
+ H5O_TYPE_GROUP(0), // Object is a group
+ H5O_TYPE_DATASET(1), // Object is a dataset
+ H5O_TYPE_NAMED_DATATYPE(2), // Object is a named data type
+ H5O_TYPE_NTYPES(3); // Number of different object types
+ private static final Map<Integer, H5O_type> lookup = new HashMap<Integer, H5O_type>();
+
+ static {
+ for (H5O_type s : EnumSet.allOf(H5O_type.class))
+ lookup.put(s.getCode(), s);
+ }
+
+ private int code;
+
+ H5O_type(int layout_type) {
+ this.code = layout_type;
+ }
+
+ public int getCode() {
+ return this.code;
+ }
+
+ public static H5O_type get(int code) {
+ return lookup.get(code);
+ }
+ }
+
+ private static void do_iterate() {
+ long file_id = -1;
+
+ // Open a file using default properties.
+ try {
+ file_id = H5.H5Fopen(FILENAME, HDF5Constants.H5F_ACC_RDONLY, HDF5Constants.H5P_DEFAULT);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Begin iteration.
+ System.out.println("Objects in root group:");
+ try {
+ if (file_id >= 0) {
+ int count = (int) H5.H5Gn_members(file_id, DATASETNAME);
+ String[] oname = new String[count];
+ int[] otype = new int[count];
+ int[] ltype = new int[count];
+ long[] orefs = new long[count];
+ H5.H5Gget_obj_info_all(file_id, DATASETNAME, oname, otype, ltype, orefs, HDF5Constants.H5_INDEX_NAME);
+
+ // Get type of the object and display its name and type.
+ for (int indx = 0; indx < otype.length; indx++) {
+ switch (H5O_type.get(otype[indx])) {
+ case H5O_TYPE_GROUP:
+ System.out.println(" Group: " + oname[indx]);
+ break;
+ case H5O_TYPE_DATASET:
+ System.out.println(" Dataset: " + oname[indx]);
+ break;
+ case H5O_TYPE_NAMED_DATATYPE:
+ System.out.println(" Datatype: " + oname[indx]);
+ break;
+ default:
+ System.out.println(" Unknown: " + oname[indx]);
+ }
+ }
+ }
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Close the file.
+ try {
+ if (file_id >= 0)
+ H5.H5Fclose(file_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+ }
+
+ public static void main(String[] args) {
+ H5Ex_G_Iterate.do_iterate();
+ }
+
+}
diff --git a/java/examples/groups/H5Ex_G_Phase.java b/java/examples/groups/H5Ex_G_Phase.java
new file mode 100644
index 0000000..f23d6f2
--- /dev/null
+++ b/java/examples/groups/H5Ex_G_Phase.java
@@ -0,0 +1,241 @@
+/* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
+ * Copyright by The HDF Group. *
+ * Copyright by the Board of Trustees of the University of Illinois. *
+ * All rights reserved. *
+ * *
+ * This file is part of HDF5. The full HDF5 copyright notice, including *
+ * terms governing use, modification, and redistribution, is contained in *
+ * the files COPYING and Copyright.html. COPYING can be found at the root *
+ * of the source code distribution tree; Copyright.html can be found at the *
+ * root level of an installed copy of the electronic HDF5 document set and *
+ * is linked from the top-level documents page. It can also be found at *
+ * http://hdfgroup.org/HDF5/doc/Copyright.html. If you do not have *
+ * access to either file, you may request a copy from help@hdfgroup.org. *
+ * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */
+
+/************************************************************
+ This example shows how to set the conditions for
+ conversion between compact and dense (indexed) groups.
+ ************************************************************/
+package examples.groups;
+
+import hdf.hdf5lib.H5;
+import hdf.hdf5lib.HDF5Constants;
+import hdf.hdf5lib.structs.H5G_info_t;
+
+import java.util.EnumSet;
+import java.util.HashMap;
+import java.util.Map;
+
+public class H5Ex_G_Phase {
+ private static String FILE = "H5Ex_G_Phase.h5";
+ private static int MAX_GROUPS = 7;
+ private static int MAX_COMPACT = 5;
+ private static int MIN_DENSE = 3;
+
+ enum H5G_storage {
+ H5G_STORAGE_TYPE_UNKNOWN(-1),
+ H5G_STORAGE_TYPE_SYMBOL_TABLE(0),
+ H5G_STORAGE_TYPE_COMPACT(1),
+ H5G_STORAGE_TYPE_DENSE(2);
+
+ private static final Map<Integer, H5G_storage> lookup = new HashMap<Integer, H5G_storage>();
+
+ static {
+ for (H5G_storage s : EnumSet.allOf(H5G_storage.class))
+ lookup.put(s.getCode(), s);
+ }
+
+ private int code;
+
+ H5G_storage(int layout_type) {
+ this.code = layout_type;
+ }
+
+ public int getCode() {
+ return this.code;
+ }
+
+ public static H5G_storage get(int code) {
+ return lookup.get(code);
+ }
+ }
+
+ private static void CreateGroup() {
+ long file_id = -1;
+ long group_id = -1;
+ long subgroup_id = -1;
+ long fapl_id = -1;
+ long gcpl_id = -1;
+ H5G_info_t ginfo;
+ String name = "G0"; // Name of subgroup_id
+ int i;
+
+ // Set file access property list to allow the latest file format.This will allow the library to create new
+ // format groups.
+ try {
+ fapl_id = H5.H5Pcreate(HDF5Constants.H5P_FILE_ACCESS);
+ if (fapl_id >= 0)
+ H5.H5Pset_libver_bounds(fapl_id, HDF5Constants.H5F_LIBVER_LATEST, HDF5Constants.H5F_LIBVER_LATEST);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Create group access property list and set the phase change conditions.
+ try {
+ gcpl_id = H5.H5Pcreate(HDF5Constants.H5P_GROUP_CREATE);
+ if (gcpl_id >= 0)
+ H5.H5Pset_link_phase_change(gcpl_id, MAX_COMPACT, MIN_DENSE);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Create a new file using the default properties.
+ try {
+ if (fapl_id >= 0)
+ file_id = H5.H5Fcreate(FILE, HDF5Constants.H5F_ACC_TRUNC, HDF5Constants.H5P_DEFAULT, fapl_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Create primary group.
+ try {
+ if ((file_id >= 0) && (gcpl_id >= 0))
+ group_id = H5.H5Gcreate(file_id, name, HDF5Constants.H5P_DEFAULT, gcpl_id, HDF5Constants.H5P_DEFAULT);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Add subgroups to "group" one at a time, print the storage type for "group" after each subgroup is created.
+ for (i = 1; i <= MAX_GROUPS; i++) {
+ // Define the subgroup name and create the subgroup.
+ char append = (char) (((char) i) + '0');
+ name = name + append; /* G1, G2, G3 etc. */
+ try {
+ if (group_id >= 0) {
+ subgroup_id = H5.H5Gcreate(group_id, name, HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT,
+ HDF5Constants.H5P_DEFAULT);
+ H5.H5Gclose(subgroup_id);
+ }
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Obtain the group info and print the group storage type
+ try {
+ if (group_id >= 0) {
+ ginfo = H5.H5Gget_info(group_id);
+ System.out.print(ginfo.nlinks + " Group" + (ginfo.nlinks == 1 ? " " : "s") + ": Storage type is ");
+ switch (H5G_storage.get(ginfo.storage_type)) {
+ case H5G_STORAGE_TYPE_COMPACT:
+ System.out.println("H5G_STORAGE_TYPE_COMPACT"); // New compact format
+ break;
+ case H5G_STORAGE_TYPE_DENSE:
+ System.out.println("H5G_STORAGE_TYPE_DENSE"); // New dense (indexed) format
+ break;
+ case H5G_STORAGE_TYPE_SYMBOL_TABLE:
+ System.out.println("H5G_STORAGE_TYPE_SYMBOL_TABLE"); // Original format
+ break;
+ case H5G_STORAGE_TYPE_UNKNOWN:
+ System.out.println("H5G_STORAGE_TYPE_UNKNOWN");
+ break;
+ default:
+ System.out.println("Storage Type Invalid");
+ break;
+ }
+ }
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+ }
+
+ System.out.println();
+
+ // Delete subgroups one at a time, print the storage type for "group" after each subgroup is deleted.
+ for (i = MAX_GROUPS; i >= 1; i--) {
+ // Define the subgroup name and delete the subgroup.
+ try {
+ H5.H5Ldelete(group_id, name, HDF5Constants.H5P_DEFAULT);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+ name = name.substring(0, i + 1);
+
+ // Obtain the group info and print the group storage type
+ try {
+ if (group_id >= 0) {
+ ginfo = H5.H5Gget_info(group_id);
+ System.out.print(ginfo.nlinks + " Group" + (ginfo.nlinks == 1 ? " " : "s") + ": Storage type is ");
+ switch (H5G_storage.get(ginfo.storage_type)) {
+ case H5G_STORAGE_TYPE_COMPACT:
+ System.out.println("H5G_STORAGE_TYPE_COMPACT"); // New compact format
+ break;
+ case H5G_STORAGE_TYPE_DENSE:
+ System.out.println("H5G_STORAGE_TYPE_DENSE"); // New dense (indexed) format
+ break;
+ case H5G_STORAGE_TYPE_SYMBOL_TABLE:
+ System.out.println("H5G_STORAGE_TYPE_SYMBOL_TABLE"); // Original format
+ break;
+ case H5G_STORAGE_TYPE_UNKNOWN:
+ System.out.println("H5G_STORAGE_TYPE_UNKNOWN");
+ break;
+ default:
+ System.out.println("Storage Type Invalid");
+ break;
+ }
+ }
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+ }
+
+ // Close and release resources
+ try {
+ if (fapl_id >= 0)
+ H5.H5Pclose(fapl_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ try {
+ if (gcpl_id >= 0)
+ H5.H5Pclose(gcpl_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Close the group
+ try {
+ if (group_id >= 0)
+ H5.H5Gclose(group_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Close the file
+ try {
+ if (file_id >= 0)
+ H5.H5Fclose(file_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ }
+
+ public static void main(String[] args) {
+ H5Ex_G_Phase.CreateGroup();
+ }
+
+}
diff --git a/java/examples/groups/H5Ex_G_Traverse.java b/java/examples/groups/H5Ex_G_Traverse.java
new file mode 100644
index 0000000..b00fe97
--- /dev/null
+++ b/java/examples/groups/H5Ex_G_Traverse.java
@@ -0,0 +1,167 @@
+/* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
+ * Copyright by The HDF Group. *
+ * Copyright by the Board of Trustees of the University of Illinois. *
+ * All rights reserved. *
+ * *
+ * This file is part of HDF5. The full HDF5 copyright notice, including *
+ * terms governing use, modification, and redistribution, is contained in *
+ * the files COPYING and Copyright.html. COPYING can be found at the root *
+ * of the source code distribution tree; Copyright.html can be found at the *
+ * root level of an installed copy of the electronic HDF5 document set and *
+ * is linked from the top-level documents page. It can also be found at *
+ * http://hdfgroup.org/HDF5/doc/Copyright.html. If you do not have *
+ * access to either file, you may request a copy from help@hdfgroup.org. *
+ * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */
+
+/************************************************************
+This example shows a way to recursively traverse the file
+using H5Literate. The method shown here guarantees that
+the recursion will not enter an infinite loop, but does
+not prevent objects from being visited more than once.
+The program prints the directory structure of the file
+specified in FILE. The default file used by this example
+implements the structure described in the User's Guide,
+chapter 4, figure 26.
+ ************************************************************/
+package examples.groups;
+
+import hdf.hdf5lib.H5;
+import hdf.hdf5lib.HDF5Constants;
+import hdf.hdf5lib.callbacks.H5L_iterate_cb;
+import hdf.hdf5lib.callbacks.H5L_iterate_t;
+import hdf.hdf5lib.structs.H5L_info_t;
+import hdf.hdf5lib.structs.H5O_info_t;
+import examples.groups.H5Ex_G_Iterate.H5O_type;
+
+class opdata implements H5L_iterate_t {
+ int recurs;
+ opdata prev;
+ long addr;
+}
+
+public class H5Ex_G_Traverse {
+
+ private static String FILE = "h5ex_g_traverse.h5";
+ public static H5L_iterate_cb iter_cb = new H5L_iter_callbackT();
+
+ private static void OpenGroup() {
+ long file_id = -1;
+ H5O_info_t infobuf;
+ opdata od = new opdata();
+
+ // Open file and initialize the operator data structure.
+ try {
+ file_id = H5.H5Fopen(FILE, HDF5Constants.H5F_ACC_RDONLY, HDF5Constants.H5P_DEFAULT);
+ if (file_id >= 0) {
+ infobuf = H5.H5Oget_info(file_id);
+ od.recurs = 0;
+ od.prev = null;
+ od.addr = infobuf.addr;
+ }
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Print the root group and formatting, begin iteration.
+ try {
+ System.out.println("/ {");
+ // H5L_iterate_cb iter_cb = new H5L_iter_callbackT();
+ H5.H5Literate(file_id, HDF5Constants.H5_INDEX_NAME, HDF5Constants.H5_ITER_NATIVE, 0L, iter_cb, od);
+ System.out.println("}");
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Close and release resources.
+ try {
+ if (file_id >= 0)
+ H5.H5Fclose(file_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+ }
+
+ public static void main(String[] args) {
+ H5Ex_G_Traverse.OpenGroup();
+ }
+}
+
+class H5L_iter_callbackT implements H5L_iterate_cb {
+ public int callback(long group, String name, H5L_info_t info, H5L_iterate_t op_data) {
+
+ H5O_info_t infobuf;
+ int return_val = 0;
+ opdata od = (opdata) op_data; // Type conversion
+ int spaces = 2 * (od.recurs + 1); // Number of white spaces to prepend to output.
+
+ // Get type of the object and display its name and type.
+ // The name of the object is passed to this function by the Library.
+ try {
+ infobuf = H5.H5Oget_info_by_name(group, name, HDF5Constants.H5P_DEFAULT);
+
+ for (int i = 0; i < spaces; i++)
+ System.out.print(" "); // Format output.
+ switch (H5O_type.get(infobuf.type)) {
+ case H5O_TYPE_GROUP:
+ System.out.println("Group: " + name + " { ");
+ // Check group address against linked list of operator
+ // data structures. We will always run the check, as the
+ // reference count cannot be relied upon if there are
+ // symbolic links, and H5Oget_info_by_name always follows
+ // symbolic links. Alternatively we could use H5Lget_info
+ // and never recurse on groups discovered by symbolic
+ // links, however it could still fail if an object's
+ // reference count was manually manipulated with
+ // H5Odecr_refcount.
+ if (group_check(od, infobuf.addr)) {
+ for (int i = 0; i < spaces; i++)
+ System.out.print(" ");
+ System.out.println(" Warning: Loop detected!");
+ }
+ else {
+ // Initialize new object of type opdata and begin
+ // recursive iteration on the discovered
+ // group. The new opdata is given a pointer to the
+ // current one.
+ opdata nextod = new opdata();
+ nextod.recurs = od.recurs + 1;
+ nextod.prev = od;
+ nextod.addr = infobuf.addr;
+ H5L_iterate_cb iter_cb2 = new H5L_iter_callbackT();
+ return_val = H5.H5Literate_by_name(group, name, HDF5Constants.H5_INDEX_NAME,
+ HDF5Constants.H5_ITER_NATIVE, 0L, iter_cb2, nextod, HDF5Constants.H5P_DEFAULT);
+ }
+ for (int i = 0; i < spaces; i++)
+ System.out.print(" ");
+ System.out.println("}");
+ break;
+ case H5O_TYPE_DATASET:
+ System.out.println("Dataset: " + name);
+ break;
+ case H5O_TYPE_NAMED_DATATYPE:
+ System.out.println("Datatype: " + name);
+ break;
+ default:
+ System.out.println("Unknown: " + name);
+ }
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ return return_val;
+ }
+
+ public boolean group_check(opdata od, long target_addr) {
+ if (od.addr == target_addr)
+ return true; // Addresses match
+ else if (od.recurs == 0)
+ return false; // Root group reached with no matches
+ else
+ return group_check(od.prev, target_addr); // Recursively examine the next node
+ }
+
+}
diff --git a/java/examples/groups/H5Ex_G_Visit.java b/java/examples/groups/H5Ex_G_Visit.java
new file mode 100644
index 0000000..c2367a6
--- /dev/null
+++ b/java/examples/groups/H5Ex_G_Visit.java
@@ -0,0 +1,152 @@
+/* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
+ * Copyright by The HDF Group. *
+ * Copyright by the Board of Trustees of the University of Illinois. *
+ * All rights reserved. *
+ * *
+ * This file is part of HDF5. The full HDF5 copyright notice, including *
+ * terms governing use, modification, and redistribution, is contained in *
+ * the files COPYING and Copyright.html. COPYING can be found at the root *
+ * of the source code distribution tree; Copyright.html can be found at the *
+ * root level of an installed copy of the electronic HDF5 document set and *
+ * is linked from the top-level documents page. It can also be found at *
+ * http://hdfgroup.org/HDF5/doc/Copyright.html. If you do not have *
+ * access to either file, you may request a copy from help@hdfgroup.org. *
+ * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */
+
+/************************************************************
+ This example shows how to recursively traverse a file
+ using H5Ovisit and H5Lvisit. The program prints all of
+ the objects in the file specified in FILE, then prints all
+ of the links in that file. The default file used by this
+ example implements the structure described in the User's
+ Guide, chapter 4, figure 26.
+ ************************************************************/
+package examples.groups;
+
+import hdf.hdf5lib.H5;
+import hdf.hdf5lib.HDF5Constants;
+import hdf.hdf5lib.callbacks.H5L_iterate_cb;
+import hdf.hdf5lib.callbacks.H5L_iterate_t;
+import hdf.hdf5lib.callbacks.H5O_iterate_cb;
+import hdf.hdf5lib.callbacks.H5O_iterate_t;
+import hdf.hdf5lib.structs.H5L_info_t;
+import hdf.hdf5lib.structs.H5O_info_t;
+
+import java.util.ArrayList;
+
+public class H5Ex_G_Visit {
+
+ private static String FILE = "groups/h5ex_g_visit.h5";
+
+ public static void main(String[] args) {
+ try {
+ (new H5Ex_G_Visit()).VisitGroup();
+ }
+ catch (Exception ex) {
+ ex.printStackTrace();
+ }
+ }
+
+ private void VisitGroup() throws Exception {
+
+ long file_id = -1;
+
+ try {
+ // Open file
+ file_id = H5.H5Fopen(FILE, HDF5Constants.H5F_ACC_RDONLY, HDF5Constants.H5P_DEFAULT);
+
+ // Begin iteration using H5Ovisit
+ System.out.println("Objects in the file:");
+ H5O_iterate_t iter_data = new H5O_iter_data();
+ H5O_iterate_cb iter_cb = new H5O_iter_callback();
+ H5.H5Ovisit(file_id, HDF5Constants.H5_INDEX_NAME, HDF5Constants.H5_ITER_NATIVE, iter_cb, iter_data);
+ System.out.println();
+ // Repeat the same process using H5Lvisit
+ H5L_iterate_t iter_data2 = new H5L_iter_data();
+ H5L_iterate_cb iter_cb2 = new H5L_iter_callback();
+ System.out.println("Links in the file:");
+ H5.H5Lvisit(file_id, HDF5Constants.H5_INDEX_NAME, HDF5Constants.H5_ITER_NATIVE, iter_cb2, iter_data2);
+
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+ finally {
+ // Close and release resources.
+ if (file_id >= 0)
+ H5.H5Fclose(file_id);
+ }
+ }
+
+ /************************************************************
+ * Operator function for H5Lvisit. This function simply retrieves the info for the object the current link points
+ * to, and calls the operator function for H5Ovisit.
+ ************************************************************/
+
+ private class idata {
+ public String link_name = null;
+ public int link_type = -1;
+
+ idata(String name, int type) {
+ this.link_name = name;
+ this.link_type = type;
+ }
+ }
+
+ private class H5L_iter_data implements H5L_iterate_t {
+ public ArrayList<idata> iterdata = new ArrayList<idata>();
+ }
+
+ private class H5L_iter_callback implements H5L_iterate_cb {
+ public int callback(long group, String name, H5L_info_t info, H5L_iterate_t op_data) {
+
+ idata id = new idata(name, info.type);
+ ((H5L_iter_data) op_data).iterdata.add(id);
+
+ H5O_info_t infobuf;
+ int ret = 0;
+ try {
+ // Get type of the object and display its name and type. The name of the object is passed to this
+ // function by the Library.
+ infobuf = H5.H5Oget_info_by_name(group, name, HDF5Constants.H5P_DEFAULT);
+ H5O_iterate_cb iter_cbO = new H5O_iter_callback();
+ H5O_iterate_t iter_dataO = new H5O_iter_data();
+ ret = iter_cbO.callback(group, name, infobuf, iter_dataO);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ return ret;
+ }
+ }
+
+ private class H5O_iter_data implements H5O_iterate_t {
+ public ArrayList<idata> iterdata = new ArrayList<idata>();
+ }
+
+ private class H5O_iter_callback implements H5O_iterate_cb {
+ public int callback(long group, String name, H5O_info_t info, H5O_iterate_t op_data) {
+ idata id = new idata(name, info.type);
+ ((H5O_iter_data) op_data).iterdata.add(id);
+
+ System.out.print("/"); /* Print root group in object path */
+
+ // Check if the current object is the root group, and if not print the full path name and type.
+
+ if (name.charAt(0) == '.') /* Root group, do not print '.' */
+ System.out.println(" (Group)");
+ else if (info.type == HDF5Constants.H5O_TYPE_GROUP)
+ System.out.println(name + " (Group)");
+ else if (info.type == HDF5Constants.H5O_TYPE_DATASET)
+ System.out.println(name + " (Dataset)");
+ else if (info.type == HDF5Constants.H5O_TYPE_NAMED_DATATYPE)
+ System.out.println(name + " (Datatype)");
+ else
+ System.out.println(name + " (Unknown)");
+
+ return 0;
+ }
+ }
+
+}
diff --git a/java/examples/groups/Makefile.am b/java/examples/groups/Makefile.am
new file mode 100644
index 0000000..be15b42
--- /dev/null
+++ b/java/examples/groups/Makefile.am
@@ -0,0 +1,68 @@
+#
+# Copyright by The HDF Group.
+# Copyright by the Board of Trustees of the University of Illinois.
+# All rights reserved.
+#
+# This file is part of HDF5. The full HDF5 copyright notice, including
+# terms governing use, modification, and redistribution, is contained in
+# the files COPYING and Copyright.html. COPYING can be found at the root
+# of the source code distribution tree; Copyright.html can be found at the
+# root level of an installed copy of the electronic HDF5 document set and
+# is linked from the top-level documents page. It can also be found at
+# http://hdfgroup.org/HDF5/doc/Copyright.html. If you do not have
+# access to either file, you may request a copy from help@hdfgroup.org.
+##
+## Makefile.am
+## Run automake to generate a Makefile.in from this file.
+##
+#
+# HDF5 Java Library Examples Makefile(.in)
+
+include $(top_srcdir)/config/commence.am
+
+# Mark this directory as part of the JNI API
+JAVA_API=yes
+
+JAVAROOT = .classes
+
+classes:
+ test -d $(@D)/$(JAVAROOT) || $(MKDIR_P) $(@D)/$(JAVAROOT)
+
+pkgpath = examples/groups
+hdfjarfile = jar$(PACKAGE_TARNAME)-$(PACKAGE_VERSION).jar
+CLASSPATH_ENV=CLASSPATH=.:$(JAVAROOT):$(top_builddir)/java/src/$(hdfjarfile):$(top_srcdir)/java/lib/slf4j-api-1.7.5.jar:$(top_srcdir)/java/lib/ext/slf4j-simple-1.7.5.jar:$$CLASSPATH
+
+jarfile = jar$(PACKAGE_TARNAME)groups.jar
+
+AM_JAVACFLAGS = $(H5_JAVACFLAGS) -deprecation
+
+TESTPACKAGE =
+
+noinst_JAVA = \
+ H5Ex_G_Create.java \
+ H5Ex_G_Iterate.java \
+ H5Ex_G_Compact.java \
+ H5Ex_G_Corder.java \
+ H5Ex_G_Intermediate.java \
+ H5Ex_G_Phase.java \
+ H5Ex_G_Visit.java
+
+
+$(jarfile): classnoinst.stamp classes
+ $(JAR) cvf $@ -C $(JAVAROOT)/ $(pkgpath)
+
+noinst_DATA = $(jarfile)
+
+.PHONY: classes
+
+check_SCRIPTS = runExample.sh
+TEST_SCRIPT = $(check_SCRIPTS)
+
+CLEANFILES = classnoinst.stamp $(jarfile) $(JAVAROOT)/$(pkgpath)/*.class runExample.sh
+
+clean:
+ rm -rf $(JAVAROOT)/*
+ rm -f $(jarfile)
+ rm -f classnoinst.stamp
+
+include $(top_srcdir)/config/conclude.am
diff --git a/java/examples/groups/h5ex_g_iterate.h5 b/java/examples/groups/h5ex_g_iterate.h5
new file mode 100644
index 0000000..e462703
--- /dev/null
+++ b/java/examples/groups/h5ex_g_iterate.h5
Binary files differ
diff --git a/java/examples/groups/h5ex_g_visit.h5 b/java/examples/groups/h5ex_g_visit.h5
new file mode 100644
index 0000000..d8267b1
--- /dev/null
+++ b/java/examples/groups/h5ex_g_visit.h5
Binary files differ
diff --git a/java/examples/groups/runExample.sh.in b/java/examples/groups/runExample.sh.in
new file mode 100644
index 0000000..7dddc6d
--- /dev/null
+++ b/java/examples/groups/runExample.sh.in
@@ -0,0 +1,341 @@
+#! /bin/sh
+#
+# Copyright by The HDF Group.
+# Copyright by the Board of Trustees of the University of Illinois.
+# All rights reserved.
+#
+# This file is part of HDF5. The full HDF5 copyright notice, including
+# terms governing use, modification, and redistribution, is contained in
+# the files COPYING and Copyright.html. COPYING can be found at the root
+# of the source code distribution tree; Copyright.html can be found at the
+# root level of an installed copy of the electronic HDF5 document set and
+# is linked from the top-level documents page. It can also be found at
+# http://hdfgroup.org/HDF5/doc/Copyright.html. If you do not have
+# access to either file, you may request a copy from help@hdfgroup.org.
+#
+
+top_builddir=@top_builddir@
+top_srcdir=@top_srcdir@
+srcdir=@srcdir@
+
+TESTNAME=EX_Groups
+EXIT_SUCCESS=0
+EXIT_FAILURE=1
+
+# Set up default variable values if not supplied by the user.
+RM='rm -rf'
+CMP='cmp'
+DIFF='diff -c'
+CP='cp'
+DIRNAME='dirname'
+LS='ls'
+AWK='awk'
+
+nerrors=0
+
+# where the libs exist
+HDFLIB_HOME="$top_srcdir/java/lib"
+BLDLIBDIR="./lib"
+BLDREFDIR="./groups"
+BLDDIR="."
+HDFTEST_HOME="$top_srcdir/java/examples/groups"
+JARFILE=jar@PACKAGE_TARNAME@-@PACKAGE_VERSION@.jar
+TESTJARFILE=jar@PACKAGE_TARNAME@groups.jar
+test -d $BLDLIBDIR || mkdir -p $BLDLIBDIR
+test -d $BLDREFDIR || mkdir -p $BLDREFDIR
+
+######################################################################
+# library files
+# --------------------------------------------------------------------
+# All the library files copy from source directory to test directory
+# NOTE: Keep this framework to add/remove test files.
+# This list are also used for checking exist.
+# Comment '#' without space can be used.
+# --------------------------------------------------------------------
+LIST_LIBRARY_FILES="
+$HDFLIB_HOME/slf4j-api-1.7.5.jar
+$HDFLIB_HOME/ext/slf4j-simple-1.7.5.jar
+$top_builddir/src/.libs/libhdf5.*
+$top_builddir/java/src/jni/.libs/libhdf5_java.*
+$top_builddir/java/src/$JARFILE
+"
+LIST_REF_FILES="
+$HDFTEST_HOME/h5ex_g_iterate.h5
+$HDFTEST_HOME/h5ex_g_visit.h5
+"
+LIST_DATA_FILES="
+$HDFTEST_HOME/../testfiles/examples.groups.H5Ex_G_Create.txt
+$HDFTEST_HOME/../testfiles/examples.groups.H5Ex_G_Iterate.txt
+$HDFTEST_HOME/../testfiles/examples.groups.H5Ex_G_Compact.txt
+$HDFTEST_HOME/../testfiles/examples.groups.H5Ex_G_Corder.txt
+$HDFTEST_HOME/../testfiles/examples.groups.H5Ex_G_Intermediate.txt
+$HDFTEST_HOME/../testfiles/examples.groups.H5Ex_G_Phase.txt
+$HDFTEST_HOME/../testfiles/examples.groups.H5Ex_G_Visit.txt
+"
+
+#
+# copy files from source dirs to test dir
+#
+COPY_LIBFILES="$LIST_LIBRARY_FILES"
+
+COPY_LIBFILES_TO_BLDLIBDIR()
+{
+ # copy test files. Used -f to make sure get a new copy
+ for tstfile in $COPY_LIBFILES
+ do
+ # ignore '#' comment
+ echo $tstfile | tr -d ' ' | grep '^#' > /dev/null
+ RET=$?
+ if [ $RET -eq 1 ]; then
+ # skip cp if srcdir is same as destdir
+ # this occurs when build/test performed in source dir and
+ # make cp fail
+ SDIR=`$DIRNAME $tstfile`
+ INODE_SDIR=`$LS -i -d $SDIR | $AWK -F' ' '{print $1}'`
+ INODE_DDIR=`$LS -i -d $BLDLIBDIR | $AWK -F' ' '{print $1}'`
+ if [ "$INODE_SDIR" != "$INODE_DDIR" ]; then
+ $CP -f $tstfile $BLDLIBDIR
+ if [ $? -ne 0 ]; then
+ echo "Error: FAILED to copy $tstfile ."
+
+ # Comment out this to CREATE expected file
+ exit $EXIT_FAILURE
+ fi
+ fi
+ fi
+ done
+}
+
+CLEAN_LIBFILES_AND_BLDLIBDIR()
+{
+ # skip rm if srcdir is same as destdir
+ # this occurs when build/test performed in source dir and
+ # make cp fail
+ SDIR=`$DIRNAME $HDFLIB_HOME/slf4j-api-1.7.5.jar`
+ INODE_SDIR=`$LS -i -d $SDIR | $AWK -F' ' '{print $1}'`
+ INODE_DDIR=`$LS -i -d $BLDLIBDIR | $AWK -F' ' '{print $1}'`
+ if [ "$INODE_SDIR" != "$INODE_DDIR" ]; then
+ $RM $BLDLIBDIR
+ fi
+}
+
+COPY_DATAFILES="$LIST_DATA_FILES"
+
+COPY_DATAFILES_TO_BLDDIR()
+{
+ # copy test files. Used -f to make sure get a new copy
+ for tstfile in $COPY_DATAFILES
+ do
+ # ignore '#' comment
+ echo $tstfile | tr -d ' ' | grep '^#' > /dev/null
+ RET=$?
+ if [ $RET -eq 1 ]; then
+ # skip cp if srcdir is same as destdir
+ # this occurs when build/test performed in source dir and
+ # make cp fail
+ SDIR=`$DIRNAME $tstfile`
+ INODE_SDIR=`$LS -i -d $SDIR | $AWK -F' ' '{print $1}'`
+ INODE_DDIR=`$LS -i -d $BLDDIR | $AWK -F' ' '{print $1}'`
+ if [ "$INODE_SDIR" != "$INODE_DDIR" ]; then
+ $CP -f $tstfile $BLDDIR
+ if [ $? -ne 0 ]; then
+ echo "Error: FAILED to copy $tstfile ."
+
+ # Comment out this to CREATE expected file
+ exit $EXIT_FAILURE
+ fi
+ fi
+ fi
+ done
+}
+
+CLEAN_DATAFILES_AND_BLDDIR()
+{
+ $RM $BLDDIR/examples.groups.H5Ex_G_*.txt
+ $RM $BLDDIR/H5Ex_G_*.out
+}
+
+COPY_REFFILES="$LIST_REF_FILES"
+
+COPY_REFFILES_TO_BLDREFDIR()
+{
+ # copy test files. Used -f to make sure get a new copy
+ for tstfile in $COPY_REFFILES
+ do
+ # ignore '#' comment
+ echo $tstfile | tr -d ' ' | grep '^#' > /dev/null
+ RET=$?
+ if [ $RET -eq 1 ]; then
+ # skip cp if srcdir is same as destdir
+ # this occurs when build/test performed in source dir and
+ # make cp fail
+ SDIR=`$DIRNAME $tstfile`
+ INODE_SDIR=`$LS -i -d $SDIR | $AWK -F' ' '{print $1}'`
+ INODE_DDIR=`$LS -i -d $BLDREFDIR | $AWK -F' ' '{print $1}'`
+ if [ "$INODE_SDIR" != "$INODE_DDIR" ]; then
+ $CP -f $tstfile $BLDREFDIR
+ if [ $? -ne 0 ]; then
+ echo "Error: FAILED to copy $tstfile ."
+
+ # Comment out this to CREATE expected file
+ exit $EXIT_FAILURE
+ fi
+ fi
+ fi
+ done
+}
+
+CLEAN_REFFILES_AND_BLDREFDIR()
+{
+ # skip rm if srcdir is same as destdir
+ # this occurs when build/test performed in source dir and
+ # make cp fail
+ SDIR=`$DIRNAME $HDFTEST_HOME/h5ex_g_iterate.h5`
+ INODE_SDIR=`$LS -i -d $SDIR | $AWK -F' ' '{print $1}'`
+ INODE_DDIR=`$LS -i -d $BLDREFDIR | $AWK -F' ' '{print $1}'`
+ if [ "$INODE_SDIR" != "$INODE_DDIR" ]; then
+ $RM $BLDREFDIR
+ fi
+}
+
+# Print a line-line message left justified in a field of 70 characters
+# beginning with the word "Testing".
+#
+TESTING() {
+ SPACES=" "
+ echo "Testing $* $SPACES" | cut -c1-70 | tr -d '\012'
+}
+
+# where Java is installed (requires jdk1.7.x)
+JAVAEXE=@JAVA@
+JAVAEXEFLAGS=@H5_JAVAFLAGS@
+
+###############################################################################
+# DO NOT MODIFY BELOW THIS LINE
+###############################################################################
+
+# prepare for test
+COPY_LIBFILES_TO_BLDLIBDIR
+COPY_DATAFILES_TO_BLDDIR
+COPY_REFFILES_TO_BLDREFDIR
+
+CPATH=".:"$BLDLIBDIR"/"$JARFILE":"$BLDLIBDIR"/slf4j-api-1.7.5.jar:"$BLDLIBDIR"/slf4j-simple-1.7.5.jar:"$TESTJARFILE""
+
+TEST=/usr/bin/test
+if [ ! -x /usr/bin/test ]
+then
+TEST=`which test`
+fi
+
+if $TEST -z "$CLASSPATH"; then
+ CLASSPATH=""
+fi
+CLASSPATH=$CPATH":"$CLASSPATH
+export CLASSPATH
+
+if $TEST -n "$JAVAPATH" ; then
+ PATH=$JAVAPATH":"$PATH
+ export PATH
+fi
+
+if $TEST -e /bin/uname; then
+ os_name=`/bin/uname -s`
+elif $TEST -e /usr/bin/uname; then
+ os_name=`/usr/bin/uname -s`
+else
+ os_name=unknown
+fi
+
+if $TEST -z "$LD_LIBRARY_PATH" ; then
+ LD_LIBRARY_PATH=""
+fi
+
+case $os_name in
+ Darwin)
+ DYLD_LIBRARY_PATH=$BLDLIBDIR:$DYLD_LIBRARY_PATH
+ export DYLD_LIBRARY_PATH
+ LD_LIBRARY_PATH=$DYLD_LIBRARY_PATH
+ ;;
+ *)
+ LD_LIBRARY_PATH=$BLDLIBDIR:$LD_LIBRARY_PATH
+ ;;
+esac
+
+export LD_LIBRARY_PATH
+
+echo "$JAVAEXE $JAVAEXEFLAGS -Xmx1024M -Dorg.slf4j.simpleLogger.defaultLog=trace -Djava.library.path=$BLDLIBDIR -cp $CLASSPATH examples.groups.H5Ex_G_Create"
+($JAVAEXE $JAVAEXEFLAGS -Xmx1024M -Dorg.slf4j.simpleLogger.defaultLog=trace -Djava.library.path=$BLDLIBDIR -cp $CLASSPATH examples.groups.H5Ex_G_Create > H5Ex_G_Create.out)
+if diff H5Ex_G_Create.out examples.groups.H5Ex_G_Create.txt > /dev/null; then
+ echo " PASSED groups.H5Ex_G_Create"
+else
+ echo "**FAILED** groups.H5Ex_G_Create"
+ nerrors="`expr $nerrors + 1`"
+fi
+
+echo "$JAVAEXE $JAVAEXEFLAGS -Xmx1024M -Dorg.slf4j.simpleLogger.defaultLog=trace -Djava.library.path=$BLDLIBDIR -cp $CLASSPATH examples.groups.H5Ex_G_Iterate"
+($JAVAEXE $JAVAEXEFLAGS -Xmx1024M -Dorg.slf4j.simpleLogger.defaultLog=trace -Djava.library.path=$BLDLIBDIR -cp $CLASSPATH examples.groups.H5Ex_G_Iterate > H5Ex_G_Iterate.out)
+if diff H5Ex_G_Iterate.out examples.groups.H5Ex_G_Iterate.txt > /dev/null; then
+ echo " PASSED groups.H5Ex_G_Iterate"
+else
+ echo "**FAILED** groups.H5Ex_G_Iterate"
+ nerrors="`expr $nerrors + 1`"
+fi
+
+echo "$JAVAEXE $JAVAEXEFLAGS -Xmx1024M -Dorg.slf4j.simpleLogger.defaultLog=trace -Djava.library.path=$BLDLIBDIR -cp $CLASSPATH examples.groups.H5Ex_G_Compact"
+($JAVAEXE $JAVAEXEFLAGS -Xmx1024M -Dorg.slf4j.simpleLogger.defaultLog=trace -Djava.library.path=$BLDLIBDIR -cp $CLASSPATH examples.groups.H5Ex_G_Compact > H5Ex_G_Compact.out)
+if diff H5Ex_G_Compact.out examples.groups.H5Ex_G_Compact.txt > /dev/null; then
+ echo " PASSED groups.H5Ex_G_Compact"
+else
+ echo "**FAILED** groups.H5Ex_G_Compact"
+ nerrors="`expr $nerrors + 1`"
+fi
+
+echo "$JAVAEXE $JAVAEXEFLAGS -Xmx1024M -Dorg.slf4j.simpleLogger.defaultLog=trace -Djava.library.path=$BLDLIBDIR -cp $CLASSPATH examples.groups.H5Ex_G_Corder"
+($JAVAEXE $JAVAEXEFLAGS -Xmx1024M -Dorg.slf4j.simpleLogger.defaultLog=trace -Djava.library.path=$BLDLIBDIR -cp $CLASSPATH examples.groups.H5Ex_G_Corder > H5Ex_G_Corder.out)
+if diff H5Ex_G_Corder.out examples.groups.H5Ex_G_Corder.txt > /dev/null; then
+ echo " PASSED groups.H5Ex_G_Corder"
+else
+ echo "**FAILED** groups.H5Ex_G_Corder"
+ nerrors="`expr $nerrors + 1`"
+fi
+
+echo "$JAVAEXE $JAVAEXEFLAGS -Xmx1024M -Dorg.slf4j.simpleLogger.defaultLog=trace -Djava.library.path=$BLDLIBDIR -cp $CLASSPATH examples.groups.H5Ex_G_Intermediate"
+($JAVAEXE $JAVAEXEFLAGS -Xmx1024M -Dorg.slf4j.simpleLogger.defaultLog=trace -Djava.library.path=$BLDLIBDIR -cp $CLASSPATH examples.groups.H5Ex_G_Intermediate > H5Ex_G_Intermediate.out)
+if diff H5Ex_G_Intermediate.out examples.groups.H5Ex_G_Intermediate.txt > /dev/null; then
+ echo " PASSED groups.H5Ex_G_Intermediate"
+else
+ echo "**FAILED** groups.H5Ex_G_Intermediate"
+ nerrors="`expr $nerrors + 1`"
+fi
+
+echo "$JAVAEXE $JAVAEXEFLAGS -Xmx1024M -Dorg.slf4j.simpleLogger.defaultLog=trace -Djava.library.path=$BLDLIBDIR -cp $CLASSPATH examples.groups.H5Ex_G_Phase"
+($JAVAEXE $JAVAEXEFLAGS -Xmx1024M -Dorg.slf4j.simpleLogger.defaultLog=trace -Djava.library.path=$BLDLIBDIR -cp $CLASSPATH examples.groups.H5Ex_G_Phase > H5Ex_G_Phase.out)
+if diff H5Ex_G_Phase.out examples.groups.H5Ex_G_Phase.txt > /dev/null; then
+ echo " PASSED groups.H5Ex_G_Phase"
+else
+ echo "**FAILED** groups.H5Ex_G_Phase"
+ nerrors="`expr $nerrors + 1`"
+fi
+
+echo "$JAVAEXE $JAVAEXEFLAGS -Xmx1024M -Dorg.slf4j.simpleLogger.defaultLog=trace -Djava.library.path=$BLDLIBDIR -cp $CLASSPATH examples.groups.H5Ex_G_Visit"
+($JAVAEXE $JAVAEXEFLAGS -Xmx1024M -Dorg.slf4j.simpleLogger.defaultLog=trace -Djava.library.path=$BLDLIBDIR -cp $CLASSPATH examples.groups.H5Ex_G_Visit > H5Ex_G_Visit.out)
+if diff H5Ex_G_Visit.out examples.groups.H5Ex_G_Visit.txt > /dev/null; then
+ echo " PASSED groups.H5Ex_G_Visit"
+else
+ echo "**FAILED** groups.H5Ex_G_Visit"
+ nerrors="`expr $nerrors + 1`"
+fi
+
+# Clean up temporary files/directories
+CLEAN_LIBFILES_AND_BLDLIBDIR
+CLEAN_DATAFILES_AND_BLDDIR
+CLEAN_REFFILES_AND_BLDREFDIR
+
+# Report test results and exit
+if test $nerrors -eq 0 ; then
+ echo "All $TESTNAME tests passed."
+ exit $EXIT_SUCCESS
+else
+ echo "$TESTNAME tests failed with $nerrors errors."
+ exit $EXIT_FAILURE
+fi
diff --git a/java/examples/intro/CMakeLists.txt b/java/examples/intro/CMakeLists.txt
new file mode 100644
index 0000000..ae3f585
--- /dev/null
+++ b/java/examples/intro/CMakeLists.txt
@@ -0,0 +1,112 @@
+cmake_minimum_required (VERSION 3.1.0)
+PROJECT (HDFJAVA_EXAMPLES_INTRO Java)
+
+set (CMAKE_VERBOSE_MAKEFILE 1)
+
+INCLUDE_DIRECTORIES (
+ ${HDF5_JAVA_JNI_BINARY_DIR}
+ ${HDF5_JAVA_HDF5_LIB_DIR}
+)
+
+set (HDF_JAVA_EXAMPLES
+ H5_CreateAttribute
+ H5_CreateDataset
+ H5_CreateFile
+ H5_CreateGroup
+ H5_CreateGroupAbsoluteRelative
+ H5_CreateGroupDataset
+ H5_ReadWrite
+)
+
+if (WIN32)
+ set (CMAKE_JAVA_INCLUDE_FLAG_SEP ";")
+else (WIN32)
+ set (CMAKE_JAVA_INCLUDE_FLAG_SEP ":")
+endif (WIN32)
+
+set (CMAKE_JAVA_INCLUDE_PATH "${HDF5_JAVA_JARS}")
+
+set (CMAKE_JAVA_CLASSPATH ".")
+foreach (CMAKE_INCLUDE_PATH ${CMAKE_JAVA_INCLUDE_PATH})
+ set (CMAKE_JAVA_CLASSPATH "${CMAKE_JAVA_CLASSPATH}${CMAKE_JAVA_INCLUDE_FLAG_SEP}${CMAKE_INCLUDE_PATH}")
+endforeach (CMAKE_INCLUDE_PATH)
+
+foreach (example ${HDF_JAVA_EXAMPLES})
+ file (WRITE ${PROJECT_BINARY_DIR}/${example}_Manifest.txt
+ "Main-Class: examples.intro.${example}
+"
+ )
+ add_jar (${example} MANIFEST ${PROJECT_BINARY_DIR}/${example}_Manifest.txt ${example}.java)
+ get_target_property (${example}_JAR_FILE ${example} JAR_FILE)
+# install_jar (${example} ${HJAVA_INSTALL_DATA_DIR}/examples examples)
+ get_target_property (${example}_CLASSPATH ${example} CLASSDIR)
+ add_dependencies (${example} ${HDF5_JAVA_HDF5_LIB_TARGET})
+endforeach (example ${HDF_JAVA_EXAMPLES})
+
+set (CMAKE_JAVA_INCLUDE_PATH "${HDF5_JAVA_JARS};${HDF5_JAVA_LOGGING_JAR};${HDF5_JAVA_LOGGING_NOP_JAR}")
+
+set (CMAKE_JAVA_CLASSPATH ".")
+foreach (HDFJAVA_JAR ${CMAKE_JAVA_INCLUDE_PATH})
+ set (CMAKE_JAVA_CLASSPATH "${CMAKE_JAVA_CLASSPATH}${CMAKE_JAVA_INCLUDE_FLAG_SEP}${HDFJAVA_JAR}")
+endforeach (HDFJAVA_JAR)
+
+foreach (example ${HDF_JAVA_OBJECT_EXAMPLES})
+ file (WRITE ${PROJECT_BINARY_DIR}/${example}_Manifest.txt
+ "Main-Class: examples.intro.${example}
+"
+ )
+ add_jar (${example} MANIFEST ${PROJECT_BINARY_DIR}/${example}_Manifest.txt ${example}.java)
+ get_target_property (${example}_JAR_FILE ${example} JAR_FILE)
+# install_jar (${example} ${HJAVA_INSTALL_DATA_DIR}/examples examples)
+ get_target_property (${example}_CLASSPATH ${example} CLASSDIR)
+ add_dependencies (${example} ${HDFJAVA_H5_LIB_TARGET})
+endforeach (example ${HDF_JAVA_OBJECT_EXAMPLES})
+
+MACRO (ADD_H5_TEST resultfile resultcode)
+ add_test (
+ NAME JAVA_intro-${resultfile}
+ COMMAND "${CMAKE_COMMAND}"
+ -D "TEST_TESTER=${CMAKE_Java_RUNTIME};${CMAKE_Java_RUNTIME_FLAGS}"
+ -D "TEST_PROGRAM=examples.intro.${resultfile}"
+ -D "TEST_ARGS:STRING=${ARGN}"
+ -D "TEST_CLASSPATH:STRING=${CMAKE_JAVA_CLASSPATH}${CMAKE_JAVA_INCLUDE_FLAG_SEP}${${resultfile}_JAR_FILE}"
+ -D "TEST_LIBRARY_DIRECTORY=${CMAKE_TEST_OUTPUT_DIRECTORY}"
+ -D "TEST_FOLDER=${HDFJAVA_EXAMPLES_BINARY_DIR}"
+ -D "TEST_OUTPUT=intro/${resultfile}.out"
+ -D "TEST_EXPECT=${resultcode}"
+ -D "TEST_REFERENCE=intro/${resultfile}.txt"
+ -P "${HDF_RESOURCES_DIR}/jrunTest.cmake"
+ )
+ if (NOT "${last_test}" STREQUAL "")
+ set_tests_properties (JAVA_intro-${resultfile} PROPERTIES DEPENDS ${last_test})
+ endif (NOT "${last_test}" STREQUAL "")
+ set (last_test "JAVA_intro-${resultfile}")
+ENDMACRO (ADD_H5_TEST file)
+
+if (BUILD_TESTING)
+
+ foreach (example ${HDF_JAVA_EXAMPLES})
+ add_test (
+ NAME JAVA_intro-${example}-clearall-objects
+ COMMAND ${CMAKE_COMMAND}
+ -E remove
+ ${HDFJAVA_EXAMPLES_BINARY_DIR}/${example}.h5
+ ${example}.out
+ ${example}.out.err
+ )
+ if (NOT "${last_test}" STREQUAL "")
+ set_tests_properties (JAVA_intro-${example}-clearall-objects PROPERTIES DEPENDS ${last_test})
+ endif (NOT "${last_test}" STREQUAL "")
+ add_test (
+ NAME JAVA_intro-${example}-copy-objects
+ COMMAND ${CMAKE_COMMAND}
+ -E copy_if_different
+ ${HDFJAVA_EXAMPLES_SOURCE_DIR}/testfiles/examples.intro.${example}.txt
+ ${HDFJAVA_EXAMPLES_INTRO_BINARY_DIR}/${example}.txt
+ )
+ set_tests_properties (JAVA_intro-${example}-copy-objects PROPERTIES DEPENDS JAVA_intro-${example}-clearall-objects)
+ set (last_test "JAVA_intro-${example}-copy-objects")
+ ADD_H5_TEST (${example} 0)
+ endforeach (example ${HDF_JAVA_EXAMPLES})
+
+endif (BUILD_TESTING)
diff --git a/java/examples/intro/H5_CreateAttribute.java b/java/examples/intro/H5_CreateAttribute.java
new file mode 100644
index 0000000..16c53d6
--- /dev/null
+++ b/java/examples/intro/H5_CreateAttribute.java
@@ -0,0 +1,145 @@
+/* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
+ * Copyright by The HDF Group. *
+ * Copyright by the Board of Trustees of the University of Illinois. *
+ * All rights reserved. *
+ * *
+ * This file is part of HDF5. The full HDF5 copyright notice, including *
+ * terms governing use, modification, and redistribution, is contained in *
+ * the files COPYING and Copyright.html. COPYING can be found at the root *
+ * of the source code distribution tree; Copyright.html can be found at the *
+ * root level of an installed copy of the electronic HDF5 document set and *
+ * is linked from the top-level documents page. It can also be found at *
+ * http://hdfgroup.org/HDF5/doc/Copyright.html. If you do not have *
+ * access to either file, you may request a copy from help@hdfgroup.org. *
+ * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */
+
+/************************************************************
+ Creating a dataset attribute.
+ ************************************************************/
+
+package examples.intro;
+
+import hdf.hdf5lib.H5;
+import hdf.hdf5lib.HDF5Constants;
+
+public class H5_CreateAttribute {
+ private static String FILENAME = "H5_CreateAttribute.h5";
+ private static String DATASETNAME = "dset";
+ private static final int DIM_X = 4;
+ private static final int DIM_Y = 6;
+ private static String DATASETATTRIBUTE = "Units";
+
+ private static void CreateDatasetAttribute() {
+ long file_id = -1;
+ long dataspace_id = -1;
+ long dataset_id = -1;
+ long attribute_id = -1;
+ long[] dims1 = { DIM_X, DIM_Y };
+ long[] dims = { 2 };
+ int[] attr_data = { 100, 200 };
+
+ // Create a new file using default properties.
+ try {
+ file_id = H5.H5Fcreate(FILENAME, HDF5Constants.H5F_ACC_TRUNC, HDF5Constants.H5P_DEFAULT,
+ HDF5Constants.H5P_DEFAULT);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Create the data space for the dataset.
+ try {
+ dataspace_id = H5.H5Screate_simple(2, dims1, null);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Create the dataset.
+ try {
+ if ((file_id >= 0) && (dataspace_id >= 0))
+ dataset_id = H5.H5Dcreate(file_id, "/" + DATASETNAME, HDF5Constants.H5T_STD_I32BE, dataspace_id,
+ HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Terminate access to the data space.
+ try {
+ if (dataspace_id >= 0)
+ H5.H5Sclose(dataspace_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Create the data space for the attribute.
+ try {
+ dataspace_id = H5.H5Screate_simple(1, dims, null);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Create a dataset attribute.
+ try {
+ if ((dataset_id >= 0) && (dataspace_id >= 0))
+ attribute_id = H5.H5Acreate(dataset_id, DATASETATTRIBUTE, HDF5Constants.H5T_STD_I32BE, dataspace_id,
+ HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Write the attribute data.
+ try {
+ if (attribute_id >= 0)
+ H5.H5Awrite(attribute_id, HDF5Constants.H5T_NATIVE_INT, attr_data);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Close the attribute.
+ try {
+ if (attribute_id >= 0)
+ H5.H5Aclose(attribute_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Close the dataspace.
+ try {
+ if (dataspace_id >= 0)
+ H5.H5Sclose(dataspace_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Close to the dataset.
+ try {
+ if (dataset_id >= 0)
+ H5.H5Dclose(dataset_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Close the file.
+ try {
+ if (file_id >= 0)
+ H5.H5Fclose(file_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+ }
+
+ public static void main(String[] args) {
+ H5_CreateAttribute.CreateDatasetAttribute();
+ }
+
+}
diff --git a/java/examples/intro/H5_CreateDataset.java b/java/examples/intro/H5_CreateDataset.java
new file mode 100644
index 0000000..a16cfe6
--- /dev/null
+++ b/java/examples/intro/H5_CreateDataset.java
@@ -0,0 +1,97 @@
+/* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
+ * Copyright by The HDF Group. *
+ * Copyright by the Board of Trustees of the University of Illinois. *
+ * All rights reserved. *
+ * *
+ * This file is part of HDF5. The full HDF5 copyright notice, including *
+ * terms governing use, modification, and redistribution, is contained in *
+ * the files COPYING and Copyright.html. COPYING can be found at the root *
+ * of the source code distribution tree; Copyright.html can be found at the *
+ * root level of an installed copy of the electronic HDF5 document set and *
+ * is linked from the top-level documents page. It can also be found at *
+ * http://hdfgroup.org/HDF5/doc/Copyright.html. If you do not have *
+ * access to either file, you may request a copy from help@hdfgroup.org. *
+ * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */
+
+/************************************************************
+ Creating and closing a dataset.
+ ************************************************************/
+
+package examples.intro;
+
+import hdf.hdf5lib.H5;
+import hdf.hdf5lib.HDF5Constants;
+
+public class H5_CreateDataset {
+ private static String FILENAME = "H5_CreateDataset.h5";
+ private static String DATASETNAME = "dset";
+ private static final int DIM_X = 4;
+ private static final int DIM_Y = 6;
+
+ private static void CreateDataset() {
+ long file_id = -1;
+ long dataspace_id = -1;
+ long dataset_id = -1;
+ long[] dims = { DIM_X, DIM_Y };
+
+ // Create a new file using default properties.
+ try {
+ file_id = H5.H5Fcreate(FILENAME, HDF5Constants.H5F_ACC_TRUNC, HDF5Constants.H5P_DEFAULT,
+ HDF5Constants.H5P_DEFAULT);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Create the data space for the dataset.
+ try {
+ dataspace_id = H5.H5Screate_simple(2, dims, null);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Create the dataset.
+ try {
+ if ((file_id >= 0) && (dataspace_id >= 0))
+ dataset_id = H5.H5Dcreate(file_id, "/" + DATASETNAME, HDF5Constants.H5T_STD_I32BE, dataspace_id,
+ HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // End access to the dataset and release resources used by it.
+ try {
+ if (dataset_id >= 0)
+ H5.H5Dclose(dataset_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Terminate access to the data space.
+ try {
+ if (dataspace_id >= 0)
+ H5.H5Sclose(dataspace_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Close the file.
+ try {
+ if (file_id >= 0)
+ H5.H5Fclose(file_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ }
+
+ public static void main(String[] args) {
+ H5_CreateDataset.CreateDataset();
+ }
+
+}
diff --git a/java/examples/intro/H5_CreateFile.java b/java/examples/intro/H5_CreateFile.java
new file mode 100644
index 0000000..eb9f277
--- /dev/null
+++ b/java/examples/intro/H5_CreateFile.java
@@ -0,0 +1,55 @@
+/* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
+ * Copyright by The HDF Group. *
+ * Copyright by the Board of Trustees of the University of Illinois. *
+ * All rights reserved. *
+ * *
+ * This file is part of HDF5. The full HDF5 copyright notice, including *
+ * terms governing use, modification, and redistribution, is contained in *
+ * the files COPYING and Copyright.html. COPYING can be found at the root *
+ * of the source code distribution tree; Copyright.html can be found at the *
+ * root level of an installed copy of the electronic HDF5 document set and *
+ * is linked from the top-level documents page. It can also be found at *
+ * http://hdfgroup.org/HDF5/doc/Copyright.html. If you do not have *
+ * access to either file, you may request a copy from help@hdfgroup.org. *
+ * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */
+
+/************************************************************
+ Creating and closing a file.
+ ************************************************************/
+
+package examples.intro;
+
+import hdf.hdf5lib.H5;
+import hdf.hdf5lib.HDF5Constants;
+
+public class H5_CreateFile {
+ static final String FILENAME = "H5_CreateFile.h5";
+
+ private static void CreateFile() {
+ long file_id = -1;
+
+ // Create a new file using default properties.
+ try {
+ file_id = H5.H5Fcreate(FILENAME, HDF5Constants.H5F_ACC_TRUNC, HDF5Constants.H5P_DEFAULT,
+ HDF5Constants.H5P_DEFAULT);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Close the file.
+ try {
+ if (file_id >= 0)
+ H5.H5Fclose(file_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ }
+
+ public static void main(String[] args) {
+ H5_CreateFile.CreateFile();
+ }
+
+}
diff --git a/java/examples/intro/H5_CreateGroup.java b/java/examples/intro/H5_CreateGroup.java
new file mode 100644
index 0000000..36bd49a
--- /dev/null
+++ b/java/examples/intro/H5_CreateGroup.java
@@ -0,0 +1,76 @@
+/* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
+ * Copyright by The HDF Group. *
+ * Copyright by the Board of Trustees of the University of Illinois. *
+ * All rights reserved. *
+ * *
+ * This file is part of HDF5. The full HDF5 copyright notice, including *
+ * terms governing use, modification, and redistribution, is contained in *
+ * the files COPYING and Copyright.html. COPYING can be found at the root *
+ * of the source code distribution tree; Copyright.html can be found at the *
+ * root level of an installed copy of the electronic HDF5 document set and *
+ * is linked from the top-level documents page. It can also be found at *
+ * http://hdfgroup.org/HDF5/doc/Copyright.html. If you do not have *
+ * access to either file, you may request a copy from help@hdfgroup.org. *
+ * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */
+
+/************************************************************
+ Creating and closing a group.
+ ************************************************************/
+
+package examples.intro;
+
+import hdf.hdf5lib.H5;
+import hdf.hdf5lib.HDF5Constants;
+
+public class H5_CreateGroup {
+ private static String FILENAME = "H5_CreateGroup.h5";
+ private static String GROUPNAME = "MyGroup";
+
+ private static void CreateGroup() {
+ long file_id = -1;
+ long group_id = -1;
+
+ // Create a new file using default properties.
+ try {
+ file_id = H5.H5Fcreate(FILENAME, HDF5Constants.H5F_ACC_TRUNC, HDF5Constants.H5P_DEFAULT,
+ HDF5Constants.H5P_DEFAULT);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Create a group in the file.
+ try {
+ if (file_id >= 0)
+ group_id = H5.H5Gcreate(file_id, "/" + GROUPNAME, HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT,
+ HDF5Constants.H5P_DEFAULT);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Close the group.
+ try {
+ if (group_id >= 0)
+ H5.H5Gclose(group_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Close the file.
+ try {
+ if (file_id >= 0)
+ H5.H5Fclose(file_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ }
+
+ public static void main(String[] args) {
+ H5_CreateGroup.CreateGroup();
+ }
+
+}
diff --git a/java/examples/intro/H5_CreateGroupAbsoluteRelative.java b/java/examples/intro/H5_CreateGroupAbsoluteRelative.java
new file mode 100644
index 0000000..e0127cc
--- /dev/null
+++ b/java/examples/intro/H5_CreateGroupAbsoluteRelative.java
@@ -0,0 +1,118 @@
+/* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
+ * Copyright by The HDF Group. *
+ * Copyright by the Board of Trustees of the University of Illinois. *
+ * All rights reserved. *
+ * *
+ * This file is part of HDF5. The full HDF5 copyright notice, including *
+ * terms governing use, modification, and redistribution, is contained in *
+ * the files COPYING and Copyright.html. COPYING can be found at the root *
+ * of the source code distribution tree; Copyright.html can be found at the *
+ * root level of an installed copy of the electronic HDF5 document set and *
+ * is linked from the top-level documents page. It can also be found at *
+ * http://hdfgroup.org/HDF5/doc/Copyright.html. If you do not have *
+ * access to either file, you may request a copy from help@hdfgroup.org. *
+ * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */
+
+/************************************************************
+ Creating groups using absolute and relative names.
+ ************************************************************/
+
+package examples.intro;
+
+import hdf.hdf5lib.H5;
+import hdf.hdf5lib.HDF5Constants;
+
+public class H5_CreateGroupAbsoluteRelative {
+ private static String FILENAME = "H5_CreateGroupAbsoluteRelative.h5";
+ private static String GROUPNAME = "MyGroup";
+ private static String GROUPNAME_A = "GroupA";
+ private static String GROUPNAME_B = "GroupB";
+
+ private static void CreateGroupAbsoluteAndRelative() {
+ long file_id = -1;
+ long group1_id = -1;
+ long group2_id = -1;
+ long group3_id = -1;
+
+ // Create a new file using default properties.
+ try {
+ file_id = H5.H5Fcreate(FILENAME, HDF5Constants.H5F_ACC_TRUNC, HDF5Constants.H5P_DEFAULT,
+ HDF5Constants.H5P_DEFAULT);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Create a group named "/MyGroup" in the file.
+ try {
+ if (file_id >= 0)
+ group1_id = H5.H5Gcreate(file_id, "/" + GROUPNAME, HDF5Constants.H5P_DEFAULT,
+ HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Create group "Group_A" in group "MyGroup" using absolute name.
+ try {
+ if (file_id >= 0)
+ group2_id = H5.H5Gcreate(file_id, "/" + GROUPNAME + "/" + GROUPNAME_A, HDF5Constants.H5P_DEFAULT,
+ HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Create group "Group_B" in group "MyGroup" using relative name.
+ try {
+ if (group1_id >= 0)
+ group3_id = H5.H5Gcreate(group1_id, GROUPNAME_B, HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT,
+ HDF5Constants.H5P_DEFAULT);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Close the group3.
+ try {
+ if (group3_id >= 0)
+ H5.H5Gclose(group3_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Close the group2.
+ try {
+ if (group2_id >= 0)
+ H5.H5Gclose(group2_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Close the group1.
+ try {
+ if (group1_id >= 0)
+ H5.H5Gclose(group1_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Close the file.
+ try {
+ if (file_id >= 0)
+ H5.H5Fclose(file_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ }
+
+ public static void main(String[] args) {
+ H5_CreateGroupAbsoluteRelative.CreateGroupAbsoluteAndRelative();
+ }
+
+}
diff --git a/java/examples/intro/H5_CreateGroupDataset.java b/java/examples/intro/H5_CreateGroupDataset.java
new file mode 100644
index 0000000..0607bbd
--- /dev/null
+++ b/java/examples/intro/H5_CreateGroupDataset.java
@@ -0,0 +1,207 @@
+/* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
+ * Copyright by The HDF Group. *
+ * Copyright by the Board of Trustees of the University of Illinois. *
+ * All rights reserved. *
+ * *
+ * This file is part of HDF5. The full HDF5 copyright notice, including *
+ * terms governing use, modification, and redistribution, is contained in *
+ * the files COPYING and Copyright.html. COPYING can be found at the root *
+ * of the source code distribution tree; Copyright.html can be found at the *
+ * root level of an installed copy of the electronic HDF5 document set and *
+ * is linked from the top-level documents page. It can also be found at *
+ * http://hdfgroup.org/HDF5/doc/Copyright.html. If you do not have *
+ * access to either file, you may request a copy from help@hdfgroup.org. *
+ * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */
+
+/************************************************************
+ Create two datasets within groups.
+ ************************************************************/
+
+package examples.intro;
+
+import hdf.hdf5lib.H5;
+import hdf.hdf5lib.HDF5Constants;
+
+public class H5_CreateGroupDataset {
+ private static String FILENAME = "H5_CreateGroupDataset.h5";
+ private static String GROUPNAME = "MyGroup";
+ private static String GROUPNAME_A = "GroupA";
+ private static String DATASETNAME1 = "dset1";
+ private static String DATASETNAME2 = "dset2";
+ private static final int DIM1_X = 3;
+ private static final int DIM1_Y = 3;
+ private static final int DIM2_X = 2;
+ private static final int DIM2_Y = 10;
+
+ private static void h5_crtgrpd() {
+ long file_id = -1;
+ long dataspace_id = -1;
+ long dataset_id = -1;
+ long group_id = -1;
+ long group1_id = -1;
+ long group2_id = -1;
+ int[][] dset1_data = new int[DIM1_X][DIM1_Y];
+ int[][] dset2_data = new int[DIM2_X][DIM2_Y];
+ long[] dims1 = { DIM1_X, DIM1_Y };
+ long[] dims2 = { DIM2_X, DIM2_Y };
+
+ // Initialize the first dataset.
+ for (int indx = 0; indx < DIM1_X; indx++)
+ for (int jndx = 0; jndx < DIM1_Y; jndx++)
+ dset1_data[indx][jndx] = jndx + 1;
+
+ // Initialize the second dataset.
+ for (int indx = 0; indx < DIM2_X; indx++)
+ for (int jndx = 0; jndx < DIM2_Y; jndx++)
+ dset2_data[indx][jndx] = jndx + 1;
+
+ // Create a file.
+ try {
+ file_id = H5.H5Fcreate(FILENAME, HDF5Constants.H5F_ACC_TRUNC, HDF5Constants.H5P_DEFAULT,
+ HDF5Constants.H5P_DEFAULT);
+ // Create a group named "/MyGroup" in the file.
+ if (file_id >= 0) {
+ group1_id = H5.H5Gcreate(file_id, "/" + GROUPNAME, HDF5Constants.H5P_DEFAULT,
+ HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT);
+ // Create group "Group_A" in group "MyGroup" using absolute name.
+ if (group1_id >= 0) {
+ group2_id = H5.H5Gcreate(file_id, "/" + GROUPNAME + "/" + GROUPNAME_A, HDF5Constants.H5P_DEFAULT,
+ HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT);
+ if (group2_id >= 0)
+ H5.H5Gclose(group2_id);
+ }
+ if (group1_id >= 0)
+ H5.H5Gclose(group1_id);
+ }
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Create the data space for the first dataset.
+ try {
+ dataspace_id = H5.H5Screate_simple(2, dims1, null);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Create the dataset in group "MyGroup".
+ try {
+ if ((file_id >= 0) && (dataspace_id >= 0))
+ dataset_id = H5.H5Dcreate(file_id, "/" + GROUPNAME + "/" + DATASETNAME1, HDF5Constants.H5T_STD_I32BE,
+ dataspace_id, HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Write the first dataset.
+ try {
+ if (dataset_id >= 0)
+ H5.H5Dwrite(dataset_id, HDF5Constants.H5T_NATIVE_INT, HDF5Constants.H5S_ALL, HDF5Constants.H5S_ALL,
+ HDF5Constants.H5P_DEFAULT, dset1_data);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Close the data space for the first dataset.
+ try {
+ if (dataspace_id >= 0)
+ H5.H5Sclose(dataspace_id);
+ dataspace_id = -1;
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Close the first dataset.
+ try {
+ if (dataset_id >= 0)
+ H5.H5Dclose(dataset_id);
+ dataset_id = -1;
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Open an existing group of the specified file.
+ try {
+ if (file_id >= 0)
+ group_id = H5.H5Gopen(file_id, "/" + GROUPNAME + "/" + GROUPNAME_A, HDF5Constants.H5P_DEFAULT);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Create the data space for the second dataset.
+ try {
+ dataspace_id = H5.H5Screate_simple(2, dims2, null);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Create the second dataset in group "Group_A".
+ try {
+ if ((group_id >= 0) && (dataspace_id >= 0))
+ dataset_id = H5.H5Dcreate(group_id, DATASETNAME2, HDF5Constants.H5T_STD_I32BE, dataspace_id,
+ HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Write the second dataset.
+ try {
+ if (dataset_id >= 0)
+ H5.H5Dwrite(dataset_id, HDF5Constants.H5T_NATIVE_INT, HDF5Constants.H5S_ALL, HDF5Constants.H5S_ALL,
+ HDF5Constants.H5P_DEFAULT, dset2_data);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Close the data space for the second dataset.
+ try {
+ if (dataspace_id >= 0)
+ H5.H5Sclose(dataspace_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Close the second dataset.
+ try {
+ if (dataset_id >= 0)
+ H5.H5Dclose(dataset_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Close the group.
+ try {
+ if (group_id >= 0)
+ H5.H5Gclose(group_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Close the file.
+ try {
+ if (file_id >= 0)
+ H5.H5Fclose(file_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+ }
+
+ public static void main(String[] args) {
+ H5_CreateGroupDataset.h5_crtgrpd();
+ }
+
+}
diff --git a/java/examples/intro/H5_ReadWrite.java b/java/examples/intro/H5_ReadWrite.java
new file mode 100644
index 0000000..0d73884
--- /dev/null
+++ b/java/examples/intro/H5_ReadWrite.java
@@ -0,0 +1,112 @@
+/* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
+ * Copyright by The HDF Group. *
+ * Copyright by the Board of Trustees of the University of Illinois. *
+ * All rights reserved. *
+ * *
+ * This file is part of HDF5. The full HDF5 copyright notice, including *
+ * terms governing use, modification, and redistribution, is contained in *
+ * the files COPYING and Copyright.html. COPYING can be found at the root *
+ * of the source code distribution tree; Copyright.html can be found at the *
+ * root level of an installed copy of the electronic HDF5 document set and *
+ * is linked from the top-level documents page. It can also be found at *
+ * http://hdfgroup.org/HDF5/doc/Copyright.html. If you do not have *
+ * access to either file, you may request a copy from help@hdfgroup.org. *
+ * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */
+
+/************************************************************
+ Writing and reading an existing dataset.
+ ************************************************************/
+
+package examples.intro;
+
+import hdf.hdf5lib.H5;
+import hdf.hdf5lib.HDF5Constants;
+
+public class H5_ReadWrite {
+ private static String FILENAME = "H5_ReadWrite.h5";
+ private static String DATASETNAME = "dset";
+ private static final int DIM_X = 4;
+ private static final int DIM_Y = 6;
+
+ private static void ReadWriteDataset() {
+ long file_id = -1;
+ long dataspace_id = -1;
+ long dataset_id = -1;
+ long[] dims = { DIM_X, DIM_Y };
+ int[][] dset_data = new int[DIM_X][DIM_Y];
+
+ // Initialize the dataset.
+ for (int indx = 0; indx < DIM_X; indx++)
+ for (int jndx = 0; jndx < DIM_Y; jndx++)
+ dset_data[indx][jndx] = indx * 6 + jndx + 1;
+
+ // Create a new file using default properties.
+ try {
+ file_id = H5.H5Fcreate(FILENAME, HDF5Constants.H5F_ACC_TRUNC, HDF5Constants.H5P_DEFAULT,
+ HDF5Constants.H5P_DEFAULT);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Create the data space for the dataset.
+ try {
+ dataspace_id = H5.H5Screate_simple(2, dims, null);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Create the dataset.
+ try {
+ if ((file_id >= 0) && (dataspace_id >= 0))
+ dataset_id = H5.H5Dcreate(file_id, "/" + DATASETNAME, HDF5Constants.H5T_STD_I32BE, dataspace_id,
+ HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Write the dataset.
+ try {
+ if (dataset_id >= 0)
+ H5.H5Dwrite(dataset_id, HDF5Constants.H5T_NATIVE_INT, HDF5Constants.H5S_ALL, HDF5Constants.H5S_ALL,
+ HDF5Constants.H5P_DEFAULT, dset_data);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ try {
+ if (dataset_id >= 0)
+ H5.H5Dread(dataset_id, HDF5Constants.H5T_NATIVE_INT, HDF5Constants.H5S_ALL, HDF5Constants.H5S_ALL,
+ HDF5Constants.H5P_DEFAULT, dset_data);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Close the dataset.
+ try {
+ if (dataset_id >= 0)
+ H5.H5Dclose(dataset_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Close the file.
+ try {
+ if (file_id >= 0)
+ H5.H5Fclose(file_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+ }
+
+ public static void main(String[] args) {
+ H5_ReadWrite.ReadWriteDataset();
+ }
+
+}
diff --git a/java/examples/intro/Makefile.am b/java/examples/intro/Makefile.am
new file mode 100644
index 0000000..3d5757a
--- /dev/null
+++ b/java/examples/intro/Makefile.am
@@ -0,0 +1,67 @@
+#
+# Copyright by The HDF Group.
+# Copyright by the Board of Trustees of the University of Illinois.
+# All rights reserved.
+#
+# This file is part of HDF5. The full HDF5 copyright notice, including
+# terms governing use, modification, and redistribution, is contained in
+# the files COPYING and Copyright.html. COPYING can be found at the root
+# of the source code distribution tree; Copyright.html can be found at the
+# root level of an installed copy of the electronic HDF5 document set and
+# is linked from the top-level documents page. It can also be found at
+# http://hdfgroup.org/HDF5/doc/Copyright.html. If you do not have
+# access to either file, you may request a copy from help@hdfgroup.org.
+##
+## Makefile.am
+## Run automake to generate a Makefile.in from this file.
+##
+#
+# HDF5 Java Library Examples Makefile(.in)
+
+include $(top_srcdir)/config/commence.am
+
+# Mark this directory as part of the JNI API
+JAVA_API=yes
+
+JAVAROOT = .classes
+
+classes:
+ test -d $(@D)/$(JAVAROOT) || $(MKDIR_P) $(@D)/$(JAVAROOT)
+
+pkgpath = examples/intro
+hdfjarfile = jar$(PACKAGE_TARNAME)-$(PACKAGE_VERSION).jar
+CLASSPATH_ENV=CLASSPATH=.:$(JAVAROOT):$(top_builddir)/java/src/$(hdfjarfile):$(top_srcdir)/java/lib/slf4j-api-1.7.5.jar:$(top_srcdir)/java/lib/ext/slf4j-simple-1.7.5.jar:$$CLASSPATH
+
+jarfile = jar$(PACKAGE_TARNAME)intro.jar
+
+AM_JAVACFLAGS = $(H5_JAVACFLAGS) -deprecation
+
+TESTPACKAGE =
+
+noinst_JAVA = \
+ H5_CreateAttribute.java \
+ H5_CreateDataset.java \
+ H5_CreateFile.java \
+ H5_CreateGroup.java \
+ H5_CreateGroupAbsoluteRelative.java \
+ H5_CreateGroupDataset.java \
+ H5_ReadWrite.java
+
+$(jarfile): classnoinst.stamp classes
+ $(JAR) cvf $@ -C $(JAVAROOT)/ $(pkgpath)
+
+noinst_DATA = $(jarfile)
+
+.PHONY: classes
+
+check_SCRIPTS = runExample.sh
+TEST_SCRIPT = $(check_SCRIPTS)
+
+CLEANFILES = classnoinst.stamp $(jarfile) $(JAVAROOT)/$(pkgpath)/*.class runExample.sh
+
+clean:
+ rm -rf $(JAVAROOT)/*
+ rm -f $(jarfile)
+ rm -f classnoinst.stamp
+
+include $(top_srcdir)/config/conclude.am
diff --git a/java/examples/intro/runExample.sh.in b/java/examples/intro/runExample.sh.in
new file mode 100644
index 0000000..60ea765
--- /dev/null
+++ b/java/examples/intro/runExample.sh.in
@@ -0,0 +1,290 @@
+#! /bin/sh
+#
+# Copyright by The HDF Group.
+# Copyright by the Board of Trustees of the University of Illinois.
+# All rights reserved.
+#
+# This file is part of HDF5. The full HDF5 copyright notice, including
+# terms governing use, modification, and redistribution, is contained in
+# the files COPYING and Copyright.html. COPYING can be found at the root
+# of the source code distribution tree; Copyright.html can be found at the
+# root level of an installed copy of the electronic HDF5 document set and
+# is linked from the top-level documents page. It can also be found at
+# http://hdfgroup.org/HDF5/doc/Copyright.html. If you do not have
+# access to either file, you may request a copy from help@hdfgroup.org.
+#
+
+top_builddir=@top_builddir@
+top_srcdir=@top_srcdir@
+srcdir=@srcdir@
+
+TESTNAME=EX_Intro
+EXIT_SUCCESS=0
+EXIT_FAILURE=1
+
+# Set up default variable values if not supplied by the user.
+RM='rm -rf'
+CMP='cmp'
+DIFF='diff -c'
+CP='cp'
+DIRNAME='dirname'
+LS='ls'
+AWK='awk'
+
+nerrors=0
+
+# where the libs exist
+HDFLIB_HOME="$top_srcdir/java/lib"
+BLDLIBDIR="./lib"
+BLDDIR="."
+HDFTEST_HOME="$top_srcdir/java/examples/intro"
+JARFILE=jar@PACKAGE_TARNAME@-@PACKAGE_VERSION@.jar
+TESTJARFILE=jar@PACKAGE_TARNAME@intro.jar
+test -d $BLDLIBDIR || mkdir -p $BLDLIBDIR
+
+######################################################################
+# library files
+# --------------------------------------------------------------------
+# All the library files copy from source directory to test directory
+# NOTE: Keep this framework to add/remove test files.
+# This list are also used for checking exist.
+# Comment '#' without space can be used.
+# --------------------------------------------------------------------
+LIST_LIBRARY_FILES="
+$HDFLIB_HOME/slf4j-api-1.7.5.jar
+$HDFLIB_HOME/ext/slf4j-simple-1.7.5.jar
+$top_builddir/src/.libs/libhdf5.*
+$top_builddir/java/src/jni/.libs/libhdf5_java.*
+$top_builddir/java/src/$JARFILE
+"
+LIST_DATA_FILES="
+$HDFTEST_HOME/../testfiles/examples.intro.H5_CreateDataset.txt
+$HDFTEST_HOME/../testfiles/examples.intro.H5_CreateAttribute.txt
+$HDFTEST_HOME/../testfiles/examples.intro.H5_CreateFile.txt
+$HDFTEST_HOME/../testfiles/examples.intro.H5_CreateGroup.txt
+$HDFTEST_HOME/../testfiles/examples.intro.H5_CreateGroupAbsoluteRelative.txt
+$HDFTEST_HOME/../testfiles/examples.intro.H5_CreateGroupDataset.txt
+$HDFTEST_HOME/../testfiles/examples.intro.H5_ReadWrite.txt
+"
+
+#
+# copy files from source dirs to test dir
+#
+COPY_LIBFILES="$LIST_LIBRARY_FILES"
+
+COPY_LIBFILES_TO_BLDLIBDIR()
+{
+ # copy test files. Used -f to make sure get a new copy
+ for tstfile in $COPY_LIBFILES
+ do
+ # ignore '#' comment
+ echo $tstfile | tr -d ' ' | grep '^#' > /dev/null
+ RET=$?
+ if [ $RET -eq 1 ]; then
+ # skip cp if srcdir is same as destdir
+ # this occurs when build/test performed in source dir and
+ # make cp fail
+ SDIR=`$DIRNAME $tstfile`
+ INODE_SDIR=`$LS -i -d $SDIR | $AWK -F' ' '{print $1}'`
+ INODE_DDIR=`$LS -i -d $BLDLIBDIR | $AWK -F' ' '{print $1}'`
+ if [ "$INODE_SDIR" != "$INODE_DDIR" ]; then
+ $CP -f $tstfile $BLDLIBDIR
+ if [ $? -ne 0 ]; then
+ echo "Error: FAILED to copy $tstfile ."
+
+ # Comment out this to CREATE expected file
+ exit $EXIT_FAILURE
+ fi
+ fi
+ fi
+ done
+}
+
+CLEAN_LIBFILES_AND_BLDLIBDIR()
+{
+ # skip rm if srcdir is same as destdir
+ # this occurs when build/test performed in source dir and
+ # make cp fail
+ SDIR=`$DIRNAME $HDFLIB_HOME/slf4j-api-1.7.5.jar`
+ INODE_SDIR=`$LS -i -d $SDIR | $AWK -F' ' '{print $1}'`
+ INODE_DDIR=`$LS -i -d $BLDLIBDIR | $AWK -F' ' '{print $1}'`
+ if [ "$INODE_SDIR" != "$INODE_DDIR" ]; then
+ $RM $BLDLIBDIR
+ fi
+}
+
+COPY_DATAFILES="$LIST_DATA_FILES"
+
+COPY_DATAFILES_TO_BLDDIR()
+{
+ # copy test files. Used -f to make sure get a new copy
+ for tstfile in $COPY_DATAFILES
+ do
+ # ignore '#' comment
+ echo $tstfile | tr -d ' ' | grep '^#' > /dev/null
+ RET=$?
+ if [ $RET -eq 1 ]; then
+ # skip cp if srcdir is same as destdir
+ # this occurs when build/test performed in source dir and
+ # make cp fail
+ SDIR=`$DIRNAME $tstfile`
+ INODE_SDIR=`$LS -i -d $SDIR | $AWK -F' ' '{print $1}'`
+ INODE_DDIR=`$LS -i -d $BLDDIR | $AWK -F' ' '{print $1}'`
+ if [ "$INODE_SDIR" != "$INODE_DDIR" ]; then
+ $CP -f $tstfile $BLDDIR
+ if [ $? -ne 0 ]; then
+ echo "Error: FAILED to copy $tstfile ."
+
+ # Comment out this to CREATE expected file
+ exit $EXIT_FAILURE
+ fi
+ fi
+ fi
+ done
+}
+
+CLEAN_DATAFILES_AND_BLDDIR()
+{
+ $RM $BLDDIR/examples.intro.H5_*.txt
+ $RM $BLDDIR/H5_*.out
+}
+
+# Print a line-line message left justified in a field of 70 characters
+# beginning with the word "Testing".
+#
+TESTING() {
+ SPACES=" "
+ echo "Testing $* $SPACES" | cut -c1-70 | tr -d '\012'
+}
+
+# where Java is installed (requires jdk1.7.x)
+JAVAEXE=@JAVA@
+JAVAEXEFLAGS=@H5_JAVAFLAGS@
+
+###############################################################################
+# DO NOT MODIFY BELOW THIS LINE
+###############################################################################
+
+# prepare for test
+COPY_LIBFILES_TO_BLDLIBDIR
+COPY_DATAFILES_TO_BLDDIR
+
+CPATH=".:"$BLDLIBDIR"/"$JARFILE":"$BLDLIBDIR"/slf4j-api-1.7.5.jar:"$BLDLIBDIR"/slf4j-simple-1.7.5.jar:"$TESTJARFILE""
+
+TEST=/usr/bin/test
+if [ ! -x /usr/bin/test ]
+then
+TEST=`which test`
+fi
+
+if $TEST -z "$CLASSPATH"; then
+ CLASSPATH=""
+fi
+CLASSPATH=$CPATH":"$CLASSPATH
+export CLASSPATH
+
+if $TEST -n "$JAVAPATH" ; then
+ PATH=$JAVAPATH":"$PATH
+ export PATH
+fi
+
+if $TEST -e /bin/uname; then
+ os_name=`/bin/uname -s`
+elif $TEST -e /usr/bin/uname; then
+ os_name=`/usr/bin/uname -s`
+else
+ os_name=unknown
+fi
+
+if $TEST -z "$LD_LIBRARY_PATH" ; then
+ LD_LIBRARY_PATH=""
+fi
+
+case $os_name in
+ Darwin)
+ DYLD_LIBRARY_PATH=$BLDLIBDIR:$DYLD_LIBRARY_PATH
+ export DYLD_LIBRARY_PATH
+ LD_LIBRARY_PATH=$DYLD_LIBRARY_PATH
+ ;;
+ *)
+ LD_LIBRARY_PATH=$BLDLIBDIR:$LD_LIBRARY_PATH
+ ;;
+esac
+
+export LD_LIBRARY_PATH
+
+echo "$JAVAEXE $JAVAEXEFLAGS -Xmx1024M -Dorg.slf4j.simpleLogger.defaultLog=trace -Djava.library.path=$BLDLIBDIR -cp $CLASSPATH examples.intro.H5_CreateDataset"
+($JAVAEXE $JAVAEXEFLAGS -Xmx1024M -Dorg.slf4j.simpleLogger.defaultLog=trace -Djava.library.path=$BLDLIBDIR -cp $CLASSPATH examples.intro.H5_CreateDataset > H5_CreateDataset.out)
+if diff H5_CreateDataset.out examples.intro.H5_CreateDataset.txt > /dev/null; then
+ echo " PASSED intro.H5_CreateDataset"
+else
+ echo "**FAILED** intro.H5_CreateDataset"
+ nerrors="`expr $nerrors + 1`"
+fi
+
+echo "$JAVAEXE $JAVAEXEFLAGS -Xmx1024M -Dorg.slf4j.simpleLogger.defaultLog=trace -Djava.library.path=$BLDLIBDIR -cp $CLASSPATH examples.intro.H5_CreateAttribute"
+($JAVAEXE $JAVAEXEFLAGS -Xmx1024M -Dorg.slf4j.simpleLogger.defaultLog=trace -Djava.library.path=$BLDLIBDIR -cp $CLASSPATH examples.intro.H5_CreateAttribute > H5_CreateAttribute.out)
+if diff H5_CreateAttribute.out examples.intro.H5_CreateAttribute.txt > /dev/null; then
+ echo " PASSED intro.H5_CreateAttribute"
+else
+ echo "**FAILED** intro.H5_CreateAttribute"
+ nerrors="`expr $nerrors + 1`"
+fi
+
+echo "$JAVAEXE $JAVAEXEFLAGS -Xmx1024M -Dorg.slf4j.simpleLogger.defaultLog=trace -Djava.library.path=$BLDLIBDIR -cp $CLASSPATH examples.intro.H5_CreateFile"
+($JAVAEXE $JAVAEXEFLAGS -Xmx1024M -Dorg.slf4j.simpleLogger.defaultLog=trace -Djava.library.path=$BLDLIBDIR -cp $CLASSPATH examples.intro.H5_CreateFile > H5_CreateFile.out)
+if diff H5_CreateFile.out examples.intro.H5_CreateFile.txt > /dev/null; then
+ echo " PASSED intro.H5_CreateFile"
+else
+ echo "**FAILED** intro.H5_CreateFile"
+ nerrors="`expr $nerrors + 1`"
+fi
+
+echo "$JAVAEXE $JAVAEXEFLAGS -Xmx1024M -Dorg.slf4j.simpleLogger.defaultLog=trace -Djava.library.path=$BLDLIBDIR -cp $CLASSPATH examples.intro.H5_CreateGroup"
+($JAVAEXE $JAVAEXEFLAGS -Xmx1024M -Dorg.slf4j.simpleLogger.defaultLog=trace -Djava.library.path=$BLDLIBDIR -cp $CLASSPATH examples.intro.H5_CreateGroup > H5_CreateGroup.out)
+if diff H5_CreateGroup.out examples.intro.H5_CreateGroup.txt > /dev/null; then
+ echo " PASSED intro.H5_CreateGroup"
+else
+ echo "**FAILED** intro.H5_CreateGroup"
+ nerrors="`expr $nerrors + 1`"
+fi
+
+echo "$JAVAEXE $JAVAEXEFLAGS -Xmx1024M -Dorg.slf4j.simpleLogger.defaultLog=trace -Djava.library.path=$BLDLIBDIR -cp $CLASSPATH examples.intro.H5_CreateGroupAbsoluteRelative"
+($JAVAEXE $JAVAEXEFLAGS -Xmx1024M -Dorg.slf4j.simpleLogger.defaultLog=trace -Djava.library.path=$BLDLIBDIR -cp $CLASSPATH examples.intro.H5_CreateGroupAbsoluteRelative > H5_CreateGroupAbsoluteRelative.out)
+if diff H5_CreateGroupAbsoluteRelative.out examples.intro.H5_CreateGroupAbsoluteRelative.txt > /dev/null; then
+ echo " PASSED intro.H5_CreateGroupAbsoluteRelative"
+else
+ echo "**FAILED** intro.H5_CreateGroupAbsoluteRelative"
+ nerrors="`expr $nerrors + 1`"
+fi
+
+echo "$JAVAEXE $JAVAEXEFLAGS -Xmx1024M -Dorg.slf4j.simpleLogger.defaultLog=trace -Djava.library.path=$BLDLIBDIR -cp $CLASSPATH examples.intro.H5_CreateGroupDataset"
+($JAVAEXE $JAVAEXEFLAGS -Xmx1024M -Dorg.slf4j.simpleLogger.defaultLog=trace -Djava.library.path=$BLDLIBDIR -cp $CLASSPATH examples.intro.H5_CreateGroupDataset > H5_CreateGroupDataset.out)
+if diff H5_CreateGroupDataset.out examples.intro.H5_CreateGroupDataset.txt > /dev/null; then
+ echo " PASSED intro.H5_CreateGroupDataset"
+else
+ echo "**FAILED** intro.H5_CreateGroupDataset"
+ nerrors="`expr $nerrors + 1`"
+fi
+
+echo "$JAVAEXE $JAVAEXEFLAGS -Xmx1024M -Dorg.slf4j.simpleLogger.defaultLog=trace -Djava.library.path=$BLDLIBDIR -cp $CLASSPATH examples.intro.H5_ReadWrite"
+($JAVAEXE $JAVAEXEFLAGS -Xmx1024M -Dorg.slf4j.simpleLogger.defaultLog=trace -Djava.library.path=$BLDLIBDIR -cp $CLASSPATH examples.intro.H5_ReadWrite > H5_ReadWrite.out)
+if diff H5_ReadWrite.out examples.intro.H5_ReadWrite.txt > /dev/null; then
+ echo " PASSED intro.H5_ReadWrite"
+else
+ echo "**FAILED** intro.H5_ReadWrite"
+ nerrors="`expr $nerrors + 1`"
+fi
+
+# Clean up temporary files/directories
+CLEAN_LIBFILES_AND_BLDLIBDIR
+CLEAN_DATAFILES_AND_BLDDIR
+
+# Report test results and exit
+if test $nerrors -eq 0 ; then
+ echo "All $TESTNAME tests passed."
+ exit $EXIT_SUCCESS
+else
+ echo "$TESTNAME tests failed with $nerrors errors."
+ exit $EXIT_FAILURE
+fi
diff --git a/java/examples/testfiles/examples.datasets.H5Ex_D_Alloc.txt b/java/examples/testfiles/examples.datasets.H5Ex_D_Alloc.txt
new file mode 100644
index 0000000..6fd810b
--- /dev/null
+++ b/java/examples/testfiles/examples.datasets.H5Ex_D_Alloc.txt
@@ -0,0 +1,16 @@
+Creating datasets...
+DS1 has allocation time H5D_ALLOC_TIME_LATE
+DS2 has allocation time H5D_ALLOC_TIME_EARLY
+
+Space for DS1 has not been allocated.
+Storage size for DS1 is: 0 bytes.
+Space for DS2 has been allocated.
+Storage size for DS2 is: 112 bytes.
+
+Writing data...
+
+Space for DS1 has been allocated.
+Storage size for DS1 is: 112 bytes.
+Space for DS2 has been allocated.
+Storage size for DS2 is: 112 bytes.
+
diff --git a/java/examples/testfiles/examples.datasets.H5Ex_D_Checksum.txt b/java/examples/testfiles/examples.datasets.H5Ex_D_Checksum.txt
new file mode 100644
index 0000000..676aebb
--- /dev/null
+++ b/java/examples/testfiles/examples.datasets.H5Ex_D_Checksum.txt
@@ -0,0 +1,3 @@
+Filter type is: H5Z_FILTER_FLETCHER32
+
+Maximum value in DS1 is: 1890
diff --git a/java/examples/testfiles/examples.datasets.H5Ex_D_Chunk.txt b/java/examples/testfiles/examples.datasets.H5Ex_D_Chunk.txt
new file mode 100644
index 0000000..5f4c2de
--- /dev/null
+++ b/java/examples/testfiles/examples.datasets.H5Ex_D_Chunk.txt
@@ -0,0 +1,26 @@
+Original Data:
+ [ 1 1 1 1 1 1 1 1 ]
+ [ 1 1 1 1 1 1 1 1 ]
+ [ 1 1 1 1 1 1 1 1 ]
+ [ 1 1 1 1 1 1 1 1 ]
+ [ 1 1 1 1 1 1 1 1 ]
+ [ 1 1 1 1 1 1 1 1 ]
+
+Storage layout for DS1 is: H5D_CHUNKED
+
+Data as written to disk by hyberslabs:
+ [ 0 1 0 0 1 0 0 1 ]
+ [ 1 1 0 1 1 0 1 1 ]
+ [ 0 0 0 0 0 0 0 0 ]
+ [ 0 1 0 0 1 0 0 1 ]
+ [ 1 1 0 1 1 0 1 1 ]
+ [ 0 0 0 0 0 0 0 0 ]
+
+Data as read from disk by hyberslab:
+ [ 0 1 0 0 0 0 0 1 ]
+ [ 0 1 0 1 0 0 1 1 ]
+ [ 0 0 0 0 0 0 0 0 ]
+ [ 0 0 0 0 0 0 0 0 ]
+ [ 0 1 0 1 0 0 1 1 ]
+ [ 0 0 0 0 0 0 0 0 ]
+
diff --git a/java/examples/testfiles/examples.datasets.H5Ex_D_Compact.txt b/java/examples/testfiles/examples.datasets.H5Ex_D_Compact.txt
new file mode 100644
index 0000000..e34f3c1
--- /dev/null
+++ b/java/examples/testfiles/examples.datasets.H5Ex_D_Compact.txt
@@ -0,0 +1,8 @@
+Storage layout for DS1 is: H5D_COMPACT
+
+Data for DS1 is:
+ [ 0 -1 -2 -3 -4 -5 -6 ]
+ [ 0 0 0 0 0 0 0 ]
+ [ 0 1 2 3 4 5 6 ]
+ [ 0 2 4 6 8 10 12 ]
+
diff --git a/java/examples/testfiles/examples.datasets.H5Ex_D_External.txt b/java/examples/testfiles/examples.datasets.H5Ex_D_External.txt
new file mode 100644
index 0000000..5878149
--- /dev/null
+++ b/java/examples/testfiles/examples.datasets.H5Ex_D_External.txt
@@ -0,0 +1,7 @@
+DS1 is stored in file: H5Ex_D_External.data
+DS1:
+ [ 0 -1 -2 -3 -4 -5 -6 ]
+ [ 0 0 0 0 0 0 0 ]
+ [ 0 1 2 3 4 5 6 ]
+ [ 0 2 4 6 8 10 12 ]
+
diff --git a/java/examples/testfiles/examples.datasets.H5Ex_D_FillValue.txt b/java/examples/testfiles/examples.datasets.H5Ex_D_FillValue.txt
new file mode 100644
index 0000000..68d826b
--- /dev/null
+++ b/java/examples/testfiles/examples.datasets.H5Ex_D_FillValue.txt
@@ -0,0 +1,20 @@
+Dataset before being written to:
+ [ 99 99 99 99 99 99 99 ]
+ [ 99 99 99 99 99 99 99 ]
+ [ 99 99 99 99 99 99 99 ]
+ [ 99 99 99 99 99 99 99 ]
+
+Dataset after being written to:
+ [ 0 -1 -2 -3 -4 -5 -6 ]
+ [ 0 0 0 0 0 0 0 ]
+ [ 0 1 2 3 4 5 6 ]
+ [ 0 2 4 6 8 10 12 ]
+
+Dataset after extension:
+ [ 0 -1 -2 -3 -4 -5 -6 99 99 99 ]
+ [ 0 0 0 0 0 0 0 99 99 99 ]
+ [ 0 1 2 3 4 5 6 99 99 99 ]
+ [ 0 2 4 6 8 10 12 99 99 99 ]
+ [ 99 99 99 99 99 99 99 99 99 99 ]
+ [ 99 99 99 99 99 99 99 99 99 99 ]
+
diff --git a/java/examples/testfiles/examples.datasets.H5Ex_D_Gzip.txt b/java/examples/testfiles/examples.datasets.H5Ex_D_Gzip.txt
new file mode 100644
index 0000000..255a561
--- /dev/null
+++ b/java/examples/testfiles/examples.datasets.H5Ex_D_Gzip.txt
@@ -0,0 +1,3 @@
+Filter type is: H5Z_FILTER_DEFLATE
+
+Maximum value in DS1 is: 1890
diff --git a/java/examples/testfiles/examples.datasets.H5Ex_D_Hyperslab.txt b/java/examples/testfiles/examples.datasets.H5Ex_D_Hyperslab.txt
new file mode 100644
index 0000000..823dfcc
--- /dev/null
+++ b/java/examples/testfiles/examples.datasets.H5Ex_D_Hyperslab.txt
@@ -0,0 +1,24 @@
+Original Data:
+ [ 1 1 1 1 1 1 1 1 ]
+ [ 1 1 1 1 1 1 1 1 ]
+ [ 1 1 1 1 1 1 1 1 ]
+ [ 1 1 1 1 1 1 1 1 ]
+ [ 1 1 1 1 1 1 1 1 ]
+ [ 1 1 1 1 1 1 1 1 ]
+
+Data as written to disk by hyberslabs:
+ [ 0 1 0 0 1 0 0 1 ]
+ [ 1 1 0 1 1 0 1 1 ]
+ [ 0 0 0 0 0 0 0 0 ]
+ [ 0 1 0 0 1 0 0 1 ]
+ [ 1 1 0 1 1 0 1 1 ]
+ [ 0 0 0 0 0 0 0 0 ]
+
+Data as read from disk by hyberslab:
+ [ 0 1 0 0 0 0 0 1 ]
+ [ 0 1 0 1 0 0 1 1 ]
+ [ 0 0 0 0 0 0 0 0 ]
+ [ 0 0 0 0 0 0 0 0 ]
+ [ 0 1 0 1 0 0 1 1 ]
+ [ 0 0 0 0 0 0 0 0 ]
+
diff --git a/java/examples/testfiles/examples.datasets.H5Ex_D_Nbit.txt b/java/examples/testfiles/examples.datasets.H5Ex_D_Nbit.txt
new file mode 100644
index 0000000..a768ba0
--- /dev/null
+++ b/java/examples/testfiles/examples.datasets.H5Ex_D_Nbit.txt
@@ -0,0 +1,3 @@
+Filter type is: H5Z_FILTER_NBIT
+
+Maximum value in DS1 is: 1890
diff --git a/java/examples/testfiles/examples.datasets.H5Ex_D_ReadWrite.txt b/java/examples/testfiles/examples.datasets.H5Ex_D_ReadWrite.txt
new file mode 100644
index 0000000..e021029
--- /dev/null
+++ b/java/examples/testfiles/examples.datasets.H5Ex_D_ReadWrite.txt
@@ -0,0 +1,6 @@
+DS1:
+ [ 0 -1 -2 -3 -4 -5 -6 ]
+ [ 0 0 0 0 0 0 0 ]
+ [ 0 1 2 3 4 5 6 ]
+ [ 0 2 4 6 8 10 12 ]
+
diff --git a/java/examples/testfiles/examples.datasets.H5Ex_D_Shuffle.txt b/java/examples/testfiles/examples.datasets.H5Ex_D_Shuffle.txt
new file mode 100644
index 0000000..ea95f11
--- /dev/null
+++ b/java/examples/testfiles/examples.datasets.H5Ex_D_Shuffle.txt
@@ -0,0 +1,5 @@
+Filter 0: Type is: H5Z_FILTER_SHUFFLE
+
+Filter 1: Type is: H5Z_FILTER_DEFLATE
+
+Maximum value in DS1 is: 1890
diff --git a/java/examples/testfiles/examples.datasets.H5Ex_D_Sofloat.txt b/java/examples/testfiles/examples.datasets.H5Ex_D_Sofloat.txt
new file mode 100644
index 0000000..4d4b5d6
--- /dev/null
+++ b/java/examples/testfiles/examples.datasets.H5Ex_D_Sofloat.txt
@@ -0,0 +1,6 @@
+Maximum value in write buffer is: 106.66666666666667
+Minimum value in write buffer is: 1.7692307692307692
+Filter type is: H5Z_FILTER_SCALEOFFSET
+
+Maximum value in DS1 is: 106.66169811320755
+Minimum value in DS1 is: 1.7692307692307692
diff --git a/java/examples/testfiles/examples.datasets.H5Ex_D_Soint.txt b/java/examples/testfiles/examples.datasets.H5Ex_D_Soint.txt
new file mode 100644
index 0000000..48d0d8c
--- /dev/null
+++ b/java/examples/testfiles/examples.datasets.H5Ex_D_Soint.txt
@@ -0,0 +1,3 @@
+Filter type is: H5Z_FILTER_SCALEOFFSET
+
+Maximum value in DS1 is: 1890
diff --git a/java/examples/testfiles/examples.datasets.H5Ex_D_Szip.txt b/java/examples/testfiles/examples.datasets.H5Ex_D_Szip.txt
new file mode 100644
index 0000000..a1c0d19
--- /dev/null
+++ b/java/examples/testfiles/examples.datasets.H5Ex_D_Szip.txt
@@ -0,0 +1,3 @@
+Filter type is: H5Z_FILTER_SZIP
+
+Maximum value in DS1 is: 1890
diff --git a/java/examples/testfiles/examples.datasets.H5Ex_D_Transform.txt b/java/examples/testfiles/examples.datasets.H5Ex_D_Transform.txt
new file mode 100644
index 0000000..05257bc
--- /dev/null
+++ b/java/examples/testfiles/examples.datasets.H5Ex_D_Transform.txt
@@ -0,0 +1,15 @@
+Original Data:
+ [ 0 -1 -2 -3 -4 -5 -6 ]
+ [ 0 0 0 0 0 0 0 ]
+ [ 0 1 2 3 4 5 6 ]
+ [ 0 2 4 6 8 10 12 ]
+Data as written with transform 'x+1'
+ [ 1 0 -1 -2 -3 -4 -5 ]
+ [ 1 1 1 1 1 1 1 ]
+ [ 1 2 3 4 5 6 7 ]
+ [ 1 3 5 7 9 11 13 ]
+Data as written with transform 'x+1' and read with transform 'x-1'
+ [ 0 -1 -2 -3 -4 -5 -6 ]
+ [ 0 0 0 0 0 0 0 ]
+ [ 0 1 2 3 4 5 6 ]
+ [ 0 2 4 6 8 10 12 ]
diff --git a/java/examples/testfiles/examples.datasets.H5Ex_D_UnlimitedAdd.txt b/java/examples/testfiles/examples.datasets.H5Ex_D_UnlimitedAdd.txt
new file mode 100644
index 0000000..d3a7281
--- /dev/null
+++ b/java/examples/testfiles/examples.datasets.H5Ex_D_UnlimitedAdd.txt
@@ -0,0 +1,14 @@
+Dataset before extension:
+ [ 0 -1 -2 -3 -4 -5 -6 ]
+ [ 0 0 0 0 0 0 0 ]
+ [ 0 1 2 3 4 5 6 ]
+ [ 0 2 4 6 8 10 12 ]
+
+Dataset after extension:
+ [ 0 -1 -2 -3 -4 -5 -6 7 8 9 ]
+ [ 0 0 0 0 0 0 0 7 8 9 ]
+ [ 0 1 2 3 4 5 6 7 8 9 ]
+ [ 0 2 4 6 8 10 12 7 8 9 ]
+ [ 0 1 2 3 4 5 6 7 8 9 ]
+ [ 0 1 2 3 4 5 6 7 8 9 ]
+
diff --git a/java/examples/testfiles/examples.datasets.H5Ex_D_UnlimitedGzip.txt b/java/examples/testfiles/examples.datasets.H5Ex_D_UnlimitedGzip.txt
new file mode 100644
index 0000000..9e36281
--- /dev/null
+++ b/java/examples/testfiles/examples.datasets.H5Ex_D_UnlimitedGzip.txt
@@ -0,0 +1,16 @@
+Dataset before extension:
+ [ 0 -1 -2 -3 -4 -5 -6 ]
+ [ 0 0 0 0 0 0 0 ]
+ [ 0 1 2 3 4 5 6 ]
+ [ 0 2 4 6 8 10 12 ]
+
+Filter type is: H5Z_FILTER_DEFLATE
+
+Dataset after extension:
+ [ 0 -1 -2 -3 -4 -5 -6 7 8 9 ]
+ [ 0 0 0 0 0 0 0 7 8 9 ]
+ [ 0 1 2 3 4 5 6 7 8 9 ]
+ [ 0 2 4 6 8 10 12 7 8 9 ]
+ [ 0 1 2 3 4 5 6 7 8 9 ]
+ [ 0 1 2 3 4 5 6 7 8 9 ]
+
diff --git a/java/examples/testfiles/examples.datasets.H5Ex_D_UnlimitedMod.txt b/java/examples/testfiles/examples.datasets.H5Ex_D_UnlimitedMod.txt
new file mode 100644
index 0000000..15eee16
--- /dev/null
+++ b/java/examples/testfiles/examples.datasets.H5Ex_D_UnlimitedMod.txt
@@ -0,0 +1,14 @@
+Dataset before extension:
+ [ 0 -1 -2 -3 -4 -5 -6 ]
+ [ 0 0 0 0 0 0 0 ]
+ [ 0 1 2 3 4 5 6 ]
+ [ 0 2 4 6 8 10 12 ]
+
+Dataset after extension:
+ [ 0 1 2 3 4 5 6 7 8 9 ]
+ [ 0 1 2 3 4 5 6 7 8 9 ]
+ [ 0 1 2 3 4 5 6 7 8 9 ]
+ [ 0 1 2 3 4 5 6 7 8 9 ]
+ [ 0 1 2 3 4 5 6 7 8 9 ]
+ [ 0 1 2 3 4 5 6 7 8 9 ]
+
diff --git a/java/examples/testfiles/examples.datatypes.H5Ex_T_Array.txt b/java/examples/testfiles/examples.datatypes.H5Ex_T_Array.txt
new file mode 100644
index 0000000..7bcd8fa
--- /dev/null
+++ b/java/examples/testfiles/examples.datatypes.H5Ex_T_Array.txt
@@ -0,0 +1,21 @@
+DS1 [0]:
+ [0 0 0 0 0 ]
+ [0 -1 -2 -3 -4 ]
+ [0 -2 -4 -6 -8 ]
+
+DS1 [1]:
+ [0 1 2 3 4 ]
+ [1 1 1 1 1 ]
+ [2 1 0 -1 -2 ]
+
+DS1 [2]:
+ [0 2 4 6 8 ]
+ [2 3 4 5 6 ]
+ [4 4 4 4 4 ]
+
+DS1 [3]:
+ [0 3 6 9 12 ]
+ [3 5 7 9 11 ]
+ [6 7 8 9 10 ]
+
+
diff --git a/java/examples/testfiles/examples.datatypes.H5Ex_T_ArrayAttribute.txt b/java/examples/testfiles/examples.datatypes.H5Ex_T_ArrayAttribute.txt
new file mode 100644
index 0000000..7d27c0b
--- /dev/null
+++ b/java/examples/testfiles/examples.datatypes.H5Ex_T_ArrayAttribute.txt
@@ -0,0 +1,21 @@
+A1 [0]:
+ [0 0 0 0 0 ]
+ [0 -1 -2 -3 -4 ]
+ [0 -2 -4 -6 -8 ]
+
+A1 [1]:
+ [0 1 2 3 4 ]
+ [1 1 1 1 1 ]
+ [2 1 0 -1 -2 ]
+
+A1 [2]:
+ [0 2 4 6 8 ]
+ [2 3 4 5 6 ]
+ [4 4 4 4 4 ]
+
+A1 [3]:
+ [0 3 6 9 12 ]
+ [3 5 7 9 11 ]
+ [6 7 8 9 10 ]
+
+
diff --git a/java/examples/testfiles/examples.datatypes.H5Ex_T_Bit.txt b/java/examples/testfiles/examples.datatypes.H5Ex_T_Bit.txt
new file mode 100644
index 0000000..57769b2
--- /dev/null
+++ b/java/examples/testfiles/examples.datatypes.H5Ex_T_Bit.txt
@@ -0,0 +1,6 @@
+DS1:
+ [{0, 0, 0, 0}{3, 0, 1, 1}{2, 0, 2, 2}{1, 0, 3, 3}{0, 0, 0, 0}{3, 0, 1, 1}{2, 0, 2, 2}]
+ [{0, 0, 0, 0}{0, 0, 0, 0}{0, 0, 0, 0}{0, 0, 0, 0}{0, 0, 0, 0}{0, 0, 0, 0}{0, 0, 0, 0}]
+ [{0, 0, 0, 0}{0, 0, 0, 0}{0, 0, 0, 0}{0, 0, 0, 0}{0, 0, 0, 0}{0, 0, 0, 0}{0, 0, 0, 0}]
+ [{0, 0, 0, 0}{0, 0, 0, 0}{0, 0, 0, 0}{0, 0, 0, 0}{0, 0, 0, 0}{0, 0, 0, 0}{0, 0, 0, 0}]
+
diff --git a/java/examples/testfiles/examples.datatypes.H5Ex_T_BitAttribute.txt b/java/examples/testfiles/examples.datatypes.H5Ex_T_BitAttribute.txt
new file mode 100644
index 0000000..683bc7f
--- /dev/null
+++ b/java/examples/testfiles/examples.datatypes.H5Ex_T_BitAttribute.txt
@@ -0,0 +1,6 @@
+A1:
+ [{0, 0, 0, 0}{3, 0, 1, 1}{2, 0, 2, 2}{1, 0, 3, 3}{0, 0, 0, 0}{3, 0, 1, 1}{2, 0, 2, 2}]
+ [{0, 0, 0, 0}{0, 0, 0, 0}{0, 0, 0, 0}{0, 0, 0, 0}{0, 0, 0, 0}{0, 0, 0, 0}{0, 0, 0, 0}]
+ [{0, 0, 0, 0}{0, 0, 0, 0}{0, 0, 0, 0}{0, 0, 0, 0}{0, 0, 0, 0}{0, 0, 0, 0}{0, 0, 0, 0}]
+ [{0, 0, 0, 0}{0, 0, 0, 0}{0, 0, 0, 0}{0, 0, 0, 0}{0, 0, 0, 0}{0, 0, 0, 0}{0, 0, 0, 0}]
+
diff --git a/java/examples/testfiles/examples.datatypes.H5Ex_T_Commit.txt b/java/examples/testfiles/examples.datatypes.H5Ex_T_Commit.txt
new file mode 100644
index 0000000..e6d0bef
--- /dev/null
+++ b/java/examples/testfiles/examples.datatypes.H5Ex_T_Commit.txt
@@ -0,0 +1,6 @@
+Named datatype: Sensor_Type:
+ Class: H5T_COMPOUND
+ Serial number
+ Location
+ Temperature (F)
+ Pressure (inHg)
diff --git a/java/examples/testfiles/examples.datatypes.H5Ex_T_Compound.txt b/java/examples/testfiles/examples.datatypes.H5Ex_T_Compound.txt
new file mode 100644
index 0000000..0505c78
--- /dev/null
+++ b/java/examples/testfiles/examples.datatypes.H5Ex_T_Compound.txt
@@ -0,0 +1,25 @@
+DS1 [0]:
+Serial number : 1153
+Location : Exterior (static)
+Temperature (F) : 53.23
+Pressure (inHg) : 24.57
+
+DS1 [1]:
+Serial number : 1184
+Location : Intake
+Temperature (F) : 55.12
+Pressure (inHg) : 22.95
+
+DS1 [2]:
+Serial number : 1027
+Location : Intake manifold
+Temperature (F) : 103.55
+Pressure (inHg) : 31.23
+
+DS1 [3]:
+Serial number : 1313
+Location : Exhaust manifold
+Temperature (F) : 1252.89
+Pressure (inHg) : 84.11
+
+
diff --git a/java/examples/testfiles/examples.datatypes.H5Ex_T_CompoundAttribute.txt b/java/examples/testfiles/examples.datatypes.H5Ex_T_CompoundAttribute.txt
new file mode 100644
index 0000000..dd77f8d
--- /dev/null
+++ b/java/examples/testfiles/examples.datatypes.H5Ex_T_CompoundAttribute.txt
@@ -0,0 +1,25 @@
+A1 [0]:
+Serial number : 1153
+Location : Exterior (static)
+Temperature (F) : 53.23
+Pressure (inHg) : 24.57
+
+A1 [1]:
+Serial number : 1184
+Location : Intake
+Temperature (F) : 55.12
+Pressure (inHg) : 22.95
+
+A1 [2]:
+Serial number : 1027
+Location : Intake manifold
+Temperature (F) : 103.55
+Pressure (inHg) : 31.23
+
+A1 [3]:
+Serial number : 1313
+Location : Exhaust manifold
+Temperature (F) : 1252.89
+Pressure (inHg) : 84.11
+
+
diff --git a/java/examples/testfiles/examples.datatypes.H5Ex_T_Float.txt b/java/examples/testfiles/examples.datatypes.H5Ex_T_Float.txt
new file mode 100644
index 0000000..85d8ced
--- /dev/null
+++ b/java/examples/testfiles/examples.datatypes.H5Ex_T_Float.txt
@@ -0,0 +1,6 @@
+DS1:
+ [ 0.0000 1.0000 2.0000 3.0000 4.0000 5.0000 6.0000]
+ [ 2.0000 1.6667 2.4000 3.2857 4.2222 5.1818 6.1538]
+ [ 4.0000 2.3333 2.8000 3.5714 4.4444 5.3636 6.3077]
+ [ 6.0000 3.0000 3.2000 3.8571 4.6667 5.5455 6.4615]
+
diff --git a/java/examples/testfiles/examples.datatypes.H5Ex_T_FloatAttribute.txt b/java/examples/testfiles/examples.datatypes.H5Ex_T_FloatAttribute.txt
new file mode 100644
index 0000000..cfa1f92
--- /dev/null
+++ b/java/examples/testfiles/examples.datatypes.H5Ex_T_FloatAttribute.txt
@@ -0,0 +1,6 @@
+A1:
+ [ 0.0000 1.0000 2.0000 3.0000 4.0000 5.0000 6.0000]
+ [ 2.0000 1.6667 2.4000 3.2857 4.2222 5.1818 6.1538]
+ [ 4.0000 2.3333 2.8000 3.5714 4.4444 5.3636 6.3077]
+ [ 6.0000 3.0000 3.2000 3.8571 4.6667 5.5455 6.4615]
+
diff --git a/java/examples/testfiles/examples.datatypes.H5Ex_T_Integer.txt b/java/examples/testfiles/examples.datatypes.H5Ex_T_Integer.txt
new file mode 100644
index 0000000..f686bd1
--- /dev/null
+++ b/java/examples/testfiles/examples.datatypes.H5Ex_T_Integer.txt
@@ -0,0 +1,6 @@
+DS1:
+ [ 0 -1 -2 -3 -4 -5 -6]
+ [ 0 0 0 0 0 0 0]
+ [ 0 1 2 3 4 5 6]
+ [ 0 2 4 6 8 10 12]
+
diff --git a/java/examples/testfiles/examples.datatypes.H5Ex_T_IntegerAttribute.txt b/java/examples/testfiles/examples.datatypes.H5Ex_T_IntegerAttribute.txt
new file mode 100644
index 0000000..dccd4a6
--- /dev/null
+++ b/java/examples/testfiles/examples.datatypes.H5Ex_T_IntegerAttribute.txt
@@ -0,0 +1,6 @@
+A1:
+ [ 0 -1 -2 -3 -4 -5 -6]
+ [ 0 0 0 0 0 0 0]
+ [ 0 1 2 3 4 5 6]
+ [ 0 2 4 6 8 10 12]
+
diff --git a/java/examples/testfiles/examples.datatypes.H5Ex_T_ObjectReference.txt b/java/examples/testfiles/examples.datatypes.H5Ex_T_ObjectReference.txt
new file mode 100644
index 0000000..827c042
--- /dev/null
+++ b/java/examples/testfiles/examples.datatypes.H5Ex_T_ObjectReference.txt
@@ -0,0 +1,4 @@
+DS1[0]:
+ ->H5G_GROUP:
+DS1[1]:
+ ->H5G_DATASET:
diff --git a/java/examples/testfiles/examples.datatypes.H5Ex_T_ObjectReferenceAttribute.txt b/java/examples/testfiles/examples.datatypes.H5Ex_T_ObjectReferenceAttribute.txt
new file mode 100644
index 0000000..fe3cdc0
--- /dev/null
+++ b/java/examples/testfiles/examples.datatypes.H5Ex_T_ObjectReferenceAttribute.txt
@@ -0,0 +1,4 @@
+A1[0]:
+ ->H5G_GROUP:
+A1[1]:
+ ->H5G_DATASET:
diff --git a/java/examples/testfiles/examples.datatypes.H5Ex_T_Opaque.txt b/java/examples/testfiles/examples.datatypes.H5Ex_T_Opaque.txt
new file mode 100644
index 0000000..fb74236
--- /dev/null
+++ b/java/examples/testfiles/examples.datatypes.H5Ex_T_Opaque.txt
@@ -0,0 +1,6 @@
+Datatype tag for DS1 is: "Character array"
+DS1[0]: OPAQUE0
+DS1[1]: OPAQUE1
+DS1[2]: OPAQUE2
+DS1[3]: OPAQUE3
+
diff --git a/java/examples/testfiles/examples.datatypes.H5Ex_T_OpaqueAttribute.txt b/java/examples/testfiles/examples.datatypes.H5Ex_T_OpaqueAttribute.txt
new file mode 100644
index 0000000..bc9a730
--- /dev/null
+++ b/java/examples/testfiles/examples.datatypes.H5Ex_T_OpaqueAttribute.txt
@@ -0,0 +1,6 @@
+Datatype tag for A1 is: "Character array"
+A1[0]: OPAQUE0
+A1[1]: OPAQUE1
+A1[2]: OPAQUE2
+A1[3]: OPAQUE3
+
diff --git a/java/examples/testfiles/examples.datatypes.H5Ex_T_String.txt b/java/examples/testfiles/examples.datatypes.H5Ex_T_String.txt
new file mode 100644
index 0000000..4df6a41
--- /dev/null
+++ b/java/examples/testfiles/examples.datatypes.H5Ex_T_String.txt
@@ -0,0 +1,5 @@
+DS1 [0]: Parting
+DS1 [1]: is such
+DS1 [2]: sweet
+DS1 [3]: sorrow.
+
diff --git a/java/examples/testfiles/examples.datatypes.H5Ex_T_StringAttribute.txt b/java/examples/testfiles/examples.datatypes.H5Ex_T_StringAttribute.txt
new file mode 100644
index 0000000..4df6a41
--- /dev/null
+++ b/java/examples/testfiles/examples.datatypes.H5Ex_T_StringAttribute.txt
@@ -0,0 +1,5 @@
+DS1 [0]: Parting
+DS1 [1]: is such
+DS1 [2]: sweet
+DS1 [3]: sorrow.
+
diff --git a/java/examples/testfiles/examples.datatypes.H5Ex_T_VLString.txt b/java/examples/testfiles/examples.datatypes.H5Ex_T_VLString.txt
new file mode 100644
index 0000000..0322953
--- /dev/null
+++ b/java/examples/testfiles/examples.datatypes.H5Ex_T_VLString.txt
@@ -0,0 +1,4 @@
+DS1 [0]: Parting
+DS1 [1]: is such
+DS1 [2]: sweet
+DS1 [3]: sorrow.
diff --git a/java/examples/testfiles/examples.groups.H5Ex_G_Compact.txt b/java/examples/testfiles/examples.groups.H5Ex_G_Compact.txt
new file mode 100644
index 0000000..0a88d3f
--- /dev/null
+++ b/java/examples/testfiles/examples.groups.H5Ex_G_Compact.txt
@@ -0,0 +1,5 @@
+Group storage type for H5Ex_G_Compact1.h5 is: H5G_STORAGE_TYPE_SYMBOL_TABLE
+File size for H5Ex_G_Compact1.h5 is: 1832 bytes
+
+Group storage type for H5Ex_G_Compact2.h5 is: H5G_STORAGE_TYPE_COMPACT
+File size for H5Ex_G_Compact2.h5 is: 342 bytes
diff --git a/java/examples/testfiles/examples.groups.H5Ex_G_Corder.txt b/java/examples/testfiles/examples.groups.H5Ex_G_Corder.txt
new file mode 100644
index 0000000..2d959fc
--- /dev/null
+++ b/java/examples/testfiles/examples.groups.H5Ex_G_Corder.txt
@@ -0,0 +1,10 @@
+Traversing group using alphabetical indices:
+Index 0: 5
+Index 1: D
+Index 2: F
+Index 3: H
+Traversing group using creation order indices:
+Index 0: H
+Index 1: D
+Index 2: F
+Index 3: 5
diff --git a/java/examples/testfiles/examples.groups.H5Ex_G_Create.txt b/java/examples/testfiles/examples.groups.H5Ex_G_Create.txt
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/java/examples/testfiles/examples.groups.H5Ex_G_Create.txt
diff --git a/java/examples/testfiles/examples.groups.H5Ex_G_Intermediate.txt b/java/examples/testfiles/examples.groups.H5Ex_G_Intermediate.txt
new file mode 100644
index 0000000..65a0fc2
--- /dev/null
+++ b/java/examples/testfiles/examples.groups.H5Ex_G_Intermediate.txt
@@ -0,0 +1,5 @@
+Objects in the file_id:
+/ (Group)
+/G1 (Group)
+/G1/G2 (Group)
+/G1/G2/G3 (Group)
diff --git a/java/examples/testfiles/examples.groups.H5Ex_G_Iterate.txt b/java/examples/testfiles/examples.groups.H5Ex_G_Iterate.txt
new file mode 100644
index 0000000..66a4ae9
--- /dev/null
+++ b/java/examples/testfiles/examples.groups.H5Ex_G_Iterate.txt
@@ -0,0 +1,5 @@
+Objects in root group:
+ Dataset: DS1
+ Datatype: DT1
+ Group: G1
+ Dataset: L1
diff --git a/java/examples/testfiles/examples.groups.H5Ex_G_Phase.txt b/java/examples/testfiles/examples.groups.H5Ex_G_Phase.txt
new file mode 100644
index 0000000..9e666d4
--- /dev/null
+++ b/java/examples/testfiles/examples.groups.H5Ex_G_Phase.txt
@@ -0,0 +1,15 @@
+1 Group : Storage type is H5G_STORAGE_TYPE_COMPACT
+2 Groups: Storage type is H5G_STORAGE_TYPE_COMPACT
+3 Groups: Storage type is H5G_STORAGE_TYPE_COMPACT
+4 Groups: Storage type is H5G_STORAGE_TYPE_COMPACT
+5 Groups: Storage type is H5G_STORAGE_TYPE_COMPACT
+6 Groups: Storage type is H5G_STORAGE_TYPE_DENSE
+7 Groups: Storage type is H5G_STORAGE_TYPE_DENSE
+
+6 Groups: Storage type is H5G_STORAGE_TYPE_DENSE
+5 Groups: Storage type is H5G_STORAGE_TYPE_DENSE
+4 Groups: Storage type is H5G_STORAGE_TYPE_DENSE
+3 Groups: Storage type is H5G_STORAGE_TYPE_DENSE
+2 Groups: Storage type is H5G_STORAGE_TYPE_COMPACT
+1 Group : Storage type is H5G_STORAGE_TYPE_COMPACT
+0 Groups: Storage type is H5G_STORAGE_TYPE_COMPACT
diff --git a/java/examples/testfiles/examples.groups.H5Ex_G_Visit.txt b/java/examples/testfiles/examples.groups.H5Ex_G_Visit.txt
new file mode 100644
index 0000000..126a588
--- /dev/null
+++ b/java/examples/testfiles/examples.groups.H5Ex_G_Visit.txt
@@ -0,0 +1,19 @@
+Objects in the file:
+/ (Group)
+/group1 (Group)
+/group1/dset1 (Dataset)
+/group1/group3 (Group)
+/group1/group3/group4 (Group)
+/group1/group3/group4/group1 (Group)
+/group1/group3/group4/group2 (Group)
+
+Links in the file:
+/group1 (Group)
+/group1/dset1 (Dataset)
+/group1/group3 (Group)
+/group1/group3/dset2 (Dataset)
+/group1/group3/group4 (Group)
+/group1/group3/group4/group1 (Group)
+/group1/group3/group4/group1/group5 (Group)
+/group1/group3/group4/group2 (Group)
+/group2 (Group)
diff --git a/java/examples/testfiles/examples.intro.H5_CreateAttribute.txt b/java/examples/testfiles/examples.intro.H5_CreateAttribute.txt
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/java/examples/testfiles/examples.intro.H5_CreateAttribute.txt
diff --git a/java/examples/testfiles/examples.intro.H5_CreateDataset.txt b/java/examples/testfiles/examples.intro.H5_CreateDataset.txt
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/java/examples/testfiles/examples.intro.H5_CreateDataset.txt
diff --git a/java/examples/testfiles/examples.intro.H5_CreateFile.txt b/java/examples/testfiles/examples.intro.H5_CreateFile.txt
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/java/examples/testfiles/examples.intro.H5_CreateFile.txt
diff --git a/java/examples/testfiles/examples.intro.H5_CreateGroup.txt b/java/examples/testfiles/examples.intro.H5_CreateGroup.txt
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/java/examples/testfiles/examples.intro.H5_CreateGroup.txt
diff --git a/java/examples/testfiles/examples.intro.H5_CreateGroupAbsoluteRelative.txt b/java/examples/testfiles/examples.intro.H5_CreateGroupAbsoluteRelative.txt
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/java/examples/testfiles/examples.intro.H5_CreateGroupAbsoluteRelative.txt
diff --git a/java/examples/testfiles/examples.intro.H5_CreateGroupDataset.txt b/java/examples/testfiles/examples.intro.H5_CreateGroupDataset.txt
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/java/examples/testfiles/examples.intro.H5_CreateGroupDataset.txt
diff --git a/java/examples/testfiles/examples.intro.H5_ReadWrite.txt b/java/examples/testfiles/examples.intro.H5_ReadWrite.txt
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/java/examples/testfiles/examples.intro.H5_ReadWrite.txt
diff --git a/java/lib/ext/slf4j-nop-1.7.5.jar b/java/lib/ext/slf4j-nop-1.7.5.jar
new file mode 100644
index 0000000..e55bdd8
--- /dev/null
+++ b/java/lib/ext/slf4j-nop-1.7.5.jar
Binary files differ
diff --git a/java/lib/ext/slf4j-simple-1.7.5.jar b/java/lib/ext/slf4j-simple-1.7.5.jar
new file mode 100644
index 0000000..9dece31
--- /dev/null
+++ b/java/lib/ext/slf4j-simple-1.7.5.jar
Binary files differ
diff --git a/java/lib/hamcrest-core.jar b/java/lib/hamcrest-core.jar
new file mode 100644
index 0000000..9d5fe16
--- /dev/null
+++ b/java/lib/hamcrest-core.jar
Binary files differ
diff --git a/java/lib/junit.jar b/java/lib/junit.jar
new file mode 100644
index 0000000..aaf7444
--- /dev/null
+++ b/java/lib/junit.jar
Binary files differ
diff --git a/java/lib/simplelogger.properties b/java/lib/simplelogger.properties
new file mode 100644
index 0000000..119ee92
--- /dev/null
+++ b/java/lib/simplelogger.properties
@@ -0,0 +1,36 @@
+# SLF4J's SimpleLogger configuration file
+# Simple implementation of Logger that sends all enabled log messages, for all defined loggers, to System.err.
+
+org.slf4j.simpleLogger.logFile=slf4j.simpleLogger.log
+
+# Default logging detail level for all instances of SimpleLogger.
+# Must be one of ("trace", "debug", "info", "warn", or "error").
+# If not specified, defaults to "info".
+org.slf4j.simpleLogger.defaultLog=trace
+
+# Logging detail level for a SimpleLogger instance named "xxxxx".
+# Must be one of ("trace", "debug", "info", "warn", or "error").
+# If not specified, the default logging detail level is used.
+#org.slf4j.simpleLogger.log.xxxxx=
+
+# Set to true if you want the current date and time to be included in output messages.
+# Default is false, and will output the number of milliseconds elapsed since startup.
+#org.slf4j.simpleLogger.showDateTime=false
+
+# The date and time format to be used in the output messages.
+# The pattern describing the date and time format is the same that is used in java.text.SimpleDateFormat.
+# If the format is not specified or is invalid, the default format is used.
+# The default format is yyyy-MM-dd HH:mm:ss:SSS Z.
+org.slf4j.simpleLogger.dateTimeFormat=yyyy-MM-dd HH:mm:ss:SSS Z
+
+# Set to true if you want to output the current thread name.
+# Defaults to true.
+org.slf4j.simpleLogger.showThreadName=true
+
+# Set to true if you want the Logger instance name to be included in output messages.
+# Defaults to true.
+org.slf4j.simpleLogger.showLogName=true
+
+# Set to true if you want the last component of the name to be included in output messages.
+# Defaults to false.
+org.slf4j.simpleLogger.showShortLogName=false
diff --git a/java/lib/slf4j-api-1.7.5.jar b/java/lib/slf4j-api-1.7.5.jar
new file mode 100644
index 0000000..8766455
--- /dev/null
+++ b/java/lib/slf4j-api-1.7.5.jar
Binary files differ
diff --git a/java/src/CMakeLists.txt b/java/src/CMakeLists.txt
new file mode 100644
index 0000000..ae78201
--- /dev/null
+++ b/java/src/CMakeLists.txt
@@ -0,0 +1,8 @@
+cmake_minimum_required(VERSION 3.1.0)
+PROJECT ( HDF5_JAVA_SRC C Java )
+
+#-----------------------------------------------------------------------------
+# Traverse source subdirectory
+#-----------------------------------------------------------------------------
+add_subdirectory (${HDF5_JAVA_SRC_SOURCE_DIR}/jni ${HDF5_JAVA_SRC_BINARY_DIR}/jni)
+add_subdirectory (${HDF5_JAVA_SRC_SOURCE_DIR}/hdf ${HDF5_JAVA_SRC_BINARY_DIR}/hdf)
diff --git a/java/src/Makefile.am b/java/src/Makefile.am
new file mode 100644
index 0000000..f2e12a7
--- /dev/null
+++ b/java/src/Makefile.am
@@ -0,0 +1,137 @@
+#
+# Copyright by The HDF Group.
+# Copyright by the Board of Trustees of the University of Illinois.
+# All rights reserved.
+#
+# This file is part of HDF5. The full HDF5 copyright notice, including
+# terms governing use, modification, and redistribution, is contained in
+# the files COPYING and Copyright.html. COPYING can be found at the root
+# of the source code distribution tree; Copyright.html can be found at the
+# root level of an installed copy of the electronic HDF5 document set and
+# is linked from the top-level documents page. It can also be found at
+# http://hdfgroup.org/HDF5/doc/Copyright.html. If you do not have
+# access to either file, you may request a copy from help@hdfgroup.org.
+#
+#
+# This makefile mostly just reinvokes make in the various subdirectories
+# but does so in the correct order. You can alternatively invoke make from
+# each subdirectory manually.
+##
+## Makefile.am
+## Run automake to generate a Makefile.in from this file.
+##
+#
+# HDF5 Java native interface (JNI) Library Makefile(.in)
+
+include $(top_srcdir)/config/commence.am
+
+# Mark this directory as part of the JNI API
+JAVA_API=yes
+
+SUBDIRS=jni
+
+JAVAROOT = .classes
+
+classes:
+ test -d $(@D)/$(JAVAROOT) || $(MKDIR_P) $(@D)/$(JAVAROOT)
+
+jarfile = jar$(PACKAGE_TARNAME)-$(PACKAGE_VERSION).jar
+hdf5_javadir = $(libdir)
+
+pkgpath = hdf/hdf5lib
+CLASSPATH_ENV=CLASSPATH=.:$(top_srcdir)/java/lib/slf4j-api-1.7.5.jar:$$CLASSPATH
+
+AM_JAVACFLAGS = $(H5_JAVACFLAGS) -deprecation
+
+hdf5_java_JAVA = \
+ ${pkgpath}/callbacks/Callbacks.java \
+ ${pkgpath}/callbacks/H5A_iterate_cb.java \
+ ${pkgpath}/callbacks/H5A_iterate_t.java \
+ ${pkgpath}/callbacks/H5D_iterate_cb.java \
+ ${pkgpath}/callbacks/H5D_iterate_t.java \
+ ${pkgpath}/callbacks/H5E_walk_cb.java \
+ ${pkgpath}/callbacks/H5E_walk_t.java \
+ ${pkgpath}/callbacks/H5L_iterate_cb.java \
+ ${pkgpath}/callbacks/H5L_iterate_t.java \
+ ${pkgpath}/callbacks/H5O_iterate_cb.java \
+ ${pkgpath}/callbacks/H5O_iterate_t.java \
+ ${pkgpath}/callbacks/H5P_cls_close_func_cb.java \
+ ${pkgpath}/callbacks/H5P_cls_close_func_t.java \
+ ${pkgpath}/callbacks/H5P_cls_copy_func_cb.java \
+ ${pkgpath}/callbacks/H5P_cls_copy_func_t.java \
+ ${pkgpath}/callbacks/H5P_cls_create_func_cb.java \
+ ${pkgpath}/callbacks/H5P_cls_create_func_t.java \
+ ${pkgpath}/callbacks/H5P_prp_close_func_cb.java \
+ ${pkgpath}/callbacks/H5P_prp_compare_func_cb.java \
+ ${pkgpath}/callbacks/H5P_prp_copy_func_cb.java \
+ ${pkgpath}/callbacks/H5P_prp_create_func_cb.java \
+ ${pkgpath}/callbacks/H5P_prp_delete_func_cb.java \
+ ${pkgpath}/callbacks/H5P_prp_get_func_cb.java \
+ ${pkgpath}/callbacks/H5P_prp_set_func_cb.java \
+ ${pkgpath}/callbacks/H5P_iterate_cb.java \
+ ${pkgpath}/callbacks/H5P_iterate_t.java \
+ ${pkgpath}/exceptions/HDF5AtomException.java \
+ ${pkgpath}/exceptions/HDF5AttributeException.java \
+ ${pkgpath}/exceptions/HDF5BtreeException.java \
+ ${pkgpath}/exceptions/HDF5DataFiltersException.java \
+ ${pkgpath}/exceptions/HDF5DataStorageException.java \
+ ${pkgpath}/exceptions/HDF5DatasetInterfaceException.java \
+ ${pkgpath}/exceptions/HDF5DataspaceInterfaceException.java \
+ ${pkgpath}/exceptions/HDF5DatatypeInterfaceException.java \
+ ${pkgpath}/exceptions/HDF5Exception.java \
+ ${pkgpath}/exceptions/HDF5ExternalFileListException.java \
+ ${pkgpath}/exceptions/HDF5FileInterfaceException.java \
+ ${pkgpath}/exceptions/HDF5FunctionArgumentException.java \
+ ${pkgpath}/exceptions/HDF5FunctionEntryExitException.java \
+ ${pkgpath}/exceptions/HDF5HeapException.java \
+ ${pkgpath}/exceptions/HDF5InternalErrorException.java \
+ ${pkgpath}/exceptions/HDF5JavaException.java \
+ ${pkgpath}/exceptions/HDF5LibraryException.java \
+ ${pkgpath}/exceptions/HDF5LowLevelIOException.java \
+ ${pkgpath}/exceptions/HDF5MetaDataCacheException.java \
+ ${pkgpath}/exceptions/HDF5ObjectHeaderException.java \
+ ${pkgpath}/exceptions/HDF5PropertyListInterfaceException.java \
+ ${pkgpath}/exceptions/HDF5ReferenceException.java \
+ ${pkgpath}/exceptions/HDF5ResourceUnavailableException.java \
+ ${pkgpath}/exceptions/HDF5SymbolTableException.java \
+ ${pkgpath}/structs/H5_ih_info_t.java \
+ ${pkgpath}/structs/H5A_info_t.java \
+ ${pkgpath}/structs/H5E_error2_t.java \
+ ${pkgpath}/structs/H5F_info2_t.java \
+ ${pkgpath}/structs/H5G_info_t.java \
+ ${pkgpath}/structs/H5L_info_t.java \
+ ${pkgpath}/structs/H5O_info_t.java \
+ ${pkgpath}/structs/H5O_hdr_info_t.java \
+ ${pkgpath}/structs/H5AC_cache_config_t.java \
+ ${pkgpath}/H5.java \
+ ${pkgpath}/HDF5Constants.java \
+ ${pkgpath}/HDF5GroupInfo.java \
+ ${pkgpath}/HDFArray.java \
+ ${pkgpath}/HDFNativeData.java
+
+$(jarfile): classhdf5_java.stamp classes docs
+ $(JAR) cvf $@ -C $(JAVAROOT)/ $(pkgpath)
+
+hdf5_java_DATA = $(jarfile)
+
+.PHONY: docs classes
+
+WINDOWTITLE = 'HDF5 Java'
+DOCTITLE = '<h1>HDF5 Java Wrapper</h1>'
+SRCDIR = '$(pkgpath)'
+
+docs:
+ $(JAVADOC) -sourcepath $(srcdir) -d javadoc -use -splitIndex -windowtitle $(WINDOWTITLE) -doctitle $(DOCTITLE) -J-Xmx180m -verbose -overview $(top_srcdir)/java/src/hdf/overview.html -classpath $(CLASSPATH_ENV) hdf.hdf5lib
+
+CLEANFILES = classhdf5_java.stamp $(jarfile) $(JAVAROOT)/$(pkgpath)/callbacks/*.class $(JAVAROOT)/$(pkgpath)/exceptions/*.class $(JAVAROOT)/$(pkgpath)/structs/*.class $(JAVAROOT)/$(pkgpath)/*.class
+
+clean:
+ rm -rf $(JAVAROOT)/*
+ rm -f $(jarfile)
+ rm -f classhdf5_java.stamp
+
+
+# Clean examples when check-clean is invoked
+check-clean :: ;
+
+#include $(top_srcdir)/config/conclude.am
diff --git a/java/src/hdf/CMakeLists.txt b/java/src/hdf/CMakeLists.txt
new file mode 100644
index 0000000..f465da9
--- /dev/null
+++ b/java/src/hdf/CMakeLists.txt
@@ -0,0 +1,4 @@
+cmake_minimum_required (VERSION 3.1.0)
+PROJECT (HDF5_JAVA_HDF)
+
+add_subdirectory (${HDF5_JAVA_HDF_SOURCE_DIR}/hdf5lib hdf5lib)
diff --git a/java/src/hdf/hdf5lib/CMakeLists.txt b/java/src/hdf/hdf5lib/CMakeLists.txt
new file mode 100644
index 0000000..c2daff4
--- /dev/null
+++ b/java/src/hdf/hdf5lib/CMakeLists.txt
@@ -0,0 +1,125 @@
+cmake_minimum_required (VERSION 3.1.0)
+PROJECT (HDF5_JAVA_HDF_HDF5 Java)
+
+set (CMAKE_VERBOSE_MAKEFILE 1)
+
+INCLUDE_DIRECTORIES (
+ ${HDF5_JAVA_HDF_HDF5_SOURCE_DIR}
+ ${HDF5_JAVA_HDF_HDF5_BINARY_DIR}
+ ${HDF5_JAVA_LIB_DIR}
+ ${CMAKE_CURRENT_BINARY_DIR}/CMakeFiles/${HDF5_JAVA_HDF5_LIB_CORENAME}.dir/hdf/hdf5lib
+)
+
+SET_GLOBAL_VARIABLE (HDF5_JAVA_SOURCE_PACKAGES
+ "${HDFJAVA_SOURCE_PACKAGES};hdf.hdf5lib.callbacks;hdf.hdf5lib.exceptions;hdf.hdf5lib.structs;hdf.hdf5lib"
+)
+
+set (HDF5_JAVA_HDF_HDF5_CALLBACKS_SRCS
+ callbacks/H5A_iterate_cb.java
+ callbacks/H5A_iterate_t.java
+ callbacks/H5D_iterate_cb.java
+ callbacks/H5D_iterate_t.java
+ callbacks/H5E_walk_cb.java
+ callbacks/H5E_walk_t.java
+ callbacks/H5L_iterate_cb.java
+ callbacks/H5L_iterate_t.java
+ callbacks/H5O_iterate_cb.java
+ callbacks/H5O_iterate_t.java
+ callbacks/H5P_cls_close_func_cb.java
+ callbacks/H5P_cls_close_func_t.java
+ callbacks/H5P_cls_copy_func_cb.java
+ callbacks/H5P_cls_copy_func_t.java
+ callbacks/H5P_cls_create_func_cb.java
+ callbacks/H5P_cls_create_func_t.java
+ callbacks/H5P_prp_close_func_cb.java
+ callbacks/H5P_prp_compare_func_cb.java
+ callbacks/H5P_prp_copy_func_cb.java
+ callbacks/H5P_prp_create_func_cb.java
+ callbacks/H5P_prp_delete_func_cb.java
+ callbacks/H5P_prp_get_func_cb.java
+ callbacks/H5P_prp_set_func_cb.java
+ callbacks/H5P_iterate_cb.java
+ callbacks/H5P_iterate_t.java
+ callbacks/Callbacks.java
+)
+
+set (HDF5_JAVA_HDF_HDF5_EXCEPTIONS_SRCS
+ exceptions/HDF5Exception.java
+ exceptions/HDF5AtomException.java
+ exceptions/HDF5AttributeException.java
+ exceptions/HDF5BtreeException.java
+ exceptions/HDF5DataFiltersException.java
+ exceptions/HDF5DatasetInterfaceException.java
+ exceptions/HDF5DataspaceInterfaceException.java
+ exceptions/HDF5DataStorageException.java
+ exceptions/HDF5DatatypeInterfaceException.java
+ exceptions/HDF5ExternalFileListException.java
+ exceptions/HDF5FileInterfaceException.java
+ exceptions/HDF5FunctionArgumentException.java
+ exceptions/HDF5FunctionEntryExitException.java
+ exceptions/HDF5HeapException.java
+ exceptions/HDF5InternalErrorException.java
+ exceptions/HDF5JavaException.java
+ exceptions/HDF5LibraryException.java
+ exceptions/HDF5LowLevelIOException.java
+ exceptions/HDF5MetaDataCacheException.java
+ exceptions/HDF5ObjectHeaderException.java
+ exceptions/HDF5PropertyListInterfaceException.java
+ exceptions/HDF5ReferenceException.java
+ exceptions/HDF5ResourceUnavailableException.java
+ exceptions/HDF5SymbolTableException.java
+)
+
+set (HDF5_JAVA_HDF_HDF5_STRUCTS_SRCS
+ structs/H5_ih_info_t.java
+ structs/H5A_info_t.java
+ structs/H5AC_cache_config_t.java
+ structs/H5E_error2_t.java
+ structs/H5F_info2_t.java
+ structs/H5G_info_t.java
+ structs/H5L_info_t.java
+ structs/H5O_hdr_info_t.java
+ structs/H5O_info_t.java
+)
+
+set (HDF5_JAVA_HDF_HDF5_SRCS
+ HDFArray.java
+ HDF5Constants.java
+ HDF5GroupInfo.java
+ HDFNativeData.java
+ H5.java
+)
+
+set (CMAKE_JNI_TARGET TRUE)
+
+file (WRITE ${PROJECT_BINARY_DIR}/Manifest.txt
+"
+"
+)
+
+set (CMAKE_JAVA_INCLUDE_PATH "${HDF5_JAVA_LOGGING_JAR}")
+
+add_jar (${HDF5_JAVA_HDF5_LIB_TARGET} OUTPUT_NAME "${HDF5_JAVA_HDF5_LIB_TARGET}-${HDF5_PACKAGE_VERSION}" OUTPUT_DIR ${CMAKE_JAVA_TARGET_OUTPUT_DIR} MANIFEST ${PROJECT_BINARY_DIR}/Manifest.txt ${HDF5_JAVA_HDF_HDF5_CALLBACKS_SRCS} ${HDF5_JAVA_HDF_HDF5_EXCEPTIONS_SRCS} ${HDF5_JAVA_HDF_HDF5_STRUCTS_SRCS} ${HDF5_JAVA_HDF_HDF5_SRCS})
+install_jar (${HDF5_JAVA_HDF5_LIB_TARGET} LIBRARY DESTINATION ${HDF5_INSTALL_JAR_DIR} COMPONENT libraries)
+#if (NOT WIN32)
+# install_jni_symlink (${HDF5_JAVA_HDF5_LIB_TARGET} ${HDF5_INSTALL_JAR_DIR} libraries)
+#endif (NOT WIN32)
+
+get_target_property (${HDF5_JAVA_HDF5_LIB_TARGET}_JAR_FILE ${HDF5_JAVA_HDF5_LIB_TARGET} JAR_FILE)
+SET_GLOBAL_VARIABLE (HDF5_JAVA_JARS_TO_EXPORT "${HDF5_JAVA_JARS_TO_EXPORT};${${HDF5_JAVA_HDF5_LIB_TARGET}_JAR_FILE}")
+SET_GLOBAL_VARIABLE (HDF5_JAVA_JARS ${${HDF5_JAVA_HDF5_LIB_TARGET}_JAR_FILE})
+
+add_dependencies (${HDF5_JAVA_HDF5_LIB_TARGET} ${HDF5_JAVA_JNI_LIB_TARGET})
+set_target_properties (${HDF5_JAVA_HDF5_LIB_TARGET} PROPERTIES FOLDER libraries/java)
+
+create_javadoc(hdf5_java_doc
+ FILES ${HDF5_JAVA_HDF_HDF5_CALLBACKS_SRCS} ${HDF5_JAVA_HDF_HDF5_EXCEPTIONS_SRCS} ${HDF5_JAVA_HDF_HDF5_STRUCTS_SRCS} ${HDF5_JAVA_HDF_HDF5_SRCS}
+ OVERVIEW ${HDF5_JAVA_HDF5_SRC_DIR}/overview.html
+ CLASSPATH ${CMAKE_JAVA_INCLUDE_PATH}
+ WINDOWTITLE "HDF5 Java"
+ DOCTITLE "<h1>HDF5 Java Wrapper</h1>"
+ INSTALLPATH ${HDF5_INSTALL_DATA_DIR}
+ AUTHOR TRUE
+ USE TRUE
+ VERSION TRUE
+)
diff --git a/java/src/hdf/hdf5lib/H5.java b/java/src/hdf/hdf5lib/H5.java
new file mode 100644
index 0000000..b8d9147
--- /dev/null
+++ b/java/src/hdf/hdf5lib/H5.java
@@ -0,0 +1,9184 @@
+/* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
+ * Copyright by The HDF Group. *
+ * Copyright by the Board of Trustees of the University of Illinois. *
+ * All rights reserved. *
+ * *
+ * This file is part of HDF5. The full HDF5 copyright notice, including *
+ * terms governing use, modification, and redistribution, is contained in *
+ * the files COPYING and Copyright.html. COPYING can be found at the root *
+ * of the source code distribution tree; Copyright.html can be found at the *
+ * root level of an installed copy of the electronic HDF5 document set and *
+ * is linked from the top-level documents page. It can also be found at *
+ * http://hdfgroup.org/HDF5/doc/Copyright.html. If you do not have *
+ * access to either file, you may request a copy from help@hdfgroup.org. *
+ * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */
+
+
+package hdf.hdf5lib;
+
+import java.io.File;
+import java.nio.ByteBuffer;
+import java.util.Collection;
+import java.util.LinkedHashSet;
+import hdf.hdf5lib.callbacks.H5A_iterate_cb;
+import hdf.hdf5lib.callbacks.H5A_iterate_t;
+import hdf.hdf5lib.callbacks.H5D_iterate_cb;
+import hdf.hdf5lib.callbacks.H5D_iterate_t;
+import hdf.hdf5lib.callbacks.H5E_walk_cb;
+import hdf.hdf5lib.callbacks.H5E_walk_t;
+import hdf.hdf5lib.callbacks.H5L_iterate_cb;
+import hdf.hdf5lib.callbacks.H5L_iterate_t;
+import hdf.hdf5lib.callbacks.H5O_iterate_cb;
+import hdf.hdf5lib.callbacks.H5O_iterate_t;
+import hdf.hdf5lib.callbacks.H5P_cls_close_func_cb;
+import hdf.hdf5lib.callbacks.H5P_cls_close_func_t;
+import hdf.hdf5lib.callbacks.H5P_cls_copy_func_cb;
+import hdf.hdf5lib.callbacks.H5P_cls_copy_func_t;
+import hdf.hdf5lib.callbacks.H5P_cls_create_func_cb;
+import hdf.hdf5lib.callbacks.H5P_cls_create_func_t;
+import hdf.hdf5lib.callbacks.H5P_prp_set_func_cb;
+import hdf.hdf5lib.callbacks.H5P_prp_get_func_cb;
+import hdf.hdf5lib.callbacks.H5P_prp_delete_func_cb;
+import hdf.hdf5lib.callbacks.H5P_prp_copy_func_cb;
+import hdf.hdf5lib.callbacks.H5P_prp_compare_func_cb;
+import hdf.hdf5lib.callbacks.H5P_prp_close_func_cb;
+import hdf.hdf5lib.callbacks.H5P_prp_create_func_cb;
+import hdf.hdf5lib.callbacks.H5P_iterate_cb;
+import hdf.hdf5lib.callbacks.H5P_iterate_t;
+import hdf.hdf5lib.exceptions.HDF5Exception;
+import hdf.hdf5lib.exceptions.HDF5JavaException;
+import hdf.hdf5lib.exceptions.HDF5LibraryException;
+import hdf.hdf5lib.structs.H5AC_cache_config_t;
+import hdf.hdf5lib.structs.H5A_info_t;
+import hdf.hdf5lib.structs.H5E_error2_t;
+import hdf.hdf5lib.structs.H5F_info2_t;
+import hdf.hdf5lib.structs.H5G_info_t;
+import hdf.hdf5lib.structs.H5L_info_t;
+import hdf.hdf5lib.structs.H5O_info_t;
+
+/**
+ * This class is the Java interface for the HDF5 library.
+ * <p>
+ * This code is the called by Java programs to access the entry points of the HDF5 library. Each routine wraps a single
+ * HDF5 entry point, generally with the arguments and return codes analogous to the C interface.
+ * <p>
+ * For details of the HDF5 library, see the HDF5 Documentation at: <a
+ * href="http://hdfgroup.org/HDF5/">http://hdfgroup.org/HDF5/</a>
+ * <hr>
+ * <p>
+ * <b>Mapping of arguments for Java</b>
+ *
+ * <p>
+ * In general, arguments to the HDF Java API are straightforward translations from the 'C' API described in the HDF
+ * Reference Manual.
+ *
+ * <center>
+ * <table border=2 cellpadding=2>
+ * <caption><b>HDF-5 C types to Java types</b> </caption>
+ * <tr>
+ * <td><b>HDF-5</b></td>
+ * <td><b>Java</b></td>
+ * </tr>
+ * <tr>
+ * <td>H5T_NATIVE_INT</td>
+ * <td>int, Integer</td>
+ * </tr>
+ * <tr>
+ * <td>H5T_NATIVE_SHORT</td>
+ * <td>short, Short</td>
+ * </tr>
+ * <tr>
+ * <td>H5T_NATIVE_FLOAT</td>
+ * <td>float, Float</td>
+ * </tr>
+ * <tr>
+ * <td>H5T_NATIVE_DOUBLE</td>
+ * <td>double, Double</td>
+ * </tr>
+ * <tr>
+ * <td>H5T_NATIVE_CHAR</td>
+ * <td>byte, Byte</td>
+ * </tr>
+ * <tr>
+ * <td>H5T_C_S1</td>
+ * <td>java.lang.String</td>
+ * </tr>
+ * <tr>
+ * <td>void * <BR>
+ * (i.e., pointer to `Any')</td>
+ * <td>Special -- see HDFArray</td>
+ * </tr>
+ * </table>
+ * </center>
+ * <center> <b>General Rules for Passing Arguments and Results</b> </center>
+ * <p>
+ * In general, arguments passed <b>IN</b> to Java are the analogous basic types, as above. The exception is for arrays,
+ * which are discussed below.
+ * <p>
+ * The <i>return value</i> of Java methods is also the analogous type, as above. A major exception to that rule is that
+ * all HDF functions that return SUCCEED/FAIL are declared <i>boolean</i> in the Java version, rather than <i>int</i> as
+ * in the C. Functions that return a value or else FAIL are declared the equivalent to the C function. However, in most
+ * cases the Java method will raise an exception instead of returning an error code. See <a href="#ERRORS">Errors and
+ * Exceptions</a> below.
+ * <p>
+ * Java does not support pass by reference of arguments, so arguments that are returned through <b>OUT</b> parameters
+ * must be wrapped in an object or array. The Java API for HDF consistently wraps arguments in arrays.
+ * <p>
+ * For instance, a function that returns two integers is declared:
+ *
+ * <pre>
+ * h_err_t HDF5dummy( int *a1, int *a2)
+ * </pre>
+ *
+ * For the Java interface, this would be declared:
+ *
+ * <pre>
+ * public synchronized static native int HDF5dummy(int args[]);
+ * </pre>
+ *
+ * where <i>a1</i> is <i>args[0]</i> and <i>a2</i> is <i>args[1]</i>, and would be invoked:
+ *
+ * <pre>
+ * H5.HDF5dummy(a);
+ * </pre>
+ *
+ * <p>
+ * All the routines where this convention is used will have specific documentation of the details, given below.
+ * <p>
+ * <a NAME="ARRAYS"> <b>Arrays</b> </a>
+ * <p>
+ * HDF5 needs to read and write multi-dimensional arrays of any number type (and records). The HDF5 API describes the
+ * layout of the source and destination, and the data for the array passed as a block of bytes, for instance,
+ *
+ * <pre>
+ * herr_t H5Dread(int fid, int filetype, int memtype, int memspace,
+ * void * data);
+ * </pre>
+ *
+ * <p>
+ * where ``void *'' means that the data may be any valid numeric type, and is a contiguous block of bytes that is the
+ * data for a multi-dimensional array. The other parameters describe the dimensions, rank, and datatype of the array on
+ * disk (source) and in memory (destination).
+ * <p>
+ * For Java, this ``ANY'' is a problem, as the type of data must always be declared. Furthermore, multidimensional
+ * arrays are definitely <i>not</i> layed out contiguously in memory. It would be infeasible to declare a separate
+ * routine for every combination of number type and dimensionality. For that reason, the <a
+ * href="./hdf.hdf5lib.HDFArray.html"><b>HDFArray</b></a> class is used to discover the type, shape, and size of the
+ * data array at run time, and to convert to and from a contiguous array of bytes in synchronized static native C order.
+ * <p>
+ * The upshot is that any Java array of numbers (either primitive or sub-classes of type <b>Number</b>) can be passed as
+ * an ``Object'', and the Java API will translate to and from the appropriate packed array of bytes needed by the C
+ * library. So the function above would be declared:
+ *
+ * <pre>
+ * public synchronized static native int H5Dread(int fid, int filetype, int memtype, int memspace, Object data);
+ * </pre>
+ * OPEN_IDS.addElement(id);
+
+ * and the parameter <i>data</i> can be any multi-dimensional array of numbers, such as float[][], or int[][][], or
+ * Double[][].
+ * <p>
+ * <a NAME="CONSTANTS"> <b>HDF-5 Constants</b></a>
+ * <p>
+ * The HDF-5 API defines a set of constants and enumerated values. Most of these values are available to Java programs
+ * via the class <a href="./hdf.hdf5lib.HDF5Constants.html"> <b>HDF5Constants</b></a>. For example, the parameters for
+ * the h5open() call include two numeric values, <b><i>HDFConstants.H5F_ACC_RDWR</i></b> and
+ * <b><i>HDF5Constants.H5P_DEFAULT</i></b>. As would be expected, these numbers correspond to the C constants
+ * <b><i>H5F_ACC_RDWR</i></b> and <b><i>H5P_DEFAULT</i></b>.
+ * <p>
+ * The HDF-5 API defines a set of values that describe number types and sizes, such as "H5T_NATIVE_INT" and "hsize_t".
+ * These values are determined at run time by the HDF-5 C library. To support these parameters, the Java class <a
+ * href="./hdf.hdf5lib.HDF5CDataTypes.html"> <b>HDF5CDataTypes</b></a> looks up the values when initiated. The values
+ * can be accessed as public variables of the Java class, such as:
+ *
+ * <pre>
+ * int data_type = HDF5CDataTypes.JH5T_NATIVE_INT;
+ * </pre>
+ *
+ * The Java application uses both types of constants the same way, the only difference is that the
+ * <b><i>HDF5CDataTypes</i></b> may have different values on different platforms.
+ * <p>
+ * <a NAME="ERRORS"> <b>Error handling and Exceptions</b></a>
+ * <p>
+ * The HDF5 error API (H5E) manages the behavior of the error stack in the HDF-5 library. This API is omitted from the
+ * JHI5. Errors are converted into Java exceptions. This is totally different from the C interface, but is very natural
+ * for Java programming.
+ * <p>
+ * The exceptions of the JHI5 are organized as sub-classes of the class <a
+ * href="./hdf.hdf5lib.exceptions.HDF5Exception.html"> <b>HDF5Exception</b></a>. There are two subclasses of
+ * <b>HDF5Exception</b>, <a href="./hdf.hdf5lib.exceptions.HDF5LibraryException.html"> <b>HDF5LibraryException</b></a>
+ * and <a href="./hdf.hdf5lib.exceptions.HDF5JavaException.html"> <b>HDF5JavaException</b></a>. The sub-classes of the
+ * former represent errors from the HDF-5 C library, while sub-classes of the latter represent errors in the JHI5
+ * wrapper and support code.
+ * <p>
+ * The super-class <b><i>HDF5LibraryException</i></b> implements the method '<b><i>printStackTrace()</i></b>', which
+ * prints out the HDF-5 error stack, as described in the HDF-5 C API <i><b>H5Eprint()</b>.</i> This may be used by Java
+ * exception handlers to print out the HDF-5 error stack.
+ * <hr>
+ *
+ * @version HDF5 1.9 <BR>
+ * <b>See also: <a href ="./hdf.hdf5lib.HDFArray.html"> hdf.hdf5lib.HDFArray</a> </b><BR>
+ * <a href ="./hdf.hdf5lib.HDF5Constants.html"> hdf.hdf5lib.HDF5Constants</a><BR>
+ * <a href ="./hdf.hdf5lib.HDF5CDataTypes.html"> hdf.hdf5lib.HDF5CDataTypes</a><BR>
+ * <a href ="./hdf.hdf5lib.HDF5Exception.html"> hdf.hdf5lib.HDF5Exception</a><BR>
+ * <a href="http://hdfgroup.org/HDF5/"> http://hdfgroup.org/HDF5"</a>
+ **/
+public class H5 implements java.io.Serializable {
+ /**
+ *
+ */
+ private static final long serialVersionUID = 6129888282117053288L;
+
+ private final static org.slf4j.Logger log = org.slf4j.LoggerFactory.getLogger(H5.class);
+
+ /**
+ * The version number of the HDF5 library:
+ * LIB_VERSION[0]: The major version of the library.
+ * LIB_VERSION[1]: The minor version of the library.
+ * LIB_VERSION[2]: The release number of the library.
+ *
+ * Make sure to update the versions number when a different library is used.
+ */
+ public final static int LIB_VERSION[] = { 1, 9, 9999 };
+
+ public final static String H5PATH_PROPERTY_KEY = "hdf.hdf5lib.H5.hdf5lib";
+
+ // add system property to load library by name from library path, via
+ // System.loadLibrary()
+ public final static String H5_LIBRARY_NAME_PROPERTY_KEY = "hdf.hdf5lib.H5.loadLibraryName";
+ private static String s_libraryName;
+ private static boolean isLibraryLoaded = false;
+
+ private final static boolean IS_CRITICAL_PINNING = true;
+ // change from Vector to LinkedHashSet - jp 6-Oct-2014
+ private final static LinkedHashSet<Long> OPEN_IDS = new LinkedHashSet<Long>();
+
+ static {
+ loadH5Lib();
+ }
+
+ public static void loadH5Lib() {
+ // Make sure that the library is loaded only once
+ if (isLibraryLoaded)
+ return;
+
+ // first try loading library by name from user supplied library path
+ s_libraryName = System.getProperty(H5_LIBRARY_NAME_PROPERTY_KEY, null);
+ String mappedName = null;
+ if ((s_libraryName != null) && (s_libraryName.length() > 0)) {
+ try {
+ mappedName = System.mapLibraryName(s_libraryName);
+ System.loadLibrary(s_libraryName);
+ isLibraryLoaded = true;
+ }
+ catch (Throwable err) {
+ err.printStackTrace();
+ isLibraryLoaded = false;
+ }
+ finally {
+ log.info("HDF5 library: " + s_libraryName);
+ log.debug(" resolved to: " + mappedName + "; ");
+ log.info((isLibraryLoaded ? "" : " NOT") + " successfully loaded from system property");
+ }
+ }
+
+ if (!isLibraryLoaded) {
+ // else try loading library via full path
+ String filename = System.getProperty(H5PATH_PROPERTY_KEY, null);
+ if ((filename != null) && (filename.length() > 0)) {
+ File h5dll = new File(filename);
+ if (h5dll.exists() && h5dll.canRead() && h5dll.isFile()) {
+ try {
+ System.load(filename);
+ isLibraryLoaded = true;
+ }
+ catch (Throwable err) {
+ err.printStackTrace();
+ isLibraryLoaded = false;
+ }
+ finally {
+ log.info("HDF5 library: ");
+ log.debug(filename);
+ log.info((isLibraryLoaded ? "" : " NOT") + " successfully loaded.");
+ }
+ }
+ else {
+ isLibraryLoaded = false;
+ throw (new UnsatisfiedLinkError("Invalid HDF5 library, " + filename));
+ }
+ }
+ }
+
+ // else load standard library
+ if (!isLibraryLoaded) {
+ try {
+ s_libraryName = "hdf5_java";
+ mappedName = System.mapLibraryName(s_libraryName);
+ System.loadLibrary("hdf5_java");
+ isLibraryLoaded = true;
+ }
+ catch (Throwable err) {
+ err.printStackTrace();
+ isLibraryLoaded = false;
+ }
+ finally {
+ log.info("HDF5 library: " + s_libraryName);
+ log.debug(" resolved to: " + mappedName + "; ");
+ log.info((isLibraryLoaded ? "" : " NOT") + " successfully loaded from java.library.path");
+ }
+ }
+
+ /* Important! Exit quietly */
+ try {
+ H5.H5dont_atexit();
+ }
+ catch (HDF5LibraryException e) {
+ System.exit(1);
+ }
+
+ /* Important! Disable error output to C stdout */
+ if (!log.isDebugEnabled())
+ H5.H5error_off();
+
+ /*
+ * Optional: confirm the version This will crash immediately if not the specified version.
+ */
+ Integer majnum = Integer.getInteger("hdf.hdf5lib.H5.hdf5maj", null);
+ Integer minnum = Integer.getInteger("hdf.hdf5lib.H5.hdf5min", null);
+ Integer relnum = Integer.getInteger("hdf.hdf5lib.H5.hdf5rel", null);
+ if ((majnum != null) && (minnum != null) && (relnum != null)) {
+ H5.H5check_version(majnum.intValue(), minnum.intValue(), relnum.intValue());
+ }
+ }
+
+ // ////////////////////////////////////////////////////////////
+ // //
+ // H5: General Library Functions //
+ // //
+ // ////////////////////////////////////////////////////////////
+
+ /**
+ * Get number of open IDs.
+ *
+ * @return Returns a count of open IDs
+ */
+ public final static int getOpenIDCount() {
+ return OPEN_IDS.size();
+ }
+
+ /**
+ * Get the open IDs
+ *
+ * @return Returns a collection of open IDs
+ */
+ public final static Collection<Long> getOpenIDs() {
+ return OPEN_IDS;
+ }
+
+ /**
+ * H5check_version verifies that the arguments match the version numbers compiled into the library.
+ *
+ * @param majnum
+ * The major version of the library.
+ * @param minnum
+ * The minor version of the library.
+ * @param relnum
+ * The release number of the library.
+ * @return a non-negative value if successful. Upon failure (when the versions do not match), this function causes
+ * the application to abort (i.e., crash)
+ *
+ * See C API function: herr_t H5check_version()
+ **/
+ public synchronized static native int H5check_version(int majnum, int minnum, int relnum);
+
+ /**
+ * H5close flushes all data to disk, closes all file identifiers, and cleans up all memory used by the library.
+ *
+ * @return a non-negative value if successful
+ *
+ * @exception HDF5LibraryException
+ * - Error from the HDF-5 Library.
+ **/
+ public synchronized static native int H5close() throws HDF5LibraryException;
+
+ /**
+ * H5open initialize the library.
+ *
+ * @return a non-negative value if successful
+ *
+ * @exception HDF5LibraryException
+ * - Error from the HDF-5 Library.
+ **/
+ public synchronized static native int H5open() throws HDF5LibraryException;
+
+ /**
+ * H5dont_atexit indicates to the library that an atexit() cleanup routine should not be installed. In order to be
+ * effective, this routine must be called before any other HDF function calls, and must be called each time the
+ * library is loaded/linked into the application (the first time and after it's been unloaded).
+ * <P>
+ * This is called by the static initializer, so this should never need to be explicitly called by a Java program.
+ *
+ * @return a non-negative value if successful
+ *
+ * @exception HDF5LibraryException
+ * - Error from the HDF-5 Library.
+ **/
+ private synchronized static native int H5dont_atexit() throws HDF5LibraryException;
+
+ /**
+ * Turn off error handling By default, the C library prints the error stack of the HDF-5 C library on stdout. This
+ * behavior may be disabled by calling H5error_off().
+ *
+ * @return a non-negative value if successful
+ */
+ public synchronized static native int H5error_off();
+
+ /**
+ * H5garbage_collect collects on all free-lists of all types.
+ *
+ * @return a non-negative value if successful
+ *
+ * @exception HDF5LibraryException
+ * - Error from the HDF-5 Library.
+ **/
+ public synchronized static native int H5garbage_collect() throws HDF5LibraryException;
+
+ /**
+ * H5get_libversion retrieves the major, minor, and release numbers of the version of the HDF library which is
+ * linked to the application.
+ *
+ * @param libversion
+ * The version information of the HDF library.
+ *
+ * <pre>
+ * libversion[0] = The major version of the library.
+ * libversion[1] = The minor version of the library.
+ * libversion[2] = The release number of the library.
+ * </pre>
+ * @return a non-negative value if successful, along with the version information.
+ *
+ * @exception HDF5LibraryException
+ * - Error from the HDF-5 Library.
+ **/
+ public synchronized static native int H5get_libversion(int[] libversion) throws HDF5LibraryException;
+
+ public synchronized static native int H5set_free_list_limits(int reg_global_lim, int reg_list_lim,
+ int arr_global_lim, int arr_list_lim, int blk_global_lim, int blk_list_lim) throws HDF5LibraryException;
+
+ /**
+ * H5export_dataset is a utility function to save data in a file.
+ *
+ * @param file_export_name
+ * The file name to export data into.
+ * @param file_name
+ * The name of the HDF5 file containing the dataset.
+ * @param object_path
+ * The full path of the dataset to be exported.
+ * @param binary_order
+ * 99 - export data as text.
+ * 1 - export data as binary Native Order.
+ * 2 - export data as binary Little Endian.
+ * 3 - export data as binary Big Endian.
+ *
+ * @exception HDF5LibraryException
+ * - Error from the HDF-5 Library.
+ **/
+ public synchronized static native void H5export_dataset(String file_export_name, String file_name,
+ String object_path, int binary_order) throws HDF5LibraryException;
+
+ /**
+ * H5is_library_threadsafe Checks to see if the library was built with thread-safety enabled.
+ *
+ * @return true if hdf5 library implements threadsafe
+ *
+ **/
+ private synchronized static native boolean H5is_library_threadsafe();
+
+ // /////// unimplemented ////////
+ // H5_DLL herr_t H5free_memory(void *mem);
+ // H5_DLL void *H5allocate_memory(size_t size, hbool_t clear);
+ // H5_DLL void *H5resize_memory(void *mem, size_t size);
+
+ // ////////////////////////////////////////////////////////////
+ // //
+ // H5A: HDF5 1.8 Attribute Interface API Functions //
+ // //
+ // ////////////////////////////////////////////////////////////
+
+ /**
+ * H5Aclose terminates access to the attribute specified by its identifier, attr_id.
+ *
+ * @param attr_id
+ * IN: Attribute to release access to.
+ *
+ * @return a non-negative value if successful
+ *
+ * @exception HDF5LibraryException
+ * - Error from the HDF-5 Library.
+ **/
+ public static int H5Aclose(long attr_id) throws HDF5LibraryException {
+ if (attr_id < 0)
+ return 0; // throw new HDF5LibraryException("Negative ID");;
+
+ log.trace("OPEN_IDS: H5Aclose remove {}", attr_id);
+ OPEN_IDS.remove(attr_id);
+ log.trace("OPEN_IDS: {}", OPEN_IDS.size());
+ return _H5Aclose(attr_id);
+ }
+
+ private synchronized static native int _H5Aclose(long attr_id) throws HDF5LibraryException;
+
+ /**
+ * H5Acopy copies the content of one attribute to another.
+ *
+ * @param src_aid
+ * the identifier of the source attribute
+ * @param dst_aid
+ * the identifier of the destination attribute
+ *
+ * @return a non-negative value if successful
+ *
+ * @exception HDF5LibraryException
+ * - Error from the HDF-5 Library.
+ */
+ public synchronized static native int H5Acopy(long src_aid, long dst_aid) throws HDF5LibraryException;
+
+ /**
+ * H5Acreate creates an attribute, attr_name, which is attached to the object specified by the identifier loc_id.
+ *
+ * @param loc_id
+ * IN: Location or object identifier; may be dataset or group
+ * @param attr_name
+ * IN: Attribute name
+ * @param type_id
+ * IN: Attribute datatype identifier
+ * @param space_id
+ * IN: Attribute dataspace identifier
+ * @param acpl_id
+ * IN: Attribute creation property list identifier
+ * @param aapl_id
+ * IN: Attribute access property list identifier
+ *
+ * @return An attribute identifier if successful; otherwise returns a negative value.
+ *
+ * @exception HDF5LibraryException
+ * - Error from the HDF-5 Library.
+ * @exception NullPointerException
+ * - Name is null.
+ **/
+ public static long H5Acreate(long loc_id, String attr_name, long type_id, long space_id, long acpl_id, long aapl_id)
+ throws HDF5LibraryException, NullPointerException {
+ long id = _H5Acreate2(loc_id, attr_name, type_id, space_id, acpl_id, aapl_id);
+ if (id > 0) {
+ log.trace("OPEN_IDS: H5A create add {}", id);
+ OPEN_IDS.add(id);
+ log.trace("OPEN_IDS: {}", OPEN_IDS.size());
+ }
+ return id;
+ }
+
+ /**
+ * H5Acreate2 an attribute, attr_name, which is attached to the object specified by the identifier loc_id.
+ *
+ * @see public static long H5Acreate( long loc_id, String attr_name, long type_id, long space_id, long acpl_id, long
+ * aapl_id )
+ **/
+ private synchronized static native long _H5Acreate2(long loc_id, String attr_name, long type_id, long space_id,
+ long acpl_id, long aapl_id) throws HDF5LibraryException, NullPointerException;
+
+ /**
+ * H5Acreate_by_name creates an attribute, attr_name, which is attached to the object specified by loc_id and
+ * obj_name.
+ *
+ * @param loc_id
+ * IN: Location or object identifier; may be dataset or group
+ * @param obj_name
+ * IN: Name, relative to loc_id, of object that attribute is to be attached to
+ * @param attr_name
+ * IN: Attribute name
+ * @param type_id
+ * IN: Attribute datatype identifier
+ * @param space_id
+ * IN: Attribute dataspace identifier
+ * @param acpl_id
+ * IN: Attribute creation property list identifier (currently not used).
+ * @param aapl_id
+ * IN: Attribute access property list identifier (currently not used).
+ * @param lapl_id
+ * IN: Link access property list
+ *
+ * @return An attribute identifier if successful; otherwise returns a negative value.
+ *
+ * @exception HDF5LibraryException
+ * - Error from the HDF-5 Library.
+ * @exception NullPointerException
+ * - name is null.
+ **/
+ public static long H5Acreate_by_name(long loc_id, String obj_name, String attr_name, long type_id, long space_id,
+ long acpl_id, long aapl_id, long lapl_id) throws HDF5LibraryException, NullPointerException {
+ long id = _H5Acreate_by_name(loc_id, obj_name, attr_name, type_id, space_id, acpl_id, aapl_id, lapl_id);
+ if (id > 0) {
+ log.trace("OPEN_IDS: H5Acreate_by_name add {}", id);
+ OPEN_IDS.add(id);
+ log.trace("OPEN_IDS: {}", OPEN_IDS.size());
+ }
+ return id;
+ }
+
+ private synchronized static native long _H5Acreate_by_name(long loc_id, String obj_name, String attr_name,
+ long type_id, long space_id, long acpl_id, long aapl_id, long lapl_id) throws HDF5LibraryException,
+ NullPointerException;
+
+ /**
+ * H5Adelete removes the attribute specified by its name, name, from a dataset, group, or named datatype.
+ *
+ * @param loc_id
+ * IN: Identifier of the dataset, group, or named datatype.
+ * @param name
+ * IN: Name of the attribute to delete.
+ *
+ * @return a non-negative value if successful
+ *
+ * @exception HDF5LibraryException
+ * - Error from the HDF-5 Library.
+ * @exception NullPointerException
+ * - name is null.
+ **/
+ public synchronized static native int H5Adelete(long loc_id, String name) throws HDF5LibraryException,
+ NullPointerException;
+
+ /**
+ * H5Adelete_by_idx removes an attribute, specified by its location in an index, from an object.
+ *
+ * @param loc_id
+ * IN: Location or object identifier; may be dataset or group
+ * @param obj_name
+ * IN: Name of object, relative to location, from which attribute is to be removed
+ * @param idx_type
+ * IN: Type of index
+ * @param order
+ * IN: Order in which to iterate over index
+ * @param n
+ * IN: Offset within index
+ * @param lapl_id
+ * IN: Link access property list identifier
+ *
+ * @exception HDF5LibraryException
+ * - Error from the HDF-5 Library.
+ * @exception NullPointerException
+ * - obj_name is null.
+ **/
+ public synchronized static native void H5Adelete_by_idx(long loc_id, String obj_name, int idx_type, int order,
+ long n, long lapl_id) throws HDF5LibraryException, NullPointerException;
+
+ /**
+ * H5Adelete_by_name removes the attribute attr_name from an object specified by location and name, loc_id and
+ * obj_name, respectively.
+ *
+ * @param loc_id
+ * IN: Location or object identifier; may be dataset or group
+ * @param obj_name
+ * IN: Name of object, relative to location, from which attribute is to be removed
+ * @param attr_name
+ * IN: Name of attribute to delete
+ * @param lapl_id
+ * IN: Link access property list identifier.
+ *
+ * @return a non-negative value if successful; otherwise returns a negative value.
+ *
+ * @exception HDF5LibraryException
+ * - Error from the HDF-5 Library.
+ * @exception NullPointerException
+ * - name is null.
+ **/
+ public synchronized static native int H5Adelete_by_name(long loc_id, String obj_name, String attr_name, long lapl_id)
+ throws HDF5LibraryException, NullPointerException;
+
+ /**
+ * H5Aexists determines whether the attribute attr_name exists on the object specified by obj_id.
+ *
+ * @param obj_id
+ * IN: Object identifier.
+ * @param attr_name
+ * IN: Name of the attribute.
+ *
+ * @return boolean true if an attribute with a given name exists.
+ *
+ * @exception HDF5LibraryException
+ * - Error from the HDF-5 Library.
+ * @exception NullPointerException
+ * - attr_name is null.
+ **/
+ public synchronized static native boolean H5Aexists(long obj_id, String attr_name) throws HDF5LibraryException,
+ NullPointerException;
+
+ /**
+ * H5Aexists_by_name determines whether the attribute attr_name exists on an object. That object is specified by its
+ * location and name, loc_id and obj_name, respectively.
+ *
+ * @param loc_id
+ * IN: Location of object to which attribute is attached .
+ * @param obj_name
+ * IN: Name, relative to loc_id, of object that attribute is attached to.
+ * @param attr_name
+ * IN: Name of attribute.
+ * @param lapl_id
+ * IN: Link access property list identifier.
+ *
+ * @return boolean true if an attribute with a given name exists, otherwise returns false.
+ *
+ * @exception HDF5LibraryException
+ * - Error from the HDF-5 Library.
+ * @exception NullPointerException
+ * - name is null.
+ **/
+ public synchronized static native boolean H5Aexists_by_name(long loc_id, String obj_name, String attr_name,
+ long lapl_id) throws HDF5LibraryException, NullPointerException;
+
+ /**
+ * H5Aget_info retrieves attribute information, by attribute identifier.
+ *
+ * @param attr_id
+ * IN: Attribute identifier
+ *
+ * @return A buffer(H5A_info_t) for Attribute information
+ *
+ * @exception HDF5LibraryException
+ * - Error from the HDF-5 Library.
+ **/
+ public synchronized static native H5A_info_t H5Aget_info(long attr_id) throws HDF5LibraryException;
+
+ /**
+ * H5Aget_info_by_idx Retrieves attribute information, by attribute index position.
+ *
+ * @param loc_id
+ * IN: Location of object to which attribute is attached
+ * @param obj_name
+ * IN: Name of object to which attribute is attached, relative to location
+ * @param idx_type
+ * IN: Type of index
+ * @param order
+ * IN: Index traversal order
+ * @param n
+ * IN: Attribute's position in index
+ * @param lapl_id
+ * IN: Link access property list
+ *
+ * @return A buffer(H5A_info_t) for Attribute information
+ *
+ * @exception HDF5LibraryException
+ * - Error from the HDF-5 Library.
+ * @exception NullPointerException
+ * - obj_name is null.
+ **/
+ public synchronized static native H5A_info_t H5Aget_info_by_idx(long loc_id, String obj_name, int idx_type,
+ int order, long n, long lapl_id) throws HDF5LibraryException, NullPointerException;
+
+ /**
+ * H5Aget_info_by_name Retrieves attribute information, by attribute name.
+ *
+ * @param loc_id
+ * IN: Location of object to which attribute is attached
+ * @param obj_name
+ * IN: Name of object to which attribute is attached, relative to location
+ * @param attr_name
+ * IN: Attribute name
+ * @param lapl_id
+ * IN: Link access property list
+ *
+ * @return A buffer(H5A_info_t) for Attribute information
+ *
+ * @exception HDF5LibraryException
+ * - Error from the HDF-5 Library.
+ * @exception NullPointerException
+ * - obj_name is null.
+ **/
+ public synchronized static native H5A_info_t H5Aget_info_by_name(long loc_id, String obj_name, String attr_name,
+ long lapl_id) throws HDF5LibraryException, NullPointerException;
+
+ /**
+ * H5Aget_name retrieves the name of an attribute specified by the identifier, attr_id.
+ *
+ * @param attr_id
+ * IN: Identifier of the attribute.
+ *
+ * @return String for Attribute name.
+ *
+ * @exception HDF5LibraryException
+ * - Error from the HDF-5 Library.
+ **/
+ public synchronized static native String H5Aget_name(long attr_id)
+ throws HDF5LibraryException;
+
+ /**
+ * H5Aget_name_by_idx retrieves the name of an attribute that is attached to an object, which is specified by its
+ * location and name, loc_id and obj_name, respectively.
+ *
+ * @param attr_id
+ * IN: Attribute identifier
+ * @param obj_name
+ * IN: Name of object to which attribute is attached, relative to location
+ * @param idx_type
+ * IN: Type of index
+ * @param order
+ * IN: Index traversal order
+ * @param n
+ * IN: Attribute's position in index
+ * @param lapl_id
+ * IN: Link access property list
+ *
+ * @return String for Attribute name.
+ *
+ * @exception HDF5LibraryException
+ * - Error from the HDF5 Library.
+ * @exception NullPointerException
+ * - obj_name is null.
+ **/
+ public synchronized static native String H5Aget_name_by_idx(long attr_id, String obj_name, int idx_type, int order,
+ long n, long lapl_id) throws HDF5LibraryException, NullPointerException;
+
+ /**
+ * H5Aget_space retrieves a copy of the dataspace for an attribute.
+ *
+ * @param attr_id
+ * IN: Identifier of an attribute.
+ *
+ * @return attribute dataspace identifier if successful
+ *
+ * @exception HDF5LibraryException
+ * - Error from the HDF-5 Library.
+ **/
+ public static long H5Aget_space(long attr_id) throws HDF5LibraryException {
+ long id = _H5Aget_space(attr_id);
+ if (id > 0) {
+ log.trace("OPEN_IDS: H5Aget_space add {}", id);
+ OPEN_IDS.add(id);
+ log.trace("OPEN_IDS: {}", OPEN_IDS.size());
+ }
+ return id;
+ }
+
+ private synchronized static native long _H5Aget_space(long attr_id) throws HDF5LibraryException;
+
+ /**
+ * H5Aget_storage_size returns the amount of storage that is required for the specified attribute, attr_id.
+ *
+ * @param attr_id
+ * IN: Identifier of the attribute to query.
+ *
+ * @return the amount of storage size allocated for the attribute; otherwise returns 0 (zero)
+ *
+ * @exception HDF5LibraryException
+ * - Error from the HDF-5 Library.
+ **/
+ public synchronized static native long H5Aget_storage_size(long attr_id) throws HDF5LibraryException;
+
+ /**
+ * H5Aget_type retrieves a copy of the datatype for an attribute.
+ *
+ * @param attr_id
+ * IN: Identifier of an attribute.
+ *
+ * @return a datatype identifier if successful
+ *
+ * @exception HDF5LibraryException
+ * - Error from the HDF-5 Library.
+ **/
+ public static long H5Aget_type(long attr_id) throws HDF5LibraryException {
+ long id = _H5Aget_type(attr_id);
+ if (id > 0) {
+ log.trace("OPEN_IDS: H5Aget_type add {}", id);
+ OPEN_IDS.add(id);
+ log.trace("OPEN_IDS: {}", OPEN_IDS.size());
+ }
+ return id;
+ }
+
+ private synchronized static native long _H5Aget_type(long attr_id) throws HDF5LibraryException;
+
+ /**
+ * H5Aopen opens an existing attribute, attr_name, that is attached to an object specified an object identifier,
+ * object_id.
+ *
+ * @param obj_id
+ * IN: Identifier for object to which attribute is attached
+ * @param attr_name
+ * IN: Name of attribute to open
+ * @param aapl_id
+ * IN: Attribute access property list identifier
+ *
+ * @return An attribute identifier if successful; otherwise returns a negative value.
+ *
+ * @exception HDF5LibraryException
+ * - Error from the HDF-5 Library.
+ * @exception NullPointerException
+ * - Name is null.
+ **/
+ public static long H5Aopen(long obj_id, String attr_name, long aapl_id) throws HDF5LibraryException,
+ NullPointerException {
+ long id = _H5Aopen(obj_id, attr_name, aapl_id);
+ if (id > 0) {
+ log.trace("OPEN_IDS: H5Aopen add {}", id);
+ OPEN_IDS.add(id);
+ log.trace("OPEN_IDS: {}", OPEN_IDS.size());
+ }
+ return id;
+ }
+
+ private synchronized static native long _H5Aopen(long obj_id, String attr_name, long aapl_id)
+ throws HDF5LibraryException, NullPointerException;
+
+ /**
+ * H5Aopen_by_idx opens an existing attribute that is attached to an object specified by location and name, loc_id
+ * and obj_name, respectively
+ *
+ * @param loc_id
+ * IN: Location of object to which attribute is attached
+ * @param obj_name
+ * IN: Name of object to which attribute is attached, relative to location
+ * @param idx_type
+ * IN: Type of index
+ * @param order
+ * IN: Index traversal order
+ * @param n
+ * IN: Attribute's position in index
+ * @param aapl_id
+ * IN: Attribute access property list
+ * @param lapl_id
+ * IN: Link access property list
+ *
+ * @return An attribute identifier if successful; otherwise returns a negative value.
+ *
+ * @exception HDF5LibraryException
+ * - Error from the HDF-5 Library.
+ * @exception NullPointerException
+ * - Name is null.
+ **/
+ public static long H5Aopen_by_idx(long loc_id, String obj_name, int idx_type, int order, long n, long aapl_id,
+ long lapl_id) throws HDF5LibraryException, NullPointerException {
+ long id = _H5Aopen_by_idx(loc_id, obj_name, idx_type, order, n, aapl_id, lapl_id);
+ if (id > 0) {
+ log.trace("OPEN_IDS: H5Aopen_by_idx add {}", id);
+ OPEN_IDS.add(id);
+ log.trace("OPEN_IDS: {}", OPEN_IDS.size());
+ }
+ return id;
+ }
+
+ private synchronized static native long _H5Aopen_by_idx(long loc_id, String obj_name, int idx_type, int order,
+ long n, long aapl_id, long lapl_id) throws HDF5LibraryException, NullPointerException;
+
+ /**
+ * H5Aopen_by_name Opens an attribute for an object by object name and attribute name
+ *
+ * @param loc_id
+ * IN: Location from which to find object to which attribute is attached
+ * @param obj_name
+ * IN: Name of object to which attribute is attached, relative to loc_id
+ * @param attr_name
+ * IN: Name of attribute to open
+ * @param aapl_id
+ * IN: Attribute access property list
+ * @param lapl_id
+ * IN: Link access property list identifier
+ *
+ * @return Returns an attribute identifier if successful; otherwise returns a negative value.
+ *
+ * @exception HDF5LibraryException
+ * - Error from the HDF-5 Library.
+ * @exception NullPointerException
+ * - obj_name is null.
+ **/
+ public static long H5Aopen_by_name(long loc_id, String obj_name, String attr_name, long aapl_id, long lapl_id)
+ throws HDF5LibraryException, NullPointerException {
+ long id = _H5Aopen_by_name(loc_id, obj_name, attr_name, aapl_id, lapl_id);
+ if (id > 0) {
+ log.trace("OPEN_IDS: H5Aopen_by_name add {}", id);
+ OPEN_IDS.add(id);
+ log.trace("OPEN_IDS: {}", OPEN_IDS.size());
+ }
+ return id;
+ }
+
+ private synchronized static native long _H5Aopen_by_name(long loc_id, String obj_name, String attr_name,
+ long aapl_id, long lapl_id) throws HDF5LibraryException, NullPointerException;
+
+ /**
+ * H5Aread reads an attribute, specified with attr_id. The attribute's memory datatype is specified with
+ * mem_type_id. The entire attribute is read into buf from the file.
+ *
+ * @param attr_id
+ * IN: Identifier of an attribute to read.
+ * @param mem_type_id
+ * IN: Identifier of the attribute datatype (in memory).
+ * @param buf
+ * IN: Buffer for data to be read.
+ *
+ * @return a non-negative value if successful
+ *
+ * @exception HDF5LibraryException
+ * - Error from the HDF-5 Library.
+ * @exception NullPointerException
+ * - data buffer is null.
+ **/
+ public synchronized static native int H5Aread(long attr_id, long mem_type_id, byte[] buf)
+ throws HDF5LibraryException, NullPointerException;
+
+ /**
+ * H5Aread reads an attribute, specified with attr_id. The attribute's memory datatype is specified with
+ * mem_type_id. The entire attribute is read into data object from the file.
+ *
+ * @param attr_id
+ * IN: Identifier of an attribute to read.
+ * @param mem_type_id
+ * IN: Identifier of the attribute datatype (in memory).
+ * @param obj
+ * IN: Object for data to be read.
+ *
+ * @return a non-negative value if successful
+ *
+ * @exception HDF5LibraryException
+ * - Error from the HDF-5 Library.
+ * @exception NullPointerException
+ * - data buffer is null. See public synchronized static native int H5Aread( )
+ **/
+ public synchronized static int H5Aread(long attr_id, long mem_type_id, Object obj) throws HDF5Exception,
+ NullPointerException {
+ HDFArray theArray = new HDFArray(obj);
+ byte[] buf = theArray.emptyBytes();
+
+ // This will raise an exception if there is an error
+ int status = H5Aread(attr_id, mem_type_id, buf);
+
+ // No exception: status really ought to be OK
+ if (status >= 0) {
+ obj = theArray.arrayify(buf);
+ }
+
+ return status;
+ }
+
+ public synchronized static native int H5AreadVL(long attr_id, long mem_type_id, String[] buf)
+ throws HDF5LibraryException, NullPointerException;
+
+ /**
+ * H5Arename changes the name of attribute that is attached to the object specified by loc_id. The attribute named
+ * old_attr_name is renamed new_attr_name.
+ *
+ * @param loc_id
+ * IN: Location or object identifier; may be dataset or group
+ * @param old_attr_name
+ * IN: Prior attribute name
+ * @param new_attr_name
+ * IN: New attribute name
+ *
+ * @return A non-negative value if successful; otherwise returns a negative value.
+ *
+ * @exception HDF5LibraryException
+ * - Error from the HDF-5 Library.
+ * @exception NullPointerException
+ * - Name is null.
+ **/
+ public synchronized static native int H5Arename(long loc_id, String old_attr_name, String new_attr_name)
+ throws HDF5LibraryException, NullPointerException;
+
+ /**
+ * H5Arename_by_name changes the name of attribute that is attached to the object specified by loc_id and obj_name.
+ * The attribute named old_attr_name is renamed new_attr_name.
+ *
+ * @param loc_id
+ * IN: Location or object identifier; may be dataset or group
+ * @param obj_name
+ * IN: Name of object, relative to location, whose attribute is to be renamed
+ * @param old_attr_name
+ * IN: Prior attribute name
+ * @param new_attr_name
+ * IN: New attribute name
+ * @param lapl_id
+ * IN: Link access property list
+ *
+ * @return A non-negative value if successful; otherwise returns a negative value.
+ *
+ * @exception HDF5LibraryException
+ * - Error from the HDF-5 Library.
+ * @exception NullPointerException
+ * - Name is null.
+ **/
+ public synchronized static native int H5Arename_by_name(long loc_id, String obj_name, String old_attr_name,
+ String new_attr_name, long lapl_id) throws HDF5LibraryException, NullPointerException;
+
+ /**
+ * H5Awrite writes an attribute, specified with attr_id. The attribute's memory datatype is specified with
+ * mem_type_id. The entire attribute is written from buf to the file.
+ *
+ * @param attr_id
+ * IN: Identifier of an attribute to write.
+ * @param mem_type_id
+ * IN: Identifier of the attribute datatype (in memory).
+ * @param buf
+ * IN: Data to be written.
+ *
+ * @return a non-negative value if successful
+ *
+ * @exception HDF5LibraryException
+ * - Error from the HDF-5 Library.
+ * @exception NullPointerException
+ * - data is null.
+ **/
+ public synchronized static native int H5Awrite(long attr_id, long mem_type_id, byte[] buf)
+ throws HDF5LibraryException, NullPointerException;
+
+ /**
+ * H5Awrite writes an attribute, specified with attr_id. The attribute's memory datatype is specified with
+ * mem_type_id. The entire attribute is written from data object to the file.
+ *
+ * @param attr_id
+ * IN: Identifier of an attribute to write.
+ * @param mem_type_id
+ * IN: Identifier of the attribute datatype (in memory).
+ * @param obj
+ * IN: Data object to be written.
+ *
+ * @return a non-negative value if successful
+ *
+ * @exception HDF5LibraryException
+ * - Error from the HDF-5 Library.
+ * @exception NullPointerException
+ * - data object is null. See public synchronized static native int H5Awrite(int attr_id, int
+ * mem_type_id, byte[] buf);
+ **/
+ public synchronized static int H5Awrite(long attr_id, long mem_type_id, Object obj)
+ throws HDF5Exception, NullPointerException
+ {
+ HDFArray theArray = new HDFArray(obj);
+ byte[] buf = theArray.byteify();
+
+ int retVal = H5Awrite(attr_id, mem_type_id, buf);
+ buf = null;
+ theArray = null;
+ return retVal;
+ }
+
+ public synchronized static native int H5AwriteVL(long attr_id, long mem_type_id, String[] buf)
+ throws HDF5LibraryException, NullPointerException;
+
+ /**
+ * H5Aget_create_plist retrieves a copy of the attribute creation property list identifier.
+ *
+ * @param attr_id
+ * IN: Identifier of an attribute.
+ *
+ * @return identifier for the attribute's creation property list if successful
+ *
+ * @exception HDF5LibraryException
+ * - Error from the HDF-5 Library.
+ **/
+ public static long H5Aget_create_plist(long attr_id)
+ throws HDF5LibraryException
+ {
+ long id = _H5Aget_create_plist(attr_id);
+ if (id > 0) {
+ log.trace("OPEN_IDS: H5Aget_create_plist add {}", id);
+ OPEN_IDS.add(id);
+ log.trace("OPEN_IDS: {}", OPEN_IDS.size());
+ }
+ return id;
+ }
+
+ private synchronized static native long _H5Aget_create_plist(long attr_id) throws HDF5LibraryException;
+
+ /**
+ * H5Aiterate2 iterates over the attributes attached to a dataset, named datatype, or group, as
+ * specified by obj_id. For each attribute, user-provided data, op_data, with additional information
+ * as defined below, is passed to a user-defined function, op, which operates on that attribute.
+ *
+ * @param loc_id
+ * IN: Identifier for object to which attributes are attached; may be group, dataset, or named datatype.
+ * @param idx_type
+ * IN: The type of index specified by idx_type can be one of the following:
+ * H5_INDEX_NAME An alpha-numeric index by attribute name.
+ * H5_INDEX_CRT_ORDER An index by creation order.
+ * @param order
+ * IN: The order in which the index is to be traversed, as specified by order, can be one of the following:
+ * H5_ITER_INC Iteration is from beginning to end, i.e., a top-down iteration
+ * incrementing the index position at each step.
+ * H5_ITER_DEC Iteration starts at the end of the index, i.e., a bottom-up iteration
+ * decrementing the index position at each step.
+ * H5_ITER_NATIVE HDF5 iterates in the fastest-available order. No information is provided
+ * as to the order, but HDF5 ensures that each element in the index will be
+ * visited if the iteration completes successfully.
+ * @param idx
+ * IN/OUT: Initial and returned offset within index.
+ * @param op
+ * IN: Callback function to operate on each value.
+ * @param op_data
+ * IN/OUT: Pointer to any user-efined data for use by operator function.
+ *
+ * @return returns the return value of the first operator that returns a positive value, or zero if all members were
+ * processed with no operator returning non-zero.
+ *
+ * @exception HDF5LibraryException
+ * - Error from the HDF-5 Library.
+ * @exception NullPointerException
+ * - buf is null.
+ **/
+ public synchronized static native int H5Aiterate(long loc_id, int idx_type, int order, long idx,
+ H5A_iterate_cb op, H5A_iterate_t op_data) throws HDF5LibraryException, NullPointerException;
+
+ /**
+ * H5Aiterate_by_name iterates over the attributes attached to the dataset or group specified with loc_id
+ * and obj_name. For each attribute, user-provided data, op_data, with additional information as defined
+ * below, is passed to a user-defined function, op, which operates on that attribute.
+ *
+ * @param loc_id
+ * IN: Identifier for object to which attributes are attached; may be group, dataset, or named datatype.
+ * @param obj_name
+ * IN: Name of object, relative to location.
+ * @param idx_type
+ * IN: The type of index specified by idx_type can be one of the following:
+ * H5_INDEX_NAME An alpha-numeric index by attribute name.
+ * H5_INDEX_CRT_ORDER An index by creation order.
+ * @param order
+ * IN: The order in which the index is to be traversed, as specified by order, can be one of the following:
+ * H5_ITER_INC Iteration is from beginning to end, i.e., a top-down iteration
+ * incrementing the index position at each step.
+ * H5_ITER_DEC Iteration starts at the end of the index, i.e., a bottom-up iteration
+ * decrementing the index position at each step.
+ * H5_ITER_NATIVE HDF5 iterates in the fastest-available order. No information is provided
+ * as to the order, but HDF5 ensures that each element in the index will be
+ * visited if the iteration completes successfully.
+ * @param idx
+ * IN/OUT: Initial and returned offset within index.
+ * @param op
+ * IN: Callback function to operate on each value.
+ * @param op_data
+ * IN/OUT: Pointer to any user-efined data for use by operator function.
+ * @param lapl_id
+ * IN: Link access property list
+ *
+ * @return returns the return value of the first operator that returns a positive value, or zero if all members were
+ * processed with no operator returning non-zero.
+ *
+ * @exception HDF5LibraryException
+ * - Error from the HDF-5 Library.
+ * @exception NullPointerException
+ * - buf is null.
+ **/
+ public synchronized static native int H5Aiterate_by_name(long loc_id, String obj_name, int idx_type,
+ int order, long idx, H5A_iterate_cb op, H5A_iterate_t op_data, long lapl_id) throws HDF5LibraryException, NullPointerException;
+
+ // ////////////////////////////////////////////////////////////
+ // //
+ // H5D: Datasets Interface Functions //
+ // //
+ // ////////////////////////////////////////////////////////////
+
+ /**
+ * H5Dcopy copies the content of one dataset to another dataset.
+ *
+ * @param src_did
+ * the identifier of the source dataset
+ * @param dst_did
+ * the identifier of the destinaiton dataset
+ *
+ * @return a non-negative value if successful
+ *
+ * @exception HDF5LibraryException
+ * - Error from the HDF-5 Library.
+ */
+ public synchronized static native int H5Dcopy(long src_did, long dst_did) throws HDF5LibraryException;
+
+ /**
+ * H5Dclose ends access to a dataset specified by dataset_id and releases resources used by it.
+ *
+ * @param dataset_id
+ * Identifier of the dataset to finish access to.
+ *
+ * @return a non-negative value if successful
+ *
+ * @exception HDF5LibraryException
+ * - Error from the HDF-5 Library.
+ **/
+ public static int H5Dclose(long dataset_id) throws HDF5LibraryException {
+ if (dataset_id < 0)
+ return 0; // throw new HDF5LibraryException("Negative ID");
+
+ log.trace("OPEN_IDS: H5Dclose remove {}", dataset_id);
+ OPEN_IDS.remove(dataset_id);
+ log.trace("OPEN_IDS: {}", OPEN_IDS.size());
+ return _H5Dclose(dataset_id);
+ }
+
+ private synchronized static native int _H5Dclose(long dataset_id) throws HDF5LibraryException;
+
+ /**
+ * H5Dcreate creates a new dataset named name at the location specified by loc_id.
+ *
+ * @param loc_id
+ * IN: Location identifier
+ * @param name
+ * IN: Dataset name
+ * @param type_id
+ * IN: Datatype identifier
+ * @param space_id
+ * IN: Dataspace identifier
+ * @param lcpl_id
+ * IN: Identifier of link creation property list.
+ * @param dcpl_id
+ * IN: Identifier of dataset creation property list.
+ * @param dapl_id
+ * IN: Identifier of dataset access property list.
+ *
+ * @return a dataset identifier
+ *
+ * @exception HDF5LibraryException
+ * - Error from the HDF-5 Library.
+ * @exception NullPointerException
+ * - name is null.
+ **/
+ public static long H5Dcreate(long loc_id, String name, long type_id, long space_id, long lcpl_id, long dcpl_id,
+ long dapl_id) throws HDF5LibraryException, NullPointerException {
+ long id = _H5Dcreate2(loc_id, name, type_id, space_id, lcpl_id, dcpl_id, dapl_id);
+ if (id > 0) {
+ log.trace("OPEN_IDS: H5Dcreate add {}", id);
+ OPEN_IDS.add(id);
+ log.trace("OPEN_IDS: {}", OPEN_IDS.size());
+ }
+ return id;
+ }
+
+ /**
+ * H5Dcreate2 creates a new dataset named name at the location specified by loc_id.
+ *
+ * @see public static int H5Dcreate(int loc_id, String name, int type_id, int space_id, int lcpl_id, int dcpl_id,
+ * int dapl_id)
+ **/
+ private synchronized static native long _H5Dcreate2(long loc_id, String name, long type_id, long space_id,
+ long lcpl_id, long dcpl_id, long dapl_id) throws HDF5LibraryException, NullPointerException;
+
+ /**
+ * H5Dcreate_anon creates a dataset in the file specified by loc_id.
+ *
+ * @param loc_id
+ * IN: Location identifier
+ * @param type_id
+ * IN: Datatype identifier
+ * @param space_id
+ * IN: Dataspace identifier
+ * @param dcpl_id
+ * IN: Identifier of dataset creation property list.
+ * @param dapl_id
+ * IN: Identifier of dataset access property list.
+ *
+ * @return a dataset identifier
+ *
+ * @exception HDF5LibraryException
+ * - Error from the HDF-5 Library.
+ **/
+ public static long H5Dcreate_anon(long loc_id, long type_id, long space_id, long dcpl_id, long dapl_id)
+ throws HDF5LibraryException {
+ long id = _H5Dcreate_anon(loc_id, type_id, space_id, dcpl_id, dapl_id);
+ if (id > 0) {
+ log.trace("OPEN_IDS: H5Dcreate_anon add {}", id);
+ OPEN_IDS.add(id);
+ log.trace("OPEN_IDS: {}", OPEN_IDS.size());
+ }
+ return id;
+ }
+
+ private synchronized static native long _H5Dcreate_anon(long loc_id, long type_id, long space_id, long dcpl_id,
+ long dapl_id) throws HDF5LibraryException;
+
+ /**
+ * H5Dfill explicitly fills the dataspace selection in memory, space_id, with the fill value specified in fill.
+ *
+ * @param fill
+ * IN: Pointer to the fill value to be used.
+ * @param fill_type
+ * IN: Fill value datatype identifier.
+ * @param buf
+ * IN/OUT: Pointer to the memory buffer containing the selection to be filled.
+ * @param buf_type
+ * IN: Datatype of dataspace elements to be filled.
+ * @param space_id
+ * IN: Dataspace describing memory buffer and containing the selection to be filled.
+ *
+ * @exception HDF5LibraryException
+ * - Error from the HDF-5 Library.
+ * @exception NullPointerException
+ * - buf is null.
+ **/
+ public synchronized static native void H5Dfill(byte[] fill, long fill_type, byte[] buf, long buf_type, long space_id)
+ throws HDF5LibraryException, NullPointerException;
+
+ /**
+ * H5Dget_access_plist returns an identifier for a copy of the dataset access property list for a dataset.
+ *
+ * @param dset_id
+ * IN: Identifier of the dataset to query.
+ *
+ * @return a dataset access property list identifier
+ *
+ * @exception HDF5LibraryException
+ * - Error from the HDF-5 Library.
+ **/
+ public synchronized static native long H5Dget_access_plist(long dset_id) throws HDF5LibraryException;
+
+ /**
+ * H5Dget_create_plist returns an identifier for a copy of the dataset creation property list for a dataset.
+ *
+ * @param dataset_id
+ * Identifier of the dataset to query.
+ * @return a dataset creation property list identifier if successful
+ *
+ * @exception HDF5LibraryException
+ * - Error from the HDF-5 Library.
+ **/
+ public static long H5Dget_create_plist(long dataset_id) throws HDF5LibraryException {
+ long id = _H5Dget_create_plist(dataset_id);
+ if (id > 0) {
+ log.trace("OPEN_IDS: H5Dget_create_plist add {}", id);
+ OPEN_IDS.add(id);
+ log.trace("OPEN_IDS: {}", OPEN_IDS.size());
+ }
+ return id;
+ }
+
+ private synchronized static native long _H5Dget_create_plist(long dataset_id) throws HDF5LibraryException;
+
+ /**
+ * H5Dget_offset returns the address in the file of the dataset dset_id.
+ *
+ * @param dset_id
+ * IN: Identifier of the dataset in question
+ *
+ * @return the offset in bytes.
+ *
+ * @exception HDF5LibraryException
+ * - Error from the HDF-5 Library.
+ **/
+ public synchronized static native long H5Dget_offset(long dset_id) throws HDF5LibraryException;
+
+ /**
+ * H5Dget_space returns an identifier for a copy of the dataspace for a dataset.
+ *
+ * @param dataset_id
+ * Identifier of the dataset to query.
+ *
+ * @return a dataspace identifier if successful
+ *
+ * @exception HDF5LibraryException
+ * - Error from the HDF-5 Library.
+ **/
+ public static long H5Dget_space(long dataset_id) throws HDF5LibraryException {
+ long id = _H5Dget_space(dataset_id);
+ if (id > 0) {
+ log.trace("OPEN_IDS: H5Dget_space add {}", id);
+ OPEN_IDS.add(id);
+ log.trace("OPEN_IDS: {}", OPEN_IDS.size());
+ }
+ return id;
+ }
+
+ private synchronized static native long _H5Dget_space(long dataset_id) throws HDF5LibraryException;
+
+ /**
+ * H5Dget_space_status determines whether space has been allocated for the dataset dset_id.
+ *
+ * @param dset_id
+ * IN: Identifier of the dataset to query.
+ *
+ * @return the space allocation status
+ *
+ * @exception HDF5LibraryException
+ * - Error from the HDF-5 Library.
+ **/
+ public synchronized static native int H5Dget_space_status(long dset_id) throws HDF5LibraryException;
+
+ /**
+ * H5Dget_storage_size returns the amount of storage that is required for the dataset.
+ *
+ * @param dataset_id
+ * Identifier of the dataset in question
+ *
+ * @return he amount of storage space allocated for the dataset.
+ *
+ * @exception HDF5LibraryException
+ * - Error from the HDF-5 Library.
+ **/
+ public synchronized static native long H5Dget_storage_size(long dataset_id) throws HDF5LibraryException,
+ IllegalArgumentException;
+
+ /**
+ * H5Dget_type returns an identifier for a copy of the datatype for a dataset.
+ *
+ * @param dataset_id
+ * Identifier of the dataset to query.
+ *
+ * @return a datatype identifier if successful
+ *
+ * @exception HDF5LibraryException
+ * - Error from the HDF-5 Library.
+ **/
+ public static long H5Dget_type(long dataset_id) throws HDF5LibraryException {
+ long id = _H5Dget_type(dataset_id);
+ if (id > 0) {
+ log.trace("OPEN_IDS: H5Dget_type add {}", id);
+ OPEN_IDS.add(id);
+ log.trace("OPEN_IDS: {}", OPEN_IDS.size());
+ }
+ return id;
+ }
+
+ private synchronized static native long _H5Dget_type(long dataset_id) throws HDF5LibraryException;
+
+ /**
+ * H5Diterate iterates over all the data elements in the memory buffer buf, executing the callback function operator
+ * once for each such data element.
+ *
+ * @param buf
+ * IN/OUT: Pointer to the memory containing the elements to iterate over.
+ * @param buf_type
+ * IN: Buffer datatype identifier.
+ * @param space_id
+ * IN: Dataspace describing memory buffer.
+ * @param op
+ * IN: Callback function to operate on each value.
+ * @param op_data
+ * IN/OUT: Pointer to any user-efined data for use by operator function.
+ *
+ * @return returns the return value of the first operator that returns a positive value, or zero if all members were
+ * processed with no operator returning non-zero.
+ *
+ * @exception HDF5LibraryException
+ * - Error from the HDF-5 Library.
+ * @exception NullPointerException
+ * - buf is null.
+ **/
+ public synchronized static native int H5Diterate(byte[] buf, long buf_type, long space_id, H5D_iterate_cb op,
+ H5D_iterate_t op_data) throws HDF5LibraryException, NullPointerException;
+
+ /**
+ * H5Dopen opens the existing dataset specified by a location identifier and name, loc_id and name, respectively.
+ *
+ * @param loc_id
+ * IN: Location identifier
+ * @param name
+ * IN: Dataset name
+ * @param dapl_id
+ * IN: Identifier of dataset access property list.
+ *
+ * @return a dataset identifier if successful
+ *
+ * @exception HDF5LibraryException
+ * - Error from the HDF-5 Library.
+ * @exception NullPointerException
+ * - name is null.
+ **/
+ public static long H5Dopen(long loc_id, String name, long dapl_id) throws HDF5LibraryException,
+ NullPointerException {
+ long id = _H5Dopen2(loc_id, name, dapl_id);
+ if (id > 0) {
+ log.trace("OPEN_IDS: H5Dopen add {}", id);
+ OPEN_IDS.add(id);
+ log.trace("OPEN_IDS: {}", OPEN_IDS.size());
+ }
+ return id;
+ }
+
+ /**
+ * H5Dopen2 opens the existing dataset specified by a location identifier and name, loc_id and name, respectively.
+ *
+ * @see public static int H5Dopen(int loc_id, String name, int dapl_id)
+ **/
+ private synchronized static native long _H5Dopen2(long loc_id, String name, long dapl_id)
+ throws HDF5LibraryException, NullPointerException;
+
+ /**
+ * H5Dread reads a (partial) dataset, specified by its identifier dataset_id, from the file into the application
+ * memory buffer buf.
+ *
+ * @param dataset_id
+ * Identifier of the dataset read from.
+ * @param mem_type_id
+ * Identifier of the memory datatype.
+ * @param mem_space_id
+ * Identifier of the memory dataspace.
+ * @param file_space_id
+ * Identifier of the dataset's dataspace in the file.
+ * @param xfer_plist_id
+ * Identifier of a transfer property list for this I/O operation.
+ * @param obj
+ * Buffer to store data read from the file.
+ * @param isCriticalPinning
+ * request lock on data reference.
+ *
+ * @return a non-negative value if successful
+ *
+ * @exception HDF5LibraryException
+ * - Error from the HDF-5 Library.
+ * @exception NullPointerException
+ * - data buffer is null.
+ **/
+ public synchronized static native int H5Dread(long dataset_id, long mem_type_id, long mem_space_id,
+ long file_space_id, long xfer_plist_id, byte[] obj, boolean isCriticalPinning) throws HDF5LibraryException,
+ NullPointerException;
+
+ public synchronized static int H5Dread(long dataset_id, long mem_type_id, long mem_space_id, long file_space_id,
+ long xfer_plist_id, byte[] buf) throws HDF5LibraryException, NullPointerException {
+ return H5Dread(dataset_id, mem_type_id, mem_space_id, file_space_id, xfer_plist_id, buf, true);
+ }
+
+ public synchronized static int H5Dread(long dataset_id, long mem_type_id, long mem_space_id, long file_space_id,
+ long xfer_plist_id, Object obj) throws HDF5Exception, HDF5LibraryException, NullPointerException {
+ return H5Dread(dataset_id, mem_type_id, mem_space_id, file_space_id, xfer_plist_id, obj, true);
+ }
+
+ /**
+ * H5Dread reads a (partial) dataset, specified by its identifier dataset_id, from the file into the application
+ * data object.
+ *
+ * @param dataset_id
+ * Identifier of the dataset read from.
+ * @param mem_type_id
+ * Identifier of the memory datatype.
+ * @param mem_space_id
+ * Identifier of the memory dataspace.
+ * @param file_space_id
+ * Identifier of the dataset's dataspace in the file.
+ * @param xfer_plist_id
+ * Identifier of a transfer property list for this I/O operation.
+ * @param obj
+ * Object to store data read from the file.
+ * @param isCriticalPinning
+ * request lock on data reference.
+ *
+ * @return a non-negative value if successful
+ *
+ * @exception HDF5Exception
+ * - Failure in the data conversion.
+ * @exception HDF5LibraryException
+ * - Error from the HDF-5 Library.
+ * @exception NullPointerException
+ * - data object is null.
+ **/
+ public synchronized static int H5Dread(long dataset_id, long mem_type_id, long mem_space_id, long file_space_id,
+ long xfer_plist_id, Object obj, boolean isCriticalPinning) throws HDF5Exception, HDF5LibraryException,
+ NullPointerException {
+ int status = -1;
+ boolean is1D = false;
+
+ Class dataClass = obj.getClass();
+ if (!dataClass.isArray()) {
+ throw (new HDF5JavaException("H5Dread: data is not an array"));
+ }
+
+ String cname = dataClass.getName();
+ is1D = (cname.lastIndexOf('[') == cname.indexOf('['));
+ char dname = cname.charAt(cname.lastIndexOf("[") + 1);
+ log.trace("H5Dread: cname={} is1D={} dname={}", cname, is1D, dname);
+
+ if (is1D && (dname == 'B')) {
+ log.trace("H5Dread_dname_B");
+ status = H5Dread(dataset_id, mem_type_id, mem_space_id, file_space_id, xfer_plist_id, (byte[]) obj,
+ isCriticalPinning);
+ }
+ else if (is1D && (dname == 'S')) {
+ log.trace("H5Dread_dname_S");
+ status = H5Dread_short(dataset_id, mem_type_id, mem_space_id, file_space_id, xfer_plist_id, (short[]) obj,
+ isCriticalPinning);
+ }
+ else if (is1D && (dname == 'I')) {
+ log.trace("H5Dread_dname_I");
+ status = H5Dread_int(dataset_id, mem_type_id, mem_space_id, file_space_id, xfer_plist_id, (int[]) obj,
+ isCriticalPinning);
+ }
+ else if (is1D && (dname == 'J')) {
+ log.trace("H5Dread_dname_J");
+ status = H5Dread_long(dataset_id, mem_type_id, mem_space_id, file_space_id, xfer_plist_id, (long[]) obj);
+ }
+ else if (is1D && (dname == 'F')) {
+ log.trace("H5Dread_dname_F");
+ status = H5Dread_float(dataset_id, mem_type_id, mem_space_id, file_space_id, xfer_plist_id, (float[]) obj,
+ isCriticalPinning);
+ }
+ else if (is1D && (dname == 'D')) {
+ log.trace("H5Dread_dname_D");
+ status = H5Dread_double(dataset_id, mem_type_id, mem_space_id, file_space_id, xfer_plist_id,
+ (double[]) obj, isCriticalPinning);
+ }
+ else if (H5.H5Tequal(mem_type_id, HDF5Constants.H5T_STD_REF_DSETREG)) {
+ log.trace("H5Dread_reg_ref");
+ status = H5Dread_reg_ref(dataset_id, mem_type_id, mem_space_id, file_space_id, xfer_plist_id,
+ (String[]) obj);
+ }
+ else if (is1D && (dataClass.getComponentType() == String.class)) {
+ log.trace("H5Dread_string type");
+ if (H5.H5Tis_variable_str(mem_type_id)) {
+ status = H5Dread_VLStrings(dataset_id, mem_type_id, mem_space_id, file_space_id, xfer_plist_id, (Object[]) obj);
+ }
+ else {
+ status = H5Dread_string(dataset_id, mem_type_id, mem_space_id, file_space_id, xfer_plist_id,
+ (String[]) obj);
+ }
+ }
+ else {
+ // Create a data buffer to hold the data into a Java Array
+ HDFArray theArray = new HDFArray(obj);
+ byte[] buf = theArray.emptyBytes();
+ log.trace("H5Dread_else");
+
+ // will raise exception if read fails
+ status = H5Dread(dataset_id, mem_type_id, mem_space_id, file_space_id, xfer_plist_id, buf,
+ isCriticalPinning);
+ if (status >= 0) {
+ // convert the data into a Java Array */
+ obj = theArray.arrayify(buf);
+ }
+
+ // clean up these: assign 'null' as hint to gc()
+ buf = null;
+ theArray = null;
+ }
+
+ return status;
+ }
+
+ public synchronized static native int H5Dread_double(long dataset_id, long mem_type_id, long mem_space_id,
+ long file_space_id, long xfer_plist_id, double[] buf, boolean isCriticalPinning)
+ throws HDF5LibraryException, NullPointerException;
+
+ public synchronized static int H5Dread_double(long dataset_id, long mem_type_id, long mem_space_id,
+ long file_space_id, long xfer_plist_id, double[] buf) throws HDF5LibraryException, NullPointerException {
+ return H5Dread_double(dataset_id, mem_type_id, mem_space_id, file_space_id, xfer_plist_id, buf, true);
+ }
+
+ public synchronized static native int H5Dread_float(long dataset_id, long mem_type_id, long mem_space_id,
+ long file_space_id, long xfer_plist_id, float[] buf, boolean isCriticalPinning)
+ throws HDF5LibraryException, NullPointerException;
+
+ public synchronized static int H5Dread_float(long dataset_id, long mem_type_id, long mem_space_id,
+ long file_space_id, long xfer_plist_id, float[] buf) throws HDF5LibraryException, NullPointerException {
+ return H5Dread_float(dataset_id, mem_type_id, mem_space_id, file_space_id, xfer_plist_id, buf, true);
+ }
+
+ public synchronized static native int H5Dread_int(long dataset_id, long mem_type_id, long mem_space_id,
+ long file_space_id, long xfer_plist_id, int[] buf, boolean isCriticalPinning) throws HDF5LibraryException,
+ NullPointerException;
+
+ public synchronized static int H5Dread_int(long dataset_id, long mem_type_id, long mem_space_id,
+ long file_space_id, long xfer_plist_id, int[] buf) throws HDF5LibraryException, NullPointerException {
+ return H5Dread_int(dataset_id, mem_type_id, mem_space_id, file_space_id, xfer_plist_id, buf, true);
+ }
+
+ public synchronized static native int H5Dread_long(long dataset_id, long mem_type_id, long mem_space_id,
+ long file_space_id, long xfer_plist_id, long[] buf, boolean isCriticalPinning) throws HDF5LibraryException,
+ NullPointerException;
+
+ public synchronized static int H5Dread_long(long dataset_id, long mem_type_id, long mem_space_id,
+ long file_space_id, long xfer_plist_id, long[] buf) throws HDF5LibraryException, NullPointerException {
+ return H5Dread_long(dataset_id, mem_type_id, mem_space_id, file_space_id, xfer_plist_id, buf, true);
+ }
+
+ public synchronized static native int H5Dread_reg_ref(long dataset_id, long mem_type_id, long mem_space_id,
+ long file_space_id, long xfer_plist_id, String[] buf) throws HDF5LibraryException, NullPointerException;
+
+ public synchronized static native int H5Dread_reg_ref_data(long dataset_id, long mem_type_id, long mem_space_id,
+ long file_space_id, long xfer_plist_id, String[] buf) throws HDF5LibraryException, NullPointerException;
+
+ public synchronized static native int H5Dread_short(long dataset_id, long mem_type_id, long mem_space_id,
+ long file_space_id, long xfer_plist_id, short[] buf, boolean isCriticalPinning)
+ throws HDF5LibraryException, NullPointerException;
+
+ public synchronized static int H5Dread_short(long dataset_id, long mem_type_id, long mem_space_id,
+ long file_space_id, long xfer_plist_id, short[] buf) throws HDF5LibraryException, NullPointerException {
+ return H5Dread_short(dataset_id, mem_type_id, mem_space_id, file_space_id, xfer_plist_id, buf, true);
+ }
+
+ public synchronized static native int H5Dread_string(long dataset_id, long mem_type_id, long mem_space_id,
+ long file_space_id, long xfer_plist_id, String[] buf) throws HDF5LibraryException, NullPointerException;
+
+ public synchronized static native int H5Dread_VLStrings(long dataset_id, long mem_type_id, long mem_space_id,
+ long file_space_id, long xfer_plist_id, Object[] buf) throws HDF5LibraryException, NullPointerException;
+
+ /**
+ * H5Dset_extent sets the current dimensions of the chunked dataset dset_id to the sizes specified in size.
+ *
+ * @param dset_id
+ * IN: Chunked dataset identifier.
+ * @param size
+ * IN: Array containing the new magnitude of each dimension of the dataset.
+ *
+ * @exception HDF5LibraryException
+ * - Error from the HDF-5 Library.
+ * @exception NullPointerException
+ * - size is null.
+ **/
+ public synchronized static native void H5Dset_extent(long dset_id, long size[]) throws HDF5LibraryException,
+ NullPointerException;
+
+ /**
+ * H5Dvlen_get_buf_size determines the number of bytes required to store the VL data from the dataset, using the
+ * space_id for the selection in the dataset on disk and the type_id for the memory representation of the VL data in
+ * memory.
+ *
+ * @param dset_id
+ * IN: Identifier of the dataset read from.
+ * @param type_id
+ * IN: Identifier of the datatype.
+ * @param space_id
+ * IN: Identifier of the dataspace.
+ *
+ * @return the size in bytes of the memory buffer required to store the VL data.
+ *
+ * @exception HDF5LibraryException
+ * - Error from the HDF-5 Library.
+ * @exception NullPointerException
+ * - buf is null.
+ **/
+ public synchronized static native long H5Dvlen_get_buf_size(long dset_id, long type_id, long space_id)
+ throws HDF5LibraryException;
+
+ /**
+ * H5Dvlen_reclaim reclaims buffer used for VL data.
+ *
+ * @param type_id
+ * Identifier of the datatype.
+ * @param space_id
+ * Identifier of the dataspace.
+ * @param xfer_plist_id
+ * Identifier of a transfer property list for this I/O operation.
+ * @param buf
+ * Buffer with data to be reclaimed.
+ *
+ * @return a non-negative value if successful
+ *
+ * @exception HDF5LibraryException
+ * - Error from the HDF-5 Library.
+ * @exception NullPointerException
+ * - buf is null.
+ **/
+ public synchronized static native int H5Dvlen_reclaim(long type_id, long space_id, long xfer_plist_id, byte[] buf)
+ throws HDF5LibraryException, NullPointerException;
+
+ /**
+ * H5Dwrite writes a (partial) dataset, specified by its identifier dataset_id, from the application memory buffer
+ * buf into the file.
+ *
+ * @param dataset_id
+ * Identifier of the dataset read from.
+ * @param mem_type_id
+ * Identifier of the memory datatype.
+ * @param mem_space_id
+ * Identifier of the memory dataspace.
+ * @param file_space_id
+ * Identifier of the dataset's dataspace in the file.
+ * @param xfer_plist_id
+ * Identifier of a transfer property list for this I/O operation.
+ * @param buf
+ * Buffer with data to be written to the file.
+ * @param isCriticalPinning
+ * request lock on data reference.
+ *
+ * @return a non-negative value if successful
+ *
+ * @exception HDF5LibraryException
+ * - Error from the HDF-5 Library.
+ * @exception NullPointerException
+ * - name is null.
+ **/
+ public synchronized static native int H5Dwrite(long dataset_id, long mem_type_id, long mem_space_id,
+ long file_space_id, long xfer_plist_id, byte[] buf, boolean isCriticalPinning) throws HDF5LibraryException,
+ NullPointerException;
+
+ public synchronized static int H5Dwrite(long dataset_id, long mem_type_id, long mem_space_id, long file_space_id,
+ long xfer_plist_id, byte[] buf) throws HDF5LibraryException, NullPointerException {
+ return H5Dwrite(dataset_id, mem_type_id, mem_space_id, file_space_id, xfer_plist_id, buf, true);
+ }
+
+ public synchronized static int H5Dwrite(long dataset_id, long mem_type_id, long mem_space_id, long file_space_id,
+ long xfer_plist_id, Object obj) throws HDF5Exception, HDF5LibraryException, NullPointerException {
+ return H5Dwrite(dataset_id, mem_type_id, mem_space_id, file_space_id, xfer_plist_id, obj, true);
+ }
+
+ /**
+ * H5Dwrite writes a (partial) dataset, specified by its identifier dataset_id, from the application memory data
+ * object into the file.
+ *
+ * @param dataset_id
+ * Identifier of the dataset read from.
+ * @param mem_type_id
+ * Identifier of the memory datatype.
+ * @param mem_space_id
+ * Identifier of the memory dataspace.
+ * @param file_space_id
+ * Identifier of the dataset's dataspace in the file.
+ * @param xfer_plist_id
+ * Identifier of a transfer property list for this I/O operation.
+ * @param obj
+ * Object with data to be written to the file.
+ * @param isCriticalPinning
+ * request lock on data reference.
+ *
+ * @return a non-negative value if successful
+ *
+ * @exception HDF5Exception
+ * - Failure in the data conversion.
+ * @exception HDF5LibraryException
+ * - Error from the HDF-5 Library.
+ * @exception NullPointerException
+ * - data object is null.
+ **/
+ public synchronized static int H5Dwrite(long dataset_id, long mem_type_id, long mem_space_id, long file_space_id,
+ long xfer_plist_id, Object obj, boolean isCriticalPinning) throws HDF5Exception, HDF5LibraryException,
+ NullPointerException {
+ int status = -1;
+ boolean is1D = false;
+
+ Class dataClass = obj.getClass();
+ if (!dataClass.isArray()) {
+ throw (new HDF5JavaException("H5Dread: data is not an array"));
+ }
+
+ String cname = dataClass.getName();
+ is1D = (cname.lastIndexOf('[') == cname.indexOf('['));
+ char dname = cname.charAt(cname.lastIndexOf("[") + 1);
+
+ if (is1D && (dname == 'B')) {
+ status = H5Dwrite(dataset_id, mem_type_id, mem_space_id, file_space_id, xfer_plist_id, (byte[]) obj,
+ isCriticalPinning);
+ }
+ else if (is1D && (dname == 'S')) {
+ status = H5Dwrite_short(dataset_id, mem_type_id, mem_space_id, file_space_id, xfer_plist_id, (short[]) obj,
+ isCriticalPinning);
+ }
+ else if (is1D && (dname == 'I')) {
+ status = H5Dwrite_int(dataset_id, mem_type_id, mem_space_id, file_space_id, xfer_plist_id, (int[]) obj,
+ isCriticalPinning);
+ }
+ else if (is1D && (dname == 'J')) {
+ status = H5Dwrite_long(dataset_id, mem_type_id, mem_space_id, file_space_id, xfer_plist_id, (long[]) obj,
+ isCriticalPinning);
+ }
+ else if (is1D && (dname == 'F')) {
+ status = H5Dwrite_float(dataset_id, mem_type_id, mem_space_id, file_space_id, xfer_plist_id, (float[]) obj,
+ isCriticalPinning);
+ }
+ else if (is1D && (dname == 'D')) {
+ status = H5Dwrite_double(dataset_id, mem_type_id, mem_space_id, file_space_id, xfer_plist_id,
+ (double[]) obj, isCriticalPinning);
+ }
+ else if (is1D && (dataClass.getComponentType() == String.class)) {
+ log.trace("H5Dwrite_string type");
+ if (H5.H5Tis_variable_str(mem_type_id)) {
+ status = H5Dwrite_VLStrings(dataset_id, mem_type_id, mem_space_id, file_space_id, xfer_plist_id, (Object[]) obj);
+ }
+ else {
+ status = H5Dwrite_string(dataset_id, mem_type_id, mem_space_id, file_space_id, xfer_plist_id,
+ (String[]) obj);
+ }
+ }
+ else {
+ HDFArray theArray = new HDFArray(obj);
+ byte[] buf = theArray.byteify();
+
+ // will raise exception on error
+ status = H5Dwrite(dataset_id, mem_type_id, mem_space_id, file_space_id, xfer_plist_id, buf,
+ isCriticalPinning);
+
+ // clean up these: assign 'null' as hint to gc()
+ buf = null;
+ theArray = null;
+ }
+
+ return status;
+ }
+
+ public synchronized static native int H5Dwrite_double(long dataset_id, long mem_type_id, long mem_space_id,
+ long file_space_id, long xfer_plist_id, double[] buf, boolean isCriticalPinning)
+ throws HDF5LibraryException, NullPointerException;
+
+ public synchronized static int H5Dwrite_double(long dataset_id, long mem_type_id, long mem_space_id,
+ long file_space_id, long xfer_plist_id, double[] buf) throws HDF5LibraryException, NullPointerException {
+ return H5Dwrite_double(dataset_id, mem_type_id, mem_space_id, file_space_id, xfer_plist_id, buf, true);
+ }
+
+ public synchronized static native int H5Dwrite_float(long dataset_id, long mem_type_id, long mem_space_id,
+ long file_space_id, long xfer_plist_id, float[] buf, boolean isCriticalPinning)
+ throws HDF5LibraryException, NullPointerException;
+
+ public synchronized static int H5Dwrite_float(long dataset_id, long mem_type_id, long mem_space_id,
+ long file_space_id, long xfer_plist_id, float[] buf) throws HDF5LibraryException, NullPointerException {
+ return H5Dwrite_float(dataset_id, mem_type_id, mem_space_id, file_space_id, xfer_plist_id, buf, true);
+ }
+
+ public synchronized static native int H5Dwrite_int(long dataset_id, long mem_type_id, long mem_space_id,
+ long file_space_id, long xfer_plist_id, int[] buf, boolean isCriticalPinning) throws HDF5LibraryException,
+ NullPointerException;
+
+ public synchronized static int H5Dwrite_int(long dataset_id, long mem_type_id, long mem_space_id,
+ long file_space_id, long xfer_plist_id, int[] buf) throws HDF5LibraryException, NullPointerException {
+ return H5Dwrite_int(dataset_id, mem_type_id, mem_space_id, file_space_id, xfer_plist_id, buf, true);
+ }
+
+ public synchronized static native int H5Dwrite_long(long dataset_id, long mem_type_id, long mem_space_id,
+ long file_space_id, long xfer_plist_id, long[] buf, boolean isCriticalPinning) throws HDF5LibraryException,
+ NullPointerException;
+
+ public synchronized static int H5Dwrite_long(long dataset_id, long mem_type_id, long mem_space_id,
+ long file_space_id, long xfer_plist_id, long[] buf) throws HDF5LibraryException, NullPointerException {
+ return H5Dwrite_long(dataset_id, mem_type_id, mem_space_id, file_space_id, xfer_plist_id, buf, true);
+ }
+
+ public synchronized static native int H5Dwrite_short(long dataset_id, long mem_type_id, long mem_space_id,
+ long file_space_id, long xfer_plist_id, short[] buf, boolean isCriticalPinning)
+ throws HDF5LibraryException, NullPointerException;
+
+ public synchronized static int H5Dwrite_short(long dataset_id, long mem_type_id, long mem_space_id,
+ long file_space_id, long xfer_plist_id, short[] buf) throws HDF5LibraryException, NullPointerException {
+ return H5Dwrite_short(dataset_id, mem_type_id, mem_space_id, file_space_id, xfer_plist_id, buf, true);
+ }
+
+ public synchronized static native int H5Dwrite_string(long dataset_id, long mem_type_id, long mem_space_id,
+ long file_space_id, long xfer_plist_id, String[] buf) throws HDF5LibraryException, NullPointerException;
+
+ /**
+ * H5Dwrite_VLStrings writes a (partial) variable length String dataset, specified by its identifier dataset_id, from
+ * the application memory buffer buf into the file.
+ *
+ * ---- contributed by Rosetta Biosoftware
+ *
+ * @param dataset_id
+ * Identifier of the dataset read from.
+ * @param mem_type_id
+ * Identifier of the memory datatype.
+ * @param mem_space_id
+ * Identifier of the memory dataspace.
+ * @param file_space_id
+ * Identifier of the dataset's dataspace in the file.
+ * @param xfer_plist_id
+ * Identifier of a transfer property list for this I/O operation.
+ * @param buf
+ * Buffer with data to be written to the file.
+ *
+ * @return a non-negative value if successful
+ *
+ * @exception HDF5LibraryException
+ * - Error from the HDF-5 Library.
+ * @exception NullPointerException
+ * - name is null.
+ **/
+
+ public synchronized static native int H5Dwrite_VLStrings(long dataset_id, long mem_type_id, long mem_space_id,
+ long file_space_id, long xfer_plist_id, Object[] buf) throws HDF5LibraryException, NullPointerException;
+
+ // /////// unimplemented ////////
+ // H5_DLL herr_t H5Ddebug(hid_t dset_id);
+ // herr_t H5Dgather(hid_t src_space_id, const void *src_buf, hid_t type_id,
+ // size_t dst_buf_size, void *dst_buf, H5D_gather_func_t op, void *op_data);
+ // herr_t H5Dscatter(H5D_scatter_func_t op, void *op_data, hid_t type_id, hid_t dst_space_id, void *dst_buf);
+
+ // ////////////////////////////////////////////////////////////
+ // //
+ // H5E: Error Stack //
+ // //
+ // ////////////////////////////////////////////////////////////
+
+ /**
+ * H5Eauto_is_v2 determines whether the error auto reporting function for an error stack conforms to the H5E_auto2_t
+ * typedef or the H5E_auto1_t typedef.
+ *
+ * @param stack_id
+ * IN: Error stack identifier.
+ *
+ * @return boolean true if the error stack conforms to H5E_auto2_t and false if it conforms to H5E_auto1_t.
+ *
+ * @exception HDF5LibraryException
+ * - Error from the HDF-5 Library.
+ **/
+ public synchronized static native boolean H5Eauto_is_v2(long stack_id) throws HDF5LibraryException;
+
+ /**
+ * H5Eclear clears the error stack for the current thread. H5Eclear can fail if there are problems initializing the
+ * library.
+ * <p>
+ * This may be used by exception handlers to assure that the error condition in the HDF-5 library has been reset.
+ *
+ * @return Returns a non-negative value if successful
+ *
+ * @exception HDF5LibraryException
+ * - Error from the HDF-5 Library.
+ **/
+ public static int H5Eclear() throws HDF5LibraryException {
+ H5Eclear2(HDF5Constants.H5E_DEFAULT);
+ return 0;
+ }
+
+ /**
+ * H5Eclear clears the error stack specified by estack_id, or, if estack_id is set to H5E_DEFAULT, the error stack
+ * for the current thread.
+ *
+ * @param stack_id
+ * IN: Error stack identifier.
+ *
+ * @exception HDF5LibraryException
+ * - Error from the HDF-5 Library.
+ **/
+ public static void H5Eclear(long stack_id) throws HDF5LibraryException {
+ H5Eclear2(stack_id);
+ }
+
+ /**
+ * H5Eclear2 clears the error stack specified by estack_id, or, if estack_id is set to H5E_DEFAULT, the error stack
+ * for the current thread.
+ *
+ * @param stack_id
+ * IN: Error stack identifier.
+ *
+ * @exception HDF5LibraryException
+ * - Error from the HDF-5 Library.
+ **/
+ public synchronized static native void H5Eclear2(long stack_id) throws HDF5LibraryException;
+
+ /**
+ * H5Eclose_msg closes an error message identifier, which can be either a major or minor message.
+ *
+ * @param err_id
+ * IN: Error message identifier.
+ *
+ * @exception HDF5LibraryException
+ * - Error from the HDF-5 Library.
+ **/
+ public synchronized static native void H5Eclose_msg(long err_id) throws HDF5LibraryException;
+
+ /**
+ * H5Eclose_stack closes the object handle for an error stack and releases its resources.
+ *
+ * @param stack_id
+ * IN: Error stack identifier.
+ *
+ * @exception HDF5LibraryException
+ * - Error from the HDF-5 Library.
+ **/
+ public synchronized static native void H5Eclose_stack(long stack_id) throws HDF5LibraryException;
+
+ /**
+ * H5Ecreate_msg adds an error message to an error class defined by client library or application program.
+ *
+ * @param cls_id
+ * IN: Error class identifier.
+ * @param msg_type
+ * IN: The type of the error message.
+ * @param msg
+ * IN: The error message.
+ *
+ * @return a message identifier
+ *
+ * @exception HDF5LibraryException
+ * - Error from the HDF-5 Library.
+ * @exception NullPointerException
+ * - msg is null.
+ **/
+ public synchronized static native long H5Ecreate_msg(long cls_id, int msg_type, String msg)
+ throws HDF5LibraryException, NullPointerException;
+
+ /**
+ * H5Ecreate_stack creates a new empty error stack and returns the new stack's identifier.
+ *
+ * @return an error stack identifier
+ *
+ * @exception HDF5LibraryException
+ * - Error from the HDF-5 Library.
+ **/
+ public synchronized static native long H5Ecreate_stack() throws HDF5LibraryException;
+
+ /**
+ * H5Eget_class_name retrieves the name of the error class specified by the class identifier.
+ *
+ * @param class_id
+ * IN: Error class identifier.
+ *
+ * @return the name of the error class
+ *
+ * @exception HDF5LibraryException
+ * - Error from the HDF-5 Library.
+ **/
+ public synchronized static native String H5Eget_class_name(long class_id)
+ throws HDF5LibraryException, NullPointerException;
+
+ /**
+ * H5Eget_current_stack copies the current error stack and returns an error stack identifier for the new copy.
+ *
+ * @return an error stack identifier
+ *
+ * @exception HDF5LibraryException
+ * - Error from the HDF-5 Library.
+ **/
+ public synchronized static native long H5Eget_current_stack() throws HDF5LibraryException;
+
+ /**
+ * H5Eset_current_stack replaces the content of the current error stack with a copy of the content of the error
+ * stack specified by estack_id.
+ *
+ * @param stack_id
+ * IN: Error stack identifier.
+ *
+ * @exception HDF5LibraryException
+ * - Error from the HDF-5 Library.
+ **/
+ public synchronized static native void H5Eset_current_stack(long stack_id) throws HDF5LibraryException;
+
+ /**
+ * H5Eget_msg retrieves the error message including its length and type.
+ *
+ * @param msg_id
+ * IN: Name of the error class.
+ * @param type_list
+ * OUT: The type of the error message. Valid values are H5E_MAJOR and H5E_MINOR.
+ *
+ * @return the error message
+ *
+ * @exception HDF5LibraryException
+ * - Error from the HDF-5 Library.
+ **/
+ public synchronized static native String H5Eget_msg(long msg_id, int[] type_list) throws HDF5LibraryException;
+
+ /**
+ * H5Eget_num retrieves the number of error records in the error stack specified by estack_id (including major,
+ * minor messages and description).
+ *
+ * @param stack_id
+ * IN: Error stack identifier.
+ *
+ * @return the number of error messages
+ *
+ * @exception HDF5LibraryException
+ * - Error from the HDF-5 Library.
+ **/
+ public synchronized static native long H5Eget_num(long stack_id) throws HDF5LibraryException, NullPointerException;
+
+ /**
+ * H5Eprint2 prints the error stack specified by estack_id on the specified stream, stream.
+ *
+ * @param stack_id
+ * IN: Error stack identifier.If the identifier is H5E_DEFAULT, the current error stack will be printed.
+ * @param stream
+ * IN: File pointer, or stderr if null.
+ *
+ * @exception HDF5LibraryException
+ * - Error from the HDF-5 Library.
+ **/
+ public synchronized static native void H5Eprint2(long stack_id, Object stream) throws HDF5LibraryException;
+
+ /**
+ * H5Epop deletes the number of error records specified in count from the top of the error stack specified by
+ * estack_id (including major, minor messages and description).
+ *
+ * @param stack_id
+ * IN: Error stack identifier.
+ * @param count
+ * IN: Version of the client library or application to which the error class belongs.
+ *
+ * @exception HDF5LibraryException
+ * - Error from the HDF-5 Library.
+ **/
+ public synchronized static native void H5Epop(long stack_id, long count) throws HDF5LibraryException;
+
+
+ /**
+ * H5Epush2 pushes a new error record onto the error stack specified by estack_id.
+ *
+ * @param stack_id
+ * IN: Error stack identifier.
+ * @param file
+ * IN: Name of the file in which the error was detected.
+ * @param func
+ * IN: Name of the function in which the error was detected.
+ * @param line
+ * IN: Line number within the file at which the error was detected.
+ * @param cls_id
+ * IN: Error class identifier.
+ * @param maj_id
+ * IN: Major error identifier.
+ * @param min_id
+ * IN: Minor error identifier.
+ * @param msg
+ * IN: Error description string.
+ *
+ * @exception HDF5LibraryException
+ * - Error from the HDF-5 Library.
+ * @exception NullPointerException
+ * - file, func, or msg is null.
+ **/
+ public static void H5Epush(long stack_id, String file, String func, int line,
+ long cls_id, long maj_id, long min_id, String msg) throws HDF5LibraryException, NullPointerException
+ {
+ H5Epush2(stack_id, file, func, line, cls_id, maj_id, min_id, msg);
+ }
+ public synchronized static native void H5Epush2(long stack_id, String file, String func, int line,
+ long cls_id, long maj_id, long min_id, String msg) throws HDF5LibraryException, NullPointerException;
+
+ /**
+ * H5Eregister_class registers a client library or application program to the HDF5 error API so that the client
+ * library or application program can report errors together with HDF5 library.
+ *
+ * @param cls_name
+ * IN: Name of the error class.
+ * @param lib_name
+ * IN: Name of the client library or application to which the error class belongs.
+ * @param version
+ * IN: Version of the client library or application to which the error class belongs.
+ *
+ * @return a class identifier
+ *
+ * @exception HDF5LibraryException
+ * - Error from the HDF-5 Library.
+ * @exception NullPointerException
+ * - name is null.
+ **/
+ public synchronized static native long H5Eregister_class(String cls_name, String lib_name, String version)
+ throws HDF5LibraryException, NullPointerException;
+
+ /**
+ * H5Eunregister_class removes the error class specified by class_id.
+ *
+ * @param class_id
+ * IN: Error class identifier.
+ *
+ * @exception HDF5LibraryException
+ * - Error from the HDF-5 Library.
+ **/
+ public synchronized static native void H5Eunregister_class(long class_id) throws HDF5LibraryException;
+
+ /**
+ * H5Ewalk walks the error stack specified by estack_id for the current thread and calls the
+ * function specified in func for each error along the way.
+ *
+ * @param stack_id
+ * IN: Error stack identifier.
+ * @param direction
+ * IN: Direction in which the error stack is to be walked.
+ * @param func
+ * IN: Function to be called for each error encountered.
+ * @param client_data
+ * IN: Data to be passed with func.
+ *
+ * @exception HDF5LibraryException
+ * - Error from the HDF-5 Library.
+ * @exception NullPointerException
+ * - func is null.
+ **/
+ public static void H5Ewalk(long stack_id, long direction, H5E_walk_cb func, H5E_walk_t client_data) throws HDF5LibraryException, NullPointerException
+ {
+ H5Ewalk2(stack_id, direction, func, client_data);
+ }
+ public synchronized static native void H5Ewalk2(long stack_id, long direction, H5E_walk_cb func, H5E_walk_t client_data)
+ throws HDF5LibraryException, NullPointerException;
+
+ // /////// unimplemented ////////
+ // public interface H5E_auto2_t extends Callback
+ // {
+ // int callback(int estack, Pointer client_data);
+ // }
+
+ // int H5Eget_auto(long estack_id, H5E_auto2_t func, PointerByReference client_data);
+ // {
+ // return H5Eget_auto2(estack_id, func, client_data);
+ // }
+ // int H5Eget_auto2(long estack_id, H5E_auto2_t func, PointerByReference client_data);
+
+ // int H5Eset_auto(long estack_id, H5E_auto2_t func, Pointer client_data);
+ // {
+ // return H5Eset_auto2(estack_id, func, client_data);
+ // }
+ // int H5Eset_auto2(long estack_id, H5E_auto2_t func, Pointer client_data);
+
+ // public static void H5Epush(long err_stack, String file, String func, int line,
+ // long cls_id, long maj_id, long min_id, String msg, ...)
+ // {
+ // H5Epush2(err_stack, file, func, line, cls_id, maj_id, min_id, msg, ...);
+ // }
+ // public synchronized static native void H5Epush2(long err_stack, String file, String func, int line,
+ // long cls_id, long maj_id, long min_id, String msg, ...);
+
+ // ////////////////////////////////////////////////////////////
+ // //
+ // H5F: File Interface Functions //
+ // //
+ // ////////////////////////////////////////////////////////////
+
+ /**
+ * H5Fclose terminates access to an HDF5 file.
+ *
+ * @param file_id
+ * Identifier of a file to terminate access to.
+ *
+ * @return a non-negative value if successful
+ *
+ * @exception HDF5LibraryException
+ * - Error from the HDF-5 Library.
+ **/
+ public static int H5Fclose(long file_id) throws HDF5LibraryException {
+ if (file_id < 0)
+ return 0; // throw new HDF5LibraryException("Negative ID");;
+
+ log.trace("OPEN_IDS: H5Fclose remove {}", file_id);
+ OPEN_IDS.remove(file_id);
+ log.trace("OPEN_IDS: {}", OPEN_IDS.size());
+ return _H5Fclose(file_id);
+ }
+
+ private synchronized static native int _H5Fclose(long file_id) throws HDF5LibraryException;
+
+ /**
+ * H5Fopen opens an existing file and is the primary function for accessing existing HDF5 files.
+ *
+ * @param name
+ * Name of the file to access.
+ * @param flags
+ * File access flags.
+ * @param access_id
+ * Identifier for the file access properties list.
+ *
+ * @return a file identifier if successful
+ *
+ * @exception HDF5LibraryException
+ * - Error from the HDF-5 Library.
+ * @exception NullPointerException
+ * - name is null.
+ **/
+ public static long H5Fopen(String name, int flags, long access_id) throws HDF5LibraryException,
+ NullPointerException {
+ long id = _H5Fopen(name, flags, access_id);
+ if (id > 0) {
+ log.trace("OPEN_IDS: H5Fopen add {}", id);
+ OPEN_IDS.add(id);
+ log.trace("OPEN_IDS: {}", OPEN_IDS.size());
+ }
+ return id;
+ }
+
+ private synchronized static native long _H5Fopen(String name, int flags, long access_id)
+ throws HDF5LibraryException, NullPointerException;
+
+ /**
+ * H5Freopen reopens an HDF5 file.
+ *
+ * @param file_id
+ * Identifier of a file to terminate and reopen access to.
+ *
+ * @exception HDF5LibraryException
+ * - Error from the HDF-5 Library.
+ * @return a new file identifier if successful
+ **/
+ public static long H5Freopen(long file_id) throws HDF5LibraryException {
+ long id = _H5Freopen(file_id);
+ if (id > 0) {
+ log.trace("OPEN_IDS: H5Freopen add {}", id);
+ OPEN_IDS.add(id);
+ log.trace("OPEN_IDS: {}", OPEN_IDS.size());
+ }
+ return id;
+ }
+
+ private synchronized static native long _H5Freopen(long file_id) throws HDF5LibraryException;
+
+ /**
+ * H5Fcreate is the primary function for creating HDF5 files.
+ *
+ * @param name
+ * Name of the file to access.
+ * @param flags
+ * File access flags. Possible values include:
+ * <UL>
+ * <LI>
+ * H5F_ACC_RDWR Allow read and write access to file.</LI>
+ * <LI>
+ * H5F_ACC_RDONLY Allow read-only access to file.</LI>
+ * <LI>
+ * H5F_ACC_TRUNC Truncate file, if it already exists, erasing all data previously stored in the file.</LI>
+ * <LI>
+ * H5F_ACC_EXCL Fail if file already exists.</LI>
+ * <LI>
+ * H5P_DEFAULT Apply default file access and creation properties.</LI>
+ * </UL>
+ *
+ * @param create_id
+ * File creation property list identifier, used when modifying default file meta-data. Use H5P_DEFAULT
+ * for default access properties.
+ * @param access_id
+ * File access property list identifier. If parallel file access is desired, this is a collective call
+ * according to the communicator stored in the access_id (not supported in Java). Use H5P_DEFAULT for
+ * default access properties.
+ *
+ * @return a file identifier if successful
+ *
+ * @exception HDF5LibraryException
+ * - Error from the HDF-5 Library.
+ * @exception NullPointerException
+ * - name is null.
+ **/
+ public static long H5Fcreate(String name, int flags, long create_id, long access_id) throws HDF5LibraryException,
+ NullPointerException {
+ long id = _H5Fcreate(name, flags, create_id, access_id);
+ if (id > 0) {
+ log.trace("OPEN_IDS: H5Fcreate add {}", id);
+ OPEN_IDS.add(id);
+ log.trace("OPEN_IDS: {}", OPEN_IDS.size());
+ }
+ return id;
+ }
+
+ private synchronized static native long _H5Fcreate(String name, int flags, long create_id, long access_id)
+ throws HDF5LibraryException, NullPointerException;
+
+ /**
+ * H5Fflush causes all buffers associated with a file or object to be immediately flushed (written) to disk without
+ * removing the data from the (memory) cache.
+ * <P>
+ * After this call completes, the file (or object) is in a consistent state and all data written to date is assured
+ * to be permanent.
+ *
+ * @param object_id
+ * Identifier of object used to identify the file. <b>object_id</b> can be any object associated with the
+ * file, including the file itself, a dataset, a group, an attribute, or a named data type.
+ * @param scope
+ * specifies the scope of the flushing action, in the case that the HDF-5 file is not a single physical
+ * file.
+ * <P>
+ * Valid values are:
+ * <UL>
+ * <LI>
+ * H5F_SCOPE_GLOBAL Flushes the entire virtual file.</LI>
+ * <LI>
+ * H5F_SCOPE_LOCAL Flushes only the specified file.</LI>
+ * </UL>
+ *
+ * @return a non-negative value if successful
+ *
+ * @exception HDF5LibraryException
+ * - Error from the HDF-5 Library.
+ **/
+ public synchronized static native int H5Fflush(long object_id, int scope) throws HDF5LibraryException;
+
+ /**
+ * H5Fget_access_plist returns the file access property list identifier of the specified file.
+ *
+ * @param file_id
+ * Identifier of file to get access property list of
+ *
+ * @return a file access property list identifier if successful
+ *
+ * @exception HDF5LibraryException
+ * - Error from the HDF-5 Library.
+ **/
+ public static long H5Fget_access_plist(long file_id) throws HDF5LibraryException {
+ long id = _H5Fget_access_plist(file_id);
+ if (id > 0) {
+ log.trace("OPEN_IDS: H5Fget_access_plist add {}", id);
+ OPEN_IDS.add(id);
+ log.trace("OPEN_IDS: {}", OPEN_IDS.size());
+ }
+ return id;
+ }
+
+ private synchronized static native long _H5Fget_access_plist(long file_id) throws HDF5LibraryException;
+
+ /**
+ * H5Fget_create_plist returns a file creation property list identifier identifying the creation properties used to
+ * create this file.
+ *
+ * @param file_id
+ * Identifier of the file to get creation property list
+ *
+ * @return a file creation property list identifier if successful
+ *
+ * @exception HDF5LibraryException
+ * - Error from the HDF-5 Library.
+ **/
+ public static long H5Fget_create_plist(long file_id) throws HDF5LibraryException {
+ long id = _H5Fget_create_plist(file_id);
+ if (id > 0) {
+ log.trace("OPEN_IDS: H5Fget_create_plist add {}", id);
+ OPEN_IDS.add(id);
+ log.trace("OPEN_IDS: {}", OPEN_IDS.size());
+ }
+ return id;
+ }
+
+ private synchronized static native long _H5Fget_create_plist(long file_id) throws HDF5LibraryException;
+
+ public synchronized static native long H5Fget_filesize(long file_id) throws HDF5LibraryException;
+
+ /**
+ * H5Fget_freespace returns the amount of space that is unused by any objects in the file.
+ *
+ * @param file_id
+ * IN: File identifier for a currently-open HDF5 file
+ *
+ * @return the amount of free space in the file
+ *
+ * @exception HDF5LibraryException
+ * - Error from the HDF-5 Library.
+ **/
+ public synchronized static native long H5Fget_freespace(long file_id) throws HDF5LibraryException;
+
+ /**
+ * H5Fget_intent retrieves the intended access mode flag passed with H5Fopen when the file was opened.
+ *
+ * @param file_id
+ * IN: File identifier for a currently-open HDF5 file
+ *
+ * @return the intended access mode flag, as originally passed with H5Fopen.
+ *
+ * @exception HDF5LibraryException
+ * - Error from the HDF-5 Library.
+ **/
+ public synchronized static native int H5Fget_intent(long file_id) throws HDF5LibraryException;
+
+ /**
+ * H5Fget_mdc_hit_rate queries the metadata cache of the target file to obtain its hit rate (cache hits / (cache
+ * hits + cache misses)) since the last time hit rate statistics were reset.
+ *
+ * @param file_id
+ * IN: Identifier of the target file.
+ *
+ * @return the double in which the hit rate is returned.
+ *
+ * @exception HDF5LibraryException
+ * - Error from the HDF-5 Library.
+ **/
+ public synchronized static native double H5Fget_mdc_hit_rate(long file_id) throws HDF5LibraryException;
+
+ /**
+ * H5Fget_mdc_size queries the metadata cache of the target file for the desired size information.
+ *
+ * @param file_id
+ * IN: Identifier of the target file.
+ * @param metadata_cache
+ * OUT: Current metadata cache information
+ * <ul>
+ * <li>metadata_cache[0] = max_size_ptr // current cache maximum size</li>
+ * <li>metadata_cache[1] = min_clean_size_ptr // current cache minimum clean size</li>
+ * <li>metadata_cache[2] = cur_size_ptr // current cache size</li>
+ * </ul>
+ *
+ * @return current number of entries in the cache
+ *
+ * @exception HDF5LibraryException
+ * - Error from the HDF-5 Library.
+ * @exception NullPointerException
+ * - metadata_cache is null.
+ **/
+ public synchronized static native int H5Fget_mdc_size(long file_id, long[] metadata_cache)
+ throws HDF5LibraryException, NullPointerException, IllegalArgumentException;
+
+ /**
+ * H5Fget_name retrieves the name of the file to which the object obj_id belongs.
+ *
+ * @param obj_id
+ * IN: Identifier of the object for which the associated filename is sought.
+ *
+ * @return the filename.
+ *
+ * @exception HDF5LibraryException
+ * - Error from the HDF-5 Library.
+ **/
+ public synchronized static native String H5Fget_name(long obj_id) throws HDF5LibraryException;
+
+ /**
+ * H5Fget_obj_count returns the number of open object identifiers for the file.
+ *
+ * @param file_id
+ * IN: File identifier for a currently-open HDF5 file
+ * @param types
+ * IN: Type of object for which identifiers are to be returned.
+ * <ul>
+ * <li>H5F_OBJ_FILE Files only</li>
+ * <li>H5F_OBJ_DATASET Datasets only</li>
+ * <li>H5F_OBJ_GROUP Groups only</li>
+ * <li>H5F_OBJ_DATATYPE Named datatypes only</li>
+ * <li>H5F_OBJ_ATTR Attributes only</li>
+ * <li>H5F_OBJ_ALL All of the above</li>
+ * <li>H5F_OBJ_LOCAL Restrict search to objects opened through current file identifier.</li>
+ * </ul>
+ *
+ * @return the number of open objects.
+ *
+ * @exception HDF5LibraryException
+ * - Error from the HDF-5 Library.
+ **/
+ public synchronized static native long H5Fget_obj_count(long file_id, int types) throws HDF5LibraryException;
+
+ /**
+ * H5Fget_obj_ids returns the list of identifiers for all open HDF5 objects fitting the specified criteria.
+ *
+ * @param file_id
+ * IN: File identifier for a currently-open HDF5 file
+ * @param types
+ * IN: Type of object for which identifiers are to be returned.
+ * @param max_objs
+ * IN: Maximum number of object identifiers to place into obj_id_list.
+ * @param obj_id_list
+ * OUT: Pointer to the returned list of open object identifiers.
+ *
+ * @return the number of objects placed into obj_id_list.
+ *
+ * @exception HDF5LibraryException
+ * - Error from the HDF-5 Library.
+ * @exception NullPointerException
+ * - obj_id_list is null.
+ **/
+ public synchronized static native long H5Fget_obj_ids(long file_id, int types, long max_objs, long[] obj_id_list)
+ throws HDF5LibraryException, NullPointerException;
+
+ /**
+ * H5Fis_hdf5 determines whether a file is in the HDF5 format.
+ *
+ * @param name
+ * File name to check format.
+ *
+ * @return true if is HDF-5, false if not.
+ *
+ * @exception HDF5LibraryException
+ * - Error from the HDF-5 Library.
+ * @exception NullPointerException
+ * - name is null.
+ **/
+ public synchronized static native boolean H5Fis_hdf5(String name) throws HDF5LibraryException, NullPointerException;
+
+ /**
+ * H5Fmount mounts the file specified by child_id onto the group specified by loc_id and name using the mount
+ * properties plist_id.
+ *
+ * @param loc_id
+ * The identifier for the group onto which the file specified by child_id is to be mounted.
+ * @param name
+ * The name of the group onto which the file specified by child_id is to be mounted.
+ * @param child_id
+ * The identifier of the file to be mounted.
+ * @param plist_id
+ * The identifier of the property list to be used.
+ *
+ * @return a non-negative value if successful
+ *
+ * @exception HDF5LibraryException
+ * - Error from the HDF-5 Library.
+ * @exception NullPointerException
+ * - name is null.
+ **/
+ public synchronized static native int H5Fmount(long loc_id, String name, long child_id, long plist_id)
+ throws HDF5LibraryException, NullPointerException;
+
+ /**
+ * Given a mount point, H5Funmount dissassociates the mount point's file from the file mounted there.
+ *
+ * @param loc_id
+ * The identifier for the location at which the specified file is to be unmounted.
+ * @param name
+ * The name of the file to be unmounted.
+ *
+ * @return a non-negative value if successful
+ *
+ * @exception HDF5LibraryException
+ * - Error from the HDF-5 Library.
+ * @exception NullPointerException
+ * - name is null.
+ **/
+ public synchronized static native int H5Funmount(long loc_id, String name) throws HDF5LibraryException,
+ NullPointerException;
+
+ /**
+ * H5Freset_mdc_hit_rate_stats resets the hit rate statistics counters in the metadata cache associated with the
+ * specified file.
+ *
+ * @param file_id
+ * IN: Identifier of the target file.
+ *
+ * @exception HDF5LibraryException
+ * - Error from the HDF-5 Library.
+ **/
+ public synchronized static native void H5Freset_mdc_hit_rate_stats(long file_id) throws HDF5LibraryException;
+
+ /**
+ * H5Fget_info returns global information for the file associated with the
+ * object identifier obj_id.
+ *
+ * @param obj_id IN: Object identifier for any object in the file.
+ *
+ * @return A buffer(H5F_info2_t) for current "global" information about file
+ *
+ * @exception HDF5LibraryException - Error from the HDF-5 Library.
+ **/
+ public synchronized static native H5F_info2_t H5Fget_info(long obj_id) throws HDF5LibraryException;
+
+ /**
+ * H5Fclear_elink_file_cache evicts all the cached child files in the specified file's external file
+ * cache, causing them to be closed if there is nothing else holding them open.
+ *
+ * @param file_id
+ * IN: Identifier of the target file.
+ *
+ * @exception HDF5LibraryException
+ * - Error from the HDF-5 Library.
+ **/
+ public synchronized static native void H5Fclear_elink_file_cache(long file_id) throws HDF5LibraryException;
+
+ // /////// unimplemented ////////
+ // ssize_t H5Fget_file_image(hid_t file_id, void * buf_ptr, size_t buf_len);
+ // ssize_t H5Fget_free_sections(hid_t file_id, H5F_mem_t type, size_t nsects, H5F_sect_info_t *sect_info/*out*/);
+
+ // /**
+ // * H5Fget_vfd_handle returns a pointer to the file handle from the
+ // low-level file driver
+ // * currently being used by the HDF5 library for file I/O.
+ // *
+ // * @param file_id IN: Identifier of the file to be queried.
+ // * @param fapl IN: File access property list identifier.
+ // *
+ // * @return a pointer to the file handle being used by the low-level
+ // virtual file driver.
+ // *
+ // * @exception HDF5LibraryException - Error from the HDF-5 Library.
+ // **/
+ // public synchronized static native Pointer file_handle
+ // H5Fget_vfd_handle(int file_id, int fapl)
+ // throws HDF5LibraryException;
+
+ // /**
+ // * H5Fget_mdc_config loads the current metadata cache configuration into
+ // * the instance of H5AC_cache_config_t pointed to by the config_ptr
+ // parameter.
+ // *
+ // * @param file_id IN: Identifier of the target file
+ // * @param config_ptr IN/OUT: Pointer to the instance of
+ // H5AC_cache_config_t in which the current metadata cache configuration is to be reported.
+ // *
+ // * @return none
+ // *
+ // * @exception HDF5LibraryException - Error from the HDF-5 Library.
+ // * @exception NullPointerException - config_ptr is null.
+ // **/
+ // public synchronized static native void H5Fget_mdc_config(int file_id, H5AC_cache_config_t config_ptr)
+ // throws HDF5LibraryException, NullPointerException;
+
+ // /**
+ // * H5Fset_mdc_config attempts to configure the file's metadata cache
+ // according to the configuration supplied.
+ // *
+ // * @param file_id IN: Identifier of the target file
+ // * @param config_ptr IN: Pointer to the instance of H5AC_cache_config_t
+ // containing the desired configuration.
+ // *
+ // * @return none
+ // *
+ // * @exception HDF5LibraryException - Error from the HDF-5 Library.
+ // * @exception NullPointerException - config_ptr is null.
+ // **/
+ // public synchronized static native int H5Fset_mdc_config(int file_id, H5AC_cache_config_t config_ptr)
+ // throws HDF5LibraryException, NullPointerException;
+
+ // ////////////////////////////////////////////////////////////
+ // //
+ // H5G: Group Interface Functions //
+ // //
+ // ////////////////////////////////////////////////////////////
+
+ /**
+ * H5Gclose releases resources used by a group which was opened by a call to H5Gcreate() or H5Gopen().
+ *
+ * @param group_id
+ * Group identifier to release.
+ *
+ * @return a non-negative value if successful
+ *
+ * @exception HDF5LibraryException
+ * - Error from the HDF-5 Library.
+ **/
+ public static int H5Gclose(long group_id) throws HDF5LibraryException {
+ if (group_id < 0)
+ return 0; // throw new HDF5LibraryException("Negative ID");;
+
+ log.trace("OPEN_IDS: H5Gclose remove {}", group_id);
+ OPEN_IDS.remove(group_id);
+ log.trace("OPEN_IDS: {}", OPEN_IDS.size());
+ return _H5Gclose(group_id);
+ }
+
+ private synchronized static native int _H5Gclose(long group_id) throws HDF5LibraryException;
+
+ /**
+ * H5Gcreate creates a new group with the specified name at the specified location, loc_id.
+ *
+ * @param loc_id
+ * IN: The file or group identifier.
+ * @param name
+ * IN: The absolute or relative name of the new group.
+ * @param lcpl_id
+ * IN: Identifier of link creation property list.
+ * @param gcpl_id
+ * IN: Identifier of group creation property list.
+ * @param gapl_id
+ * IN: Identifier of group access property list. (No group access properties have been implemented at
+ * this time; use H5P_DEFAULT.)
+ *
+ * @return a valid group identifier
+ *
+ * @exception HDF5LibraryException
+ * - Error from the HDF-5 Library.
+ * @exception NullPointerException
+ * - name is null.
+ **/
+ public static long H5Gcreate(long loc_id, String name, long lcpl_id, long gcpl_id, long gapl_id)
+ throws HDF5LibraryException, NullPointerException {
+ long id = _H5Gcreate2(loc_id, name, lcpl_id, gcpl_id, gapl_id);
+ if (id > 0) {
+ log.trace("OPEN_IDS: H5Gcreate add {}", id);
+ OPEN_IDS.add(id);
+ log.trace("OPEN_IDS: {}", OPEN_IDS.size());
+ }
+ return id;
+ }
+
+ private synchronized static native long _H5Gcreate2(long loc_id, String name, long lcpl_id, long gcpl_id,
+ long gapl_id) throws HDF5LibraryException, NullPointerException;
+
+ /**
+ * H5Gcreate_anon creates a new empty group in the file specified by loc_id.
+ *
+ * @param loc_id
+ * IN: File or group identifier specifying the file in which the new group is to be created.
+ * @param gcpl_id
+ * IN: Identifier of group creation property list.
+ * @param gapl_id
+ * IN: Identifier of group access property list. (No group access properties have been implemented at
+ * this time; use H5P_DEFAULT.)
+ *
+ * @return a valid group identifier
+ *
+ * @exception HDF5LibraryException
+ * - Error from the HDF-5 Library.
+ **/
+ public static long H5Gcreate_anon(long loc_id, long gcpl_id, long gapl_id) throws HDF5LibraryException {
+ long id = _H5Gcreate_anon(loc_id, gcpl_id, gapl_id);
+ if (id > 0) {
+ log.trace("OPEN_IDS: H5Gcreate_anon add {}", id);
+ OPEN_IDS.add(id);
+ log.trace("OPEN_IDS: {}", OPEN_IDS.size());
+ }
+ return id;
+ }
+
+ private synchronized static native long _H5Gcreate_anon(long loc_id, long gcpl_id, long gapl_id)
+ throws HDF5LibraryException;
+
+ /**
+ * H5Gget_create_plist returns an identifier for the group creation property list associated with the group
+ * specified by group_id.
+ *
+ * @param group_id
+ * IN: Identifier of the group.
+ *
+ * @return an identifier for the group's creation property list
+ *
+ * @exception HDF5LibraryException
+ * - Error from the HDF-5 Library.
+ **/
+ public synchronized static native long H5Gget_create_plist(long group_id) throws HDF5LibraryException;
+
+ /**
+ * H5Gget_info retrieves information about the group specified by group_id. The information is returned in the
+ * group_info struct.
+ *
+ * @param group_id
+ * IN: Identifier of the group.
+ *
+ * @return a structure in which group information is returned
+ *
+ * @exception HDF5LibraryException
+ * - Error from the HDF-5 Library.
+ **/
+ public synchronized static native H5G_info_t H5Gget_info(long group_id) throws HDF5LibraryException;
+
+ /**
+ * H5Gget_info_by_idx retrieves information about a group, according to the group's position within an index.
+ *
+ * @param group_id
+ * IN: File or group identifier.
+ * @param group_name
+ * IN: Name of group for which information is to be retrieved.
+ * @param idx_type
+ * IN: Type of index by which objects are ordered
+ * @param order
+ * IN: Order of iteration within index
+ * @param n
+ * IN: Attribute's position in index
+ * @param lapl_id
+ * IN: Link access property list.
+ *
+ * @return a structure in which group information is returned
+ *
+ * @exception HDF5LibraryException
+ * - Error from the HDF-5 Library.
+ * @exception NullPointerException
+ * - name is null.
+ **/
+ public synchronized static native H5G_info_t H5Gget_info_by_idx(long group_id, String group_name, int idx_type,
+ int order, long n, long lapl_id) throws HDF5LibraryException, NullPointerException;
+
+ /**
+ * H5Gget_info_by_name retrieves information about the group group_name located in the file or group specified by
+ * loc_id.
+ *
+ * @param group_id
+ * IN: File or group identifier.
+ * @param name
+ * IN: Name of group for which information is to be retrieved.
+ * @param lapl_id
+ * IN: Link access property list.
+ *
+ * @return a structure in which group information is returned
+ *
+ * @exception HDF5LibraryException
+ * - Error from the HDF-5 Library.
+ * @exception NullPointerException
+ * - name is null.
+ **/
+ public synchronized static native H5G_info_t H5Gget_info_by_name(long group_id, String name, long lapl_id)
+ throws HDF5LibraryException, NullPointerException;
+
+ /**
+ * retrieves information of all objects under the group (name) located in the file or group specified by loc_id.
+ *
+ * @param loc_id
+ * IN: File or group identifier
+ * @param name
+ * IN: Name of group for which information is to be retrieved
+ * @param objNames
+ * OUT: Names of all objects under the group, name.
+ * @param objTypes
+ * OUT: Types of all objects under the group, name.
+ * @param objRef
+ * OUT: Reference number of all objects under the group, name.
+ *
+ * @return the number of items found
+ *
+ * @exception HDF5LibraryException
+ * - Error from the HDF-5 Library.
+ * @exception NullPointerException
+ * - name is null.
+ */
+ public synchronized static int H5Gget_obj_info_all(long loc_id, String name, String[] objNames, int[] objTypes,
+ long[] objRef) throws HDF5LibraryException, NullPointerException {
+ if (objNames == null) {
+ throw new NullPointerException("H5Gget_obj_info_all(): name array is null");
+ }
+
+ return H5Gget_obj_info_all(loc_id, name, objNames, objTypes, null, null, objRef, HDF5Constants.H5_INDEX_NAME);
+ }
+
+ public synchronized static int H5Gget_obj_info_all(long loc_id, String name, String[] oname, int[] otype,
+ int[] ltype, long[] ref, int indx_type) throws HDF5LibraryException, NullPointerException {
+ return H5Gget_obj_info_full(loc_id, name, oname, otype, ltype, null, ref, indx_type, -1);
+ }
+
+ public synchronized static int H5Gget_obj_info_all(long loc_id, String name, String[] oname, int[] otype,
+ int[] ltype, long[] fno, long[] ref, int indx_type) throws HDF5LibraryException, NullPointerException {
+ return H5Gget_obj_info_full(loc_id, name, oname, otype, ltype, fno, ref, oname.length, indx_type, -1);
+ }
+
+ public synchronized static int H5Gget_obj_info_full(long loc_id, String name, String[] oname, int[] otype,
+ int[] ltype, long[] fno, long[] ref, int indx_type, int indx_order) throws HDF5LibraryException,
+ NullPointerException {
+ if (oname == null) {
+ throw new NullPointerException("H5Gget_obj_info_full(): name array is null");
+ }
+
+ if (otype == null) {
+ throw new NullPointerException("H5Gget_obj_info_full(): object type array is null");
+ }
+
+ if (oname.length == 0) {
+ throw new HDF5LibraryException("H5Gget_obj_info_full(): array size is zero");
+ }
+
+ if (oname.length != otype.length) {
+ throw new HDF5LibraryException("H5Gget_obj_info_full(): name and type array sizes are different");
+ }
+
+ if (ltype == null)
+ ltype = new int[otype.length];
+
+ if (fno == null)
+ fno = new long[ref.length];
+
+ if (indx_type < 0)
+ indx_type = HDF5Constants.H5_INDEX_NAME;
+
+ if (indx_order < 0)
+ indx_order = HDF5Constants.H5_ITER_INC;
+
+ log.trace("H5Gget_obj_info_full: oname_len={}", oname.length);
+ int status = H5Gget_obj_info_full(loc_id, name, oname, otype, ltype, fno, ref, oname.length, indx_type,
+ indx_order);
+ for (int indx = 0; indx < oname.length; indx++)
+ log.trace("H5Gget_obj_info_full: oname={}", oname[indx]);
+ return status;
+ }
+
+ private synchronized static native int H5Gget_obj_info_full(long loc_id, String name, String[] oname, int[] otype,
+ int[] ltype, long[] fno, long[] ref, int n, int indx_type, int indx_order) throws HDF5LibraryException,
+ NullPointerException;
+
+ /**
+ * H5Gget_obj_info_idx report the name and type of object with index 'idx' in a Group. The 'idx' corresponds to the
+ * index maintained by H5Giterate. Each link is returned, so objects with multiple links will be counted once for
+ * each link.
+ *
+ * @param loc_id
+ * IN: file or group ID.
+ * @param name
+ * IN: name of the group to iterate, relative to the loc_id
+ * @param idx
+ * IN: the index of the object to iterate.
+ * @param oname
+ * the name of the object [OUT]
+ * @param type
+ * the type of the object [OUT]
+ *
+ * @return non-negative if successful, -1 if not.
+ *
+ * @exception HDF5LibraryException
+ * - Error from the HDF-5 Library.
+ * @exception NullPointerException
+ * - name is null.
+ */
+ public synchronized static int H5Gget_obj_info_idx(long loc_id, String name, int idx, String[] oname, int[] type)
+ throws HDF5LibraryException, NullPointerException {
+ String n[] = new String[1];
+ n[0] = new String("");
+ oname[0] = H5Lget_name_by_idx(loc_id, name, HDF5Constants.H5_INDEX_NAME, HDF5Constants.H5_ITER_INC, idx,
+ HDF5Constants.H5P_DEFAULT);
+ H5L_info_t info = H5Lget_info_by_idx(loc_id, name, HDF5Constants.H5_INDEX_NAME, HDF5Constants.H5_ITER_INC, idx,
+ HDF5Constants.H5P_DEFAULT);
+ type[0] = info.type;
+ return 0;
+ }
+
+ /*
+ * Add these methods so that we don't need to call
+ * in a loop to get information for all the object in a group, which takes
+ * a lot of time to finish if the number of objects is more than 10,000
+ */
+ /**
+ * retrieves information of all objects (recurvisely) under the group (name) located in the file or group specified
+ * by loc_id upto maximum specified by objMax.
+ *
+ * @param loc_id
+ * IN: File or group identifier
+ * @param objNames
+ * OUT: Names of all objects under the group, name.
+ * @param objTypes
+ * OUT: Types of all objects under the group, name.
+ * @param lnkTypes
+ * OUT: Types of all links under the group, name.
+ * @param objRef
+ * OUT: Reference number of all objects under the group, name.
+ * @param objMax
+ * IN: Maximum number of all objects under the group, name.
+ *
+ * @return the number of items found
+ *
+ * @exception HDF5LibraryException
+ * - Error from the HDF-5 Library.
+ * @exception NullPointerException
+ * - name is null.
+ */
+ public synchronized static int H5Gget_obj_info_max(long loc_id, String[] objNames, int[] objTypes, int[] lnkTypes,
+ long[] objRef, long objMax) throws HDF5LibraryException, NullPointerException {
+ if (objNames == null) {
+ throw new NullPointerException("H5Gget_obj_info_max(): name array is null");
+ }
+
+ if (objTypes == null) {
+ throw new NullPointerException("H5Gget_obj_info_max(): object type array is null");
+ }
+
+ if (lnkTypes == null) {
+ throw new NullPointerException("H5Gget_obj_info_max(): link type array is null");
+ }
+
+ if (objNames.length <= 0) {
+ throw new HDF5LibraryException("H5Gget_obj_info_max(): array size is zero");
+ }
+
+ if (objMax <= 0) {
+ throw new HDF5LibraryException("H5Gget_obj_info_max(): maximum array size is zero");
+ }
+
+ if (objNames.length != objTypes.length) {
+ throw new HDF5LibraryException("H5Gget_obj_info_max(): name and type array sizes are different");
+ }
+
+ return H5Gget_obj_info_max(loc_id, objNames, objTypes, lnkTypes, objRef, objMax, objNames.length);
+ }
+
+ private synchronized static native int H5Gget_obj_info_max(long loc_id, String[] oname, int[] otype, int[] ltype,
+ long[] ref, long amax, int n) throws HDF5LibraryException, NullPointerException;
+
+ /**
+ * H5Gn_members report the number of objects in a Group. The 'objects' include everything that will be visited by
+ * H5Giterate. Each link is returned, so objects with multiple links will be counted once for each link.
+ *
+ * @param loc_id
+ * file or group ID.
+ * @param name
+ * name of the group to iterate, relative to the loc_id
+ *
+ * @return the number of members in the group or -1 if error.
+ *
+ * @exception HDF5LibraryException
+ * - Error from the HDF-5 Library.
+ * @exception NullPointerException
+ * - name is null.
+ */
+ public synchronized static long H5Gn_members(long loc_id, String name) throws HDF5LibraryException,
+ NullPointerException {
+ long grp_id = H5Gopen(loc_id, name, HDF5Constants.H5P_DEFAULT);
+ long n = -1;
+
+ try {
+ H5G_info_t info = H5.H5Gget_info(grp_id);
+ n = info.nlinks;
+ }
+ finally {
+ H5Gclose(grp_id);
+ }
+
+ return n;
+ }
+
+ /**
+ * H5Gopen opens an existing group, name, at the location specified by loc_id.
+ *
+ * @param loc_id
+ * IN: File or group identifier specifying the location of the group to be opened.
+ * @param name
+ * IN: Name of group to open.
+ * @param gapl_id
+ * IN: Identifier of group access property list.
+ *
+ * @return a valid group identifier if successful
+ *
+ * @exception HDF5LibraryException
+ * - Error from the HDF-5 Library.
+ * @exception NullPointerException
+ * - name is null.
+ **/
+ public static long H5Gopen(long loc_id, String name, long gapl_id) throws HDF5LibraryException,
+ NullPointerException {
+ long id = _H5Gopen2(loc_id, name, gapl_id);
+ if (id > 0) {
+ log.trace("OPEN_IDS: H5Gopen add {}", id);
+ OPEN_IDS.add(id);
+ log.trace("OPEN_IDS: {}", OPEN_IDS.size());
+ }
+ return id;
+ }
+
+ private synchronized static native long _H5Gopen2(long loc_id, String name, long gapl_id)
+ throws HDF5LibraryException, NullPointerException;
+
+ // ////////////////////////////////////////////////////////////
+ // //
+ // H5I: HDF5 1.8 Identifier Interface API Functions //
+ // //
+ // ////////////////////////////////////////////////////////////
+
+ public synchronized static native long H5Iget_file_id(long obj_id) throws HDF5LibraryException;
+
+ public synchronized static native long H5Iget_name(long obj_id, String[] name, long size)
+ throws HDF5LibraryException, NullPointerException;
+
+ public synchronized static native int H5Iget_ref(long obj_id) throws HDF5LibraryException, NullPointerException;
+
+ public synchronized static native int H5Idec_ref(long obj_id) throws HDF5LibraryException, NullPointerException;
+
+ public synchronized static native int H5Iinc_ref(long obj_id) throws HDF5LibraryException, NullPointerException;
+
+ /**
+ * H5Iget_type retrieves the type of the object identified by obj_id.
+ *
+ * @param obj_id
+ * IN: Object identifier whose type is to be determined.
+ *
+ * @return the object type if successful; otherwise H5I_BADID.
+ *
+ * @exception HDF5LibraryException
+ * - Error from the HDF-5 Library.
+ **/
+ public synchronized static native int H5Iget_type(long obj_id) throws HDF5LibraryException;
+
+ /**
+ * H5Iget_type_ref retrieves the reference count on an ID type. The reference count is used by the library to
+ * indicate when an ID type can be destroyed.
+ *
+ * @param type_id
+ * IN: The identifier of the type whose reference count is to be retrieved
+ *
+ * @return The current reference count on success, negative on failure.
+ *
+ * @exception HDF5LibraryException
+ * - Error from the HDF-5 Library.
+ **/
+ public synchronized static native int H5Iget_type_ref(long type_id) throws HDF5LibraryException;
+
+ /**
+ * H5Idec_type_ref decrements the reference count on an identifier type. The reference count is used by the
+ * library to indicate when an identifier type can be destroyed. If the reference count reaches zero,
+ * this function will destroy it.
+ *
+ * @param type_id
+ * IN: The identifier of the type whose reference count is to be decremented
+ *
+ * @return The current reference count on success, negative on failure.
+ *
+ * @exception HDF5LibraryException
+ * - Error from the HDF-5 Library.
+ **/
+ public synchronized static native int H5Idec_type_ref(long type_id) throws HDF5LibraryException;
+
+ /**
+ * H5Iinc_type_ref increments the reference count on an ID type. The reference count is used by the library
+ * to indicate when an ID type can be destroyed.
+ *
+ * @param type_id
+ * IN: The identifier of the type whose reference count is to be incremented
+ *
+ * @return The current reference count on success, negative on failure.
+ *
+ * @exception HDF5LibraryException
+ * - Error from the HDF-5 Library.
+ **/
+ public synchronized static native int H5Iinc_type_ref(long type_id) throws HDF5LibraryException;
+
+ /**
+ * H5Inmembers returns the number of identifiers of the identifier type specified in type.
+ *
+ * @param type_id
+ * IN: Identifier for the identifier type whose member count will be retrieved
+ *
+ * @return Number of identifiers of the specified identifier type
+ *
+ * @exception HDF5LibraryException
+ * - Error from the HDF-5 Library.
+ **/
+ public synchronized static native int H5Inmembers(long type_id) throws HDF5LibraryException;
+
+ /**
+ * H5Iis_valid indicates if the identifier type specified in obj_id is valid.
+ *
+ * @param obj_id
+ * IN: Identifier to be checked
+ *
+ * @return a boolean, true if the specified identifier id is valid
+ *
+ * @exception HDF5LibraryException
+ * - Error from the HDF-5 Library.
+ **/
+ public synchronized static native boolean H5Iis_valid(long obj_id) throws HDF5LibraryException;
+
+ /**
+ * H5Itype_exists indicates if the identifier type specified in type exists.
+ *
+ * @param type_id
+ * IN: the identifier type to be checked
+ *
+ * @return a boolean, true if the specified identifier type exists
+ *
+ * @exception HDF5LibraryException
+ * - Error from the HDF-5 Library.
+ **/
+ public synchronized static native boolean H5Itype_exists(int type_id) throws HDF5LibraryException;
+
+
+ /**
+ * H5Iclear_type deletes all identifiers of the type identified by the argument type.
+ *
+ * @param type_id
+ * IN: Identifier of identifier type which is to be cleared of identifiers
+ * @param force
+ * IN: Whether or not to force deletion of all identifiers
+ *
+ * @exception HDF5LibraryException
+ * - Error from the HDF-5 Library.
+ **/
+ public synchronized static native void H5Iclear_type(int type_id, boolean force) throws HDF5LibraryException;
+
+ /**
+ * H5Idestroy_type deletes an entire identifier type. All identifiers of this type are destroyed
+ * and no new identifiers of this type can be registered.
+ *
+ * @param type_id
+ * IN: Identifier of identifier type which is to be destroyed
+ *
+ * @exception HDF5LibraryException
+ * - Error from the HDF-5 Library.
+ **/
+ public synchronized static native void H5Idestroy_type(int type_id) throws HDF5LibraryException;
+
+ // /////// unimplemented ////////
+
+ // void *H5Iobject_verify(hid_t id, H5I_type_t id_type);
+
+ // hid_t H5Iregister(H5I_type_t type, const void *object);
+
+ // H5I_type_t H5Iregister_type(size_t hash_size, unsigned reserved, H5I_free_t free_func);
+
+ // void *H5Iremove_verify(hid_t id, H5I_type_t id_type);
+
+ // void *H5Isearch(H5I_type_t type, H5I_search_func_t func, void *key);
+
+ // //////////////////////////////////////////////////////////////////
+ // H5L: Link Interface Functions //
+ // //////////////////////////////////////////////////////////////////
+
+ /**
+ * H5Lcopy copies a link from one location to another.
+ *
+ * @param src_loc
+ * IN: Location identifier of the source link
+ * @param src_name
+ * IN: Name of the link to be copied
+ * @param dst_loc
+ * IN: Location identifier specifying the destination of the copy
+ * @param dst_name
+ * IN: Name to be assigned to the new copy
+ * @param lcpl_id
+ * IN: Link creation property list identifier
+ * @param lapl_id
+ * IN: Link access property list identifier
+ *
+ * @exception HDF5LibraryException
+ * - Error from the HDF-5 Library.
+ * @exception NullPointerException
+ * - name is null.
+ **/
+ public synchronized static native void H5Lcopy(long src_loc, String src_name, long dst_loc, String dst_name,
+ long lcpl_id, long lapl_id) throws HDF5LibraryException, NullPointerException;
+
+ /**
+ * H5Lcreate_external creates a new soft link to an external object, which is an object in a different HDF5 file
+ * from the location of the link.
+ *
+ * @param file_name
+ * IN: Name of the target file containing the target object.
+ * @param obj_name
+ * IN: Path within the target file to the target object.
+ * @param link_loc_id
+ * IN: The file or group identifier for the new link.
+ * @param link_name
+ * IN: The name of the new link.
+ * @param lcpl_id
+ * IN: Link creation property list identifier
+ * @param lapl_id
+ * IN: Link access property list identifier
+ *
+ * @exception HDF5LibraryException
+ * - Error from the HDF-5 Library.
+ * @exception NullPointerException
+ * - name is null.
+ **/
+ public synchronized static native void H5Lcreate_external(String file_name, String obj_name, long link_loc_id,
+ String link_name, long lcpl_id, long lapl_id) throws HDF5LibraryException, NullPointerException;
+
+ /**
+ * H5Lcreate_hard creates a new hard link to a pre-existing object in an HDF5 file.
+ *
+ * @param cur_loc
+ * IN: The file or group identifier for the target object.
+ * @param cur_name
+ * IN: Name of the target object, which must already exist.
+ * @param dst_loc
+ * IN: The file or group identifier for the new link.
+ * @param dst_name
+ * IN: The name of the new link.
+ * @param lcpl_id
+ * IN: Link creation property list identifier
+ * @param lapl_id
+ * IN: Link access property list identifier
+ *
+ * @exception HDF5LibraryException
+ * - Error from the HDF-5 Library.
+ * @exception NullPointerException
+ * - cur_name or dst_name is null.
+ **/
+ public synchronized static native void H5Lcreate_hard(long cur_loc, String cur_name, long dst_loc, String dst_name,
+ long lcpl_id, long lapl_id) throws HDF5LibraryException, NullPointerException;
+
+ /**
+ * H5Lcreate_soft creates a new soft link to an object in an HDF5 file.
+ *
+ * @param link_target
+ * IN: Path to the target object, which is not required to exist.
+ * @param link_loc_id
+ * IN: The file or group identifier for the new link.
+ * @param link_name
+ * IN: The name of the new link.
+ * @param lcpl_id
+ * IN: Link creation property list identifier
+ * @param lapl_id
+ * IN: Link access property list identifier
+ *
+ * @exception HDF5LibraryException
+ * - Error from the HDF-5 Library.
+ * @exception NullPointerException
+ * - link_name is null.
+ **/
+ public synchronized static native void H5Lcreate_soft(String link_target, long link_loc_id, String link_name,
+ long lcpl_id, long lapl_id) throws HDF5LibraryException, NullPointerException;
+
+ /**
+ * H5Ldelete removes the link specified from a group.
+ *
+ * @param loc_id
+ * IN: Identifier of the file or group containing the object.
+ * @param name
+ * IN: Name of the link to delete.
+ * @param lapl_id
+ * IN: Link access property list identifier
+ *
+ * @exception HDF5LibraryException
+ * - Error from the HDF-5 Library.
+ * @exception NullPointerException
+ * - name is null.
+ **/
+ public synchronized static native void H5Ldelete(long loc_id, String name, long lapl_id)
+ throws HDF5LibraryException, NullPointerException;
+
+ /**
+ * H5Ldelete_by_idx removes the nth link in a group according to the specified order and in the specified index.
+ *
+ * @param loc_id
+ * IN: File or group identifier specifying location of subject group
+ * @param group_name
+ * IN: Name of subject group
+ * @param idx_type
+ * IN: Index or field which determines the order
+ * @param order
+ * IN: Order within field or index
+ * @param n
+ * IN: Link for which to retrieve information
+ * @param lapl_id
+ * IN: Link access property list identifier
+ *
+ * @exception HDF5LibraryException
+ * - Error from the HDF-5 Library.
+ * @exception NullPointerException
+ * - group_name is null.
+ **/
+ public synchronized static native void H5Ldelete_by_idx(long loc_id, String group_name, int idx_type, int order,
+ long n, long lapl_id) throws HDF5LibraryException, NullPointerException;
+
+ /**
+ * H5Lexists checks if a link with a particular name exists in a group.
+ *
+ * @param loc_id
+ * IN: Identifier of the file or group to query.
+ * @param name
+ * IN: The name of the link to check.
+ * @param lapl_id
+ * IN: Link access property list identifier
+ *
+ * @return a boolean, true if the name exists, otherwise false.
+ *
+ * @exception HDF5LibraryException
+ * - Error from the HDF-5 Library.
+ * @exception NullPointerException
+ * - name is null.
+ **/
+ public synchronized static native boolean H5Lexists(long loc_id, String name, long lapl_id)
+ throws HDF5LibraryException, NullPointerException;
+
+ /**
+ * H5Lget_info returns information about the specified link.
+ *
+ * @param loc_id
+ * IN: Identifier of the file or group.
+ * @param name
+ * IN: Name of the link for which information is being sought.
+ * @param lapl_id
+ * IN: Link access property list identifier
+ *
+ * @return a buffer(H5L_info_t) for the link information.
+ *
+ * @exception HDF5LibraryException
+ * - Error from the HDF-5 Library.
+ * @exception NullPointerException
+ * - name is null.
+ **/
+ public synchronized static native H5L_info_t H5Lget_info(long loc_id, String name, long lapl_id)
+ throws HDF5LibraryException, NullPointerException;
+
+ /**
+ * H5Lget_info_by_idx opens a named datatype at the location specified by loc_id and return an identifier for the
+ * datatype.
+ *
+ * @param loc_id
+ * IN: File or group identifier specifying location of subject group
+ * @param group_name
+ * IN: Name of subject group
+ * @param idx_type
+ * IN: Type of index
+ * @param order
+ * IN: Order within field or index
+ * @param n
+ * IN: Link for which to retrieve information
+ * @param lapl_id
+ * IN: Link access property list identifier
+ *
+ * @return a buffer(H5L_info_t) for the link information.
+ *
+ * @exception HDF5LibraryException
+ * - Error from the HDF-5 Library.
+ * @exception NullPointerException
+ * - group_name is null.
+ **/
+ public synchronized static native H5L_info_t H5Lget_info_by_idx(long loc_id, String group_name, int idx_type,
+ int order, long n, long lapl_id) throws HDF5LibraryException, NullPointerException;
+
+ /**
+ * H5Lget_name_by_idx retrieves name of the nth link in a group, according to the order within a specified field or
+ * index.
+ *
+ * @param loc_id
+ * IN: File or group identifier specifying location of subject group
+ * @param group_name
+ * IN: Name of subject group
+ * @param idx_type
+ * IN: Type of index
+ * @param order
+ * IN: Order within field or index
+ * @param n
+ * IN: Link for which to retrieve information
+ * @param lapl_id
+ * IN: Link access property list identifier
+ *
+ * @return a String for the link name.
+ *
+ * @exception HDF5LibraryException
+ * - Error from the HDF-5 Library.
+ * @exception NullPointerException
+ * - group_name is null.
+ **/
+ public synchronized static native String H5Lget_name_by_idx(long loc_id, String group_name, int idx_type,
+ int order, long n, long lapl_id) throws HDF5LibraryException, NullPointerException;
+
+ /**
+ * H5Lget_value returns the link value of a symbolic link. Note that this function is a combination
+ * of H5Lget_info(), H5Lget_val() and for external links, H5Lunpack_elink_val.
+ *
+ * @param loc_id
+ * IN: Identifier of the file or group containing the object.
+ * @param name
+ * IN: Name of the symbolic link.
+ * @param link_value
+ * OUT: Path of the symbolic link, or the file_name and path of an external file.
+ * @param lapl_id
+ * IN: Link access property list identifier
+ *
+ * @return the link type
+ *
+ * @exception HDF5LibraryException
+ * - Error from the HDF-5 Library.
+ * @exception NullPointerException
+ * - name is null.
+ **/
+ public synchronized static native int H5Lget_value(long loc_id, String name, String[] link_value, long lapl_id)
+ throws HDF5LibraryException, NullPointerException;
+
+ /**
+ * H5Lget_value_by_idx retrieves value of the nth link in a group, according to the order within an index.
+ * Note that this function is a combination of H5Lget_info(), H5Lget_val() and for external links,
+ * H5Lunpack_elink_val.
+ *
+ * @param loc_id
+ * IN: File or group identifier specifying location of subject group
+ * @param group_name
+ * IN: Name of subject group
+ * @param idx_type
+ * IN: Type of index
+ * @param order
+ * IN: Order within field or index
+ * @param n
+ * IN: Link for which to retrieve information
+ * @param link_value
+ * OUT: Path of the symbolic link, or the file_name and path of an external file.
+ * @param lapl_id
+ * IN: Link access property list identifier
+ *
+ * @return the link type
+ *
+ * @exception HDF5LibraryException
+ * - Error from the HDF-5 Library.
+ * @exception NullPointerException
+ * - group_name is null.
+ **/
+ public synchronized static native int H5Lget_value_by_idx(long loc_id, String group_name, int idx_type, int order,
+ long n, String[] link_value, long lapl_id) throws HDF5LibraryException, NullPointerException;
+
+ /**
+ * H5Literate iterates through links in a group.
+ *
+ * @param grp_id
+ * IN: Identifier specifying subject group
+ * @param idx_type
+ * IN: Type of index
+ * @param order
+ * IN: Order of iteration within index
+ * @param idx
+ * IN: Iteration position at which to start
+ * @param op
+ * IN: Callback function passing data regarding the link to the calling application
+ * @param op_data
+ * IN: User-defined pointer to data required by the application for its processing of the link
+ *
+ * @return returns the return value of the first operator that returns a positive value, or zero if all members were
+ * processed with no operator returning non-zero.
+ *
+ * @exception HDF5LibraryException
+ * - Error from the HDF-5 Library.
+ **/
+ public synchronized static native int H5Literate(long grp_id, int idx_type, int order, long idx, H5L_iterate_cb op,
+ H5L_iterate_t op_data) throws HDF5LibraryException;
+
+ /**
+ * H5Literate_by_name iterates through links in a group.
+ *
+ * @param grp_id
+ * IN: Identifier specifying subject group
+ * @param group_name
+ * IN: Name of subject group
+ * @param idx_type
+ * IN: Type of index
+ * @param order
+ * IN: Order of iteration within index
+ * @param idx
+ * IN: Iteration position at which to start
+ * @param op
+ * IN: Callback function passing data regarding the link to the calling application
+ * @param op_data
+ * IN: User-defined pointer to data required by the application for its processing of the link
+ * @param lapl_id
+ * IN: Link access property list identifier
+ *
+ * @return returns the return value of the first operator that returns a positive value, or zero if all members were
+ * processed with no operator returning non-zero.
+ *
+ * @exception HDF5LibraryException
+ * - Error from the HDF-5 Library.
+ * @exception NullPointerException
+ * - group_name is null.
+ **/
+ public synchronized static native int H5Literate_by_name(long grp_id, String group_name, int idx_type, int order,
+ long idx, H5L_iterate_cb op, H5L_iterate_t op_data, long lapl_id) throws HDF5LibraryException,
+ NullPointerException;
+
+ /**
+ * H5Lmove renames a link within an HDF5 file.
+ *
+ * @param src_loc
+ * IN: Original file or group identifier.
+ * @param src_name
+ * IN: Original link name.
+ * @param dst_loc
+ * IN: Destination file or group identifier.
+ * @param dst_name
+ * IN: New link name.
+ * @param lcpl_id
+ * IN: Link creation property list identifier to be associated with the new link.
+ * @param lapl_id
+ * IN: Link access property list identifier to be associated with the new link.
+ *
+ * @exception HDF5LibraryException
+ * - Error from the HDF-5 Library.
+ * @exception NullPointerException
+ * - name is null.
+ **/
+ public synchronized static native void H5Lmove(long src_loc, String src_name, long dst_loc, String dst_name,
+ long lcpl_id, long lapl_id) throws HDF5LibraryException, NullPointerException;
+
+ /**
+ * H5Lvisit recursively visits all links starting from a specified group.
+ *
+ * @param grp_id
+ * IN: Identifier specifying subject group
+ * @param idx_type
+ * IN: Type of index
+ * @param order
+ * IN: Order of iteration within index
+ * @param op
+ * IN: Callback function passing data regarding the link to the calling application
+ * @param op_data
+ * IN: User-defined pointer to data required by the application for its processing of the link
+ *
+ * @return returns the return value of the first operator that returns a positive value, or zero if all members were
+ * processed with no operator returning non-zero.
+ *
+ * @exception HDF5LibraryException
+ * - Error from the HDF-5 Library.
+ **/
+ public synchronized static native int H5Lvisit(long grp_id, int idx_type, int order, H5L_iterate_cb op,
+ H5L_iterate_t op_data) throws HDF5LibraryException;
+
+ /**
+ * H5Lvisit_by_name recursively visits all links starting from a specified group.
+ *
+ * @param loc_id
+ * IN: Identifier specifying subject group
+ * @param group_name
+ * IN: Name of subject group
+ * @param idx_type
+ * IN: Type of index
+ * @param order
+ * IN: Order of iteration within index
+ * @param op
+ * IN: Callback function passing data regarding the link to the calling application
+ * @param op_data
+ * IN: User-defined pointer to data required by the application for its processing of the link
+ * @param lapl_id
+ * IN: link access property
+ *
+ * @return returns the return value of the first operator that returns a positive value, or zero if all members were
+ * processed with no operator returning non-zero.
+ *
+ * @exception HDF5LibraryException
+ * - Error from the HDF-5 Library.
+ * @exception NullPointerException
+ * - group_name is null.
+ **/
+ public synchronized static native int H5Lvisit_by_name(long loc_id, String group_name, int idx_type, int order,
+ H5L_iterate_cb op, H5L_iterate_t op_data, long lapl_id) throws HDF5LibraryException, NullPointerException;
+
+
+ /**
+ * H5Lis_registered tests whether a user-defined link class is currently registered,
+ * either by the HDF5 Library or by the user through the use of H5Lregister.
+ *
+ * @param link_cls_id
+ * IN: User-defined link class identifier
+ *
+ * @return Returns a positive value if the link class has been registered and zero if it is unregistered.
+ * Otherwise returns a negative value; this may mean that the identifier is not a valid user-defined class identifier.
+ *
+ * @exception HDF5LibraryException
+ * - Error from the HDF-5 Library.
+ **/
+ public synchronized static native int H5Lis_registered(int link_cls_id) throws HDF5LibraryException;
+
+ /**
+ * H5Lunregister unregisters a class of user-defined links, preventing them from being traversed, queried, moved, etc.
+ *
+ * @param link_cls_id
+ * IN: User-defined link class identifier
+ *
+ * @exception HDF5LibraryException
+ * - Error from the HDF-5 Library.
+ **/
+ public synchronized static native void H5Lunregister(int link_cls_id) throws HDF5LibraryException;
+
+ // /////// unimplemented ////////
+ // herr_t H5Lcreate_ud(hid_t link_loc_id, const char *link_name,
+ // H5L_type_t link_type, const void *udata, size_t udata_size, hid_t lcpl_id,
+ // hid_t lapl_id);
+
+ // herr_t H5Lregister(const H5L_class_t *cls);
+
+ // herr_t H5Lunpack_elink_val(const void *ext_linkval/*in*/, size_t link_size,
+ // unsigned *flags, const char **filename/*out*/, const char **obj_path /*out*/);
+ // herr_t H5Lget_val(hid_t loc_id, const char *name, void *buf/*out*/,
+ // size_t size, hid_t lapl_id);
+ // herr_t H5Lget_val_by_idx(hid_t loc_id, const char *group_name,
+ // H5_index_t idx_type, H5_iter_order_t order, hsize_t n,
+ // void *buf/*out*/, size_t size, hid_t lapl_id);
+
+
+ // ////////////////////////////////////////////////////////////
+ // //
+ // H5O: HDF5 1.8 Object Interface API Functions //
+ // //
+ // ////////////////////////////////////////////////////////////
+
+ /**
+ * H5Oclose closes the group, dataset, or named datatype specified.
+ *
+ * @param object_id
+ * IN: Object identifier
+ *
+ * @return non-negative on success
+ *
+ * @exception HDF5LibraryException
+ * - Error from the HDF-5 Library.
+ **/
+ public static int H5Oclose(long object_id) throws HDF5LibraryException {
+ if (object_id < 0)
+ return 0; // throw new HDF5LibraryException("Negative ID");;
+
+ log.trace("OPEN_IDS: H5Oclose remove {}", object_id);
+ OPEN_IDS.remove(object_id);
+ log.trace("OPEN_IDS: {}", OPEN_IDS.size());
+ return _H5Oclose(object_id);
+ }
+
+ private synchronized static native int _H5Oclose(long object_id) throws HDF5LibraryException;
+
+ /**
+ * H5Ocopy copies the group, dataset or named datatype specified from the file or group specified by source location
+ * to the destination location.
+ *
+ * @param src_loc_id
+ * IN: Object identifier indicating the location of the source object to be copied
+ * @param src_name
+ * IN: Name of the source object to be copied
+ * @param dst_loc_id
+ * IN: Location identifier specifying the destination
+ * @param dst_name
+ * IN: Name to be assigned to the new copy
+ * @param ocpypl_id
+ * IN: Object copy property list
+ * @param lcpl_id
+ * IN: Link creation property list for the new hard link
+ *
+ * @exception HDF5LibraryException
+ * - Error from the HDF-5 Library.
+ * @exception NullPointerException
+ * - name is null.
+ **/
+ public synchronized static native void H5Ocopy(long src_loc_id, String src_name, long dst_loc_id, String dst_name,
+ long ocpypl_id, long lcpl_id) throws HDF5LibraryException, NullPointerException;
+
+ /**
+ * H5Oget_comment retrieves the comment for the specified object.
+ *
+ * @param obj_id
+ * IN: File or group identifier
+ *
+ * @return the comment
+ *
+ * @exception HDF5LibraryException
+ * - Error from the HDF-5 Library.
+ **/
+ public synchronized static native String H5Oget_comment(long obj_id) throws HDF5LibraryException, IllegalArgumentException;
+
+ /**
+ * H5Oset_comment sets the comment for the specified object.
+ *
+ * @param obj_id
+ * IN: Identifier of the target object
+ * @param comment
+ * IN: The new comment.
+ *
+ * @exception HDF5LibraryException
+ * - Error from the HDF-5 Library.
+ *
+ * @deprecated As of HDF5 1.8 in favor of object attributes.
+ **/
+ @Deprecated
+ public synchronized static native void H5Oset_comment(long obj_id, String comment) throws HDF5LibraryException;
+
+ /**
+ * H5Oget_comment_by_name retrieves the comment for an object.
+ *
+ * @param loc_id
+ * IN: Identifier of a file, group, dataset, or named datatype.
+ * @param name
+ * IN: Relative name of the object whose comment is to be set or reset.
+ * @param lapl_id
+ * IN: Link access property list identifier.
+ *
+ * @return the comment
+ *
+ * @exception HDF5LibraryException
+ * - Error from the HDF-5 Library.
+ * @exception NullPointerException
+ * - name is null.
+ **/
+ public synchronized static native String H5Oget_comment_by_name(long loc_id, String name, long lapl_id)
+ throws HDF5LibraryException, IllegalArgumentException, NullPointerException;
+
+ // long H5Oget_comment_by_name(int loc_id, String name, String comment, long bufsize, int lapl_id);
+
+ /**
+ * H5Oset_comment_by_name sets the comment for the specified object.
+ *
+ * @param loc_id
+ * IN: Identifier of a file, group, dataset, or named datatype.
+ * @param name
+ * IN: Relative name of the object whose comment is to be set or reset.
+ * @param comment
+ * IN: The new comment.
+ * @param lapl_id
+ * IN: Link access property list identifier.
+ *
+ * @exception HDF5LibraryException
+ * - Error from the HDF-5 Library.
+ * @exception NullPointerException
+ * - name is null.
+ *
+ * @deprecated As of HDF5 1.8 in favor of object attributes.
+ **/
+ @Deprecated
+ public synchronized static native void H5Oset_comment_by_name(long loc_id, String name, String comment, long lapl_id)
+ throws HDF5LibraryException, NullPointerException;
+
+ /**
+ * H5Oget_info retrieves the metadata for an object specified by an identifier.
+ *
+ * @param loc_id
+ * IN: Identifier for target object
+ *
+ * @return object information
+ *
+ * @exception HDF5LibraryException
+ * - Error from the HDF-5 Library.
+ * @exception NullPointerException
+ * - name is null.
+ **/
+ public synchronized static native H5O_info_t H5Oget_info(long loc_id) throws HDF5LibraryException,
+ NullPointerException;
+
+ /**
+ * H5Oget_info_by_idx retrieves the metadata for an object, identifying the object by an index position.
+ *
+ * @param loc_id
+ * IN: File or group identifier
+ * @param group_name
+ * IN: Name of group, relative to loc_id, in which object is located
+ * @param idx_type
+ * IN: Type of index by which objects are ordered
+ * @param order
+ * IN: Order of iteration within index
+ * @param n
+ * IN: Object to open
+ * @param lapl_id
+ * IN: Access property list identifier for the link pointing to the object (Not currently used; pass as
+ * H5P_DEFAULT.)
+ *
+ * @return object information
+ *
+ * @exception HDF5LibraryException
+ * - Error from the HDF-5 Library.
+ * @exception NullPointerException
+ * - name is null.
+ **/
+ public synchronized static native H5O_info_t H5Oget_info_by_idx(long loc_id, String group_name, int idx_type,
+ int order, long n, long lapl_id) throws HDF5LibraryException, NullPointerException;
+
+ /**
+ * H5Oget_info_by_name retrieves the metadata for an object, identifying the object by location and relative name.
+ *
+ * @param loc_id
+ * IN: File or group identifier specifying location of group in which object is located
+ * @param name
+ * IN: Relative name of group
+ * @param lapl_id
+ * IN: Access property list identifier for the link pointing to the object (Not currently used; pass as
+ * H5P_DEFAULT.)
+ *
+ * @return object information
+ *
+ * @exception HDF5LibraryException
+ * - Error from the HDF-5 Library.
+ * @exception NullPointerException
+ * - name is null.
+ **/
+ public synchronized static native H5O_info_t H5Oget_info_by_name(long loc_id, String name, long lapl_id)
+ throws HDF5LibraryException, NullPointerException;
+
+ /**
+ * H5Olink creates a new hard link to an object in an HDF5 file.
+ *
+ * @param obj_id
+ * IN: Object to be linked.
+ * @param new_loc_id
+ * IN: File or group identifier specifying location at which object is to be linked.
+ * @param new_name
+ * IN: Relative name of link to be created.
+ * @param lcpl_id
+ * IN: Link creation property list identifier.
+ * @param lapl_id
+ * IN: Access property list identifier.
+ *
+ * @exception HDF5LibraryException
+ * - Error from the HDF-5 Library.
+ * @exception NullPointerException
+ * - name is null.
+ **/
+ public synchronized static native void H5Olink(long obj_id, long new_loc_id, String new_name, long lcpl_id,
+ long lapl_id) throws HDF5LibraryException, NullPointerException;
+
+ /**
+ * H5Oopen opens a group, dataset, or named datatype specified by a location and a path name.
+ *
+ * @param loc_id
+ * IN: File or group identifier
+ * @param name
+ * IN: Relative path to the object
+ * @param lapl_id
+ * IN: Access property list identifier for the link pointing to the object
+ *
+ * @return an object identifier for the opened object
+ *
+ * @exception HDF5LibraryException
+ * - Error from the HDF-5 Library.
+ * @exception NullPointerException
+ * - name is null.
+ **/
+ public static long H5Oopen(long loc_id, String name, long lapl_id) throws HDF5LibraryException, NullPointerException {
+ long id = _H5Oopen(loc_id, name, lapl_id);
+ if (id > 0) {
+ log.trace("OPEN_IDS: H5Oopen add {}", id);
+ OPEN_IDS.add(id);
+ log.trace("OPEN_IDS: {}", OPEN_IDS.size());
+ }
+ return id;
+ }
+
+ private synchronized static native long _H5Oopen(long loc_id, String name, long lapl_id)
+ throws HDF5LibraryException, NullPointerException;
+
+ /**
+ * H5Ovisit recursively visits all objects accessible from a specified object.
+ *
+ * @param obj_id
+ * IN: Identifier of the object at which the recursive iteration begins.
+ * @param idx_type
+ * IN: Type of index
+ * @param order
+ * IN: Order of iteration within index
+ * @param op
+ * IN: Callback function passing data regarding the object to the calling application
+ * @param op_data
+ * IN: User-defined pointer to data required by the application for its processing of the object
+ *
+ * @return returns the return value of the first operator that returns a positive value, or zero if all members were
+ * processed with no operator returning non-zero.
+ *
+ * @exception HDF5LibraryException
+ * - Error from the HDF-5 Library.
+ * @exception NullPointerException
+ * - name is null.
+ **/
+ public synchronized static native int H5Ovisit(long obj_id, int idx_type, int order, H5O_iterate_cb op,
+ H5O_iterate_t op_data) throws HDF5LibraryException, NullPointerException;
+
+ /**
+ * H5Ovisit_by_name recursively visits all objects starting from a specified object.
+ *
+ * @param loc_id
+ * IN: File or group identifier
+ * @param obj_name
+ * IN: Relative path to the object
+ * @param idx_type
+ * IN: Type of index
+ * @param order
+ * IN: Order of iteration within index
+ * @param op
+ * IN: Callback function passing data regarding the object to the calling application
+ * @param op_data
+ * IN: User-defined pointer to data required by the application for its processing of the object
+ * @param lapl_id
+ * IN: Link access property list identifier
+ *
+ * @return returns the return value of the first operator that returns a positive value, or zero if all members were
+ * processed with no operator returning non-zero.
+ *
+ * @exception HDF5LibraryException
+ * - Error from the HDF-5 Library.
+ * @exception NullPointerException
+ * - name is null.
+ **/
+ public synchronized static native int H5Ovisit_by_name(long loc_id, String obj_name, int idx_type, int order,
+ H5O_iterate_cb op, H5O_iterate_t op_data, long lapl_id) throws HDF5LibraryException, NullPointerException;
+
+
+ /**
+ * H5Oexists_by_name is used by an application to check that an existing link resolves to an object.
+ * Primarily, it is designed to check for dangling soft, external, or user-defined links.
+ *
+ * @param loc_id
+ * IN: File or group identifier
+ * @param obj_name
+ * IN: Relative path to the object
+ * @param lapl_id
+ * IN: Link access property list identifier
+ *
+ * @return Returns TRUE or FALSE if successful
+ *
+ * @exception HDF5LibraryException
+ * - Error from the HDF-5 Library.
+ * @exception NullPointerException
+ * - name is null.
+ **/
+ public synchronized static native boolean H5Oexists_by_name(long loc_id, String obj_name, long lapl_id) throws HDF5LibraryException, NullPointerException;
+
+ /**
+ * H5Odecr_refcount decrements the hard link reference count for an object.
+ *
+ * @param object_id IN: Object identifier
+ *
+ * @exception HDF5LibraryException - Error from the HDF-5 Library.
+ **/
+ public synchronized static native void H5Odecr_refcount(long object_id) throws HDF5LibraryException;
+
+ /**
+ * H5Oincr_refcount increments the hard link reference count for an object.
+ *
+ * @param object_id IN: Object identifier
+ *
+ * @exception HDF5LibraryException - Error from the HDF-5 Library.
+ **/
+ public synchronized static native void H5Oincr_refcount(long object_id) throws HDF5LibraryException;
+
+ /**
+ * H5Oopen_by_addr opens a group, dataset, or named datatype using its address within an HDF5 file.
+ *
+ * @param loc_id IN: File or group identifier
+ * @param addr IN: Object's address in the file
+ *
+ * @return an object identifier for the opened object
+ *
+ * @exception HDF5LibraryException - Error from the HDF-5 Library.
+ **/
+ public static long H5Oopen_by_addr(long loc_id, long addr) throws HDF5LibraryException {
+ long id = _H5Oopen_by_addr(loc_id, addr);
+ if (id > 0) {
+ log.trace("OPEN_IDS: H5Oopen_by_addr add {}", id);
+ OPEN_IDS.add(id);
+ log.trace("OPEN_IDS: {}", OPEN_IDS.size());
+ }
+ return id;
+ }
+
+ private synchronized static native long _H5Oopen_by_addr(long loc_id, long addr)
+ throws HDF5LibraryException, NullPointerException;
+
+ /**
+ * H5Oopen_by_idx opens the nth object in the group specified.
+ *
+ * @param loc_id IN: File or group identifier
+ * @param group_name IN: Name of group, relative to loc_id, in which object is located
+ * @param idx_type IN: Type of index by which objects are ordered
+ * @param order IN: Order of iteration within index
+ * @param n IN: Object to open
+ * @param lapl_id IN: Access property list identifier for the link pointing to the object
+ *
+ * @return an object identifier for the opened object
+ *
+ * @exception HDF5LibraryException - Error from the HDF-5 Library.
+ * @exception NullPointerException - group_name is null.
+ **/
+ public static long H5Oopen_by_idx(long loc_id, String group_name,
+ int idx_type, int order, long n, long lapl_id) throws HDF5LibraryException, NullPointerException {
+ long id = _H5Oopen_by_idx(loc_id, group_name, idx_type, order, n, lapl_id);
+ if (id > 0) {
+ log.trace("OPEN_IDS: H5Oopen_by_idx add {}", id);
+ OPEN_IDS.add(id);
+ log.trace("OPEN_IDS: {}", OPEN_IDS.size());
+ }
+ return id;
+ }
+
+ public synchronized static native long _H5Oopen_by_idx(long loc_id, String group_name,
+ int idx_type, int order, long n, long lapl_id) throws HDF5LibraryException, NullPointerException;
+
+ // /////// unimplemented ////////
+
+ // ////////////////////////////////////////////////////////////
+ // //
+ // H5P: Property List Interface Functions //
+ // //
+ // ////////////////////////////////////////////////////////////
+
+ // Generic property list routines
+
+ /**
+ * H5Pget_class_name retrieves the name of a generic property list class
+ *
+ * @param plid
+ * IN: Identifier of property object to query
+ * @return name of a property list if successful; null if failed
+ *
+ * @exception HDF5LibraryException
+ * - Error from the HDF-5 Library.
+ */
+ public synchronized static native String H5Pget_class_name(long plid) throws HDF5LibraryException;
+
+ /**
+ * H5Pcreate creates a new property as an instance of some property list class.
+ *
+ * @param type
+ * IN: The type of property list to create.
+ *
+ * @return a property list identifier (plist) if successful; otherwise Fail (-1).
+ *
+ * @exception HDF5LibraryException
+ * - Error from the HDF-5 Library.
+ **/
+ public static long H5Pcreate(long type) throws HDF5LibraryException {
+ long id = _H5Pcreate(type);
+ if (id > 0) {
+ log.trace("OPEN_IDS: H5Pcreate add {}", id);
+ OPEN_IDS.add(id);
+ log.trace("OPEN_IDS: {}", OPEN_IDS.size());
+ }
+ return id;
+ }
+
+ private synchronized static native long _H5Pcreate(long type) throws HDF5LibraryException;
+
+ /**
+ * Sets a property list value (support integer only)
+ *
+ * @param plid
+ * IN: Property list identifier to modify
+ * @param name
+ * IN: Name of property to modify
+ * @param value
+ * IN: value to set the property to
+ * @return a non-negative value if successful; a negative value if failed
+ *
+ * @exception HDF5LibraryException
+ * - Error from the HDF-5 Library.
+ */
+ public synchronized static native int H5Pset(long plid, String name, int value) throws HDF5LibraryException;
+
+ /**
+ * H5Pexist determines whether a property exists within a property list or class
+ *
+ * @param plid
+ * IN: Identifier for the property to query
+ * @param name
+ * IN: Name of property to check for
+ * @return a true value if the property exists in the property object; false if the property does not exist;
+ *
+ * @exception HDF5LibraryException
+ * - Error from the HDF-5 Library.
+ */
+ public synchronized static native boolean H5Pexist(long plid, String name) throws HDF5LibraryException;
+
+ /**
+ * H5Pget_size retrieves the size of a property's value in bytes
+ *
+ * @param plid
+ * IN: Identifier of property object to query
+ * @param name
+ * IN: Name of property to query
+ * @return size of a property's value if successful; a negative value if failed
+ *
+ * @exception HDF5LibraryException
+ * - Error from the HDF-5 Library.
+ */
+ public synchronized static native long H5Pget_size(long plid, String name) throws HDF5LibraryException;
+
+ /**
+ * H5Pget_nprops retrieves the number of properties in a property list or class
+ *
+ * @param plid
+ * IN: Identifier of property object to query
+ * @return number of properties if successful; a negative value if failed
+ *
+ * @exception HDF5LibraryException
+ * - Error from the HDF-5 Library.
+ */
+ public synchronized static native long H5Pget_nprops(long plid) throws HDF5LibraryException;
+
+ /**
+ * H5Pget_class returns the property list class for the property list identified by the plist parameter.
+ *
+ * @param plist
+ * IN: Identifier of property list to query.
+ * @return a property list class if successful. Otherwise returns H5P_ROOT (-1).
+ *
+ * @exception HDF5LibraryException
+ * - Error from the HDF-5 Library.
+ **/
+ public synchronized static native long H5Pget_class(long plist) throws HDF5LibraryException;
+
+ /**
+ * H5Pget_class_parent retrieves an identifier for the parent class of a property class
+ *
+ * @param plid
+ * IN: Identifier of the property class to query
+ * @return a valid parent class object identifier if successful; a negative value if failed
+ *
+ * @exception HDF5LibraryException
+ * - Error from the HDF-5 Library.
+ */
+ public synchronized static native long H5Pget_class_parent(long plid) throws HDF5LibraryException;
+
+ /**
+ * H5Pget retrieves a copy of the value for a property in a property list (support integer only)
+ *
+ * @param plid
+ * IN: Identifier of property object to query
+ * @param name
+ * IN: Name of property to query
+ * @return value for a property if successful; a negative value if failed
+ *
+ * @exception HDF5LibraryException
+ * - Error from the HDF-5 Library.
+ */
+ public synchronized static native int H5Pget(long plid, String name) throws HDF5LibraryException;
+
+ /**
+ * H5Pequal determines if two property lists or classes are equal
+ *
+ * @param plid1
+ * IN: First property object to be compared
+ * @param plid2
+ * IN: Second property object to be compared
+ * @return positive value if equal; zero if unequal, a negative value if failed
+ *
+ * @exception HDF5LibraryException
+ * - Error from the HDF-5 Library.
+ */
+ public synchronized static native int H5Pequal(long plid1, long plid2) throws HDF5LibraryException;
+
+ public static boolean H5P_equal(long plid1, long plid2) throws HDF5LibraryException {
+ if (H5Pequal(plid1, plid2) == 1)
+ return true;
+ return false;
+ }
+
+ /**
+ * H5Pisa_class checks to determine whether a property list is a member of the specified class
+ *
+ * @param plist
+ * IN: Identifier of the property list
+ * @param pclass
+ * IN: Identifier of the property class
+ * @return a positive value if equal; zero if unequal; a negative value if failed
+ *
+ * @exception HDF5LibraryException
+ * - Error from the HDF-5 Library.
+ */
+ public synchronized static native int H5Pisa_class(long plist, long pclass) throws HDF5LibraryException;
+
+ /**
+ * H5Pcopy_prop copies a property from one property list or class to another
+ *
+ * @param dst_id
+ * IN: Identifier of the destination property list or class
+ * @param src_id
+ * IN: Identifier of the source property list or class
+ * @param name
+ * IN: Name of the property to copy
+ * @return a non-negative value if successful; a negative value if failed
+ *
+ * @exception HDF5LibraryException
+ * - Error from the HDF-5 Library.
+ */
+ public synchronized static native int H5Pcopy_prop(long dst_id, long src_id, String name)
+ throws HDF5LibraryException;
+
+ /**
+ * H5Premove removes a property from a property list
+ *
+ * @param plid
+ * IN: Identifier of the property list to modify
+ * @param name
+ * IN: Name of property to remove
+ * @return a non-negative value if successful; a negative value if failed
+ *
+ * @exception HDF5LibraryException
+ * - Error from the HDF-5 Library.
+ */
+ public synchronized static native int H5Premove(long plid, String name) throws HDF5LibraryException;
+
+ /**
+ * H5Punregister removes a property from a property list class
+ *
+ * @param plid
+ * IN: Property list class from which to remove permanent property
+ * @param name
+ * IN: Name of property to remove
+ * @return a non-negative value if successful; a negative value if failed
+ *
+ * @exception HDF5LibraryException
+ * - Error from the HDF-5 Library.
+ */
+ public synchronized static native int H5Punregister(long plid, String name) throws HDF5LibraryException;
+
+ /**
+ * Closes an existing property list class
+ *
+ * @param plid
+ * IN: Property list class to close
+ * @return a non-negative value if successful; a negative value if failed
+ *
+ * @exception HDF5LibraryException
+ * - Error from the HDF-5 Library.
+ */
+ public static int H5Pclose_class(long plid) throws HDF5LibraryException {
+ if (plid < 0)
+ return 0; // throw new HDF5LibraryException("Negative ID");;
+
+ log.trace("OPEN_IDS: H5Pclose_class remove {}", plid);
+ OPEN_IDS.remove(plid);
+ log.trace("OPEN_IDS: {}", OPEN_IDS.size());
+ return _H5Pclose_class(plid);
+ }
+
+ public synchronized static native int _H5Pclose_class(long plid) throws HDF5LibraryException;
+
+ /**
+ * H5Pclose terminates access to a property list.
+ *
+ * @param plist
+ * IN: Identifier of the property list to terminate access to.
+ * @return a non-negative value if successful
+ *
+ * @exception HDF5LibraryException
+ * - Error from the HDF-5 Library.
+ **/
+ public static int H5Pclose(long plist) throws HDF5LibraryException {
+ if (plist < 0)
+ return 0; // throw new HDF5LibraryException("Negative ID");;
+
+ log.trace("OPEN_IDS: H5Pclose remove {}", plist);
+ OPEN_IDS.remove(plist);
+ log.trace("OPEN_IDS: {}", OPEN_IDS.size());
+ return _H5Pclose(plist);
+ }
+
+ private synchronized static native int _H5Pclose(long plist) throws HDF5LibraryException;
+
+ /**
+ * H5Pcopy copies an existing property list to create a new property list.
+ *
+ * @param plist
+ * IN: Identifier of property list to duplicate.
+ *
+ * @return a property list identifier if successful
+ *
+ * @exception HDF5LibraryException
+ * - Error from the HDF-5 Library.
+ **/
+ public static long H5Pcopy(long plist) throws HDF5LibraryException {
+ long id = _H5Pcopy(plist);
+ if (id > 0) {
+ log.trace("OPEN_IDS: H5Pcopy add {}", id);
+ OPEN_IDS.add(id);
+ log.trace("OPEN_IDS: {}", OPEN_IDS.size());
+ }
+ return id;
+ }
+
+ private synchronized static native long _H5Pcopy(long plist) throws HDF5LibraryException;
+
+ public static long H5Pcreate_class_nocb(long parent_class, String name) throws HDF5LibraryException {
+ long id = _H5Pcreate_class_nocb(parent_class, name);
+ if (id > 0) {
+ log.trace("OPEN_IDS: H5Pcreate_class_nocb add {}", id);
+ OPEN_IDS.add(id);
+ log.trace("OPEN_IDS: {}", OPEN_IDS.size());
+ }
+ return id;
+ }
+
+ private synchronized static native long _H5Pcreate_class_nocb(long parent_class, String name) throws HDF5LibraryException;
+
+// public static long H5Pcreate_class(long parent_class, String name, H5P_cls_create_func_cb create_op, H5P_cls_create_func_t create_data,
+// H5P_cls_copy_func_cb copy_op, H5P_cls_copy_func_t copy_data, H5P_cls_close_func_cb close_op, H5P_cls_close_func_t close_data) throws HDF5LibraryException {
+// long id = _H5Pcreate_class(parent_class, name, create_op, create_data, copy_op, copy_data, close_op, close_data);
+// if (id > 0) {
+// log.trace("OPEN_IDS: H5Pcreate_class add {}", id);
+// OPEN_IDS.add(id);
+// log.trace("OPEN_IDS: {}", OPEN_IDS.size());
+// }
+// return id;
+// }
+//
+// private synchronized static native long _H5Pcreate_class(long parent_class, String name, H5P_cls_create_func_cb create_op, H5P_cls_create_func_t create_data,
+// H5P_cls_copy_func_cb copy_op, H5P_cls_copy_func_t copy_data, H5P_cls_close_func_cb close_op, H5P_cls_close_func_t close_data) throws HDF5LibraryException;
+
+ public synchronized static native void H5Pregister2_nocb(long plist_class, String name, long size, byte[] def_value) throws HDF5LibraryException;
+
+// public synchronized static native void H5Pregister2(long plist_class, String name, long size, byte[] def_value, H5P_prp_create_func_cb prp_create, H5P_prp_set_func_cb prp_set,
+// H5P_prp_get_func_cb prp_get, H5P_prp_delete_func_cb prp_delete, H5P_prp_copy_func_cb prp_copy, H5P_prp_compare_func_cb prp_cmp, H5P_prp_close_func_cb prp_close) throws HDF5LibraryException;
+
+ public synchronized static native void H5Pinsert2_nocb(long plist, String name, long size, byte[] value) throws HDF5LibraryException;
+
+
+ // public synchronized static native void H5Pinsert2(long plist, String name, long size, byte[] value, H5P_prp_set_func_cb prp_set, H5P_prp_get_func_cb prp_get,
+ // H5P_prp_delete_func_cb prp_delete, H5P_prp_copy_func_cb prp_copy, H5P_prp_compare_func_cb prp_cmp, H5P_prp_close_func_cb prp_close) throws HDF5LibraryException;
+
+ public synchronized static native int H5Piterate(long plist, int[] idx, H5P_iterate_cb op, H5P_iterate_t op_data) throws HDF5LibraryException;
+
+ // Object creation property list (OCPL) routines
+
+ /**
+ * H5Pget_attr_phase_change retrieves attribute storage phase change thresholds.
+ *
+ * @param ocpl_id
+ * IN: : Object (dataset or group) creation property list identifier
+ * @param attributes
+ * The maximun and minimum no. of attributes to be stored.
+ *
+ * <pre>
+ * attributes[0] = The maximum number of attributes to be stored in compact storage
+ * attributes[1] = The minimum number of attributes to be stored in dense storage
+ * </pre>
+ *
+ * @return Returns a non-negative value if successful; otherwise returns a negative value.
+ *
+ * @exception HDF5LibraryException
+ * - Error from the HDF-5 Library.
+ * @exception NullPointerException
+ * - size is null.
+ *
+ **/
+ public synchronized static native int H5Pget_attr_phase_change(long ocpl_id, int[] attributes)
+ throws HDF5LibraryException, NullPointerException;
+
+ /**
+ * H5Pset_attr_phase_change sets threshold values for attribute storage on an object. These
+ * thresholds determine the point at which attribute storage changes
+ * from compact storage (i.e., storage in the object header)
+ * to dense storage (i.e., storage in a heap and indexed with a B-tree).
+ *
+ * @param ocpl_id
+ * IN: : Object (dataset or group) creation property list identifier
+ * @param max_compact
+ * IN: Maximum number of attributes to be stored in compact storage (Default: 8)
+ * @param min_dense
+ * IN: Minimum number of attributes to be stored in dense storage (Default: 6)
+ *
+ * @exception HDF5LibraryException
+ * - Error from the HDF-5 Library.
+ *
+ **/
+ public synchronized static native void H5Pset_attr_phase_change(long ocpl_id, int max_compact, int min_dense)
+ throws HDF5LibraryException;
+
+ /**
+ * H5Pget_attr_creation_order retrieves the settings for tracking and indexing attribute creation order on an object
+ *
+ * @param ocpl_id
+ * IN: Object (group or dataset) creation property list identifier
+ *
+ * @return Flags specifying whether to track and index attribute creation order
+ *
+ * @exception HDF5LibraryException
+ * - Error from the HDF-5 Library.
+ *
+ **/
+ public synchronized static native int H5Pget_attr_creation_order(long ocpl_id) throws HDF5LibraryException;
+
+ /**
+ * H5Pset_attr_creation_order sets flags specifying whether to track and index attribute creation order on an
+ * object.
+ *
+ * @param ocpl_id
+ * IN: Object creation property list identifier
+ * @param crt_order_flags
+ * IN: Flags specifying whether to track and index attribute creation order
+ *
+ * @return Returns a non-negative value if successful; otherwise returns a negative value.
+ *
+ * @exception HDF5LibraryException
+ * - Error from the HDF-5 Library.
+ *
+ **/
+ public synchronized static native int H5Pset_attr_creation_order(long ocpl_id, int crt_order_flags)
+ throws HDF5LibraryException;
+
+ /**
+ * H5Pget_obj_track_times queries the object creation property list, ocpl_id, to determine whether object times are
+ * being recorded.
+ *
+ * @param ocpl_id
+ * IN: Object creation property list identifier
+ *
+ * @return TRUE or FALSE, specifying whether object times are being recorded
+ *
+ * @exception HDF5LibraryException
+ * - Error from the HDF-5 Library.
+ *
+ **/
+ public synchronized static native boolean H5Pget_obj_track_times(long ocpl_id) throws HDF5LibraryException;
+
+ /**
+ * H5Pset_obj_track_times sets a property in the object creation property list, ocpl_id, that governs the recording
+ * of times associated with an object.
+ *
+ * @param ocpl_id
+ * IN: Object creation property list identifier
+ *
+ * @param track_times
+ * IN: TRUE or FALSE, specifying whether object times are to be tracked
+ *
+ * @exception HDF5LibraryException
+ * - Error from the HDF-5 Library.
+ *
+ **/
+ public synchronized static native void H5Pset_obj_track_times(long ocpl_id, boolean track_times)
+ throws HDF5LibraryException;
+
+ public synchronized static native int H5Pmodify_filter(long plist, long filter, int flags, long cd_nelmts,
+ int[] cd_values) throws HDF5LibraryException, NullPointerException;
+
+ /**
+ * H5Pset_filter adds the specified filter and corresponding properties to the end of an output filter pipeline.
+ *
+ * @param plist
+ * IN: Property list identifier.
+ * @param filter
+ * IN: Filter to be added to the pipeline.
+ * @param flags
+ * IN: Bit vector specifying certain general properties of the filter.
+ * @param cd_nelmts
+ * IN: Number of elements in cd_values
+ * @param cd_values
+ * IN: Auxiliary data for the filter.
+ *
+ * @return a non-negative value if successful
+ *
+ * @exception HDF5LibraryException
+ * - Error from the HDF-5 Library.
+ **/
+ public synchronized static native int H5Pset_filter(long plist, int filter, int flags, long cd_nelmts,
+ int[] cd_values) throws HDF5LibraryException;
+
+ /**
+ * H5Pget_nfilters returns the number of filters defined in the filter pipeline associated with the property list
+ * plist.
+ *
+ * @param plist
+ * IN: Property list identifier.
+ *
+ * @return the number of filters in the pipeline if successful
+ *
+ * @exception HDF5LibraryException
+ * - Error from the HDF-5 Library.
+ **/
+ public synchronized static native int H5Pget_nfilters(long plist) throws HDF5LibraryException;
+
+ /**
+ * H5Pget_filter returns information about a filter, specified by its filter number, in a filter pipeline, specified
+ * by the property list with which it is associated.
+ *
+ * @param plist
+ * IN: Property list identifier.
+ * @param filter_number
+ * IN: Sequence number within the filter pipeline of the filter for which information is sought.
+ * @param flags
+ * OUT: Bit vector specifying certain general properties of the filter.
+ * @param cd_nelmts
+ * IN/OUT: Number of elements in cd_values
+ * @param cd_values
+ * OUT: Auxiliary data for the filter.
+ * @param namelen
+ * IN: Anticipated number of characters in name.
+ * @param name
+ * OUT: Name of the filter.
+ * @param filter_config
+ * OUT:A bit field encoding the returned filter information
+ *
+ * @return the filter identification number if successful. Otherwise returns H5Z_FILTER_ERROR (-1).
+ *
+ * @exception ArrayIndexOutOfBoundsException
+ * Fatal error on Copyback
+ * @exception ArrayStoreException
+ * Fatal error on Copyback
+ * @exception HDF5LibraryException
+ * - Error from the HDF-5 Library.
+ * @exception NullPointerException
+ * - name or an array is null.
+ *
+ **/
+ public static int H5Pget_filter(long plist, int filter_number, int[] flags, long[] cd_nelmts, int[] cd_values,
+ long namelen, String[] name, int[] filter_config) throws ArrayIndexOutOfBoundsException,
+ ArrayStoreException, HDF5LibraryException, NullPointerException {
+ return H5Pget_filter2(plist, filter_number, flags, cd_nelmts, cd_values, namelen, name, filter_config);
+ }
+
+ /**
+ * H5Pget_filter2 returns information about a filter, specified by its filter number, in a filter pipeline,
+ * specified by the property list with which it is associated.
+ *
+ * @see public static int H5Pget_filter(int plist, int filter_number, int[] flags, int[] cd_nelmts, int[] cd_values,
+ * int namelen, String[] name, int[] filter_config)
+ *
+ **/
+ private synchronized static native int H5Pget_filter2(long plist, int filter_number, int[] flags, long[] cd_nelmts,
+ int[] cd_values, long namelen, String[] name, int[] filter_config) throws ArrayIndexOutOfBoundsException,
+ ArrayStoreException, HDF5LibraryException, NullPointerException;
+
+ /**
+ * H5Pget_filter_by_id returns information about the filter specified in filter_id, a filter identifier. plist_id
+ * must be a dataset or group creation property list and filter_id must be in the associated filter pipeline. The
+ * filter_id and flags parameters are used in the same manner as described in the discussion of H5Pset_filter. Aside
+ * from the fact that they are used for output, the parameters cd_nelmts and cd_values[] are used in the same manner
+ * as described in the discussion of H5Pset_filter. On input, the cd_nelmts parameter indicates the number of
+ * entries in the cd_values[] array allocated by the calling program; on exit it contains the number of values
+ * defined by the filter. On input, the namelen parameter indicates the number of characters allocated for the
+ * filter name by the calling program in the array name[]. On exit name[] contains the name of the filter with one
+ * character of the name in each element of the array. If the filter specified in filter_id is not set for the
+ * property list, an error will be returned and H5Pget_filter_by_id1 will fail.
+ *
+ * @param plist_id
+ * IN: Property list identifier.
+ * @param filter_id
+ * IN: Filter identifier.
+ * @param flags
+ * OUT: Bit vector specifying certain general properties of the filter.
+ * @param cd_nelmts
+ * N/OUT: Number of elements in cd_values
+ * @param cd_values
+ * OUT: Auxiliary data for the filter.
+ * @param namelen
+ * IN: Anticipated number of characters in name.
+ * @param name
+ * OUT: Name of the filter.
+ * @param filter_config
+ * OUT: A bit field encoding the returned filter information
+ *
+ * @return the filter identification number if successful. Otherwise returns H5Z_FILTER_ERROR (-1).
+ *
+ * @exception HDF5LibraryException
+ * - Error from the HDF-5 Library.
+ * @exception ArrayIndexOutOfBoundsException
+ * Fatal error on Copyback
+ * @exception ArrayStoreException
+ * Fatal error on Copyback
+ * @exception NullPointerException
+ * - name or an array is null.
+ *
+ **/
+ public static int H5Pget_filter_by_id(long plist_id, long filter_id, int[] flags, long[] cd_nelmts,
+ int[] cd_values, long namelen, String[] name, int[] filter_config) throws ArrayIndexOutOfBoundsException,
+ ArrayStoreException, HDF5LibraryException, NullPointerException {
+ return H5Pget_filter_by_id2(plist_id, filter_id, flags, cd_nelmts, cd_values, namelen, name, filter_config);
+ }
+
+ /**
+ * H5Pget_filter_by_id2 returns information about a filter, specified by its filter id, in a filter pipeline,
+ * specified by the property list with which it is associated.
+ *
+ * @param plist_id
+ * IN: Property list identifier.
+ * @param filter_id
+ * IN: Filter identifier.
+ * @param flags
+ * OUT: Bit vector specifying certain general properties of the filter.
+ * @param cd_nelmts
+ * N/OUT: Number of elements in cd_values
+ * @param cd_values
+ * OUT: Auxiliary data for the filter.
+ * @param namelen
+ * IN: Anticipated number of characters in name.
+ * @param name
+ * OUT: Name of the filter.
+ * @param filter_config
+ * OUT: A bit field encoding the returned filter information
+ *
+ * @return the filter identification number if successful. Otherwise returns H5Z_FILTER_ERROR (-1).
+ *
+ * @exception HDF5LibraryException
+ * - Error from the HDF-5 Library.
+ * @exception NullPointerException
+ * - name or an array is null.
+ *
+ **/
+ public synchronized static native int H5Pget_filter_by_id2(long plist_id, long filter_id, int[] flags,
+ long[] cd_nelmts, int[] cd_values, long namelen, String[] name, int[] filter_config)
+ throws HDF5LibraryException, NullPointerException;
+
+
+ public synchronized static native boolean H5Pall_filters_avail(long dcpl_id) throws HDF5LibraryException,
+ NullPointerException;
+
+ public synchronized static native int H5Premove_filter(long obj_id, long filter) throws HDF5LibraryException;
+
+ /**
+ * H5Pset_deflate sets the compression method for a dataset.
+ *
+ * @param plist
+ * IN: Identifier for the dataset creation property list.
+ * @param level
+ * IN: Compression level.
+ *
+ * @return non-negative if successful
+ *
+ * @exception HDF5LibraryException
+ * - Error from the HDF-5 Library.
+ **/
+ public synchronized static native int H5Pset_deflate(long plist, int level) throws HDF5LibraryException;
+
+ public synchronized static native int H5Pset_fletcher32(long plist) throws HDF5LibraryException,
+ NullPointerException;
+
+ // File creation property list (FCPL) routines
+
+ /**
+ * H5Pget_userblock retrieves the size of a user block in a file creation property list.
+ *
+ * @param plist
+ * IN: Identifier for property list to query.
+ * @param size
+ * OUT: Pointer to location to return user-block size.
+ *
+ * @return a non-negative value and the size of the user block; if successful
+ *
+ * @exception HDF5LibraryException
+ * - Error from the HDF-5 Library.
+ * @exception NullPointerException
+ * - size is null.
+ **/
+ public synchronized static native int H5Pget_userblock(long plist, long[] size) throws HDF5LibraryException,
+ NullPointerException;
+
+ /**
+ * H5Pset_userblock sets the user block size of a file creation property list.
+ *
+ * @param plist
+ * IN: Identifier of property list to modify.
+ * @param size
+ * IN: Size of the user-block in bytes.
+ *
+ * @return a non-negative value if successful
+ *
+ * @exception HDF5LibraryException
+ * - Error from the HDF-5 Library.
+ **/
+ public synchronized static native int H5Pset_userblock(long plist, long size) throws HDF5LibraryException;
+
+ /**
+ * H5Pget_sizes retrieves the size of the offsets and lengths used in an HDF5 file. This function is only valid for
+ * file creation property lists.
+ *
+ * @param plist
+ * IN: Identifier of property list to query.
+ * @param size
+ * OUT: the size of the offsets and length.
+ *
+ * <pre>
+ * size[0] = sizeof_addr // offset size in bytes
+ * size[1] = sizeof_size // length size in bytes
+ * </pre>
+ * @return a non-negative value with the sizes initialized; if successful;
+ *
+ * @exception HDF5LibraryException
+ * - Error from the HDF-5 Library.
+ * @exception NullPointerException
+ * - size is null.
+ * @exception IllegalArgumentException
+ * - size is invalid.
+ **/
+ public synchronized static native int H5Pget_sizes(long plist, long[] size) throws HDF5LibraryException,
+ NullPointerException, IllegalArgumentException;
+
+ /**
+ * H5Pset_sizes sets the byte size of the offsets and lengths used to address objects in an HDF5 file.
+ *
+ * @param plist
+ * IN: Identifier of property list to modify.
+ * @param sizeof_addr
+ * IN: Size of an object offset in bytes.
+ * @param sizeof_size
+ * IN: Size of an object length in bytes.
+ *
+ * @return a non-negative value if successful
+ *
+ * @exception HDF5LibraryException
+ * - Error from the HDF-5 Library.
+ **/
+ public synchronized static native int H5Pset_sizes(long plist, int sizeof_addr, int sizeof_size)
+ throws HDF5LibraryException;
+
+ /**
+ * H5Pget_sym_k retrieves the size of the symbol table B-tree 1/2 rank and the symbol table leaf node 1/2 size.
+ *
+ * @param plist
+ * IN: Property list to query.
+ * @param size
+ * OUT: the symbol table's B-tree 1/2 rank and leaf node 1/2size.
+ *
+ * <pre>
+ * size[0] = ik // the symbol table's B-tree 1/2 rank
+ * size[1] = lk // leaf node 1/2 size
+ * </pre>
+ *
+ * @return a non-negative value if successful
+ *
+ * @exception HDF5LibraryException
+ * - Error from the HDF-5 Library.
+ * @exception NullPointerException
+ * - size is null.
+ * @exception IllegalArgumentException
+ * - size is invalid.
+ **/
+ public synchronized static native int H5Pget_sym_k(long plist, int[] size) throws HDF5LibraryException,
+ NullPointerException, IllegalArgumentException;
+
+ /**
+ * H5Pset_sym_k sets the size of parameters used to control the symbol table nodes.
+ *
+ * @param plist
+ * IN: Identifier for property list to query.
+ * @param ik
+ * IN: Symbol table tree rank.
+ * @param lk
+ * IN: Symbol table node size.
+ *
+ * @return a non-negative value if successful
+ *
+ * @exception HDF5LibraryException
+ * - Error from the HDF-5 Library.
+ **/
+ public synchronized static native int H5Pset_sym_k(long plist, int ik, int lk) throws HDF5LibraryException;
+
+ /**
+ * H5Pget_istore_k queries the 1/2 rank of an indexed storage B-tree.
+ *
+ * @param plist
+ * IN: Identifier of property list to query.
+ * @param ik
+ * OUT: Pointer to location to return the chunked storage B-tree 1/2 rank.
+ *
+ * @return a non-negative value if successful
+ *
+ * @exception HDF5LibraryException
+ * - Error from the HDF-5 Library.
+ * @exception NullPointerException
+ * - ik array is null.
+ **/
+ public synchronized static native int H5Pget_istore_k(long plist, int[] ik) throws HDF5LibraryException,
+ NullPointerException;
+
+ /**
+ * H5Pset_istore_k sets the size of the parameter used to control the B-trees for indexing chunked datasets.
+ *
+ * @param plist
+ * IN: Identifier of property list to query.
+ * @param ik
+ * IN: 1/2 rank of chunked storage B-tree.
+ *
+ * @return a non-negative value if successful
+ *
+ * @exception HDF5LibraryException
+ * - Error from the HDF-5 Library.
+ **/
+ public synchronized static native int H5Pset_istore_k(long plist, int ik) throws HDF5LibraryException;
+
+ /**
+ * H5Pget_shared_mesg_nindexes retrieves number of shared object header message indexes in file creation property
+ * list.
+ *
+ * @param fcpl_id
+ * IN: : File creation property list identifier
+ *
+ * @return nindexes, the number of shared object header message indexes available in files created with this
+ * property list
+ *
+ * @exception HDF5LibraryException
+ * - Error from the HDF-5 Library.
+ *
+ **/
+ public synchronized static native int H5Pget_shared_mesg_nindexes(long fcpl_id) throws HDF5LibraryException;
+
+ /**
+ * H5Pset_shared_mesg_nindexes sets the number of shared object header message indexes in the specified file
+ * creation property list.
+ *
+ * @param plist_id
+ * IN: File creation property list
+ * @param nindexes
+ * IN: Number of shared object header message indexes to be available in files created with this property
+ * list
+ *
+ * @return a non-negative value if successful; otherwise returns a negative value.
+ *
+ * @exception HDF5LibraryException
+ * - Error from the HDF-5 Library.
+ * @exception IllegalArgumentException
+ * - Invalid value of nindexes
+ *
+ **/
+ public synchronized static native int H5Pset_shared_mesg_nindexes(long plist_id, int nindexes)
+ throws HDF5LibraryException, IllegalArgumentException;
+
+ /**
+ * H5Pget_shared_mesg_index Retrieves the configuration settings for a shared message index.
+ *
+ * @param fcpl_id
+ * IN: File creation property list identifier
+ * @param index_num
+ * IN: Index being configured.
+ * @param mesg_info
+ * The message type and minimum message size
+ *
+ * <pre>
+ * mesg_info[0] = Types of messages that may be stored in this index.
+ * mesg_info[1] = Minimum message size.
+ * </pre>
+ *
+ * @return Returns a non-negative value if successful; otherwise returns a negative value.
+ *
+ * @exception HDF5LibraryException
+ * - Error from the HDF-5 Library.
+ * @exception NullPointerException
+ * - mesg_info is null.
+ * @exception IllegalArgumentException
+ * - Invalid value of nindexes
+ *
+ **/
+ public synchronized static native int H5Pget_shared_mesg_index(long fcpl_id, int index_num, int[] mesg_info)
+ throws HDF5LibraryException, NullPointerException, IllegalArgumentException;
+
+ /**
+ * H5Pset_shared_mesg_index Configures the specified shared object header message index
+ *
+ * @param fcpl_id
+ * IN: File creation property list identifier.
+ * @param index_num
+ * IN: Index being configured.
+ * @param mesg_type_flags
+ * IN: Types of messages that should be stored in this index.
+ * @param min_mesg_size
+ * IN: Minimum message size.
+ *
+ * @return a non-negative value if successful; otherwise returns a negative value.
+ *
+ * @exception HDF5LibraryException
+ * - Error from the HDF-5 Library.
+ * @exception IllegalArgumentException
+ * - Invalid value of nindexes
+ *
+ **/
+ public synchronized static native int H5Pset_shared_mesg_index(long fcpl_id, int index_num, int mesg_type_flags,
+ int min_mesg_size) throws HDF5LibraryException, IllegalArgumentException;
+
+ /**
+ * H5Pget_shared_mesg_phase_change retrieves shared object header message phase change information.
+ *
+ * @param fcpl_id
+ * IN: : File creation property list identifier
+ * @param size
+ * The threshold values for storage of shared object header message indexes in a file.
+ *
+ * <pre>
+ * size[0] = Threshold above which storage of a shared object header message index shifts from list to B-tree
+ * size[1] = Threshold below which storage of a shared object header message index reverts to list format
+ * </pre>
+ *
+ * @return Returns a non-negative value if successful; otherwise returns a negative value.
+ *
+ * @exception HDF5LibraryException
+ * - Error from the HDF-5 Library.
+ * @exception NullPointerException
+ * - size is null.
+ *
+ **/
+ public synchronized static native int H5Pget_shared_mesg_phase_change(long fcpl_id, int[] size)
+ throws HDF5LibraryException, NullPointerException;
+
+ /**
+ * H5Pset_shared_mesg_phase_change sets shared object header message storage phase change thresholds.
+ *
+ * @param fcpl_id
+ * IN: File creation property list identifier
+ * @param max_list
+ * IN: Threshold above which storage of a shared object header message index shifts from list to B-tree
+ * @param min_btree
+ * IN: Threshold below which storage of a shared object header message index reverts to list format
+ *
+ * @return a non-negative value if successful; otherwise returns a negative value.
+ *
+ * @exception HDF5LibraryException
+ * - Error from the HDF-5 Library.
+ * @exception IllegalArgumentException
+ * - Invalid values of max_list and min_btree.
+ *
+ **/
+ public synchronized static native int H5Pset_shared_mesg_phase_change(long fcpl_id, int max_list, int min_btree)
+ throws HDF5LibraryException, IllegalArgumentException;
+
+ /**
+ * H5Pset_file_space sets the file space management strategy for the file associated with fcpl_id to strategy.
+ * There are four strategies that applications can select and they are described in the Parameters section.
+ *
+ * @param fcpl_id
+ * IN: File creation property list identifier
+ * @param strategy
+ * IN: The strategy for file space management.
+ * Passing a value of zero (0) indicates that the value of strategy is not to be modified.
+ * H5F_FILE_SPACE_ALL_PERSIST
+ * With this strategy, the free-space managers track the free space that results from the
+ * manipulation of HDF5 objects in the HDF5 file. The free space information is saved when the
+ * file is closed, and reloaded when the file is reopened. When space is needed for file metadata
+ * or raw data, the HDF5 library first requests space from the library's free-space managers.
+ * If the request is not satisfied, the library requests space from the aggregators. If the request
+ * is still not satisfied, the library requests space from the virtual file driver. That is, the
+ * library will use all of the mechanisms for allocating space.
+ * H5F_FILE_SPACE_ALL (Default file space management strategy)
+ * With this strategy, the free-space managers track the free space that results from the manipulation
+ * of HDF5 objects in the HDF5 file. The free space information is NOT saved when the file is closed
+ * and the free space that exists upon file closing becomes unaccounted space in the file.
+ * Like the previous strategy, the library will try all of the mechanisms for allocating space. When
+ * space is needed for file metadata or raw data, the library first requests space from the free-space
+ * managers. If the request is not satisfied, the library requests space from the aggregators. If the
+ * request is still not satisfied, the library requests space from the virtual file driver.
+ * H5F_FILE_SPACE_AGGR_VFD
+ * With this strategy, the library does not track free space that results from the manipulation of HDF5
+ * obejcts in the HDF5 file and the free space becomes unaccounted space in the file.
+ * When space is needed for file metadata or raw data, the library first requests space from the
+ * aggregators. If the request is not satisfied, the library requests space from the virtual file driver.
+ * H5F_FILE_SPACE_VFD
+ * With this strategy, the library does not track free space that results from the manipulation of HDF5
+ * obejcts in the HDF5 file and the free space becomes unaccounted space in the file.
+ * When space is needed for file metadata or raw data, the library requests space from the virtual file driver.
+ * @param threshold
+ * IN: The free-space section threshold. The library default is 1, which is to track all free-space sections.
+ * Passing a value of zero (0) indicates that the value of threshold is not to be modified.
+ *
+ * @exception HDF5LibraryException
+ * - Error from the HDF-5 Library.
+ * @exception IllegalArgumentException
+ * - Invalid values of max_list and min_btree.
+ *
+ **/
+ public synchronized static native void H5Pset_file_space(long fcpl_id, int strategy, long threshold)
+ throws HDF5LibraryException, IllegalArgumentException;
+
+ /**
+ * H5Pget_file_space provides the means for applications to manage the HDF5 file's file space for their specific needs.
+ *
+ * @param fcpl_id
+ * IN: File creation property list identifier
+ * @param strategy
+ * IN/OUT: The current file space management strategy in use for the file. NULL, strategy not queried.
+ * @param threshold
+ * IN/OUT: The current free-space section threshold. NULL, threshold not queried.
+ *
+ * @exception HDF5LibraryException
+ * - Error from the HDF-5 Library.
+ * @exception IllegalArgumentException
+ * - Invalid values of max_list and min_btree.
+ *
+ **/
+ public synchronized static native void H5Pget_file_space(long fcpl_id, int[] strategy, long[] threshold)
+ throws HDF5LibraryException, IllegalArgumentException;
+
+ // File access property list (FAPL) routines
+
+ /**
+ * H5Pget_alignment retrieves the current settings for alignment properties from a file access property list.
+ *
+ * @param plist
+ * IN: Identifier of a file access property list.
+ * @param alignment
+ * OUT: threshold value and alignment value.
+ *
+ * <pre>
+ * alignment[0] = threshold // threshold value
+ * alignment[1] = alignment // alignment value
+ * </pre>
+ * @return a non-negative value if successful
+ *
+ * @exception HDF5LibraryException
+ * - Error from the HDF-5 Library.
+ * @exception NullPointerException
+ * - aligment array is null.
+ * @exception IllegalArgumentException
+ * - aligment array is invalid.
+ **/
+ public synchronized static native int H5Pget_alignment(long plist, long[] alignment) throws HDF5LibraryException,
+ NullPointerException, IllegalArgumentException;
+
+ /**
+ * H5Pset_alignment sets the alignment properties of a file access property list so that any file object &gt;=
+ * THRESHOLD bytes will be aligned on an address which is a multiple of ALIGNMENT.
+ *
+ * @param plist
+ * IN: Identifier for a file access property list.
+ * @param threshold
+ * IN: Threshold value.
+ * @param alignment
+ * IN: Alignment value.
+ *
+ * @return a non-negative value if successful
+ *
+ * @exception HDF5LibraryException
+ * - Error from the HDF-5 Library.
+ **/
+ public synchronized static native int H5Pset_alignment(long plist, long threshold, long alignment)
+ throws HDF5LibraryException;
+
+ /**
+ * H5Pget_driver returns the identifier of the low-level file driver associated with the file access property list
+ * or data transfer property list plid.
+ *
+ * @param plid
+ * IN: File access or data transfer property list identifier.
+ * @return a valid low-level driver identifier if successful; a negative value if failed
+ *
+ * @exception HDF5LibraryException
+ * - Error from the HDF-5 Library.
+ */
+ public synchronized static native long H5Pget_driver(long plid) throws HDF5LibraryException;
+
+ public synchronized static native long H5Pget_family_offset(long fapl_id) throws HDF5LibraryException,
+ NullPointerException;
+
+ public synchronized static native int H5Pset_family_offset(long fapl_id, long offset) throws HDF5LibraryException,
+ NullPointerException;
+
+ /**
+ * Retrieves the maximum possible number of elements in the meta data cache and the maximum possible number of bytes
+ * and the RDCC_W0 value in the raw data chunk cache.
+ *
+ * @param plist
+ * IN: Identifier of the file access property list.
+ * @param mdc_nelmts
+ * IN/OUT: No longer used, will be ignored.
+ * @param rdcc_nelmts
+ * IN/OUT: Number of elements (objects) in the raw data chunk cache.
+ * @param rdcc_nbytes
+ * IN/OUT: Total size of the raw data chunk cache, in bytes.
+ * @param rdcc_w0
+ * IN/OUT: Preemption policy.
+ *
+ * @return a non-negative value if successful
+ *
+ * @exception HDF5LibraryException
+ * - Error from the HDF-5 Library.
+ * @exception NullPointerException
+ * - an array is null.
+ **/
+ public synchronized static native int H5Pget_cache(long plist, int[] mdc_nelmts, long[] rdcc_nelmts,
+ long[] rdcc_nbytes, double[] rdcc_w0) throws HDF5LibraryException, NullPointerException;
+
+ /**
+ * H5Pset_cache sets the number of elements (objects) in the meta data cache and the total number of bytes in the
+ * raw data chunk cache.
+ *
+ * @param plist
+ * IN: Identifier of the file access property list.
+ * @param mdc_nelmts
+ * IN: No longer used, will be ignored.
+ * @param rdcc_nelmts
+ * IN: Number of elements (objects) in the raw data chunk cache.
+ * @param rdcc_nbytes
+ * IN: Total size of the raw data chunk cache, in bytes.
+ * @param rdcc_w0
+ * IN: Preemption policy.
+ *
+ * @return a non-negative value if successful
+ *
+ * @exception HDF5LibraryException
+ * - Error from the HDF-5 Library.
+ **/
+ public synchronized static native int H5Pset_cache(long plist, int mdc_nelmts, long rdcc_nelmts, long rdcc_nbytes,
+ double rdcc_w0) throws HDF5LibraryException;
+
+ /**
+ * H5Pget_mdc_config gets the initial metadata cache configuration contained in a file access property list and
+ * loads it into the instance of H5AC_cache_config_t pointed to by the config_ptr parameter. This configuration is
+ * used when the file is opened.
+ *
+ * @param plist_id
+ * IN: Identifier of the file access property list.
+ *
+ * @return A buffer(H5AC_cache_config_t) for the current metadata cache configuration information
+ *
+ * @exception HDF5LibraryException
+ * - Error from the HDF-5 Library.
+ **/
+ public synchronized static native H5AC_cache_config_t H5Pget_mdc_config(long plist_id) throws HDF5LibraryException;
+
+ public synchronized static native void H5Pset_mdc_config(long plist_id, H5AC_cache_config_t config_ptr)
+ throws HDF5LibraryException;
+
+ /**
+ * H5Pget_gc_references Returns the current setting for the garbage collection refernces property from a file access
+ * property list.
+ *
+ * @param fapl_id
+ * IN File access property list
+ *
+ * @return GC is on (true) or off (false)
+ *
+ * @exception HDF5LibraryException
+ * - Error from the HDF-5 Library.
+ **/
+ public synchronized static native boolean H5Pget_gc_references(long fapl_id) throws HDF5LibraryException;
+
+ /**
+ * H5Pset_gc_references Sets the flag for garbage collecting references for the file. Default value for garbage
+ * collecting references is off.
+ *
+ * @param fapl_id
+ * IN File access property list
+ * @param gc_ref
+ * IN set GC on (true) or off (false)
+ *
+ * @return non-negative if successful
+ *
+ * @exception HDF5LibraryException
+ * - Error from the HDF-5 Library.
+ **/
+ public synchronized static native int H5Pset_gc_references(long fapl_id, boolean gc_ref)
+ throws HDF5LibraryException;
+
+ public synchronized static native int H5Pget_fclose_degree(long plist_id) throws HDF5LibraryException,
+ NullPointerException;
+
+ public synchronized static native int H5Pset_fclose_degree(long plist, int degree) throws HDF5LibraryException,
+ NullPointerException;
+
+ /**
+ * H5Pget_meta_block_size the current metadata block size setting.
+ *
+ * @param fapl_id
+ * IN: File access property list identifier
+ *
+ * @return the minimum size, in bytes, of metadata block allocations.
+ *
+ * @exception HDF5LibraryException
+ * - Error from the HDF-5 Library.
+ *
+ **/
+ public synchronized static native long H5Pget_meta_block_size(long fapl_id) throws HDF5LibraryException;
+
+ /**
+ * H5Pset_meta_block_size sets the minimum metadata block size.
+ *
+ * @param fapl_id
+ * IN: File access property list identifier
+ * @param size
+ * IN: Minimum size, in bytes, of metadata block allocations.
+ *
+ * @exception HDF5LibraryException
+ * - Error from the HDF-5 Library.
+ *
+ **/
+ public synchronized static native void H5Pset_meta_block_size(long fapl_id, long size) throws HDF5LibraryException;
+
+ public synchronized static native long H5Pget_sieve_buf_size(long fapl_id) throws HDF5LibraryException;
+
+ public synchronized static native void H5Pset_sieve_buf_size(long fapl_id, long size) throws HDF5LibraryException;
+
+ /**
+ * H5Pget_small_data_block_size retrieves the size of a block of small data in a file creation property list.
+ *
+ * @param plist
+ * IN: Identifier for property list to query.
+ *
+ * @return a non-negative value and the size of the user block; if successful
+ *
+ * @exception HDF5LibraryException
+ * - Error from the HDF-5 Library.
+ **/
+ public synchronized static native long H5Pget_small_data_block_size(long plist) throws HDF5LibraryException;
+
+ /**
+ * H5Pset_small_data_block_size reserves blocks of size bytes for the contiguous storage of the raw data portion of
+ * small datasets.
+ *
+ * @param plist
+ * IN: Identifier of property list to modify.
+ * @param size
+ * IN: Size of the blocks in bytes.
+ *
+ * @return a non-negative value if successful
+ *
+ * @exception HDF5LibraryException
+ * - Error from the HDF-5 Library.
+ **/
+ public synchronized static native int H5Pset_small_data_block_size(long plist, long size)
+ throws HDF5LibraryException;
+
+ /**
+ * H5Pget_libver_bounds retrieves the lower and upper bounds on the HDF5 Library versions that indirectly determine
+ * the object formats versions used when creating objects in the file.
+ *
+ * @param fapl_id
+ * IN: File access property list identifier
+ * @param libver
+ * The earliest/latest version of the library that will be used for writing objects.
+ *
+ * <pre>
+ * libver[0] = The earliest version of the library that will be used for writing objects
+ * libver[1] = The latest version of the library that will be used for writing objects.
+ * </pre>
+ *
+ * @return Returns a non-negative value if successful; otherwise returns a negative value.
+ *
+ * @exception HDF5LibraryException
+ * - Error from the HDF-5 Library.
+ * @exception NullPointerException
+ * - size is null.
+ *
+ **/
+ public synchronized static native int H5Pget_libver_bounds(long fapl_id, int[] libver) throws HDF5LibraryException,
+ NullPointerException;
+
+ /**
+ * H5Pset_libver_bounds Sets bounds on library versions, and indirectly format versions, to be used when creating
+ * objects
+ *
+ * @param fapl_id
+ * IN: File access property list identifier
+ * @param low
+ * IN: The earliest version of the library that will be used for writing objects
+ * @param high
+ * IN: The latest version of the library that will be used for writing objects.
+ *
+ *
+ * @return Returns a non-negative value if successful; otherwise returns a negative value.
+ *
+ * @exception HDF5LibraryException
+ * - Error from the HDF-5 Library.
+ * @exception IllegalArgumentException
+ * - Argument is Illegal
+ *
+ **/
+ public synchronized static native int H5Pset_libver_bounds(long fapl_id, int low, int high)
+ throws HDF5LibraryException, IllegalArgumentException;
+
+ /**
+ * H5Pget_elink_file_cache_size retrieves the size of the external link open file cache.
+ *
+ * @param fapl_id
+ * IN: File access property list identifier
+ *
+ * @return External link open file cache size in number of files.
+ *
+ * @exception HDF5LibraryException
+ * - Error from the HDF-5 Library.
+ *
+ **/
+ public synchronized static native int H5Pget_elink_file_cache_size(long fapl_id) throws HDF5LibraryException;
+
+ /**
+ * H5Pset_elink_file_cache_size sets the number of files that can be held open in an external link open file cache.
+ *
+ * @param fapl_id
+ * IN: File access property list identifier
+ * @param efc_size
+ * IN: External link open file cache size in number of files.
+ *
+ * @exception HDF5LibraryException
+ * - Error from the HDF-5 Library.
+ *
+ **/
+ public synchronized static native void H5Pset_elink_file_cache_size(long fapl_id, int efc_size)
+ throws HDF5LibraryException;
+
+ // Dataset creation property list (DCPL) routines //
+
+ /**
+ * H5Pget_layout returns the layout of the raw data for a dataset.
+ *
+ * @param plist
+ * IN: Identifier for property list to query.
+ *
+ * @return the layout type of a dataset creation property list if successful. Otherwise returns H5D_LAYOUT_ERROR
+ * (-1).
+ *
+ * @exception HDF5LibraryException
+ * - Error from the HDF-5 Library.
+ **/
+ public synchronized static native int H5Pget_layout(long plist) throws HDF5LibraryException;
+
+ /**
+ * H5Pset_layout sets the type of storage used store the raw data for a dataset.
+ *
+ * @param plist
+ * IN: Identifier of property list to query.
+ * @param layout
+ * IN: Type of storage layout for raw data.
+ *
+ * @return a non-negative value if successful
+ *
+ * @exception HDF5LibraryException
+ * - Error from the HDF-5 Library.
+ **/
+ public synchronized static native int H5Pset_layout(long plist, int layout) throws HDF5LibraryException;
+
+ /**
+ * H5Pget_chunk retrieves the size of chunks for the raw data of a chunked layout dataset.
+ *
+ * @param plist
+ * IN: Identifier of property list to query.
+ * @param max_ndims
+ * IN: Size of the dims array.
+ * @param dims
+ * OUT: Array to store the chunk dimensions.
+ *
+ * @return chunk dimensionality successful
+ *
+ * @exception HDF5LibraryException
+ * - Error from the HDF-5 Library.
+ * @exception NullPointerException
+ * - dims array is null.
+ * @exception IllegalArgumentException
+ * - max_ndims &lt;=0
+ **/
+ public synchronized static native int H5Pget_chunk(long plist, int max_ndims, long[] dims)
+ throws HDF5LibraryException, NullPointerException, IllegalArgumentException;
+
+ /**
+ * H5Pset_chunk sets the size of the chunks used to store a chunked layout dataset.
+ *
+ * @param plist
+ * IN: Identifier for property list to query.
+ * @param ndims
+ * IN: The number of dimensions of each chunk.
+ * @param dim
+ * IN: An array containing the size of each chunk.
+ *
+ * @return a non-negative value if successful
+ *
+ * @exception HDF5LibraryException
+ * - Error from the HDF-5 Library.
+ * @exception NullPointerException
+ * - dims array is null.
+ * @exception IllegalArgumentException
+ * - dims &lt;=0
+ **/
+ public synchronized static native int H5Pset_chunk(long plist, int ndims, byte[] dim) throws HDF5LibraryException,
+ NullPointerException, IllegalArgumentException;
+
+ public synchronized static int H5Pset_chunk(long plist, int ndims, long[] dim) throws HDF5Exception,
+ NullPointerException, IllegalArgumentException {
+ if (dim == null) {
+ return -1;
+ }
+
+ HDFArray theArray = new HDFArray(dim);
+ byte[] thedims = theArray.byteify();
+
+ int retVal = H5Pset_chunk(plist, ndims, thedims);
+
+ thedims = null;
+ theArray = null;
+ return retVal;
+ }
+
+ /**
+ * H5Pset_virtual maps elements of the virtual dataset (VDS) described by the
+ * virtual dataspace identifier vspace_id to the elements of the source dataset
+ * described by the source dataset dataspace identifier src_space_id. The source
+ * dataset is identified by the name of the file where it is located, src_file_name,
+ * and the name of the dataset, src_dset_name.
+ *
+ * @param dcpl_id
+ * IN: The identifier of the dataset creation property list that will be used when creating the virtual dataset.
+ * @param vspace_id
+ * IN: The dataspace identifier with the selection within the virtual dataset applied, possibly an unlimited selection.
+ * @param src_file_name
+ * IN: The name of the HDF5 file where the source dataset is located. The file might not exist yet. The name can be specified using a C-style printf statement.
+ * @param src_dset_name
+ * IN: The path to the HDF5 dataset in the file specified by src_file_name. The dataset might not exist yet. The dataset name can be specified using a C-style printf statement.
+ * @param src_space_id
+ * IN: The source dataset dataspace identifier with a selection applied, possibly an unlimited selection.
+ *
+ * @exception HDF5LibraryException
+ * - Error from the HDF-5 Library.
+ * @exception NullPointerException
+ * - an name string is null.
+ * @exception IllegalArgumentException
+ * - An id is &lt;=0
+ **/
+ public synchronized static native void H5Pset_virtual(long dcpl_id, long vspace_id, String src_file_name, String src_dset_name, long src_space_id) throws HDF5LibraryException,
+ NullPointerException, IllegalArgumentException;
+
+ /**
+ * H5Pget_virtual_count gets the number of mappings for a virtual dataset that has the creation property list specified by dcpl_id.
+ *
+ * @param dcpl_id
+ * IN: The identifier of the virtual dataset creation property list.
+ *
+ * @return a non-negative number of mappings if successful
+ *
+ * @exception HDF5LibraryException
+ * - Error from the HDF-5 Library.
+ * @exception IllegalArgumentException
+ * - An id is &lt;=0
+ **/
+ public synchronized static native long H5Pget_virtual_count(long dcpl_id) throws HDF5LibraryException, IllegalArgumentException;
+
+ /**
+ * H5Pget_virtual_vspace takes the dataset creation property list for the virtual dataset, dcpl_id, and the mapping index, index,
+ * and returns a dataspace identifier for the selection within the virtual dataset used in the mapping.
+ *
+ * @param dcpl_id
+ * IN: The identifier of the virtual dataset creation property list.
+ * @param index
+ * IN: Mapping index.
+ *
+ * @return a valid dataspace identifier if successful
+ *
+ * @exception HDF5LibraryException
+ * - Error from the HDF-5 Library.
+ * @exception IllegalArgumentException
+ * - An id is &lt;=0
+ **/
+ public synchronized static native long H5Pget_virtual_vspace(long dcpl_id, long index) throws HDF5LibraryException, IllegalArgumentException;
+
+ /**
+ * H5Pget_virtual_srcspace takes the dataset creation property list for the virtual dataset, dcpl_id, and the mapping index, index,
+ * and returns a dataspace identifier for the selection within the source dataset used in the mapping.
+ *
+ * @param dcpl_id
+ * IN: The identifier of the virtual dataset creation property list.
+ * @param index
+ * IN: Mapping index.
+ *
+ * @return a valid dataspace identifier if successful
+ *
+ * @exception HDF5LibraryException
+ * - Error from the HDF-5 Library.
+ * @exception IllegalArgumentException
+ * - An id is &lt;=0
+ **/
+ public synchronized static native long H5Pget_virtual_srcspace(long dcpl_id, long index) throws HDF5LibraryException, IllegalArgumentException;
+
+ /**
+ * H5Pget_virtual_filename takes the dataset creation property list for the virtual dataset, dcpl_id, the mapping index, index,
+ * the size of the filename for a source dataset, size, and retrieves the name of the file for a source dataset used in the mapping.
+ *
+ * @param dcpl_id
+ * IN: The identifier of the virtual dataset creation property list.
+ * @param index
+ * IN: Mapping index.
+ *
+ * @return the name of the file containing the source dataset if successful
+ *
+ * @exception HDF5LibraryException
+ * - Error from the HDF-5 Library.
+ * @exception IllegalArgumentException
+ * - An id is &lt;=0
+ **/
+ public synchronized static native String H5Pget_virtual_filename(long dcpl_id, long index) throws HDF5LibraryException, IllegalArgumentException;
+
+ /**
+ * H5Pget_virtual_dsetname takes the dataset creation property list for the virtual dataset, dcpl_id, the mapping index, index, the
+ * size of the dataset name for a source dataset, size, and retrieves the name of the source dataset used in the mapping.
+ *
+ * @param dcpl_id
+ * IN: The identifier of the virtual dataset creation property list.
+ * @param index
+ * IN: Mapping index.
+ *
+ * @return the name of the source dataset if successful
+ *
+ * @exception HDF5LibraryException
+ * - Error from the HDF-5 Library.
+ * @exception IllegalArgumentException
+ * - An id is &lt;=0
+ **/
+ public synchronized static native String H5Pget_virtual_dsetname(long dcpl_id, long index) throws HDF5LibraryException, IllegalArgumentException;
+
+ /**
+ * H5Pget_external returns information about an external file.
+ *
+ * @param plist
+ * IN: Identifier of a dataset creation property list.
+ * @param idx
+ * IN: External file index.
+ * @param name_size
+ * IN: Maximum length of name array.
+ * @param name
+ * OUT: Name of the external file.
+ * @param size
+ * OUT: the offset value and the size of the external file data.
+ *
+ * <pre>
+ * size[0] = offset // a location to return an offset value
+ * size[1] = size // a location to return the size of
+ * // the external file data.
+ * </pre>
+ *
+ * @return a non-negative value if successful
+ *
+ * @exception ArrayIndexOutOfBoundsException
+ * Fatal error on Copyback
+ * @exception ArrayStoreException
+ * Fatal error on Copyback
+ * @exception HDF5LibraryException
+ * - Error from the HDF-5 Library.
+ * @exception NullPointerException
+ * - name or size is null.
+ * @exception IllegalArgumentException
+ * - name_size &lt;= 0 .
+ *
+ **/
+ public synchronized static native int H5Pget_external(long plist, int idx, long name_size, String[] name,
+ long[] size) throws ArrayIndexOutOfBoundsException, ArrayStoreException, HDF5LibraryException,
+ NullPointerException, IllegalArgumentException;
+
+ /**
+ * H5Pset_external adds an external file to the list of external files.
+ *
+ * @param plist
+ * IN: Identifier of a dataset creation property list.
+ * @param name
+ * IN: Name of an external file.
+ * @param offset
+ * IN: Offset, in bytes, from the beginning of the file to the location in the file where the data
+ * starts.
+ * @param size
+ * IN: Number of bytes reserved in the file for the data.
+ *
+ * @return a non-negative value if successful
+ *
+ * @exception HDF5LibraryException
+ * - Error from the HDF-5 Library.
+ * @exception NullPointerException
+ * - name is null.
+ **/
+ public synchronized static native int H5Pset_external(long plist, String name, long offset, long size)
+ throws HDF5LibraryException, NullPointerException;
+
+ /**
+ * H5Pget_external_count returns the number of external files for the specified dataset.
+ *
+ * @param plist
+ * IN: Identifier of a dataset creation property list.
+ *
+ * @return the number of external files if successful
+ *
+ * @exception HDF5LibraryException
+ * - Error from the HDF-5 Library.
+ **/
+ public synchronized static native int H5Pget_external_count(long plist) throws HDF5LibraryException;
+
+ public synchronized static native int H5Pset_szip(long plist, int options_mask, int pixels_per_block)
+ throws HDF5LibraryException, NullPointerException;
+
+ public synchronized static native int H5Pset_shuffle(long plist_id) throws HDF5LibraryException,
+ NullPointerException;
+
+ /**
+ * H5Pset_nbit Sets up the use of the N-Bit filter.
+ *
+ * @param plist_id
+ * IN: Dataset creation property list identifier.
+ *
+ * @return a non-negative value if successful; otherwise returns a negative value.
+ *
+ * @exception HDF5LibraryException
+ * - Error from the HDF-5 Library.
+ *
+ **/
+ public synchronized static native int H5Pset_nbit(long plist_id) throws HDF5LibraryException;
+
+ /**
+ * H5Pset_scaleoffset sets the Scale-Offset filter for a dataset.
+ *
+ * @param plist_id
+ * IN: Dataset creation property list identifier.
+ * @param scale_type
+ * IN: Flag indicating compression method.
+ * @param scale_factor
+ * IN: Parameter related to scale.
+ *
+ * @return a non-negative value if successful; otherwise returns a negative value.
+ *
+ * @exception HDF5LibraryException
+ * - Error from the HDF-5 Library.
+ * @exception IllegalArgumentException
+ * - Invalid arguments
+ *
+ **/
+ public synchronized static native int H5Pset_scaleoffset(long plist_id, int scale_type, int scale_factor)
+ throws HDF5LibraryException, IllegalArgumentException;
+
+ /**
+ * H5Pget_fill_value queries the fill value property of a dataset creation property list.
+ *
+ * @param plist_id
+ * IN: Property list identifier.
+ * @param type_id
+ * IN: The datatype identifier of value.
+ * @param value
+ * IN: The fill value.
+ *
+ * @return a non-negative value if successful
+ *
+ * @exception HDF5Exception
+ * - Error converting data array.
+ **/
+ public synchronized static native int H5Pget_fill_value(long plist_id, long type_id, byte[] value)
+ throws HDF5Exception;
+
+ /**
+ * H5Pget_fill_value queries the fill value property of a dataset creation property list.
+ *
+ * @param plist_id
+ * IN: Property list identifier.
+ * @param type_id
+ * IN: The datatype identifier of value.
+ * @param obj
+ * IN: The fill value.
+ *
+ * @return a non-negative value if successful
+ *
+ * @exception HDF5Exception
+ * - Error converting data array.
+ **/
+ public synchronized static int H5Pget_fill_value(long plist_id, long type_id, Object obj) throws HDF5Exception {
+ HDFArray theArray = new HDFArray(obj);
+ byte[] buf = theArray.emptyBytes();
+
+ int status = H5Pget_fill_value(plist_id, type_id, buf);
+ if (status >= 0) {
+ obj = theArray.arrayify(buf);
+ }
+
+ return status;
+ }
+
+ /**
+ * H5Pset_fill_value sets the fill value for a dataset creation property list.
+ *
+ * @param plist_id
+ * IN: Property list identifier.
+ * @param type_id
+ * IN: The datatype identifier of value.
+ * @param value
+ * IN: The fill value.
+ *
+ * @return a non-negative value if successful
+ *
+ * @exception HDF5Exception
+ * - Error converting data array
+ **/
+ public synchronized static native int H5Pset_fill_value(long plist_id, long type_id, byte[] value)
+ throws HDF5Exception;
+
+ /**
+ * H5Pset_fill_value sets the fill value for a dataset creation property list.
+ *
+ * @param plist_id
+ * IN: Property list identifier.
+ * @param type_id
+ * IN: The datatype identifier of value.
+ * @param obj
+ * IN: The fill value.
+ *
+ * @return a non-negative value if successful
+ *
+ * @exception HDF5Exception
+ * - Error converting data array
+ **/
+ public synchronized static int H5Pset_fill_value(long plist_id, long type_id, Object obj) throws HDF5Exception {
+ HDFArray theArray = new HDFArray(obj);
+ byte[] buf = theArray.byteify();
+
+ int retVal = H5Pset_fill_value(plist_id, type_id, buf);
+
+ buf = null;
+ theArray = null;
+ return retVal;
+ }
+
+ public synchronized static native int H5Pfill_value_defined(long plist_id, int[] status)
+ throws HDF5LibraryException, NullPointerException;
+
+ public synchronized static native int H5Pget_alloc_time(long plist_id, int[] alloc_time)
+ throws HDF5LibraryException, NullPointerException;
+
+ public synchronized static native int H5Pset_alloc_time(long plist_id, int alloc_time) throws HDF5LibraryException,
+ NullPointerException;
+
+ public synchronized static native int H5Pget_fill_time(long plist_id, int[] fill_time) throws HDF5LibraryException,
+ NullPointerException;
+
+ public synchronized static native int H5Pset_fill_time(long plist_id, int fill_time) throws HDF5LibraryException,
+ NullPointerException;
+
+ // Dataset access property list (DAPL) routines //
+
+ /**
+ * Retrieves the maximum possible number of elements in the meta data cache and the maximum possible number of bytes
+ * and the RDCC_W0 value in the raw data chunk cache on a per-datset basis.
+ *
+ * @param dapl_id
+ * IN: Identifier of the dataset access property list.
+ * @param rdcc_nslots
+ * IN/OUT: Number of elements (objects) in the raw data chunk cache.
+ * @param rdcc_nbytes
+ * IN/OUT: Total size of the raw data chunk cache, in bytes.
+ * @param rdcc_w0
+ * IN/OUT: Preemption policy.
+ *
+ * @exception HDF5LibraryException
+ * - Error from the HDF-5 Library.
+ * @exception NullPointerException
+ * - an array is null.
+ **/
+ public synchronized static native void H5Pget_chunk_cache(long dapl_id, long[] rdcc_nslots, long[] rdcc_nbytes,
+ double[] rdcc_w0) throws HDF5LibraryException, NullPointerException;
+
+ /**
+ * H5Pset_chunk_cache sets the number of elements (objects) in the meta data cache and the total number of bytes in
+ * the raw data chunk cache on a per-datset basis.
+ *
+ * @param dapl_id
+ * IN: Identifier of the datset access property list.
+ * @param rdcc_nslots
+ * IN: Number of elements (objects) in the raw data chunk cache.
+ * @param rdcc_nbytes
+ * IN: Total size of the raw data chunk cache, in bytes.
+ * @param rdcc_w0
+ * IN: Preemption policy.
+ *
+ * @exception HDF5LibraryException
+ * - Error from the HDF-5 Library.
+ **/
+ public synchronized static native void H5Pset_chunk_cache(long dapl_id, long rdcc_nslots, long rdcc_nbytes,
+ double rdcc_w0) throws HDF5LibraryException;
+
+ /**
+ * H5Pset_virtual_view takes the access property list for the virtual dataset, dapl_id, and the flag,
+ * view, and sets the VDS view according to the flag value.
+ *
+ * @param dapl_id
+ * IN: Dataset access property list identifier for the virtual dataset
+ * @param view
+ * IN: Flag specifying the extent of the data to be included in the view.
+ *
+ * @exception HDF5LibraryException
+ * - Error from the HDF-5 Library
+ **/
+ public synchronized static native void H5Pset_virtual_view(long dapl_id, int view) throws HDF5LibraryException;
+
+ /**
+ * H5Pget_virtual_view takes the virtual dataset access property list, dapl_id, and retrieves the flag,
+ * view, set by the H5Pset_virtual_view call.
+ *
+ * @param dapl_id
+ * IN: Dataset access property list identifier for the virtual dataset
+
+ * @return The flag specifying the view of the virtual dataset.
+ *
+ * @exception HDF5LibraryException
+ * - Error from the HDF-5 Library
+ **/
+ public synchronized static native int H5Pget_virtual_view(long dapl_id) throws HDF5LibraryException;
+
+ /**
+ * H5Pset_virtual_printf_gap sets the access property list for the virtual dataset, dapl_id, to instruct the
+ * library to stop looking for the mapped data stored in the files and/or datasets with the printf-style names
+ * after not finding gap_size files and/or datasets. The found source files and datasets will determine the
+ * extent of the unlimited virtual dataset with the printf-style mappings.
+ *
+ * @param dapl_id
+ * IN: Dataset access property list identifier for the virtual dataset
+ * @param gap_size
+ * IN: Maximum number of files and/or datasets allowed to be missing for determining
+ * the extent of an unlimited virtual dataset with printf-style mappings.
+ *
+ * @exception HDF5LibraryException
+ * - Error from the HDF-5 Library
+ **/
+ public synchronized static native void H5Pset_virtual_printf_gap(long dapl_id, long gap_size) throws HDF5LibraryException;
+
+ /**
+ * H5Pget_virtual_printf_gap returns the maximum number of missing printf-style files and/or datasets for
+ * determining the extent of an unlimited virtual dataaset, gap_size, using the access property list for
+ * the virtual dataset, dapl_id.
+ *
+ * @param dapl_id
+ * IN: Dataset access property list identifier for the virtual dataset
+
+ * @return Maximum number of files and/or datasets allowed to be missing for determining
+ * the extent of an unlimited virtual dataset with printf-style mappings.
+ *
+ * @exception HDF5LibraryException
+ * - Error from the HDF-5 Library
+ **/
+ public synchronized static native long H5Pget_virtual_printf_gap(long dapl_id) throws HDF5LibraryException;
+
+ // Dataset xfer property list (DXPL) routines //
+
+ /**
+ * H5Pget_data_transform retrieves the data transform expression previously set in the dataset transfer property
+ * list plist_id by H5Pset_data_transform.
+ *
+ * @param plist_id
+ * IN: Identifier of the property list or class
+ * @param size
+ * IN: Number of bytes of the transform expression to copy to
+ * @param expression
+ * OUT: A data transform expression
+ *
+ * @return The size of the transform expression if successful; 0(zero) if no transform expression exists. Otherwise
+ * returns a negative value.
+ *
+ *
+ * @exception HDF5LibraryException
+ * - Error from the HDF-5 Library.
+ * @exception IllegalArgumentException
+ * - Size is &lt;= 0.
+ *
+ **/
+ public synchronized static native long H5Pget_data_transform(long plist_id, String[] expression, long size)
+ throws HDF5LibraryException, IllegalArgumentException;
+
+ /**
+ * H5Pset_data_transform sets a data transform expression
+ *
+ * @param plist_id
+ * IN: Identifier of the property list or class
+ * @param expression
+ * IN: Pointer to the null-terminated data transform expression
+ *
+ * @return a non-negative valule if successful; otherwise returns a negative value.
+ *
+ * @exception HDF5LibraryException
+ * - Error from the HDF-5 Library.
+ * @exception NullPointerException
+ * - expression is null.
+ *
+ **/
+ public synchronized static native int H5Pset_data_transform(long plist_id, String expression)
+ throws HDF5LibraryException, NullPointerException;
+
+ /**
+ * HH5Pget_buffer gets type conversion and background buffers. Returns buffer size, in bytes, if successful;
+ * otherwise 0 on failure.
+ *
+ * @param plist
+ * Identifier for the dataset transfer property list.
+ * @param tconv
+ * byte array of application-allocated type conversion buffer.
+ * @param bkg
+ * byte array of application-allocated background buffer.
+ *
+ * @return buffer size, in bytes, if successful; otherwise 0 on failure
+ *
+ * @exception HDF5LibraryException
+ * - Error from the HDF-5 Library.
+ * @exception IllegalArgumentException
+ * - plist is invalid.
+ **/
+ public synchronized static native int H5Pget_buffer(long plist, byte[] tconv, byte[] bkg)
+ throws HDF5LibraryException, IllegalArgumentException;
+
+ public synchronized static native long H5Pget_buffer_size(long plist)
+ throws HDF5LibraryException, IllegalArgumentException;
+
+ /**
+ * H5Pset_buffer sets type conversion and background buffers. status to TRUE or FALSE.
+ *
+ * Given a dataset transfer property list, H5Pset_buffer sets the maximum size for the type conversion buffer and
+ * background buffer and optionally supplies pointers to application-allocated buffers. If the buffer size is
+ * smaller than the entire amount of data being transferred between the application and the file, and a type
+ * conversion buffer or background buffer is required, then strip mining will be used.
+ *
+ * Note that there are minimum size requirements for the buffer. Strip mining can only break the data up along the
+ * first dimension, so the buffer must be large enough to accommodate a complete slice that encompasses all of the
+ * remaining dimensions. For example, when strip mining a 100x200x300 hyperslab of a simple data space, the buffer
+ * must be large enough to hold 1x200x300 data elements. When strip mining a 100x200x300x150 hyperslab of a simple
+ * data space, the buffer must be large enough to hold 1x200x300x150 data elements.
+ *
+ * @param plist
+ * Identifier for the dataset transfer property list.
+ * @param size
+ * Size, in bytes, of the type conversion and background buffers.
+ *
+ * @exception HDF5LibraryException
+ * - Error from the HDF-5 Library.
+ * @exception IllegalArgumentException
+ * - plist is invalid.
+ **/
+ public synchronized static native void H5Pset_buffer_size(long plist, long size) throws HDF5LibraryException,
+ IllegalArgumentException;
+
+ public synchronized static native int H5Pget_edc_check(long plist) throws HDF5LibraryException,
+ NullPointerException;
+
+ public synchronized static native int H5Pset_edc_check(long plist, int check) throws HDF5LibraryException,
+ NullPointerException;
+
+ /**
+ * H5Pget_btree_ratio Get the B-tree split ratios for a dataset transfer property list.
+ *
+ * @param plist_id
+ * IN Dataset transfer property list
+ * @param left
+ * OUT split ratio for leftmost nodes
+ * @param right
+ * OUT split ratio for righttmost nodes
+ * @param middle
+ * OUT split ratio for all other nodes
+ *
+ * @return non-negative if succeed
+ *
+ * @exception HDF5LibraryException
+ * - Error from the HDF-5 Library.
+ * @exception NullPointerException
+ * - an input array is null.
+ **/
+ public synchronized static native int H5Pget_btree_ratios(long plist_id, double[] left, double[] middle,
+ double[] right) throws HDF5LibraryException, NullPointerException;
+
+ /**
+ * H5Pset_btree_ratio Sets B-tree split ratios for a dataset transfer property list. The split ratios determine what
+ * percent of children go in the first node when a node splits.
+ *
+ * @param plist_id
+ * IN Dataset transfer property list
+ * @param left
+ * IN split ratio for leftmost nodes
+ * @param right
+ * IN split ratio for righttmost nodes
+ * @param middle
+ * IN split ratio for all other nodes
+ *
+ * @return non-negative if succeed
+ *
+ * @exception HDF5LibraryException
+ * - Error from the HDF-5 Library.
+ **/
+ public synchronized static native int H5Pset_btree_ratios(long plist_id, double left, double middle, double right)
+ throws HDF5LibraryException;
+
+ public synchronized static native int H5Pget_hyper_vector_size(long dxpl_id, long[] vector_size)
+ throws HDF5LibraryException, NullPointerException;
+
+ public synchronized static native int H5Pset_hyper_vector_size(long dxpl_id, long vector_size)
+ throws HDF5LibraryException, NullPointerException;
+
+ // Link creation property list (LCPL) routines //
+
+ /**
+ * H5Pget_create_intermediate_group determines whether property is set to enable creating missing intermediate
+ * groups.
+ *
+ * @param lcpl_id
+ * IN: Link creation property list identifier
+ *
+ * @return Boolean true or false
+ *
+ * @exception HDF5LibraryException
+ * - Error from the HDF-5 Library.
+ *
+ **/
+ public synchronized static native boolean H5Pget_create_intermediate_group(long lcpl_id)
+ throws HDF5LibraryException;
+
+ /**
+ * H5Pset_create_intermediate_group specifies in property list whether to create missing intermediate groups
+ *
+ * @param lcpl_id
+ * IN: Link creation property list identifier
+ * @param crt_intermed_group
+ * IN: Flag specifying whether to create intermediate groups upon the creation of an object
+ *
+ * @return a non-negative valule if successful; otherwise returns a negative value.
+ *
+ * @exception HDF5LibraryException
+ * - Error from the HDF-5 Library.
+ *
+ **/
+ public synchronized static native int H5Pset_create_intermediate_group(long lcpl_id, boolean crt_intermed_group)
+ throws HDF5LibraryException;
+
+ // Group creation property list (GCPL) routines //
+
+ /**
+ * H5Pget_local_heap_size_hint Retrieves the anticipated size of the local heap for original-style groups.
+ *
+ * @param gcpl_id
+ * IN: Group creation property list identifier
+ *
+ * @return size_hint, the anticipated size of local heap
+ *
+ * @exception HDF5LibraryException
+ * - Error from the HDF-5 Library.
+ *
+ **/
+ public synchronized static native long H5Pget_local_heap_size_hint(long gcpl_id) throws HDF5LibraryException;
+
+ /**
+ * H5Pset_local_heap_size_hint Specifies the anticipated maximum size of a local heap.
+ *
+ * @param gcpl_id
+ * IN: Group creation property list identifier
+ * @param size_hint
+ * IN: Anticipated maximum size in bytes of local heap
+ *
+ * @return a non-negative value if successful; otherwise returns a negative value.
+ *
+ * @exception HDF5LibraryException
+ * - Error from the HDF-5 Library.
+ *
+ **/
+ public synchronized static native int H5Pset_local_heap_size_hint(long gcpl_id, long size_hint)
+ throws HDF5LibraryException;
+
+ /**
+ * H5Pget_link_phase_change Queries the settings for conversion between compact and dense groups.
+ *
+ * @param gcpl_id
+ * IN: Group creation property list identifier
+ * @param links
+ * The max. no. of compact links &amp; the min. no. of dense links, which are used for storing groups
+ *
+ * <pre>
+ * links[0] = The maximum number of links for compact storage
+ * links[1] = The minimum number of links for dense storage
+ * </pre>
+ *
+ * @return Returns a non-negative value if successful; otherwise returns a negative value.
+ *
+ * @exception HDF5LibraryException
+ * - Error from the HDF-5 Library.
+ * @exception NullPointerException
+ * - size is null.
+ *
+ **/
+ public synchronized static native int H5Pget_link_phase_change(long gcpl_id, int[] links)
+ throws HDF5LibraryException, NullPointerException;
+
+ /**
+ * H5Pset_link_phase_change Sets the parameters for conversion between compact and dense groups.
+ *
+ * @param gcpl_id
+ * IN: Group creation property list identifier
+ * @param max_compact
+ * IN: Maximum number of links for compact storage(Default: 8)
+ * @param min_dense
+ * IN: Minimum number of links for dense storage(Default: 6)
+ *
+ * @return a non-negative value if successful; otherwise returns a negative value.
+ *
+ * @exception HDF5LibraryException
+ * - Error from the HDF-5 Library.
+ * @exception IllegalArgumentException
+ * - Invalid values of max_compact and min_dense.
+ *
+ **/
+ public synchronized static native int H5Pset_link_phase_change(long gcpl_id, int max_compact, int min_dense)
+ throws HDF5LibraryException, IllegalArgumentException;
+
+ /**
+ * H5Pget_est_link_info Queries data required to estimate required local heap or object header size.
+ *
+ * @param gcpl_id
+ * IN: Group creation property list identifier
+ * @param link_info
+ * Estimated number of links to be inserted into group And the estimated average length of link names
+ *
+ * <pre>
+ * link_info[0] = Estimated number of links to be inserted into group
+ * link_info[1] = Estimated average length of link names
+ * </pre>
+ *
+ * @return Returns a non-negative value if successful; otherwise returns a negative value.
+ *
+ * @exception HDF5LibraryException
+ * - Error from the HDF-5 Library.
+ * @exception NullPointerException
+ * - link_info is null.
+ *
+ **/
+ public synchronized static native int H5Pget_est_link_info(long gcpl_id, int[] link_info)
+ throws HDF5LibraryException, NullPointerException;
+
+ /**
+ * H5Pset_est_link_info Sets estimated number of links and length of link names in a group.
+ *
+ * @param gcpl_id
+ * IN: Group creation property list identifier
+ * @param est_num_entries
+ * IN: Estimated number of links to be inserted into group
+ * @param est_name_len
+ * IN: Estimated average length of link names
+ *
+ * @return a non-negative value if successful; otherwise returns a negative value.
+ *
+ * @exception HDF5LibraryException
+ * - Error from the HDF-5 Library.
+ * @exception IllegalArgumentException
+ * - Invalid values to est_num_entries and est_name_len.
+ *
+ **/
+ public synchronized static native int H5Pset_est_link_info(long gcpl_id, int est_num_entries, int est_name_len)
+ throws HDF5LibraryException, IllegalArgumentException;
+
+ /**
+ * H5Pget_link_creation_order queries the group creation property list, gcpl_id, and returns a flag indicating
+ * whether link creation order is tracked and/or indexed in a group.
+ *
+ * @param gcpl_id
+ * IN: Group creation property list identifier
+ *
+ * @return crt_order_flags -Creation order flag(s)
+ *
+ * @exception HDF5LibraryException
+ * - Error from the HDF-5 Library.
+ *
+ **/
+ public synchronized static native int H5Pget_link_creation_order(long gcpl_id) throws HDF5LibraryException;
+
+ /**
+ * H5Pset_link_creation_order Sets flags in a group creation property list, gcpl_id, for tracking and/or indexing
+ * links on creation order.
+ *
+ * @param gcpl_id
+ * IN: Group creation property list identifier
+ * @param crt_order_flags
+ * IN: Creation order flag(s)
+ *
+ *
+ * @return Returns a non-negative value if successful; otherwise returns a negative value.
+ *
+ * @exception HDF5LibraryException
+ * - Error from the HDF-5 Library.
+ *
+ **/
+ public synchronized static native int H5Pset_link_creation_order(long gcpl_id, int crt_order_flags)
+ throws HDF5LibraryException;
+
+ // String creation property list (STRCPL) routines //
+
+ public synchronized static native int H5Pget_char_encoding(long plist_id) throws HDF5LibraryException;
+
+ public synchronized static native void H5Pset_char_encoding(long plist_id, int encoding)
+ throws HDF5LibraryException;
+
+ // Link access property list (LAPL) routines //
+
+ /**
+ * H5Pget_nlinks retrieves the maximum number of soft or user-defined link traversals allowed, nlinks, before the
+ * library assumes it has found a cycle and aborts the traversal. This value is retrieved from the link access
+ * property list lapl_id.
+ *
+ * @param lapl_id
+ * IN: File access property list identifier
+ *
+ * @return Returns a Maximum number of links to traverse.
+ *
+ * @exception HDF5LibraryException
+ * - Error from the HDF-5 Library.
+ *
+ **/
+ public synchronized static native long H5Pget_nlinks(long lapl_id) throws HDF5LibraryException;
+
+ /**
+ * H5Pset_nlinks sets the maximum number of soft or user-defined link traversals allowed, nlinks, before the library
+ * assumes it has found a cycle and aborts the traversal. This value is set in the link access property list
+ * lapl_id.
+ *
+ * @param lapl_id
+ * IN: File access property list identifier
+ * @param nlinks
+ * IN: Maximum number of links to traverse
+ *
+ * @return Returns a non-negative value if successful; otherwise returns a negative value.
+ *
+ * @exception HDF5LibraryException
+ * - Error from the HDF-5 Library.
+ * @exception IllegalArgumentException
+ * - Argument is Illegal
+ *
+ **/
+ public synchronized static native int H5Pset_nlinks(long lapl_id, long nlinks) throws HDF5LibraryException,
+ IllegalArgumentException;
+
+ /**
+ * H5Pget_elink_prefix Retrieves prefix applied to external link paths.
+ *
+ * @param lapl_id
+ * IN: Link access property list identifier
+ * @param prefix
+ * OUT: Prefix applied to external link paths
+ *
+ * @return If successful, returns a non-negative value specifying the size in bytes of the prefix without the NULL
+ * terminator; otherwise returns a negative value.
+ *
+ * @exception HDF5LibraryException
+ * - Error from the HDF-5 Library.
+ * @exception NullPointerException
+ * - prefix is null.
+ *
+ **/
+ public synchronized static native long H5Pget_elink_prefix(long lapl_id, String[] prefix)
+ throws HDF5LibraryException, NullPointerException;
+
+ /**
+ * H5Pset_elink_prefix Sets prefix to be applied to external link paths.
+ *
+ * @param lapl_id
+ * IN: Link access property list identifier
+ * @param prefix
+ * IN: Prefix to be applied to external link paths
+ *
+ * @return a non-negative value if successful; otherwise returns a negative value.
+ *
+ * @exception HDF5LibraryException
+ * - Error from the HDF-5 Library.
+ * @exception NullPointerException
+ * - prefix is null.
+ *
+ **/
+ public synchronized static native int H5Pset_elink_prefix(long lapl_id, String prefix) throws HDF5LibraryException,
+ NullPointerException;
+
+ /**
+ * H5Pget_elink_fapl Retrieves the file access property list identifier associated with the link access property
+ * list.
+ *
+ * @param lapl_id
+ * IN: Link access property list identifier
+ *
+ * @return a non-negative value if successful; otherwise returns a negative value.
+ *
+ * @exception HDF5LibraryException
+ * - Error from the HDF-5 Library.
+ *
+ **/
+ public static long H5Pget_elink_fapl(long lapl_id) throws HDF5LibraryException {
+ long id = _H5Pget_elink_fapl(lapl_id);
+ if (id > 0) {
+ log.trace("OPEN_IDS: H5Pget_elink_fapl add {}", id);
+ OPEN_IDS.add(id);
+ log.trace("OPEN_IDS: {}", OPEN_IDS.size());
+ }
+ return id;
+ }
+
+ private synchronized static native long _H5Pget_elink_fapl(long lapl_id) throws HDF5LibraryException;
+
+ /**
+ * H5Pset_elink_fapl sets a file access property list for use in accessing a file pointed to by an external link.
+ *
+ * @param lapl_id
+ * IN: Link access property list identifier
+ * @param fapl_id
+ * IN: File access property list identifier
+ *
+ * @return a non-negative value if successful; otherwise returns a negative value.
+ *
+ * @exception HDF5LibraryException
+ * - Error from the HDF-5 Library.
+ *
+ **/
+ public synchronized static native int H5Pset_elink_fapl(long lapl_id, long fapl_id) throws HDF5LibraryException;
+
+ /**
+ * H5Pget_elink_acc_flags retrieves the external link traversal file access flag from the specified link access
+ * property list.
+ *
+ * @param lapl_id
+ * IN: Link access property list identifier
+ *
+ * @return File access flag for link traversal.
+ *
+ * @exception HDF5LibraryException
+ * - Error from the HDF-5 Library.
+ *
+ **/
+ public synchronized static native int H5Pget_elink_acc_flags(long lapl_id) throws HDF5LibraryException;
+
+ /**
+ * H5Pset_elink_acc_flags Sets the external link traversal file access flag in a link access property list.
+ *
+ * @param lapl_id
+ * IN: Link access property list identifier
+ * @param flags
+ * IN: The access flag for external link traversal.
+ *
+ * @return a non-negative value if successful; otherwise returns a negative value.
+ *
+ * @exception HDF5LibraryException
+ * - Error from the HDF-5 Library.
+ * @exception IllegalArgumentException
+ * - Invalid Flag values.
+ *
+ **/
+ public synchronized static native int H5Pset_elink_acc_flags(long lapl_id, int flags) throws HDF5LibraryException,
+ IllegalArgumentException;
+
+ // Object copy property list (OCPYPL) routines //
+
+ /**
+ * H5Pget_copy_object retrieves the properties to be used when an object is copied.
+ *
+ * @param ocp_plist_id
+ * IN: Object copy property list identifier
+ *
+ * @return Copy option(s) set in the object copy property list
+ *
+ * @exception HDF5LibraryException
+ * - Error from the HDF-5 Library.
+ *
+ **/
+ public synchronized static native int H5Pget_copy_object(long ocp_plist_id) throws HDF5LibraryException;
+
+ /**
+ * H5Pset_copy_object Sets properties to be used when an object is copied.
+ *
+ * @param ocp_plist_id
+ * IN: Object copy property list identifier
+ * @param copy_options
+ * IN: Copy option(s) to be set
+ *
+ * @exception HDF5LibraryException
+ * - Error from the HDF-5 Library.
+ *
+ **/
+ public synchronized static native void H5Pset_copy_object(long ocp_plist_id, int copy_options)
+ throws HDF5LibraryException;
+
+ // Other/Older property list routines //
+
+ /**
+ * H5Pget_version retrieves the version information of various objects for a file creation property list.
+ *
+ * @param plist
+ * IN: Identifier of the file creation property list.
+ * @param version_info
+ * OUT: version information.
+ *
+ * <pre>
+ * version_info[0] = boot // boot block version number
+ * version_info[1] = freelist // global freelist version
+ * version_info[2] = stab // symbol tabl version number
+ * version_info[3] = shhdr // shared object header version
+ * </pre>
+ * @return a non-negative value, with the values of version_info initialized, if successful
+ *
+ * @exception HDF5LibraryException
+ * - Error from the HDF-5 Library.
+ * @exception NullPointerException
+ * - version_info is null.
+ * @exception IllegalArgumentException
+ * - version_info is illegal.
+ **/
+ public synchronized static native int H5Pget_version(long plist, int[] version_info) throws HDF5LibraryException,
+ NullPointerException, IllegalArgumentException;
+
+ // file drivers property list routines //
+
+ public synchronized static native void H5Pget_fapl_core(long fapl_id, long[] increment, boolean[] backing_store)
+ throws HDF5LibraryException, NullPointerException;
+
+ public synchronized static native int H5Pset_fapl_core(long fapl_id, long increment, boolean backing_store)
+ throws HDF5LibraryException, NullPointerException;
+
+ /**
+ * H5Pget_fapl_direct Retrieve direct I/O settings.
+ *
+ * @param fapl_id
+ * IN: File access property list identifier
+ * @param info
+ * OUT: Returned property list information info[0] = alignment Required memory alignment boundary info[1]
+ * = block_size File system block size info[2] = cbuf_size Copy buffer size
+ *
+ * @return a non-negative value if successful; otherwise returns a negative value.
+ *
+ * @exception HDF5LibraryException
+ * - Error from the HDF-5 Library.
+ *
+ **/
+ public synchronized static native int H5Pget_fapl_direct(long fapl_id, long[] info) throws HDF5LibraryException;
+
+ /**
+ * H5Pset_fapl_direct Sets up use of the direct I/O driver.
+ *
+ * @param fapl_id
+ * IN: File access property list identifier
+ * @param alignment
+ * IN: Required memory alignment boundary
+ * @param block_size
+ * IN: File system block size
+ * @param cbuf_size
+ * IN: Copy buffer size
+ *
+ * @return a non-negative value if successful; otherwise returns a negative value.
+ *
+ * @exception HDF5LibraryException
+ * - Error from the HDF-5 Library.
+ *
+ **/
+ public synchronized static native int H5Pset_fapl_direct(long fapl_id, long alignment, long block_size,
+ long cbuf_size) throws HDF5LibraryException;
+
+ public synchronized static native int H5Pget_fapl_family(long fapl_id, long[] memb_size, long[] memb_fapl_id)
+ throws HDF5LibraryException, NullPointerException;
+
+ public synchronized static native int H5Pset_fapl_family(long fapl_id, long memb_size, long memb_fapl_id)
+ throws HDF5LibraryException, NullPointerException;
+
+ /**
+ * H5Pget_fapl_multi Sets up use of the multi I/O driver.
+ *
+ * @param fapl_id
+ * IN: File access property list identifier
+ * @param memb_map
+ * IN: Maps memory usage types to other memory usage types.
+ * @param memb_fapl
+ * IN: Property list for each memory usage type.
+ * @param memb_name
+ * IN: Name generator for names of member files.
+ * @param memb_addr
+ * IN: The offsets within the virtual address space, from 0 (zero) to HADDR_MAX, at which each type of
+ * data storage begins.
+ *
+ * @return a boolean value; Allows read-only access to incomplete file sets when TRUE.
+ *
+ * @exception HDF5LibraryException
+ * - Error from the HDF-5 Library.
+ * @exception NullPointerException
+ * - an array is null.
+ *
+ **/
+ public synchronized static native boolean H5Pget_fapl_multi(long fapl_id, int[] memb_map, long[] memb_fapl,
+ String[] memb_name, long[] memb_addr) throws HDF5LibraryException, NullPointerException;
+
+ /**
+ * H5Pset_fapl_multi Sets up use of the multi I/O driver.
+ *
+ * @param fapl_id
+ * IN: File access property list identifier
+ * @param memb_map
+ * IN: Maps memory usage types to other memory usage types.
+ * @param memb_fapl
+ * IN: Property list for each memory usage type.
+ * @param memb_name
+ * IN: Name generator for names of member files.
+ * @param memb_addr
+ * IN: The offsets within the virtual address space, from 0 (zero) to HADDR_MAX, at which each type of
+ * data storage begins.
+ * @param relax
+ * IN: Allows read-only access to incomplete file sets when TRUE.
+ *
+ * @exception HDF5LibraryException
+ * - Error from the HDF-5 Library.
+ * @exception NullPointerException
+ * - an array is null.
+ *
+ **/
+ public synchronized static native void H5Pset_fapl_multi(long fapl_id, int[] memb_map, long[] memb_fapl,
+ String[] memb_name, long[] memb_addr, boolean relax) throws HDF5LibraryException, NullPointerException;
+
+
+ /**
+ * H5Pset_fapl_log Sets up the logging virtual file driver (H5FD_LOG) for use. H5Pset_fapl_log modifies the file
+ * access property list to use the logging driver, H5FD_LOG. The logging virtual file driver (VFD) is a clone of the
+ * standard SEC2 (H5FD_SEC2) driver with additional facilities for logging VFD metrics and activity to a file.
+ *
+ * @param fapl_id
+ * IN: File access property list identifier.
+ * @param logfile
+ * IN: logfile is the name of the file in which the logging entries are to be recorded.
+ * @param flags
+ * IN: Flags specifying the types of logging activity.
+ * @param buf_size
+ * IN: The size of the logging buffers, in bytes.
+ *
+ * @exception HDF5LibraryException
+ * - Error from the HDF-5 Library.
+ * @exception NullPointerException
+ * - logfile is null.
+ **/
+ public synchronized static native void H5Pset_fapl_log(long fapl_id, String logfile, long flags, long buf_size)
+ throws HDF5LibraryException, NullPointerException;
+
+ public synchronized static native int H5Pset_fapl_sec2(long fapl_id) throws HDF5LibraryException, NullPointerException;
+
+ public synchronized static native void H5Pset_fapl_split(long fapl_id, String meta_ext, long meta_plist_id,
+ String raw_ext, long raw_plist_id) throws HDF5LibraryException, NullPointerException;
+
+ public synchronized static native int H5Pset_fapl_stdio(long fapl_id) throws HDF5LibraryException, NullPointerException;
+
+ public synchronized static native int H5Pset_fapl_windows(long fapl_id) throws HDF5LibraryException, NullPointerException;
+
+ // /////// unimplemented ////////
+
+ // Generic property list routines //
+ // herr_t H5Pencode(hid_t plist_id, void *buf, size_t *nalloc);
+ // hid_t H5Pdecode(const void *buf);
+
+ // Object creation property list (OCPL) routines //
+
+ // File creation property list (FCPL) routines //
+
+ // File access property list (FAPL) routines //
+ // herr_t H5Pset_driver( hid_t plist_id, hid_t new_driver_id, const void *new_driver_info )
+ // void *H5Pget_driver_info( hid_t plist_id )
+ // herr_t H5Pget_multi_type ( hid_t fapl_id, H5FD_mem_t *type )
+ // herr_t H5Pset_multi_type ( hid_t fapl_id, H5FD_mem_t type )
+ // herr_t H5Pget_file_image(hid_t fapl_id, void **buf_ptr_ptr, size_t *buf_len_ptr);
+ // herr_t H5Pset_file_image(hid_t fapl_id, void *buf_ptr, size_t buf_len);
+ // herr_t H5Pget_file_image_callbacks(hid_t fapl_id, H5FD_file_image_callbacks_t *callbacks_ptr);
+ // herr_t H5Pset_file_image_callbacks(hid_t fapl_id, H5FD_file_image_callbacks_t *callbacks_ptr);
+ // herr_t H5Pset_core_write_tracking(hid_t fapl_id, hbool_t is_enabled, size_t page_size);
+ // herr_t H5Pget_core_write_tracking(hid_t fapl_id, hbool_t *is_enabled, size_t *page_size);
+
+ // Dataset creation property list (DCPL) routines //
+
+ // Dataset access property list (DAPL) routines //
+
+ // Dataset xfer property list (DXPL) routines //
+ // herr_t H5Pset_buffer(hid_t plist_id, size_t size, void *tconv, void *bkg);
+ // herr_t H5Pset_preserve(hid_t plist_id, hbool_t status);
+ // int H5Pget_preserve(hid_t plist_id);
+ // herr_t H5Pset_filter_callback(hid_t plist, H5Z_filter_func_t func, void *op_data)
+ // herr_t H5Pget_vlen_mem_manager(hid_t plist, H5MM_allocate_t *alloc, void **alloc_info, H5MM_free_t *free, void
+ // **free_info )
+ // herr_t H5Pset_vlen_mem_manager(hid_t plist, H5MM_allocate_t alloc, void *alloc_info, H5MM_free_t free, void
+ // *free_info )
+ // herr_t H5Pget_type_conv_cb(hid_t plist, H5T_conv_except_func_t *func, void **op_data)
+ // herr_t H5Pset_type_conv_cb( hid_t plist, H5T_conv_except_func_t func, void *op_data)
+
+ // Link creation property list (LCPL) routines //
+
+ // Group creation property list (GCPL) routines //
+
+ // String creation property list (STRCPL) routines //
+
+ // Link access property list (LAPL) routines //
+ // herr_t H5Pget_elink_cb( hid_t lapl_id, H5L_elink_traverse_t *func, void **op_data )
+ // herr_t H5Pset_elink_cb( hid_t lapl_id, H5L_elink_traverse_t func, void *op_data )
+
+ // Object copy property list (OCPYPL) routines //
+ // herr_t H5Padd_merge_committed_dtype_path(hid_t plist_id, const char *path);
+ // herr_t H5Pfree_merge_committed_dtype_paths(hid_t plist_id);
+ // herr_t H5Pget_mcdt_search_cb(hid_t plist_id, H5O_mcdt_search_cb_t *func, void **op_data);
+ // herr_t H5Pset_mcdt_search_cb(hid_t plist_id, H5O_mcdt_search_cb_t func, void *op_data);
+
+
+ // Other/Older property list routines //
+ // herr_t H5Pget_fapl_mpio( int fapl_id, MPI_Comm *comm, MPI_Info *info )
+ // herr_t H5Pset_fapl_mpio( int fapl_id, MPI_Comm comm, MPI_Info info )
+
+ // herr_t H5Pget_fapl_mpiposix( int fapl_id, MPI_Comm *comm, hbool_t *use_gpfs_hints )
+ // herr_t H5Pset_fapl_mpiposix( int fapl_id, MPI_Comm comm, hbool_t use_gpfs_hints )
+
+ // herr_t H5Pget_dxpl_mpio( hid_t dxpl_id, H5FD_mpio_xfer_t *xfer_mode )
+ // herr_t H5Pset_dxpl_mpio( hid_t dxpl_id, H5FD_mpio_xfer_t xfer_mode )
+ // herr_t H5Pset_dxpl_mpio_chunk_opt (hid_t dxpl_id, H5FD_mpio_chunk_opt_t opt_mode)
+ // herr_t H5Pset_dxpl_mpio_chunk_opt_num (hid_t dxpl_id, unsigned num_chunk_per_proc)
+ // herr_t H5Pset_dxpl_mpio_chunk_opt_ratio (hid_t dxpl_id, unsigned percent_proc_per_chunk)
+ // herr_t H5Pset_dxpl_mpio_collective_opt (hid_t dxpl_id, H5FD_mpio_collective_opt_t opt_mode)
+
+ // ////////////////////////////////////////////////////////////
+ // //
+ // H5PL: HDF5 1.8 Plugin API Functions //
+ // //
+ // ////////////////////////////////////////////////////////////
+ /**
+ * H5PLset_loading_state uses one argument to enable or disable individual plugins.
+ * The plugin_flags parameter is an encoded integer in which each bit controls a specific
+ * plugin or class of plugins.
+ * A plugin bit set to 0 (zero) prevents the use of the dynamic plugin corresponding
+ * to that bit position. A plugin bit set to 1 (one) allows the use of that dynamic plugin.
+ * All dynamic plugins can be enabled by setting plugin_flags to a negative value.
+ * A value of 0 (zero) will disable all dynamic plugins.
+ *
+ * H5PLset_loading_state inspects the HDF5_PLUGIN_PRELOAD environment variable every
+ * time it is called. If the environment variable is set to the special :: string,
+ * all dynamic plugins will be disabled.
+ *
+ * @param plugin_flags
+ * IN: The list of dynamic plugin types to enable or disable.
+ * A plugin bit set to 0 (zero) prevents use of that dynamic plugin.
+ * A plugin bit set to 1 (one) enables use of that dynamic plugin.
+ * Setting plugin_flags to a negative value enables all dynamic plugins.
+ * Setting plugin_flags to 0 (zero) disables all dynamic plugins.
+ *
+ *
+ * @exception HDF5LibraryException
+ * - Error from the HDF-5 Library.
+ **/
+ public synchronized static native void H5PLset_loading_state(int plugin_flags) throws HDF5LibraryException;
+
+ /**
+ * H5PLget_loading_state retrieves the state of the dynamic plugins flag, plugin_flags..
+ *
+ * @return the list of dynamic plugin types that are enabled or disabled.
+ * A plugin bit set to 0 (zero) indicates that that dynamic plugin is disabled.
+ * A plugin bit set to 1 (one) indicates that that dynamic plugin is enabled.
+ * If the value of plugin_flags is negative, all dynamic plugins are enabled.
+ * If the value of plugin_flags is 0 (zero), all dynamic plugins are disabled.
+ *
+ * @exception HDF5LibraryException
+ * - Error from the HDF-5 Library.
+ **/
+ public synchronized static native int H5PLget_loading_state() throws HDF5LibraryException;
+
+ // ////////////////////////////////////////////////////////////
+ // //
+ // H5R: HDF5 1.8 Reference API Functions //
+ // //
+ // ////////////////////////////////////////////////////////////
+
+ private synchronized static native int H5Rcreate(byte[] ref, long loc_id, String name, int ref_type, long space_id)
+ throws HDF5LibraryException, NullPointerException, IllegalArgumentException;
+
+ /**
+ * H5Rcreate creates the reference, ref, of the type specified in ref_type, pointing to the object name located at
+ * loc_id.
+ *
+ * @param loc_id
+ * IN: Location identifier used to locate the object being pointed to.
+ * @param name
+ * IN: Name of object at location loc_id.
+ * @param ref_type
+ * IN: Type of reference.
+ * @param space_id
+ * IN: Dataspace identifier with selection.
+ *
+ * @return the reference (byte[]) if successful
+ *
+ * @exception HDF5LibraryException
+ * - Error from the HDF-5 Library.
+ * @exception NullPointerException
+ * - an input array is null.
+ * @exception IllegalArgumentException
+ * - an input array is invalid.
+ **/
+ public synchronized static byte[] H5Rcreate(long loc_id, String name, int ref_type, long space_id)
+ throws HDF5LibraryException, NullPointerException, IllegalArgumentException {
+ /* These sizes are correct for HDF5.1.2 */
+ int ref_size = 8;
+ if (ref_type == HDF5Constants.H5R_DATASET_REGION) {
+ ref_size = 12;
+ }
+ byte rbuf[] = new byte[ref_size];
+
+ /* will raise an exception if fails */
+ H5Rcreate(rbuf, loc_id, name, ref_type, space_id);
+
+ return rbuf;
+ }
+
+ /**
+ * Given a reference to some object, H5Rdereference opens that object and return an identifier.
+ *
+ * @param dataset
+ * IN: Dataset containing reference object.
+ * @param access_list
+ * IN: Property list of the object being referenced.
+ * @param ref_type
+ * IN: The reference type of ref.
+ * @param ref
+ * IN: reference to an object
+ *
+ * @return valid identifier if successful
+ *
+ * @exception HDF5LibraryException
+ * - Error from the HDF-5 Library.
+ * @exception NullPointerException
+ * - output array is null.
+ * @exception IllegalArgumentException
+ * - output array is invalid.
+ **/
+ public static long H5Rdereference(long dataset, long access_list, int ref_type, byte[] ref)
+ throws HDF5LibraryException, NullPointerException, IllegalArgumentException {
+ long id = _H5Rdereference(dataset, access_list, ref_type, ref);
+ if (id > 0) {
+ log.trace("OPEN_IDS: H5Rdereference add {}", id);
+ OPEN_IDS.add(id);
+ log.trace("OPEN_IDS: {}", OPEN_IDS.size());
+ }
+ return id;
+ }
+
+ private synchronized static native long _H5Rdereference(long dataset, long access_list, int ref_type, byte[] ref)
+ throws HDF5LibraryException, NullPointerException, IllegalArgumentException;
+
+ /**
+ * H5Rget_name retrieves a name for the object identified by ref.
+ *
+ * @param loc_id
+ * IN: Identifier for the dataset containing the reference or for the group that dataset is in.
+ * @param ref_type
+ * IN: Type of reference.
+ * @param ref
+ * IN: An object or dataset region reference.
+ * @param name
+ * OUT: A name associated with the referenced object or dataset region.
+ * @param size
+ * IN: The size of the name buffer.
+ *
+ * @return Returns the length of the name if successful, returning 0 (zero) if no name is associated with the
+ * identifier. Otherwise returns a negative value.
+ *
+ *
+ * @exception HDF5LibraryException
+ * - Error from the HDF-5 Library.
+ * @exception NullPointerException
+ * - size is null.
+ * @exception IllegalArgumentException
+ * - Argument is illegal.
+ *
+ **/
+ public synchronized static native long H5Rget_name(long loc_id, int ref_type, byte[] ref, String[] name, long size)
+ throws HDF5LibraryException, NullPointerException, IllegalArgumentException;
+
+ /**
+ * H5Rget_obj_type Given a reference to an object ref, H5Rget_obj_type returns the type of the object pointed to.
+ *
+ * @param loc_id
+ * IN: loc_id of the reference object.
+ * @param ref_type
+ * IN: Type of reference to query.
+ * @param ref
+ * IN: the reference
+ *
+ * @return Returns the object type
+ *
+ * @exception HDF5LibraryException
+ * - Error from the HDF-5 Library.
+ * @exception NullPointerException
+ * - array is null.
+ * @exception IllegalArgumentException
+ * - array is invalid.
+ **/
+ public synchronized static native int H5Rget_obj_type(long loc_id, int ref_type, byte ref[])
+ throws HDF5LibraryException, NullPointerException, IllegalArgumentException;
+
+ /**
+ * H5Rget_obj_type2 Retrieves the type of object that an object reference points to.
+ *
+ * @see public static int H5Rget_obj_type(int loc_id, int ref_type, byte ref[])
+ **/
+ private synchronized static native int H5Rget_obj_type2(long loc_id, int ref_type, byte ref[], int[] obj_type)
+ throws HDF5LibraryException, NullPointerException, IllegalArgumentException;
+
+ /**
+ * Given a reference to an object ref, H5Rget_region creates a copy of the dataspace of the dataset pointed to and
+ * defines a selection in the copy which is the region pointed to.
+ *
+ * @param loc_id
+ * IN: loc_id of the reference object.
+ * @param ref_type
+ * IN: The reference type of ref.
+ * @param ref
+ * OUT: the reference to the object and region
+ *
+ * @return a valid identifier if successful
+ *
+ * @exception HDF5LibraryException
+ * - Error from the HDF-5 Library.
+ * @exception NullPointerException
+ * - output array is null.
+ * @exception IllegalArgumentException
+ * - output array is invalid.
+ **/
+ public static long H5Rget_region(long loc_id, int ref_type, byte[] ref) throws HDF5LibraryException,
+ NullPointerException, IllegalArgumentException {
+ long id = _H5Rget_region(loc_id, ref_type, ref);
+ if (id > 0) {
+ log.trace("OPEN_IDS: H5Rget_region add {}", id);
+ OPEN_IDS.add(id);
+ log.trace("OPEN_IDS: {}", OPEN_IDS.size());
+ }
+ return id;
+ }
+
+ private synchronized static native long _H5Rget_region(long loc_id, int ref_type, byte[] ref)
+ throws HDF5LibraryException, NullPointerException, IllegalArgumentException;
+
+ // ////////////////////////////////////////////////////////////
+ // //
+ // H5S: Dataspace Interface Functions //
+ // //
+ // ////////////////////////////////////////////////////////////
+
+ /**
+ * H5Sclose releases a dataspace.
+ *
+ * @param space_id
+ * Identifier of dataspace to release.
+ *
+ * @return a non-negative value if successful
+ *
+ * @exception HDF5LibraryException
+ * - Error from the HDF-5 Library.
+ **/
+ public static int H5Sclose(long space_id) throws HDF5LibraryException {
+ if (space_id < 0)
+ return 0; // throw new HDF5LibraryException("Negative ID");;
+
+ log.trace("OPEN_IDS: H5Sclose remove {}", space_id);
+ OPEN_IDS.remove(space_id);
+ log.trace("OPEN_IDS: {}", OPEN_IDS.size());
+ return _H5Sclose(space_id);
+ }
+
+ private synchronized static native int _H5Sclose(long space_id) throws HDF5LibraryException;
+
+ /**
+ * H5Scopy creates a new dataspace which is an exact copy of the dataspace identified by space_id.
+ *
+ * @param space_id
+ * Identifier of dataspace to copy.
+ * @return a dataspace identifier if successful
+ *
+ * @exception HDF5LibraryException
+ * - Error from the HDF-5 Library.
+ **/
+ public static long H5Scopy(long space_id) throws HDF5LibraryException {
+ long id = _H5Scopy(space_id);
+ if (id > 0) {
+ log.trace("OPEN_IDS: H5Scopy add {}", id);
+ OPEN_IDS.add(id);
+ log.trace("OPEN_IDS: {}", OPEN_IDS.size());
+ }
+ return id;
+ }
+
+ private synchronized static native long _H5Scopy(long space_id) throws HDF5LibraryException;
+
+ /**
+ * H5Screate creates a new dataspace of a particular type.
+ *
+ * @param type
+ * IN: The type of dataspace to be created.
+ *
+ * @return a dataspace identifier
+ *
+ * @exception HDF5LibraryException
+ * - Error from the HDF-5 Library.
+ **/
+ public static long H5Screate(int type) throws HDF5LibraryException {
+ long id = _H5Screate(type);
+ if (id > 0) {
+ log.trace("OPEN_IDS: H5Screate add {}", id);
+ OPEN_IDS.add(id);
+ log.trace("OPEN_IDS: {}", OPEN_IDS.size());
+ }
+ return id;
+ }
+
+ private synchronized static native long _H5Screate(int type) throws HDF5LibraryException;
+
+ /**
+ * H5Screate_simple creates a new simple data space and opens it for access.
+ *
+ * @param rank
+ * IN: Number of dimensions of dataspace.
+ * @param dims
+ * IN: An array of the size of each dimension.
+ * @param maxdims
+ * IN: An array of the maximum size of each dimension.
+ *
+ * @return a dataspace identifier
+ *
+ * @exception HDF5Exception
+ * - Error from the HDF-5 Library.
+ * @exception NullPointerException
+ * - dims or maxdims is null.
+ **/
+ public static long H5Screate_simple(int rank, long[] dims, long[] maxdims) throws HDF5Exception,
+ NullPointerException {
+ long id = _H5Screate_simple(rank, dims, maxdims);
+ if (id > 0) {
+ log.trace("OPEN_IDS: H5Screate_simple add {}", id);
+ OPEN_IDS.add(id);
+ log.trace("OPEN_IDS: {}", OPEN_IDS.size());
+ }
+ return id;
+ }
+
+ private synchronized static native long _H5Screate_simple(int rank, long[] dims, long[] maxdims)
+ throws HDF5Exception, NullPointerException;
+
+ /**
+ * H5Sdecode reconstructs the HDF5 data space object and returns a new object handle for it.
+ *
+ * @param buf
+ * IN: Buffer for the data space object to be decoded.
+ *
+ * @return a new object handle
+ *
+ * @exception HDF5LibraryException
+ * - Error from the HDF-5 Library.
+ * @exception NullPointerException
+ * - buf is null.
+ **/
+ public synchronized static native long H5Sdecode(byte[] buf) throws HDF5LibraryException, NullPointerException;
+
+ /**
+ * H5Sencode converts a data space description into binary form in a buffer.
+ *
+ * @param obj_id
+ * IN: Identifier of the object to be encoded.
+ *
+ * @return the buffer for the object to be encoded into.
+ *
+ * @exception HDF5LibraryException
+ * - Error from the HDF-5 Library.
+ **/
+ public synchronized static native byte[] H5Sencode(long obj_id) throws HDF5LibraryException, NullPointerException;
+
+ /**
+ * H5Sextent_copy copies the extent from source_space_id to dest_space_id. This action may change the type of the
+ * dataspace.
+ *
+ * @param dest_space_id
+ * IN: The identifier for the dataspace from which the extent is copied.
+ * @param source_space_id
+ * IN: The identifier for the dataspace to which the extent is copied.
+ *
+ * @return a non-negative value if successful
+ *
+ * @exception HDF5LibraryException
+ * - Error from the HDF-5 Library.
+ **/
+ public synchronized static native int H5Sextent_copy(long dest_space_id, long source_space_id)
+ throws HDF5LibraryException;
+
+ /**
+ * H5Sextent_equal determines whether the dataspace extents of two dataspaces, space1_id and space2_id, are equal.
+ *
+ * @param first_space_id
+ * IN: The identifier for the first dataspace.
+ * @param second_space_id
+ * IN: The identifier for the seconddataspace.
+ *
+ * @return true if successful, else false
+ *
+ * @exception HDF5LibraryException
+ * - Error from the HDF-5 Library.
+ **/
+ public synchronized static native boolean H5Sextent_equal(long first_space_id, long second_space_id)
+ throws HDF5LibraryException;
+
+ /**
+ * H5Sget_select_bounds retrieves the coordinates of the bounding box containing the current selection and places
+ * them into user-supplied buffers.
+ * <P>
+ * The start and end buffers must be large enough to hold the dataspace rank number of coordinates.
+ *
+ * @param spaceid
+ * Identifier of dataspace to release.
+ * @param start
+ * coordinates of lowest corner of bounding box.
+ * @param end
+ * coordinates of highest corner of bounding box.
+ *
+ * @return a non-negative value if successful,with start and end initialized.
+ *
+ * @exception HDF5LibraryException
+ * - Error from the HDF-5 Library.
+ * @exception NullPointerException
+ * - start or end is null.
+ **/
+ public synchronized static native int H5Sget_select_bounds(long spaceid, long[] start, long[] end)
+ throws HDF5LibraryException, NullPointerException;
+
+ /**
+ * H5Sget_select_elem_npoints returns the number of element points in the current dataspace selection.
+ *
+ * @param spaceid
+ * Identifier of dataspace to release.
+ *
+ * @return a non-negative value if successful
+ *
+ * @exception HDF5LibraryException
+ * - Error from the HDF-5 Library.
+ **/
+ public synchronized static native long H5Sget_select_elem_npoints(long spaceid) throws HDF5LibraryException;
+
+ /**
+ * H5Sget_select_elem_pointlist returns an array of of element points in the current dataspace selection. The point
+ * coordinates have the same dimensionality (rank) as the dataspace they are located within, one coordinate per
+ * point.
+ *
+ * @param spaceid
+ * Identifier of dataspace to release.
+ * @param startpoint
+ * first point to retrieve
+ * @param numpoints
+ * number of points to retrieve
+ * @param buf
+ * returns points startblock to startblock+num-1, each points is <i>rank</i> longs.
+ *
+ * @return a non-negative value if successful
+ *
+ * @exception HDF5LibraryException
+ * - Error from the HDF-5 Library.
+ * @exception NullPointerException
+ * - buf is null.
+ **/
+ public synchronized static native int H5Sget_select_elem_pointlist(long spaceid, long startpoint, long numpoints,
+ long[] buf) throws HDF5LibraryException, NullPointerException;
+
+ /**
+ * H5Sget_select_hyper_blocklist returns an array of hyperslab blocks. The block coordinates have the same
+ * dimensionality (rank) as the dataspace they are located within. The list of blocks is formatted as follows:
+ *
+ * <pre>
+ * &lt;"start" coordinate&gt;, immediately followed by
+ * &lt;"opposite" corner coordinate&gt;, followed by
+ * the next "start" and "opposite" coordinates,
+ * etc.
+ * until all of the selected blocks have been listed.
+ * </pre>
+ *
+ * @param spaceid
+ * Identifier of dataspace to release.
+ * @param startblock
+ * first block to retrieve
+ * @param numblocks
+ * number of blocks to retrieve
+ * @param buf
+ * returns blocks startblock to startblock+num-1, each block is <i>rank</i> * 2 (corners) longs.
+ *
+ * @return a non-negative value if successful
+ *
+ * @exception HDF5LibraryException
+ * - Error from the HDF-5 Library.
+ * @exception NullPointerException
+ * - buf is null.
+ **/
+ public synchronized static native int H5Sget_select_hyper_blocklist(long spaceid, long startblock, long numblocks,
+ long[] buf) throws HDF5LibraryException, NullPointerException;
+
+ /**
+ * H5Sget_select_hyper_nblocks returns the number of hyperslab blocks in the current dataspace selection.
+ *
+ * @param spaceid
+ * Identifier of dataspace to release.
+ *
+ * @return a non-negative value if successful
+ *
+ * @exception HDF5LibraryException
+ * - Error from the HDF-5 Library.
+ **/
+ public synchronized static native long H5Sget_select_hyper_nblocks(long spaceid) throws HDF5LibraryException;
+
+ /**
+ * H5Sget_select_npoints determines the number of elements in the current selection of a dataspace.
+ *
+ * @param space_id
+ * IN: Identifier of the dataspace object to query
+ *
+ * @return the number of elements in the selection if successful
+ *
+ * @exception HDF5LibraryException
+ * - Error from the HDF-5 Library.
+ **/
+ public synchronized static native long H5Sget_select_npoints(long space_id) throws HDF5LibraryException;
+
+ /**
+ * H5Sget_select_type retrieves the type of selection currently defined for the dataspace space_id.
+ *
+ * @param space_id
+ * IN: Identifier of the dataspace object to query
+ *
+ * @return the dataspace selection type if successful
+ *
+ * @exception HDF5LibraryException
+ * - Error from the HDF-5 Library.
+ **/
+ public synchronized static native int H5Sget_select_type(long space_id) throws HDF5LibraryException;
+
+ /**
+ * H5Sget_simple_extent_dims returns the size and maximum sizes of each dimension of a dataspace through the dims
+ * and maxdims parameters.
+ *
+ * @param space_id
+ * IN: Identifier of the dataspace object to query
+ * @param dims
+ * OUT: Pointer to array to store the size of each dimension.
+ * @param maxdims
+ * OUT: Pointer to array to store the maximum size of each dimension.
+ *
+ * @return the number of dimensions in the dataspace if successful
+ *
+ * @exception HDF5LibraryException
+ * - Error from the HDF-5 Library.
+ * @exception NullPointerException
+ * - dims or maxdims is null.
+ **/
+ public synchronized static native int H5Sget_simple_extent_dims(long space_id, long[] dims, long[] maxdims)
+ throws HDF5LibraryException, NullPointerException;
+
+ /**
+ * H5Sget_simple_extent_ndims determines the dimensionality (or rank) of a dataspace.
+ *
+ * @param space_id
+ * IN: Identifier of the dataspace
+ *
+ * @return the number of dimensions in the dataspace if successful
+ *
+ * @exception HDF5LibraryException
+ * - Error from the HDF-5 Library.
+ **/
+ public synchronized static native int H5Sget_simple_extent_ndims(long space_id) throws HDF5LibraryException;
+
+ /**
+ * H5Sget_simple_extent_npoints determines the number of elements in a dataspace.
+ *
+ * @param space_id
+ * ID of the dataspace object to query
+ * @return the number of elements in the dataspace if successful
+ *
+ * @exception HDF5LibraryException
+ * - Error from the HDF-5 Library.
+ **/
+ public synchronized static native long H5Sget_simple_extent_npoints(long space_id) throws HDF5LibraryException;
+
+ /**
+ * H5Sget_simple_extent_type queries a dataspace to determine the current class of a dataspace.
+ *
+ * @param space_id
+ * Dataspace identifier.
+ *
+ * @return a dataspace class name if successful
+ *
+ * @exception HDF5LibraryException
+ * - Error from the HDF-5 Library.
+ **/
+ public synchronized static native int H5Sget_simple_extent_type(long space_id) throws HDF5LibraryException;
+
+ /**
+ * H5Sis_simple determines whether a dataspace is a simple dataspace.
+ *
+ * @param space_id
+ * Identifier of the dataspace to query
+ *
+ * @return true if is a simple dataspace
+ *
+ * @exception HDF5LibraryException
+ * - Error from the HDF-5 Library.
+ **/
+ public synchronized static native boolean H5Sis_simple(long space_id) throws HDF5LibraryException;
+
+ /**
+ * H5Soffset_simple sets the offset of a simple dataspace space_id.
+ *
+ * @param space_id
+ * IN: The identifier for the dataspace object to reset.
+ * @param offset
+ * IN: The offset at which to position the selection.
+ *
+ * @return a non-negative value if successful
+ *
+ * @exception HDF5LibraryException
+ * - Error from the HDF-5 Library.
+ * @exception NullPointerException
+ * - offset array is null.
+ **/
+ public synchronized static native int H5Soffset_simple(long space_id, byte[] offset) throws HDF5LibraryException,
+ NullPointerException;
+
+ public synchronized static int H5Soffset_simple(long space_id, long[] offset) throws HDF5Exception,
+ NullPointerException {
+ if (offset == null) {
+ return -1;
+ }
+
+ HDFArray theArray = new HDFArray(offset);
+ byte[] theArr = theArray.byteify();
+
+ int retVal = H5Soffset_simple(space_id, theArr);
+
+ theArr = null;
+ theArray = null;
+ return retVal;
+ }
+
+ /**
+ * H5Sselect_all selects the entire extent of the dataspace space_id.
+ *
+ * @param space_id
+ * IN: The identifier of the dataspace to be selected.
+ *
+ * @return a non-negative value if successful
+ *
+ * @exception HDF5LibraryException
+ * - Error from the HDF-5 Library.
+ **/
+ public synchronized static native int H5Sselect_all(long space_id) throws HDF5LibraryException;
+
+ /**
+ * H5Sselect_elements selects array elements to be included in the selection for the space_id dataspace.
+ *
+ * @param space_id
+ * Identifier of the dataspace.
+ * @param op
+ * operator specifying how the new selection is combined.
+ * @param num_elements
+ * Number of elements to be selected.
+ * @param coord
+ * A 2-dimensional array specifying the coordinates of the elements.
+ *
+ * @return a non-negative value if successful
+ *
+ * @exception HDF5LibraryException
+ * - Error from the HDF-5 Library.
+ **/
+ private synchronized static native int H5Sselect_elements(long space_id, int op, int num_elements, byte[] coord)
+ throws HDF5LibraryException, NullPointerException;
+
+ /**
+ * H5Sselect_elements selects array elements to be included in the selection for the space_id dataspace.
+ *
+ * @param space_id
+ * Identifier of the dataspace.
+ * @param op
+ * operator specifying how the new selection is combined.
+ * @param num_elements
+ * Number of elements to be selected.
+ * @param coord2D
+ * A 2-dimensional array specifying the coordinates of the elements.
+ *
+ * @return a non-negative value if successful
+ *
+ * @exception HDF5Exception
+ * - Error in the data conversion
+ * @exception HDF5LibraryException
+ * - Error from the HDF-5 Library.
+ * @exception NullPointerException
+ * - cord array is
+ **/
+ public synchronized static int H5Sselect_elements(long space_id, int op, int num_elements, long[][] coord2D)
+ throws HDF5Exception, HDF5LibraryException, NullPointerException {
+ if (coord2D == null) {
+ return -1;
+ }
+
+ HDFArray theArray = new HDFArray(coord2D);
+ byte[] coord = theArray.byteify();
+
+ int retVal = H5Sselect_elements(space_id, op, num_elements, coord);
+
+ coord = null;
+ theArray = null;
+ return retVal;
+ }
+
+ /**
+ * H5Sselect_hyperslab selects a hyperslab region to add to the current selected region for the dataspace specified
+ * by space_id. The start, stride, count, and block arrays must be the same size as the rank of the dataspace.
+ *
+ * @param space_id
+ * IN: Identifier of dataspace selection to modify
+ * @param op
+ * IN: Operation to perform on current selection.
+ * @param start
+ * IN: Offset of start of hyperslab
+ * @param stride
+ * IN: Hyperslab stride.
+ * @param count
+ * IN: Number of blocks included in hyperslab.
+ * @param block
+ * IN: Size of block in hyperslab.
+ *
+ * @return a non-negative value if successful
+ *
+ * @exception HDF5LibraryException
+ * - Error from the HDF-5 Library.
+ * @exception NullPointerException
+ * - an input array is null.
+ * @exception IllegalArgumentException
+ * - an input array is invalid.
+ **/
+ public synchronized static int H5Sselect_hyperslab(long space_id, int op, byte[] start, byte[] stride,
+ byte[] count, byte[] block) throws HDF5LibraryException, NullPointerException, IllegalArgumentException {
+ ByteBuffer startbb = ByteBuffer.wrap(start);
+ long[] lastart = (startbb.asLongBuffer()).array();
+ ByteBuffer stridebb = ByteBuffer.wrap(stride);
+ long[] lastride = (stridebb.asLongBuffer()).array();
+ ByteBuffer countbb = ByteBuffer.wrap(count);
+ long[] lacount = (countbb.asLongBuffer()).array();
+ ByteBuffer blockbb = ByteBuffer.wrap(block);
+ long[] lablock = (blockbb.asLongBuffer()).array();
+
+ return H5Sselect_hyperslab(space_id, op, lastart, lastride, lacount, lablock);
+ }
+
+ public synchronized static native int H5Sselect_hyperslab(long space_id, int op, long[] start, long[] stride,
+ long[] count, long[] block) throws HDF5LibraryException, NullPointerException, IllegalArgumentException;
+
+ /**
+ * H5Sselect_none resets the selection region for the dataspace space_id to include no elements.
+ *
+ * @param space_id
+ * IN: The identifier of the dataspace to be reset.
+ * @return a non-negative value if successful
+ *
+ * @exception HDF5LibraryException
+ * - Error from the HDF-5 Library.
+ **/
+ public synchronized static native int H5Sselect_none(long space_id) throws HDF5LibraryException;
+
+ /**
+ * H5Sselect_valid verifies that the selection for the dataspace.
+ *
+ * @param space_id
+ * The identifier for the dataspace in which the selection is being reset.
+ *
+ * @return true if the selection is contained within the extent and FALSE if it is not or is an error.
+ *
+ * @exception HDF5LibraryException
+ * - Error from the HDF-5 Library.
+ **/
+ public synchronized static native boolean H5Sselect_valid(long space_id) throws HDF5LibraryException;
+
+ /**
+ * H5Sset_extent_none removes the extent from a dataspace and sets the type to H5S_NONE.
+ *
+ * @param space_id
+ * The identifier for the dataspace from which the extent is to be removed.
+ *
+ * @return a non-negative value if successful
+ *
+ * @exception HDF5LibraryException
+ * - Error from the HDF-5 Library.
+ **/
+ public synchronized static native int H5Sset_extent_none(long space_id) throws HDF5LibraryException;
+
+ /**
+ * H5Sset_extent_simple sets or resets the size of an existing dataspace.
+ *
+ * @param space_id
+ * Dataspace identifier.
+ * @param rank
+ * Rank, or dimensionality, of the dataspace.
+ * @param current_size
+ * Array containing current size of dataspace.
+ * @param maximum_size
+ * Array containing maximum size of dataspace.
+ *
+ * @return a dataspace identifier if successful
+ *
+ * @exception HDF5LibraryException
+ * - Error from the HDF-5 Library.
+ **/
+ public synchronized static native long H5Sset_extent_simple(long space_id, int rank, long[] current_size,
+ long[] maximum_size) throws HDF5LibraryException, NullPointerException;
+
+ public synchronized static long H5Sset_extent_simple(long space_id, int rank, byte[] current_size,
+ byte[] maximum_size) throws HDF5LibraryException, NullPointerException {
+ ByteBuffer csbb = ByteBuffer.wrap(current_size);
+ long[] lacs = (csbb.asLongBuffer()).array();
+ ByteBuffer maxsbb = ByteBuffer.wrap(maximum_size);
+ long[] lamaxs = (maxsbb.asLongBuffer()).array();
+
+ return H5Sset_extent_simple(space_id, rank, lacs, lamaxs);
+ }
+
+ /**
+ * H5Sget_regular_hyperslab determines if a hyperslab selection is regular for the dataspace specified
+ * by space_id. The start, stride, count, and block arrays must be the same size as the rank of the dataspace.
+ *
+ * @param space_id
+ * IN: Identifier of dataspace selection to modify
+ * @param start
+ * OUT: Offset of start of hyperslab
+ * @param stride
+ * OUT: Hyperslab stride.
+ * @param count
+ * OUT: Number of blocks included in hyperslab.
+ * @param block
+ * OUT: Size of block in hyperslab.
+ *
+ * @exception HDF5LibraryException
+ * - Error from the HDF-5 Library.
+ * @exception NullPointerException
+ * - an output array is null.
+ * @exception IllegalArgumentException
+ * - an output array is invalid.
+ **/
+ public synchronized static native void H5Sget_regular_hyperslab(long space_id, long[] start, long[] stride, long[] count, long[] block) throws HDF5LibraryException, NullPointerException, IllegalArgumentException;
+
+ /**
+ * H5Sis_regular_hyperslab retrieves a regular hyperslab selection for the dataspace specified
+ * by space_id.
+ *
+ * @param space_id
+ * IN: Identifier of dataspace selection to query
+ *
+ * @return a TRUE/FALSE for hyperslab selection if successful
+ *
+ * @exception HDF5LibraryException
+ * - Error from the HDF-5 Library.
+ **/
+ public synchronized static native boolean H5Sis_regular_hyperslab(long space_id) throws HDF5LibraryException;
+
+ // /////// unimplemented ////////
+
+
+
+ // ////////////////////////////////////////////////////////////
+ // //
+ // H5T: Datatype Interface Functions //
+ // //
+ // ////////////////////////////////////////////////////////////
+
+ /**
+ * H5Tarray_create creates a new array datatype object.
+ *
+ * @param base_id
+ * IN: Datatype identifier for the array base datatype.
+ * @param ndims
+ * IN: Rank of the array.
+ * @param dim
+ * IN: Size of each array dimension.
+ *
+ * @return a valid datatype identifier
+ *
+ * @exception HDF5LibraryException
+ * - Error from the HDF-5 Library.
+ * @exception NullPointerException
+ * - dim is null.
+ **/
+ public static long H5Tarray_create(long base_id, int ndims, long[] dim) throws HDF5LibraryException,
+ NullPointerException {
+ long id = _H5Tarray_create2(base_id, ndims, dim);
+ if (id > 0) {
+ log.trace("OPEN_IDS: H5Tarray_create add {}", id);
+ OPEN_IDS.add(id);
+ log.trace("OPEN_IDS: {}", OPEN_IDS.size());
+ }
+ return id;
+ }
+
+ private synchronized static native long _H5Tarray_create2(long base_id, int ndims, long[] dim)
+ throws HDF5LibraryException, NullPointerException;
+
+ /**
+ * H5Tclose releases a datatype.
+ *
+ * @param type_id
+ * IN: Identifier of datatype to release.
+ *
+ * @return a non-negative value if successful
+ *
+ * @exception HDF5LibraryException
+ * - Error from the HDF-5 Library.
+ **/
+ public static int H5Tclose(long type_id) throws HDF5LibraryException {
+ if (type_id < 0)
+ return 0; // throw new HDF5LibraryException("Negative ID");;
+
+ log.trace("OPEN_IDS: H5Tclose remove {}", type_id);
+ OPEN_IDS.remove(type_id);
+ log.trace("OPEN_IDS: {}", OPEN_IDS.size());
+ return _H5Tclose(type_id);
+ }
+
+ private synchronized static native int _H5Tclose(long type_id) throws HDF5LibraryException;
+
+ /**
+ * H5Tcommit saves a transient datatype as an immutable named datatype in a file.
+ *
+ * @param loc_id
+ * IN: Location identifier.
+ * @param name
+ * IN: Name given to committed datatype.
+ * @param type_id
+ * IN: Identifier of datatype to be committed.
+ * @param lcpl_id
+ * IN: Link creation property list.
+ * @param tcpl_id
+ * IN: Datatype creation property list.
+ * @param tapl_id
+ * IN: Datatype access property list.
+ *
+ * @exception HDF5LibraryException
+ * - Error from the HDF-5 Library.
+ * @exception NullPointerException
+ * - name is null.
+ **/
+ public synchronized static native void H5Tcommit(long loc_id, String name, long type_id, long lcpl_id,
+ long tcpl_id, long tapl_id) throws HDF5LibraryException, NullPointerException;
+
+ /**
+ * H5Tcommit_anon commits a transient datatype (not immutable) to a file, turning it into a named datatype with the
+ * specified creation and property lists.
+ *
+ * @param loc_id
+ * IN: Location identifier.
+ * @param type_id
+ * IN: Identifier of datatype to be committed.
+ * @param tcpl_id
+ * IN: Datatype creation property list.
+ * @param tapl_id
+ * IN: Datatype access property list.
+ *
+ * @exception HDF5LibraryException
+ * - Error from the HDF-5 Library.
+ **/
+ public synchronized static native void H5Tcommit_anon(long loc_id, long type_id, long tcpl_id, long tapl_id)
+ throws HDF5LibraryException;
+
+ /**
+ * H5Tcommitted queries a type to determine whether the type specified by the type identifier is a named type or a
+ * transient type.
+ *
+ * @param type_id
+ * IN: Identifier of datatype.
+ *
+ * @return true the datatype has been committed
+ *
+ * @exception HDF5LibraryException
+ * - Error from the HDF-5 Library.
+ **/
+ public synchronized static native boolean H5Tcommitted(long type_id) throws HDF5LibraryException;
+
+ /**
+ * H5Tcompiler_conv finds out whether the library's conversion function from type src_id to type dst_id is a
+ * compiler (hard) conversion.
+ *
+ * @param src_id
+ * IN: Identifier of source datatype.
+ * @param dst_id
+ * IN: Identifier of destination datatype.
+ *
+ * @exception HDF5LibraryException
+ * - Error from the HDF-5 Library.
+ **/
+ public synchronized static native void H5Tcompiler_conv(long src_id, long dst_id) throws HDF5LibraryException;
+
+ /**
+ ** H5Tconvert converts nelmts elements from the type specified by the src_id identifier to type dst_id.
+ *
+ * @param src_id
+ * IN: Identifier of source datatype.
+ * @param dst_id
+ * IN: Identifier of destination datatype.
+ * @param nelmts
+ * IN: Size of array buf.
+ * @param buf
+ * IN: Array containing pre- and post-conversion values.
+ * @param background
+ * IN: Optional background buffer.
+ * @param plist_id
+ * IN: Dataset transfer property list identifier.
+ *
+ * @exception HDF5LibraryException
+ * - Error from the HDF-5 Library.
+ * @exception NullPointerException
+ * - buf is null.
+ **/
+ public synchronized static native void H5Tconvert(long src_id, long dst_id, long nelmts, byte[] buf,
+ byte[] background, long plist_id) throws HDF5LibraryException, NullPointerException;
+
+ // int H5Tconvert(int src_id, int dst_id, long nelmts, Pointer buf, Pointer background, int plist_id);
+
+ /**
+ * H5Tcopy copies an existing datatype. The returned type is always transient and unlocked.
+ *
+ * @param type_id
+ * IN: Identifier of datatype to copy. Can be a datatype identifier, a predefined datatype (defined in
+ * H5Tpublic.h), or a dataset Identifier.
+ *
+ * @return a datatype identifier if successful
+ *
+ * @exception HDF5LibraryException
+ * - Error from the HDF-5 Library.
+ **/
+ public static long H5Tcopy(long type_id) throws HDF5LibraryException {
+ long id = _H5Tcopy(type_id);
+ if (id > 0) {
+ log.trace("OPEN_IDS: H5Tcopy add {}", id);
+ OPEN_IDS.add(id);
+ log.trace("OPEN_IDS: {}", OPEN_IDS.size());
+ }
+ return id;
+ }
+
+ private synchronized static native long _H5Tcopy(long type_id) throws HDF5LibraryException;
+
+ /**
+ * H5Tcreate creates a new dataype of the specified class with the specified number of bytes.
+ *
+ * @param tclass
+ * IN: Class of datatype to create.
+ * @param size
+ * IN: The number of bytes in the datatype to create.
+ *
+ * @return datatype identifier
+ *
+ * @exception HDF5LibraryException
+ * - Error from the HDF-5 Library.
+ **/
+ public static long H5Tcreate(int tclass, long size) throws HDF5LibraryException {
+ long id = _H5Tcreate(tclass, size);
+ if (id > 0) {
+ log.trace("OPEN_IDS: H5Tcreate add {}", id);
+ OPEN_IDS.add(id);
+ log.trace("OPEN_IDS: {}", OPEN_IDS.size());
+ }
+ return id;
+ }
+
+ private synchronized static native long _H5Tcreate(int type, long size) throws HDF5LibraryException;
+
+ /**
+ * H5Tdecode reconstructs the HDF5 data type object and returns a new object handle for it.
+ *
+ * @param buf
+ * IN: Buffer for the data type object to be decoded.
+ *
+ * @return a new object handle
+ *
+ * @exception HDF5LibraryException
+ * - Error from the HDF-5 Library.
+ * @exception NullPointerException
+ * - buf is null.
+ **/
+ public static long H5Tdecode(byte[] buf) throws HDF5LibraryException, NullPointerException {
+ long id = _H5Tdecode(buf);
+ if (id > 0) {
+ log.trace("OPEN_IDS: H5Tdecode add {}", id);
+ OPEN_IDS.add(id);
+ log.trace("OPEN_IDS: {}", OPEN_IDS.size());
+ }
+ return id;
+ }
+
+ private synchronized static native long _H5Tdecode(byte[] buf) throws HDF5LibraryException, NullPointerException;
+
+ /**
+ * H5Tdetect_class determines whether the datatype specified in dtype_id contains any datatypes of the datatype
+ * class specified in dtype_class.
+ *
+ * @param type_id
+ * IN: Identifier of datatype to query.
+ * @param cls
+ * IN: Identifier of datatype cls.
+ *
+ * @return true if the datatype specified in dtype_id contains any datatypes of the datatype class
+ *
+ * @exception HDF5LibraryException
+ * - Error from the HDF-5 Library.
+ **/
+ public synchronized static native boolean H5Tdetect_class(long type_id, int cls) throws HDF5LibraryException;
+
+ /**
+ * H5Tencode converts a data type description into binary form in a buffer.
+ *
+ * @param obj_id
+ * IN: Identifier of the object to be encoded.
+ * @param buf
+ * OUT: Buffer for the object to be encoded into. If the provided buffer is NULL, only the size of buffer
+ * needed is returned.
+ * @param nalloc
+ * IN: The size of the allocated buffer.
+ *
+ * @return the size needed for the allocated buffer.
+ *
+ * @exception HDF5LibraryException
+ * - Error from the HDF-5 Library.
+ * @exception NullPointerException
+ * - buf is null.
+ **/
+ public synchronized static native int H5Tencode(long obj_id, byte[] buf, long nalloc) throws HDF5LibraryException,
+ NullPointerException;
+
+ // /**
+ // * H5Tencode converts a data type description into binary form in a buffer.
+ // *
+ // * @param obj_id IN: Identifier of the object to be encoded.
+ // *
+ // * @return the buffer for the object to be encoded into.
+ // *
+ // * @exception HDF5LibraryException - Error from the HDF-5 Library.
+ // **/
+ // public synchronized static native byte[] H5Tencode(int obj_id)
+ // throws HDF5LibraryException;
+
+ /**
+ * H5Tenum_create creates a new enumeration datatype based on the specified base datatype, parent_id, which must be
+ * an integer type.
+ *
+ * @param base_id
+ * IN: Identifier of the parent datatype to release.
+ *
+ * @return the datatype identifier for the new enumeration datatype
+ *
+ * @exception HDF5LibraryException
+ * - Error from the HDF-5 Library.
+ **/
+ public static long H5Tenum_create(long base_id) throws HDF5LibraryException {
+ long id = _H5Tenum_create(base_id);
+ if (id > 0) {
+ log.trace("OPEN_IDS: H5Tenum_create add {}", id);
+ OPEN_IDS.add(id);
+ log.trace("OPEN_IDS: {}", OPEN_IDS.size());
+ }
+ return id;
+ }
+
+ private synchronized static native long _H5Tenum_create(long base_id) throws HDF5LibraryException;
+
+ /**
+ * H5Tenum_insert inserts a new enumeration datatype member into an enumeration datatype.
+ *
+ * @param type
+ * IN: Identifier of datatype.
+ * @param name
+ * IN: The name of the member
+ * @param value
+ * IN: The value of the member, data of the correct type
+ *
+ * @exception HDF5LibraryException
+ * - Error from the HDF-5 Library.
+ * @exception NullPointerException
+ * - name is null.
+ **/
+ public synchronized static native void H5Tenum_insert(long type, String name, byte[] value)
+ throws HDF5LibraryException, NullPointerException;
+
+ /**
+ * H5Tenum_insert inserts a new enumeration datatype member into an enumeration datatype.
+ *
+ * @param type
+ * IN: Identifier of datatype.
+ * @param name
+ * IN: The name of the member
+ * @param value
+ * IN: The value of the member, data of the correct type
+ *
+ * @return a non-negative value if successful
+ *
+ * @exception HDF5LibraryException
+ * - Error from the HDF-5 Library.
+ * @exception NullPointerException
+ * - name is null.
+ **/
+ public static int H5Tenum_insert(long type, String name, int[] value) throws HDF5LibraryException,
+ NullPointerException {
+ return H5Tenum_insert_int(type, name, value);
+ }
+
+ public static int H5Tenum_insert(long type, String name, int value) throws HDF5LibraryException,
+ NullPointerException {
+ int[] val = { value };
+ return H5Tenum_insert_int(type, name, val);
+ }
+
+ private synchronized static native int H5Tenum_insert_int(long type, String name, int[] value)
+ throws HDF5LibraryException, NullPointerException;
+
+ /**
+ * H5Tenum_nameof finds the symbol name that corresponds to the specified value of the enumeration datatype type.
+ *
+ * @param type
+ * IN: Identifier of datatype.
+ * @param value
+ * IN: The value of the member, data of the correct
+ * @param size
+ * IN: The probable length of the name
+ *
+ * @return the symbol name.
+ *
+ * @exception HDF5LibraryException
+ * - Error from the HDF-5 Library.
+ * @exception NullPointerException
+ * - value is null.
+ **/
+ public synchronized static native String H5Tenum_nameof(long type, byte[] value, long size)
+ throws HDF5LibraryException, NullPointerException;
+
+ // int H5Tenum_nameof(int type, Pointer value, Buffer name/* out */, long size);
+
+ /**
+ * H5Tenum_nameof finds the symbol name that corresponds to the specified value of the enumeration datatype type.
+ *
+ * @param type
+ * IN: Identifier of datatype.
+ * @param value
+ * IN: The value of the member, data of the correct
+ * @param name
+ * OUT: The name of the member
+ * @param size
+ * IN: The max length of the name
+ *
+ * @return a non-negative value if successful
+ *
+ * @exception HDF5LibraryException
+ * - Error from the HDF-5 Library.
+ * @exception NullPointerException
+ * - name is null.
+ **/
+ public static int H5Tenum_nameof(long type, int[] value, String[] name, int size) throws HDF5LibraryException,
+ NullPointerException {
+ return H5Tenum_nameof_int(type, value, name, size);
+ }
+
+ private synchronized static native int H5Tenum_nameof_int(long type, int[] value, String[] name, int size)
+ throws HDF5LibraryException, NullPointerException;
+
+ /**
+ * H5Tenum_valueof finds the value that corresponds to the specified name of the enumeration datatype type.
+ *
+ * @param type
+ * IN: Identifier of datatype.
+ * @param name
+ * IN: The name of the member
+ * @param value
+ * OUT: The value of the member
+ *
+ * @exception HDF5LibraryException
+ * - Error from the HDF-5 Library.
+ **/
+ public synchronized static native void H5Tenum_valueof(long type, String name, byte[] value)
+ throws HDF5LibraryException, NullPointerException;
+
+ /**
+ * H5Tenum_valueof finds the value that corresponds to the specified name of the enumeration datatype type.
+ *
+ * @param type
+ * IN: Identifier of datatype.
+ * @param name
+ * IN: The name of the member
+ * @param value
+ * OUT: The value of the member
+ *
+ * @return a non-negative value if successful
+ *
+ * @exception HDF5LibraryException
+ * - Error from the HDF-5 Library.
+ * @exception NullPointerException
+ * - name is null.
+ **/
+ public static int H5Tenum_valueof(long type, String name, int[] value) throws HDF5LibraryException,
+ NullPointerException {
+ return H5Tenum_valueof_int(type, name, value);
+ }
+
+ private synchronized static native int H5Tenum_valueof_int(long type, String name, int[] value)
+ throws HDF5LibraryException, NullPointerException;
+
+ /**
+ * H5Tequal determines whether two datatype identifiers refer to the same datatype.
+ *
+ * @param type_id1
+ * IN: Identifier of datatype to compare.
+ * @param type_id2
+ * IN: Identifier of datatype to compare.
+ *
+ * @return true if the datatype identifiers refer to the same datatype, else false.
+ *
+ * @exception HDF5LibraryException
+ * - Error from the HDF-5 Library.
+ **/
+ public synchronized static native boolean H5Tequal(long type_id1, long type_id2) throws HDF5LibraryException;
+
+ /**
+ * H5Tget_array_dims returns the sizes of the dimensions of the specified array datatype object.
+ *
+ * @param type_id
+ * IN: Datatype identifier of array object.
+ * @param dims
+ * OUT: Sizes of array dimensions.
+ *
+ * @return the non-negative number of dimensions of the array type
+ *
+ * @exception HDF5LibraryException
+ * - Error from the HDF-5 Library.
+ * @exception NullPointerException
+ * - dims is null.
+ **/
+ public static int H5Tget_array_dims(long type_id, long[] dims) throws HDF5LibraryException, NullPointerException {
+ return H5Tget_array_dims2(type_id, dims);
+ }
+
+ /**
+ * H5Tget_array_dims2 returns the sizes of the dimensions of the specified array datatype object.
+ *
+ * @param type_id
+ * IN: Datatype identifier of array object.
+ * @param dims
+ * OUT: Sizes of array dimensions.
+ *
+ * @return the non-negative number of dimensions of the array type
+ *
+ * @exception HDF5LibraryException
+ * - Error from the HDF-5 Library.
+ * @exception NullPointerException
+ * - dims is null.
+ **/
+ public synchronized static native int H5Tget_array_dims2(long type_id, long[] dims) throws HDF5LibraryException,
+ NullPointerException;
+
+ /**
+ * H5Tget_array_ndims returns the rank, the number of dimensions, of an array datatype object.
+ *
+ * @param type_id
+ * IN: Datatype identifier of array object.
+ *
+ * @return the rank of the array
+ *
+ * @exception HDF5LibraryException
+ * - Error from the HDF-5 Library.
+ **/
+ public synchronized static native int H5Tget_array_ndims(long type_id) throws HDF5LibraryException;
+
+ /**
+ * H5Tget_class returns the datatype class identifier.
+ *
+ * @param type_id
+ * IN: Identifier of datatype to query.
+ *
+ * @return datatype class identifier if successful; otherwise H5T_NO_CLASS(-1).
+ *
+ * @exception HDF5LibraryException
+ * - Error from the HDF-5 Library.
+ **/
+ public synchronized static native int H5Tget_class(long type_id) throws HDF5LibraryException;
+
+ /**
+ * H5Tget_class_name returns the datatype class identifier.
+ *
+ * @param class_id
+ * IN: Identifier of class from H5Tget_class.
+ *
+ * @return class name if successful; otherwise H5T_NO_CLASS.
+ *
+ **/
+ public static String H5Tget_class_name(long class_id) {
+ String retValue = null;
+ if (HDF5Constants.H5T_INTEGER == class_id) /* integer types */
+ retValue = "H5T_INTEGER";
+ else if (HDF5Constants.H5T_FLOAT == class_id) /* floating-point types */
+ retValue = "H5T_FLOAT";
+ else if (HDF5Constants.H5T_TIME == class_id) /* date and time types */
+ retValue = "H5T_TIME";
+ else if (HDF5Constants.H5T_STRING == class_id) /* character string types */
+ retValue = "H5T_STRING";
+ else if (HDF5Constants.H5T_BITFIELD == class_id) /* bit field types */
+ retValue = "H5T_BITFIELD";
+ else if (HDF5Constants.H5T_OPAQUE == class_id) /* opaque types */
+ retValue = "H5T_OPAQUE";
+ else if (HDF5Constants.H5T_COMPOUND == class_id) /* compound types */
+ retValue = "H5T_COMPOUND";
+ else if (HDF5Constants.H5T_REFERENCE == class_id)/* reference types */
+ retValue = "H5T_REFERENCE";
+ else if (HDF5Constants.H5T_ENUM == class_id) /* enumeration types */
+ retValue = "H5T_ENUM";
+ else if (HDF5Constants.H5T_VLEN == class_id) /* Variable-Length types */
+ retValue = "H5T_VLEN";
+ else if (HDF5Constants.H5T_ARRAY == class_id) /* Array types */
+ retValue = "H5T_ARRAY";
+ else
+ retValue = "H5T_NO_CLASS";
+
+ return retValue;
+ }
+
+ /**
+ * H5Tget_create_plist returns a property list identifier for the datatype creation property list associated with
+ * the datatype specified by type_id.
+ *
+ * @param type_id
+ * IN: Identifier of datatype.
+ *
+ * @return a datatype property list identifier.
+ *
+ * @exception HDF5LibraryException
+ * - Error from the HDF-5 Library.
+ **/
+ public static long H5Tget_create_plist(long type_id) throws HDF5LibraryException {
+ long id = _H5Tget_create_plist(type_id);
+ if (id > 0) {
+ log.trace("OPEN_IDS: _H5Tget_create_plist add {}", id);
+ OPEN_IDS.add(id);
+ log.trace("OPEN_IDS: {}", OPEN_IDS.size());
+ }
+ return id;
+ }
+
+ private synchronized static native long _H5Tget_create_plist(long type_id) throws HDF5LibraryException;
+
+ /**
+ * H5Tget_cset retrieves the character set type of a string datatype.
+ *
+ * @param type_id
+ * IN: Identifier of datatype to query.
+ *
+ * @return a valid character set type if successful
+ *
+ * @exception HDF5LibraryException
+ * - Error from the HDF-5 Library.
+ **/
+ public synchronized static native int H5Tget_cset(long type_id) throws HDF5LibraryException;
+
+ /**
+ * H5Tset_cset the character set to be used.
+ *
+ * @param type_id
+ * IN: Identifier of datatype to modify.
+ * @param cset
+ * IN: Character set type.
+ *
+ * @return a non-negative value if successful
+ *
+ * @exception HDF5LibraryException
+ * - Error from the HDF-5 Library.
+ **/
+ public synchronized static native int H5Tset_cset(long type_id, int cset) throws HDF5LibraryException;
+
+ /**
+ * H5Tget_ebias retrieves the exponent bias of a floating-point type.
+ *
+ * @param type_id
+ * Identifier of datatype to query.
+ *
+ * @return the bias if successful; otherwise 0.
+ *
+ * @exception HDF5LibraryException
+ * - Error from the HDF-5 Library.
+ **/
+ public synchronized static native int H5Tget_ebias(long type_id) throws HDF5LibraryException;
+
+ /**
+ * H5Tset_ebias sets the exponent bias of a floating-point type.
+ *
+ * @param type_id
+ * Identifier of datatype to set.
+ * @param ebias
+ * Exponent bias value.
+ *
+ * @return a non-negative value if successful
+ *
+ * @exception HDF5LibraryException
+ * - Error from the HDF-5 Library.
+ **/
+ public static int H5Tset_ebias(long type_id, int ebias) throws HDF5LibraryException {
+ H5Tset_ebias(type_id, (long) ebias);
+ return 0;
+ }
+
+ /**
+ * H5Tget_ebias retrieves the exponent bias of a floating-point type.
+ *
+ * @param type_id
+ * IN: Identifier of datatype to query.
+ *
+ * @return the bias
+ *
+ * @exception HDF5LibraryException
+ * - Error from the HDF-5 Library.
+ **/
+ public synchronized static native long H5Tget_ebias_long(long type_id) throws HDF5LibraryException;
+
+ /**
+ * H5Tset_ebias sets the exponent bias of a floating-point type.
+ *
+ * @param type_id
+ * IN: Identifier of datatype to set.
+ * @param ebias
+ * IN: Exponent bias value.
+ *
+ * @exception HDF5LibraryException
+ * - Error from the HDF-5 Library.
+ **/
+ public synchronized static native void H5Tset_ebias(long type_id, long ebias) throws HDF5LibraryException;
+
+ /**
+ * H5Tget_fields retrieves information about the locations of the various bit fields of a floating point datatype.
+ *
+ * @param type_id
+ * IN: Identifier of datatype to query.
+ * @param fields
+ * OUT: location of size and bit-position.
+ * <ul>
+ * <li>fields[0] = spos OUT: location to return size of in bits.</li>
+ * <li>fields[1] = epos OUT: location to return exponent bit-position.</li>
+ * <li>fields[2] = esize OUT: location to return size of exponent in bits.</li>
+ * <li>fields[3] = mpos OUT: location to return mantissa bit-position.</li>
+ * <li>fields[4] = msize OUT: location to return size of mantissa in bits.</li>
+ * </ul>
+ *
+ * @exception HDF5LibraryException
+ * - Error from the HDF-5 Library.
+ * @exception NullPointerException
+ * - fields is null.
+ * @exception IllegalArgumentException
+ * - fields array is invalid.
+ **/
+ public synchronized static native void H5Tget_fields(long type_id, long[] fields) throws HDF5LibraryException,
+ NullPointerException, IllegalArgumentException;
+
+ /**
+ * H5Tget_fields retrieves information about the locations of the various bit fields of a floating point datatype.
+ *
+ * @param type_id
+ * IN: Identifier of datatype to query.
+ * @param fields
+ * OUT: location of size and bit-position.
+ *
+ * <pre>
+ * fields[0] = spos OUT: location to return size of in bits.
+ * fields[1] = epos OUT: location to return exponent bit-position.
+ * fields[2] = esize OUT: location to return size of exponent in bits.
+ * fields[3] = mpos OUT: location to return mantissa bit-position.
+ * fields[4] = msize OUT: location to return size of mantissa in bits.
+ * </pre>
+ *
+ * @return a non-negative value if successful
+ *
+ * @exception HDF5LibraryException
+ * - Error from the HDF-5 Library.
+ * @exception NullPointerException
+ * - fields is null.
+ * @exception IllegalArgumentException
+ * - fields array is invalid.
+ **/
+ public static int H5Tget_fields(long type_id, int[] fields) throws HDF5LibraryException, NullPointerException,
+ IllegalArgumentException {
+ return H5Tget_fields_int(type_id, fields);
+ }
+
+ private synchronized static native int H5Tget_fields_int(long type_id, int[] fields) throws HDF5LibraryException,
+ NullPointerException, IllegalArgumentException;
+
+ /**
+ * H5Tset_fields sets the locations and sizes of the various floating point bit fields.
+ *
+ * @param type_id
+ * IN: Identifier of datatype to set.
+ * @param spos
+ * IN: Size position.
+ * @param epos
+ * IN: Exponent bit position.
+ * @param esize
+ * IN: Size of exponent in bits.
+ * @param mpos
+ * IN: Mantissa bit position.
+ * @param msize
+ * IN: Size of mantissa in bits.
+ *
+ * @exception HDF5LibraryException
+ * - Error from the HDF-5 Library.
+ **/
+ public synchronized static native void H5Tset_fields(long type_id, long spos, long epos, long esize, long mpos,
+ long msize) throws HDF5LibraryException;
+
+ /**
+ * H5Tset_fields sets the locations and sizes of the various floating point bit fields.
+ *
+ * @param type_id
+ * Identifier of datatype to set.
+ * @param spos
+ * Size position.
+ * @param epos
+ * Exponent bit position.
+ * @param esize
+ * Size of exponent in bits.
+ * @param mpos
+ * Mantissa bit position.
+ * @param msize
+ * Size of mantissa in bits.
+ *
+ * @return a non-negative value if successful
+ *
+ * @exception HDF5LibraryException
+ * - Error from the HDF-5 Library.
+ **/
+ public static int H5Tset_fields(long type_id, int spos, int epos, int esize, int mpos, int msize)
+ throws HDF5LibraryException {
+ H5Tset_fields(type_id, (long) spos, (long) epos, (long) esize, (long) mpos, (long) msize);
+ return 0;
+ }
+
+ /**
+ * H5Tget_inpad retrieves the internal padding type for unused bits in floating-point datatypes.
+ *
+ * @param type_id
+ * IN: Identifier of datatype to query.
+ *
+ * @return a valid padding type if successful
+ *
+ * @exception HDF5LibraryException
+ * - Error from the HDF-5 Library.
+ **/
+ public synchronized static native int H5Tget_inpad(long type_id) throws HDF5LibraryException;
+
+ /**
+ * If any internal bits of a floating point type are unused (that is, those significant bits which are not part of
+ * the sign, exponent, or mantissa), then H5Tset_inpad will be filled according to the value of the padding value
+ * property inpad.
+ *
+ * @param type_id
+ * IN: Identifier of datatype to modify.
+ * @param inpad
+ * IN: Padding type.
+ *
+ * @return a non-negative value if successful
+ *
+ * @exception HDF5LibraryException
+ * - Error from the HDF-5 Library.
+ **/
+ public synchronized static native int H5Tset_inpad(long type_id, int inpad) throws HDF5LibraryException;
+
+ /**
+ * H5Tget_member_class returns the class of datatype of the specified member.
+ *
+ * @param type_id
+ * IN: Datatype identifier of compound object.
+ * @param membno
+ * IN: Compound object member number.
+ *
+ * @return the class of the datatype of the field if successful;
+ *
+ * @exception HDF5LibraryException
+ * - Error from the HDF-5 Library.
+ **/
+ public synchronized static native int H5Tget_member_class(long type_id, int membno) throws HDF5LibraryException;
+
+ /**
+ * H5Tget_member_index retrieves the index of a field of a compound datatype.
+ *
+ * @param type_id
+ * IN: Identifier of datatype to query.
+ * @param field_name
+ * IN: Field name of the field index to retrieve.
+ *
+ * @return if field is defined, the index; else negative.
+ **/
+ public synchronized static native int H5Tget_member_index(long type_id, String field_name);
+
+ /**
+ * H5Tget_member_name retrieves the name of a field of a compound datatype or an element of an enumeration datatype.
+ *
+ * @param type_id
+ * IN: Identifier of datatype to query.
+ * @param field_idx
+ * IN: Field index (0-based) of the field name to retrieve.
+ *
+ * @return a valid pointer to the name if successful; otherwise null.
+ **/
+ public synchronized static native String H5Tget_member_name(long type_id, int field_idx);
+
+ /**
+ * H5Tget_member_offset returns the byte offset of the specified member of the compound datatype. This is the byte
+ * offset in the HDF-5 file/library, NOT the offset of any Java object which might be mapped to this data item.
+ *
+ * @param type_id
+ * IN: Identifier of datatype to query.
+ * @param membno
+ * IN: Field index (0-based) of the field type to retrieve.
+ *
+ * @return the offset of the member.
+ *
+ * @exception HDF5LibraryException
+ * - Error from the HDF-5 Library.
+ **/
+ public synchronized static native long H5Tget_member_offset(long type_id, int membno) throws HDF5LibraryException;
+
+ /**
+ * H5Tget_member_type returns the datatype of the specified member.
+ *
+ * @param type_id
+ * IN: Identifier of datatype to query.
+ * @param field_idx
+ * IN: Field index (0-based) of the field type to retrieve.
+ *
+ * @return the identifier of a copy of the datatype of the field if successful;
+ *
+ * @exception HDF5LibraryException
+ * - Error from the HDF-5 Library.
+ **/
+ public static long H5Tget_member_type(long type_id, int field_idx) throws HDF5LibraryException {
+ long id = _H5Tget_member_type(type_id, field_idx);
+ if (id > 0) {
+ log.trace("OPEN_IDS: H5Tget_member_type add {}", id);
+ OPEN_IDS.add(id);
+ log.trace("OPEN_IDS: {}", OPEN_IDS.size());
+ }
+ return id;
+ }
+
+ private synchronized static native long _H5Tget_member_type(long type_id, int field_idx)
+ throws HDF5LibraryException;
+
+ /**
+ * H5Tget_member_value returns the value of the enumeration datatype member memb_no.
+ *
+ * @param type_id
+ * IN: Datatype identifier for the enumeration datatype.
+ * @param membno
+ * IN: Number of the enumeration datatype member.
+ * @param value
+ * OUT: The value of the member
+ *
+ * @exception HDF5LibraryException
+ * - Error from the HDF-5 Library.
+ * @exception NullPointerException
+ * - value is null.
+ **/
+ public synchronized static native void H5Tget_member_value(long type_id, int membno, byte[] value)
+ throws HDF5LibraryException, NullPointerException;
+
+ /**
+ * H5Tget_member_value returns the value of the enumeration datatype member memb_no.
+ *
+ * @param type_id
+ * IN: Identifier of datatype.
+ * @param membno
+ * IN: The name of the member
+ * @param value
+ * OUT: The value of the member
+ *
+ * @return a non-negative value if successful
+ *
+ * @exception HDF5LibraryException
+ * - Error from the HDF-5 Library.
+ * @exception NullPointerException
+ * - value is null.
+ **/
+ public static int H5Tget_member_value(long type_id, int membno, int[] value) throws HDF5LibraryException,
+ NullPointerException {
+ return H5Tget_member_value_int(type_id, membno, value);
+ }
+
+ private synchronized static native int H5Tget_member_value_int(long type_id, int membno, int[] value)
+ throws HDF5LibraryException, NullPointerException;
+
+ /**
+ * H5Tget_native_type returns the equivalent native datatype for the datatype specified in type_id.
+ *
+ * @param type_id
+ * IN: Identifier of datatype to query. Direction of search is assumed to be in ascending order.
+ *
+ * @return the native datatype identifier for the specified dataset datatype.
+ *
+ * @exception HDF5LibraryException
+ * - Error from the HDF-5 Library.
+ **/
+ public synchronized static long H5Tget_native_type(long type_id) throws HDF5LibraryException {
+ return H5Tget_native_type(type_id, HDF5Constants.H5T_DIR_ASCEND);
+ }
+
+ /**
+ * H5Tget_native_type returns the equivalent native datatype for the datatype specified in type_id.
+ *
+ * @param type_id
+ * IN: Identifier of datatype to query.
+ * @param direction
+ * IN: Direction of search.
+ *
+ * @return the native datatype identifier for the specified dataset datatype.
+ *
+ * @exception HDF5LibraryException
+ * - Error from the HDF-5 Library.
+ **/
+ public static long H5Tget_native_type(long type_id, int direction) throws HDF5LibraryException {
+ long id = _H5Tget_native_type(type_id, direction);
+ if (id > 0) {
+ log.trace("OPEN_IDS: H5Tget_native_type add {}", id);
+ OPEN_IDS.add(id);
+ log.trace("OPEN_IDS: {}", OPEN_IDS.size());
+ }
+ return id;
+ }
+
+ private synchronized static native long _H5Tget_native_type(long tid, int direction) throws HDF5LibraryException;
+
+ /**
+ * H5Tget_nmembers retrieves the number of fields a compound datatype has.
+ *
+ * @param type_id
+ * IN: Identifier of datatype to query.
+ *
+ * @return number of members datatype has if successful
+ *
+ * @exception HDF5LibraryException
+ * - Error from the HDF-5 Library.
+ **/
+ public synchronized static native int H5Tget_nmembers(long type_id) throws HDF5LibraryException;
+
+ /**
+ * H5Tget_norm retrieves the mantissa normalization of a floating-point datatype.
+ *
+ * @param type_id
+ * IN: Identifier of datatype to query.
+ *
+ * @return a valid normalization type if successful
+ *
+ * @exception HDF5LibraryException
+ * - Error from the HDF-5 Library.
+ **/
+ public synchronized static native int H5Tget_norm(long type_id) throws HDF5LibraryException;
+
+ /**
+ * H5Tset_norm sets the mantissa normalization of a floating-point datatype.
+ *
+ * @param type_id
+ * IN: Identifier of datatype to set.
+ * @param norm
+ * IN: Mantissa normalization type.
+ *
+ * @return a non-negative value if successful
+ *
+ * @exception HDF5LibraryException
+ * - Error from the HDF-5 Library.
+ **/
+ public synchronized static native int H5Tset_norm(long type_id, int norm) throws HDF5LibraryException;
+
+ /**
+ * H5Tget_offset retrieves the bit offset of the first significant bit.
+ *
+ * @param type_id
+ * IN: Identifier of datatype to query.
+ *
+ * @return a positive offset value if successful; otherwise 0.
+ *
+ * @exception HDF5LibraryException
+ * - Error from the HDF-5 Library.
+ **/
+ public synchronized static native int H5Tget_offset(long type_id) throws HDF5LibraryException;
+
+ /**
+ * H5Tset_offset sets the bit offset of the first significant bit.
+ *
+ * @param type_id
+ * Identifier of datatype to set.
+ * @param offset
+ * Offset of first significant bit.
+ *
+ * @return a non-negative value if successful
+ *
+ * @exception HDF5LibraryException
+ * - Error from the HDF-5 Library.
+ **/
+ public static int H5Tset_offset(long type_id, int offset) throws HDF5LibraryException {
+ H5Tset_offset(type_id, (long) offset);
+ return 0;
+ }
+
+ /**
+ * H5Tset_offset sets the bit offset of the first significant bit.
+ *
+ * @param type_id
+ * IN: Identifier of datatype to set.
+ * @param offset
+ * IN: Offset of first significant bit.
+ *
+ * @exception HDF5LibraryException
+ * - Error from the HDF-5 Library.
+ **/
+ public synchronized static native void H5Tset_offset(long type_id, long offset) throws HDF5LibraryException;
+
+ /**
+ * H5Tget_order returns the byte order of an atomic datatype.
+ *
+ * @param type_id
+ * IN: Identifier of datatype to query.
+ *
+ * @return a byte order constant if successful
+ *
+ * @exception HDF5LibraryException
+ * - Error from the HDF-5 Library.
+ **/
+ public synchronized static native int H5Tget_order(long type_id) throws HDF5LibraryException;
+
+ /**
+ * H5Tset_order sets the byte ordering of an atomic datatype.
+ *
+ * @param type_id
+ * IN: Identifier of datatype to set.
+ * @param order
+ * IN: Byte ordering constant.
+ *
+ * @return a non-negative value if successful
+ *
+ * @exception HDF5LibraryException
+ * - Error from the HDF-5 Library.
+ **/
+ public synchronized static native int H5Tset_order(long type_id, int order) throws HDF5LibraryException;
+
+ /**
+ * H5Tget_pad retrieves the padding type of the least and most-significant bit padding.
+ *
+ * @param type_id
+ * IN: Identifier of datatype to query.
+ * @param pad
+ * OUT: locations to return least-significant and most-significant bit padding type.
+ *
+ * <pre>
+ * pad[0] = lsb // least-significant bit padding type
+ * pad[1] = msb // most-significant bit padding type
+ * </pre>
+ *
+ * @return a non-negative value if successful
+ *
+ * @exception HDF5LibraryException
+ * - Error from the HDF-5 Library.
+ * @exception NullPointerException
+ * - pad is null.
+ **/
+ public synchronized static native int H5Tget_pad(long type_id, int[] pad) throws HDF5LibraryException,
+ NullPointerException;
+
+ /**
+ * H5Tset_pad sets the least and most-significant bits padding types.
+ *
+ * @param type_id
+ * IN: Identifier of datatype to set.
+ * @param lsb
+ * IN: Padding type for least-significant bits.
+ * @param msb
+ * IN: Padding type for most-significant bits.
+ *
+ * @return a non-negative value if successful
+ *
+ * @exception HDF5LibraryException
+ * - Error from the HDF-5 Library.
+ **/
+ public synchronized static native int H5Tset_pad(long type_id, int lsb, int msb) throws HDF5LibraryException;
+
+ /**
+ * H5Tget_precision returns the precision of an atomic datatype.
+ *
+ * @param type_id
+ * Identifier of datatype to query.
+ *
+ * @return the number of significant bits if successful
+ *
+ * @exception HDF5LibraryException
+ * - Error from the HDF-5 Library.
+ **/
+ public synchronized static native int H5Tget_precision(long type_id) throws HDF5LibraryException;
+
+ /**
+ * H5Tset_precision sets the precision of an atomic datatype.
+ *
+ * @param type_id
+ * Identifier of datatype to set.
+ * @param precision
+ * Number of bits of precision for datatype.
+ *
+ * @return a non-negative value if successful
+ *
+ * @exception HDF5LibraryException
+ * - Error from the HDF-5 Library.
+ **/
+ public static int H5Tset_precision(long type_id, int precision) throws HDF5LibraryException {
+ H5Tset_precision(type_id, (long) precision);
+ return 0;
+ }
+
+ /**
+ * H5Tget_precision returns the precision of an atomic datatype.
+ *
+ * @param type_id
+ * IN: Identifier of datatype to query.
+ *
+ * @return the number of significant bits if successful
+ *
+ * @exception HDF5LibraryException
+ * - Error from the HDF-5 Library.
+ **/
+ public synchronized static native long H5Tget_precision_long(long type_id) throws HDF5LibraryException;
+
+ /**
+ * H5Tset_precision sets the precision of an atomic datatype.
+ *
+ * @param type_id
+ * IN: Identifier of datatype to set.
+ * @param precision
+ * IN: Number of bits of precision for datatype.
+ *
+ * @exception HDF5LibraryException
+ * - Error from the HDF-5 Library.
+ **/
+ public synchronized static native void H5Tset_precision(long type_id, long precision) throws HDF5LibraryException;
+
+ /**
+ * H5Tget_sign retrieves the sign type for an integer type.
+ *
+ * @param type_id
+ * IN: Identifier of datatype to query.
+ *
+ * @return a valid sign type if successful
+ *
+ * @exception HDF5LibraryException
+ * - Error from the HDF-5 Library.
+ **/
+ public synchronized static native int H5Tget_sign(long type_id) throws HDF5LibraryException;
+
+ /**
+ * H5Tset_sign sets the sign proprety for an integer type.
+ *
+ * @param type_id
+ * IN: Identifier of datatype to set.
+ * @param sign
+ * IN: Sign type.
+ *
+ * @return a non-negative value if successful
+ *
+ * @exception HDF5LibraryException
+ * - Error from the HDF-5 Library.
+ **/
+ public synchronized static native int H5Tset_sign(long type_id, int sign) throws HDF5LibraryException;
+
+ /**
+ * H5Tget_size returns the size of a datatype in bytes.
+ *
+ * @param type_id
+ * IN: Identifier of datatype to query.
+ *
+ * @return the size of the datatype in bytes
+ *
+ * @exception HDF5LibraryException
+ * - Error from the HDF-5 Library.
+ **/
+ public synchronized static native long H5Tget_size(long type_id) throws HDF5LibraryException;
+
+ /**
+ * H5Tset_size sets the total size in bytes, size, for an atomic datatype (this operation is not permitted on
+ * compound datatypes).
+ *
+ * @param type_id
+ * IN: Identifier of datatype to change size.
+ * @param size
+ * IN: Size in bytes to modify datatype.
+ *
+ * @return a non-negative value if successful
+ *
+ * @exception HDF5LibraryException
+ * - Error from the HDF-5 Library.
+ **/
+ public synchronized static native int H5Tset_size(long type_id, long size) throws HDF5LibraryException;
+
+ /**
+ * H5Tget_strpad retrieves the string padding method for a string datatype.
+ *
+ * @param type_id
+ * IN: Identifier of datatype to query.
+ *
+ * @return a valid string padding type if successful
+ *
+ * @exception HDF5LibraryException
+ * - Error from the HDF-5 Library.
+ **/
+ public synchronized static native int H5Tget_strpad(long type_id) throws HDF5LibraryException;
+
+ /**
+ * H5Tset_strpad defines the storage mechanism for the string.
+ *
+ * @param type_id
+ * IN: Identifier of datatype to modify.
+ * @param strpad
+ * IN: String padding type.
+ *
+ * @return a non-negative value if successful
+ *
+ * @exception HDF5LibraryException
+ * - Error from the HDF-5 Library.
+ **/
+ public synchronized static native int H5Tset_strpad(long type_id, int strpad) throws HDF5LibraryException;
+
+ /**
+ * H5Tget_super returns the type from which TYPE is derived.
+ *
+ * @param type
+ * IN: Identifier of datatype.
+ *
+ * @return the parent type
+ *
+ * @exception HDF5LibraryException
+ * - Error from the HDF-5 Library.
+ **/
+ public static long H5Tget_super(long type) throws HDF5LibraryException {
+ long id = _H5Tget_super(type);
+ if (id > 0) {
+ log.trace("OPEN_IDS: H5Tget_super add {}", id);
+ OPEN_IDS.add(id);
+ log.trace("OPEN_IDS: {}", OPEN_IDS.size());
+ }
+ return id;
+ }
+
+ private synchronized static native long _H5Tget_super(long type) throws HDF5LibraryException;
+
+ /**
+ * H5Tget_tag returns the tag associated with datatype type_id.
+ *
+ * @param type
+ * IN: Identifier of datatype.
+ *
+ * @return the tag
+ *
+ * @exception HDF5LibraryException
+ * - Error from the HDF-5 Library.
+ **/
+ public synchronized static native String H5Tget_tag(long type) throws HDF5LibraryException;
+
+ /**
+ * H5Tset_tag tags an opaque datatype type_id with a unique ASCII identifier tag.
+ *
+ * @param type
+ * IN: Datatype identifier for the opaque datatype to be tagged.
+ * @param tag
+ * IN: Descriptive ASCII string with which the opaque datatype is to be tagged.
+ *
+ * @return a non-negative value if successful
+ *
+ * @exception HDF5LibraryException
+ * - Error from the HDF-5 Library.
+ **/
+ public synchronized static native int H5Tset_tag(long type, String tag) throws HDF5LibraryException;
+
+ /**
+ * H5Tinsert adds another member to the compound datatype type_id.
+ *
+ * @param type_id
+ * IN: Identifier of compound datatype to modify.
+ * @param name
+ * IN: Name of the field to insert.
+ * @param offset
+ * IN: Offset in memory structure of the field to insert.
+ * @param field_id
+ * IN: Datatype identifier of the field to insert.
+ *
+ * @return a non-negative value if successful
+ *
+ * @exception HDF5LibraryException
+ * - Error from the HDF-5 Library.
+ * @exception NullPointerException
+ * - name is null.
+ **/
+ public synchronized static native int H5Tinsert(long type_id, String name, long offset, long field_id)
+ throws HDF5LibraryException, NullPointerException;
+
+ /**
+ * H5Tis_variable_str determines whether the datatype identified in type_id is a variable-length string.
+ *
+ * @param type_id
+ * IN: Identifier of datatype to query.
+ *
+ * @return true if type_id is a variable-length string.
+ *
+ * @exception HDF5LibraryException
+ * - Error from the HDF-5 Library.
+ **/
+ public synchronized static native boolean H5Tis_variable_str(long type_id) throws HDF5LibraryException;
+
+ /**
+ * H5Tlock locks the datatype specified by the type_id identifier, making it read-only and non-destrucible.
+ *
+ * @param type_id
+ * IN: Identifier of datatype to lock.
+ *
+ * @return a non-negative value if successful
+ *
+ * @exception HDF5LibraryException
+ * - Error from the HDF-5 Library.
+ **/
+ public synchronized static native int H5Tlock(long type_id) throws HDF5LibraryException;
+
+ /**
+ * H5Topen opens a named datatype at the location specified by loc_id and return an identifier for the datatype.
+ *
+ * @param loc_id
+ * IN: A file, group, or datatype identifier.
+ * @param name
+ * IN: A datatype name, defined within the file or group identified by loc_id.
+ * @param tapl_id
+ * IN: Datatype access property list.
+ *
+ * @return a named datatype identifier if successful
+ *
+ * @exception HDF5LibraryException
+ * - Error from the HDF-5 Library.
+ * @exception NullPointerException
+ * - name is null.
+ **/
+ public static long H5Topen(long loc_id, String name, long tapl_id) throws HDF5LibraryException,
+ NullPointerException {
+ long id = _H5Topen2(loc_id, name, tapl_id);
+ if (id > 0) {
+ log.trace("OPEN_IDS: H5Topen add {}", id);
+ OPEN_IDS.add(id);
+ log.trace("OPEN_IDS: {}", OPEN_IDS.size());
+ }
+ return id;
+ }
+
+ private synchronized static native long _H5Topen2(long loc_id, String name, long tapl_id)
+ throws HDF5LibraryException, NullPointerException;
+
+ /**
+ * H5Tpack recursively removes padding from within a compound datatype to make it more efficient (space-wise) to
+ * store that data.
+ * <P>
+ * <b>WARNING:</b> This call only affects the C-data, even if it succeeds, there may be no visible effect on Java
+ * objects.
+ *
+ * @param type_id
+ * IN: Identifier of datatype to modify.
+ *
+ * @return a non-negative value if successful
+ *
+ * @exception HDF5LibraryException
+ * - Error from the HDF-5 Library.
+ **/
+ public synchronized static native int H5Tpack(long type_id) throws HDF5LibraryException;
+
+ /**
+ * H5Tvlen_create creates a new variable-length (VL) dataype.
+ *
+ * @param base_id
+ * IN: Identifier of parent datatype.
+ *
+ * @return a non-negative value if successful
+ *
+ * @exception HDF5LibraryException
+ * - Error from the HDF-5 Library.
+ **/
+ public static long H5Tvlen_create(long base_id) throws HDF5LibraryException {
+ long id = _H5Tvlen_create(base_id);
+ if (id > 0) {
+ log.trace("OPEN_IDS: H5Tvlen_create add {}", id);
+ OPEN_IDS.add(id);
+ log.trace("OPEN_IDS: {}", OPEN_IDS.size());
+ }
+ return id;
+ }
+
+ private synchronized static native long _H5Tvlen_create(long base_id) throws HDF5LibraryException;
+
+ // /////// unimplemented ////////
+
+ // H5T_conv_t H5Tfind(int src_id, int dst_id, H5T_cdata_t *pcdata);
+
+ // public synchronized static native int H5Tregister(H5T_pers_t pers, String name, int src_id, int dst_id,
+ // H5T_conv_t func)
+ // throws HDF5LibraryException, NullPointerException;
+
+ // public synchronized static native int H5Tunregister(H5T_pers_t pers, String name, int src_id, int dst_id,
+ // H5T_conv_t func)
+ // throws HDF5LibraryException, NullPointerException;
+
+ // ////////////////////////////////////////////////////////////
+ // //
+ // H5Z: Filter Interface Functions //
+ // //
+ // ////////////////////////////////////////////////////////////
+
+ public synchronized static native int H5Zfilter_avail(int filter) throws HDF5LibraryException, NullPointerException;
+
+ public synchronized static native int H5Zget_filter_info(int filter) throws HDF5LibraryException;
+
+ public synchronized static native int H5Zunregister(int filter) throws HDF5LibraryException, NullPointerException;
+
+}
+
+// /////// unimplemented ////////
+
+// herr_t H5Zregister(const void *cls);
+
diff --git a/java/src/hdf/hdf5lib/HDF5Constants.java b/java/src/hdf/hdf5lib/HDF5Constants.java
new file mode 100644
index 0000000..8089544
--- /dev/null
+++ b/java/src/hdf/hdf5lib/HDF5Constants.java
@@ -0,0 +1,1877 @@
+/* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
+ * Copyright by The HDF Group. *
+ * Copyright by the Board of Trustees of the University of Illinois. *
+ * All rights reserved. *
+ * *
+ * This file is part of HDF5. The full HDF5 copyright notice, including *
+ * terms governing use, modification, and redistribution, is contained in *
+ * the files COPYING and Copyright.html. COPYING can be found at the root *
+ * of the source code distribution tree; Copyright.html can be found at the *
+ * root level of an installed copy of the electronic HDF5 document set and *
+ * is linked from the top-level documents page. It can also be found at *
+ * http://hdfgroup.org/HDF5/doc/Copyright.html. If you do not have *
+ * access to either file, you may request a copy from help@hdfgroup.org. *
+ * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */
+
+
+package hdf.hdf5lib;
+
+/**
+ * /** This class contains C constants and enumerated types of HDF5 library. The
+ * values of these constants are obtained from the library by calling J2C(int
+ * jconstant), where jconstant is any of the private constants which start their
+ * name with "JH5" need to be converted.
+ * <P>
+ * <B>Do not edit this file!</b>
+ *
+ * <b>See also:</b> hdf.hdf5lib.HDF5Library
+ */
+public class HDF5Constants {
+ static {
+ H5.loadH5Lib();
+ }
+
+ // /////////////////////////////////////////////////////////////////////////
+ // Get the HDF5 constants from the library //
+ // /////////////////////////////////////////////////////////////////////////
+
+ public static final long H5_QUARTER_HADDR_MAX = H5_QUARTER_HADDR_MAX();
+
+ public static final int H5_SZIP_MAX_PIXELS_PER_BLOCK = H5_SZIP_MAX_PIXELS_PER_BLOCK();
+ public static final int H5_SZIP_NN_OPTION_MASK = H5_SZIP_NN_OPTION_MASK();
+ public static final int H5_SZIP_EC_OPTION_MASK = H5_SZIP_EC_OPTION_MASK();
+ public static final int H5_SZIP_ALLOW_K13_OPTION_MASK = H5_SZIP_ALLOW_K13_OPTION_MASK();
+ public static final int H5_SZIP_CHIP_OPTION_MASK = H5_SZIP_CHIP_OPTION_MASK();
+ public static final int H5_INDEX_UNKNOWN = H5_INDEX_UNKNOWN();
+ public static final int H5_INDEX_NAME = H5_INDEX_NAME();
+ public static final int H5_INDEX_CRT_ORDER = H5_INDEX_CRT_ORDER();
+ public static final int H5_INDEX_N = H5_INDEX_N();
+ public static final int H5_ITER_UNKNOWN = H5_ITER_UNKNOWN();
+ public static final int H5_ITER_INC = H5_ITER_INC();
+ public static final int H5_ITER_DEC = H5_ITER_DEC();
+ public static final int H5_ITER_NATIVE = H5_ITER_NATIVE();
+ public static final int H5_ITER_N = H5_ITER_N();
+ public static final int H5AC_CURR_CACHE_CONFIG_VERSION = H5AC_CURR_CACHE_CONFIG_VERSION();
+ public static final int H5AC_MAX_TRACE_FILE_NAME_LEN = H5AC_MAX_TRACE_FILE_NAME_LEN();
+ public static final int H5AC_METADATA_WRITE_STRATEGY_PROCESS_ZERO_ONLY = H5AC_METADATA_WRITE_STRATEGY_PROCESS_ZERO_ONLY();
+ public static final int H5AC_METADATA_WRITE_STRATEGY_DISTRIBUTED = H5AC_METADATA_WRITE_STRATEGY_DISTRIBUTED();
+ public static final int H5C_incr_off = H5C_incr_off();
+ public static final int H5C_incr_threshold = H5C_incr_threshold();
+ public static final int H5C_flash_incr_off = H5C_flash_incr_off();
+ public static final int H5C_flash_incr_add_space = H5C_flash_incr_add_space();
+ public static final int H5C_decr_off = H5C_decr_off();
+ public static final int H5C_decr_threshold = H5C_decr_threshold();
+ public static final int H5C_decr_age_out = H5C_decr_age_out();
+ public static final int H5C_decr_age_out_with_threshold = H5C_decr_age_out_with_threshold();
+ public static final int H5D_CHUNK_IDX_BTREE = H5D_CHUNK_IDX_BTREE();
+ public static final int H5D_ALLOC_TIME_DEFAULT = H5D_ALLOC_TIME_DEFAULT();
+ public static final int H5D_ALLOC_TIME_EARLY = H5D_ALLOC_TIME_EARLY();
+ public static final int H5D_ALLOC_TIME_ERROR = H5D_ALLOC_TIME_ERROR();
+ public static final int H5D_ALLOC_TIME_INCR = H5D_ALLOC_TIME_INCR();
+ public static final int H5D_ALLOC_TIME_LATE = H5D_ALLOC_TIME_LATE();
+ public static final int H5D_FILL_TIME_ERROR = H5D_FILL_TIME_ERROR();
+ public static final int H5D_FILL_TIME_ALLOC = H5D_FILL_TIME_ALLOC();
+ public static final int H5D_FILL_TIME_NEVER = H5D_FILL_TIME_NEVER();
+ public static final int H5D_FILL_TIME_IFSET = H5D_FILL_TIME_IFSET();
+ public static final int H5D_FILL_VALUE_DEFAULT = H5D_FILL_VALUE_DEFAULT();
+ public static final int H5D_FILL_VALUE_ERROR = H5D_FILL_VALUE_ERROR();
+ public static final int H5D_FILL_VALUE_UNDEFINED = H5D_FILL_VALUE_UNDEFINED();
+ public static final int H5D_FILL_VALUE_USER_DEFINED = H5D_FILL_VALUE_USER_DEFINED();
+ public static final int H5D_LAYOUT_ERROR = H5D_LAYOUT_ERROR();
+ public static final int H5D_CHUNKED = H5D_CHUNKED();
+ public static final int H5D_COMPACT = H5D_COMPACT();
+ public static final int H5D_CONTIGUOUS = H5D_CONTIGUOUS();
+ public static final int H5D_VIRTUAL = H5D_VIRTUAL();
+ public static final int H5D_NLAYOUTS = H5D_NLAYOUTS();
+ public static final int H5D_SPACE_STATUS_ALLOCATED = H5D_SPACE_STATUS_ALLOCATED();
+ public static final int H5D_SPACE_STATUS_ERROR = H5D_SPACE_STATUS_ERROR();
+ public static final int H5D_SPACE_STATUS_NOT_ALLOCATED = H5D_SPACE_STATUS_NOT_ALLOCATED();
+ public static final int H5D_SPACE_STATUS_PART_ALLOCATED = H5D_SPACE_STATUS_PART_ALLOCATED();
+ public static final int H5D_VDS_ERROR = H5D_VDS_ERROR();
+ public static final int H5D_VDS_FIRST_MISSING = H5D_VDS_FIRST_MISSING();
+ public static final int H5D_VDS_LAST_AVAILABLE = H5D_VDS_LAST_AVAILABLE();
+
+ public static final int H5E_MAJOR = H5E_MAJOR();
+ public static final int H5E_MINOR = H5E_MINOR();
+ public static final long H5E_ALIGNMENT = H5E_ALIGNMENT();
+ public static final long H5E_ALREADYEXISTS = H5E_ALREADYEXISTS();
+ public static final long H5E_ALREADYINIT = H5E_ALREADYINIT();
+ public static final long H5E_ARGS = H5E_ARGS();
+ public static final long H5E_ATOM = H5E_ATOM();
+ public static final long H5E_ATTR = H5E_ATTR();
+ public static final long H5E_BADATOM = H5E_BADATOM();
+ public static final long H5E_BADFILE = H5E_BADFILE();
+ public static final long H5E_BADGROUP = H5E_BADGROUP();
+ public static final long H5E_BADMESG = H5E_BADMESG();
+ public static final long H5E_BADRANGE = H5E_BADRANGE();
+ public static final long H5E_BADSELECT = H5E_BADSELECT();
+ public static final long H5E_BADSIZE = H5E_BADSIZE();
+ public static final long H5E_BADTYPE = H5E_BADTYPE();
+ public static final long H5E_BADVALUE = H5E_BADVALUE();
+ public static final long H5E_BTREE = H5E_BTREE();
+ public static final long H5E_CACHE = H5E_CACHE();
+ public static final long H5E_CALLBACK = H5E_CALLBACK();
+ public static final long H5E_CANAPPLY = H5E_CANAPPLY();
+ // public static final long H5E_CANTALLOC = H5E_CANTALLOC();
+ public static final long H5E_CANTCLIP = H5E_CANTCLIP();
+ public static final long H5E_CANTCLOSEFILE = H5E_CANTCLOSEFILE();
+ public static final long H5E_CANTCONVERT = H5E_CANTCONVERT();
+ public static final long H5E_CANTCOPY = H5E_CANTCOPY();
+ public static final long H5E_CANTCOUNT = H5E_CANTCOUNT();
+ public static final long H5E_CANTCREATE = H5E_CANTCREATE();
+ public static final long H5E_CANTDEC = H5E_CANTDEC();
+ public static final long H5E_CANTDECODE = H5E_CANTDECODE();
+ public static final long H5E_CANTDELETE = H5E_CANTDELETE();
+ public static final long H5E_CANTENCODE = H5E_CANTENCODE();
+ public static final long H5E_CANTFLUSH = H5E_CANTFLUSH();
+ public static final long H5E_CANTFREE = H5E_CANTFREE();
+ public static final long H5E_CANTGET = H5E_CANTGET();
+ public static final long H5E_CANTINC = H5E_CANTINC();
+ public static final long H5E_CANTINIT = H5E_CANTINIT();
+ public static final long H5E_CANTINSERT = H5E_CANTINSERT();
+ public static final long H5E_CANTLIST = H5E_CANTLIST();
+ public static final long H5E_CANTLOAD = H5E_CANTLOAD();
+ public static final long H5E_CANTLOCK = H5E_CANTLOCK();
+ public static final long H5E_CANTNEXT = H5E_CANTNEXT();
+ public static final long H5E_CANTOPENFILE = H5E_CANTOPENFILE();
+ public static final long H5E_CANTOPENOBJ = H5E_CANTOPENOBJ();
+ // public static final long H5E_CANTRECV = H5E_CANTRECV();
+ public static final long H5E_CANTREGISTER = H5E_CANTREGISTER();
+ public static final long H5E_CANTRELEASE = H5E_CANTRELEASE();
+ public static final long H5E_CANTSELECT = H5E_CANTSELECT();
+ public static final long H5E_CANTSET = H5E_CANTSET();
+ public static final long H5E_CANTSPLIT = H5E_CANTSPLIT();
+ public static final long H5E_CANTUNLOCK = H5E_CANTUNLOCK();
+ public static final long H5E_CLOSEERROR = H5E_CLOSEERROR();
+ public static final long H5E_COMPLEN = H5E_COMPLEN();
+ public static final long H5E_DATASET = H5E_DATASET();
+ public static final long H5E_DATASPACE = H5E_DATASPACE();
+ public static final long H5E_DATATYPE = H5E_DATATYPE();
+ public static final long H5E_DEFAULT = H5E_DEFAULT();
+ public static final long H5E_DUPCLASS = H5E_DUPCLASS();
+ public static final long H5E_EFL = H5E_EFL();
+ public static final long H5E_EXISTS = H5E_EXISTS();
+ public static final long H5E_FCNTL = H5E_FCNTL();
+ public static final long H5E_FILE = H5E_FILE();
+ public static final long H5E_FILEEXISTS = H5E_FILEEXISTS();
+ public static final long H5E_FILEOPEN = H5E_FILEOPEN();
+ public static final long H5E_FUNC = H5E_FUNC();
+ public static final long H5E_HEAP = H5E_HEAP();
+ public static final long H5E_INTERNAL = H5E_INTERNAL();
+ public static final long H5E_IO = H5E_IO();
+ public static final long H5E_LINK = H5E_LINK();
+ public static final long H5E_LINKCOUNT = H5E_LINKCOUNT();
+ public static final long H5E_MOUNT = H5E_MOUNT();
+ public static final long H5E_MPI = H5E_MPI();
+ public static final long H5E_MPIERRSTR = H5E_MPIERRSTR();
+ public static final long H5E_NOFILTER = H5E_NOFILTER();
+ public static final long H5E_NOIDS = H5E_NOIDS();
+ public static final long H5E_NONE_MAJOR = H5E_NONE_MAJOR();
+ public static final long H5E_NONE_MINOR = H5E_NONE_MINOR();
+ public static final long H5E_NOSPACE = H5E_NOSPACE();
+ public static final long H5E_NOTCACHED = H5E_NOTCACHED();
+ public static final long H5E_NOTFOUND = H5E_NOTFOUND();
+ public static final long H5E_NOTHDF5 = H5E_NOTHDF5();
+ public static final long H5E_OHDR = H5E_OHDR();
+ public static final long H5E_OVERFLOW = H5E_OVERFLOW();
+ public static final long H5E_PLINE = H5E_PLINE();
+ public static final long H5E_PLIST = H5E_PLIST();
+ public static final long H5E_PROTECT = H5E_PROTECT();
+ public static final long H5E_READERROR = H5E_READERROR();
+ public static final long H5E_REFERENCE = H5E_REFERENCE();
+ public static final long H5E_RESOURCE = H5E_RESOURCE();
+ public static final long H5E_RS = H5E_RS();
+ public static final long H5E_SEEKERROR = H5E_SEEKERROR();
+ public static final long H5E_SETLOCAL = H5E_SETLOCAL();
+ public static final long H5E_STORAGE = H5E_STORAGE();
+ public static final long H5E_SYM = H5E_SYM();
+ public static final long H5E_TRUNCATED = H5E_TRUNCATED();
+ public static final long H5E_TST = H5E_TST();
+ public static final long H5E_UNINITIALIZED = H5E_UNINITIALIZED();
+ public static final long H5E_UNSUPPORTED = H5E_UNSUPPORTED();
+ public static final long H5E_VERSION = H5E_VERSION();
+ public static final long H5E_VFL = H5E_VFL();
+ public static final long H5E_WALK_DOWNWARD = H5E_WALK_DOWNWARD();
+ public static final long H5E_WALK_UPWARD = H5E_WALK_UPWARD();
+ public static final long H5E_WRITEERROR = H5E_WRITEERROR();
+
+ public static final int H5F_ACC_CREAT = H5F_ACC_CREAT();
+ public static final int H5F_ACC_EXCL = H5F_ACC_EXCL();
+ public static final int H5F_ACC_RDONLY = H5F_ACC_RDONLY();
+ public static final int H5F_ACC_RDWR = H5F_ACC_RDWR();
+ public static final int H5F_ACC_TRUNC = H5F_ACC_TRUNC();
+ public static final int H5F_ACC_DEFAULT = H5F_ACC_DEFAULT();
+ public static final int H5F_CLOSE_DEFAULT = H5F_CLOSE_DEFAULT();
+ public static final int H5F_CLOSE_SEMI = H5F_CLOSE_SEMI();
+ public static final int H5F_CLOSE_STRONG = H5F_CLOSE_STRONG();
+ public static final int H5F_CLOSE_WEAK = H5F_CLOSE_WEAK();
+ public static final int H5F_LIBVER_EARLIEST = H5F_LIBVER_EARLIEST();
+ public static final int H5F_LIBVER_LATEST = H5F_LIBVER_LATEST();
+ public static final int H5F_OBJ_ALL = H5F_OBJ_ALL();
+ public static final int H5F_OBJ_ATTR = H5F_OBJ_ATTR();
+ public static final int H5F_OBJ_DATASET = H5F_OBJ_DATASET();
+ public static final int H5F_OBJ_DATATYPE = H5F_OBJ_DATATYPE();
+ public static final int H5F_OBJ_FILE = H5F_OBJ_FILE();
+ public static final int H5F_OBJ_GROUP = H5F_OBJ_GROUP();
+ public static final int H5F_OBJ_LOCAL = H5F_OBJ_LOCAL();
+ public static final int H5F_SCOPE_GLOBAL = H5F_SCOPE_GLOBAL();
+ public static final int H5F_SCOPE_LOCAL = H5F_SCOPE_LOCAL();
+ public static final int H5F_UNLIMITED = H5F_UNLIMITED();
+ public static final int H5F_FILE_SPACE_DEFAULT = H5F_FILE_SPACE_DEFAULT();
+ public static final int H5F_FILE_SPACE_ALL_PERSIST = H5F_FILE_SPACE_ALL_PERSIST();
+ public static final int H5F_FILE_SPACE_ALL = H5F_FILE_SPACE_ALL();
+ public static final int H5F_FILE_SPACE_AGGR_VFD = H5F_FILE_SPACE_AGGR_VFD();
+ public static final int H5F_FILE_SPACE_VFD = H5F_FILE_SPACE_VFD();
+ public static final int H5F_FILE_SPACE_NTYPES = H5F_FILE_SPACE_NTYPES();
+
+ public static final long H5FD_CORE = H5FD_CORE();
+ public static final long H5FD_DIRECT = H5FD_DIRECT();
+ public static final long H5FD_FAMILY = H5FD_FAMILY();
+ public static final long H5FD_LOG = H5FD_LOG();
+ public static final long H5FD_MPIO = H5FD_MPIO();
+ public static final long H5FD_MULTI = H5FD_MULTI();
+ public static final long H5FD_SEC2 = H5FD_SEC2();
+ public static final long H5FD_STDIO = H5FD_STDIO();
+ public static final long H5FD_WINDOWS = H5FD_WINDOWS();
+ public static final int H5FD_LOG_LOC_READ = H5FD_LOG_LOC_READ();
+ public static final int H5FD_LOG_LOC_WRITE = H5FD_LOG_LOC_WRITE();
+ public static final int H5FD_LOG_LOC_SEEK = H5FD_LOG_LOC_SEEK();
+ public static final int H5FD_LOG_LOC_IO = H5FD_LOG_LOC_IO();
+ public static final int H5FD_LOG_FILE_READ = H5FD_LOG_FILE_READ();
+ public static final int H5FD_LOG_FILE_WRITE = H5FD_LOG_FILE_WRITE();
+ public static final int H5FD_LOG_FILE_IO = H5FD_LOG_FILE_IO();
+ public static final int H5FD_LOG_FLAVOR = H5FD_LOG_FLAVOR();
+ public static final int H5FD_LOG_NUM_READ = H5FD_LOG_NUM_READ();
+ public static final int H5FD_LOG_NUM_WRITE = H5FD_LOG_NUM_WRITE();
+ public static final int H5FD_LOG_NUM_SEEK = H5FD_LOG_NUM_SEEK();
+ public static final int H5FD_LOG_NUM_TRUNCATE = H5FD_LOG_NUM_TRUNCATE();
+ public static final int H5FD_LOG_NUM_IO = H5FD_LOG_NUM_IO();
+ public static final int H5FD_LOG_TIME_OPEN = H5FD_LOG_TIME_OPEN();
+ public static final int H5FD_LOG_TIME_STAT = H5FD_LOG_TIME_STAT();
+ public static final int H5FD_LOG_TIME_READ = H5FD_LOG_TIME_READ();
+ public static final int H5FD_LOG_TIME_WRITE = H5FD_LOG_TIME_WRITE();
+ public static final int H5FD_LOG_TIME_SEEK = H5FD_LOG_TIME_SEEK();
+ public static final int H5FD_LOG_TIME_CLOSE = H5FD_LOG_TIME_CLOSE();
+ public static final int H5FD_LOG_TIME_IO = H5FD_LOG_TIME_IO();
+ public static final int H5FD_LOG_ALLOC = H5FD_LOG_ALLOC();
+ public static final int H5FD_LOG_ALL = H5FD_LOG_ALL();
+ public static final int H5FD_MEM_NOLIST = H5FD_MEM_NOLIST();
+ public static final int H5FD_MEM_DEFAULT = H5FD_MEM_DEFAULT();
+ public static final int H5FD_MEM_SUPER = H5FD_MEM_SUPER();
+ public static final int H5FD_MEM_BTREE = H5FD_MEM_BTREE();
+ public static final int H5FD_MEM_DRAW = H5FD_MEM_DRAW();
+ public static final int H5FD_MEM_GHEAP = H5FD_MEM_GHEAP();
+ public static final int H5FD_MEM_LHEAP = H5FD_MEM_LHEAP();
+ public static final int H5FD_MEM_OHDR = H5FD_MEM_OHDR();
+ public static final int H5FD_MEM_NTYPES = H5FD_MEM_NTYPES();
+ public static final long H5FD_DEFAULT_HADDR_SIZE = H5FD_DEFAULT_HADDR_SIZE();
+ public static final long H5FD_MEM_DEFAULT_SIZE = H5FD_MEM_DEFAULT_SIZE();
+ public static final long H5FD_MEM_DEFAULT_SUPER_SIZE = H5FD_MEM_DEFAULT_SUPER_SIZE();
+ public static final long H5FD_MEM_DEFAULT_BTREE_SIZE = H5FD_MEM_DEFAULT_BTREE_SIZE();
+ public static final long H5FD_MEM_DEFAULT_DRAW_SIZE = H5FD_MEM_DEFAULT_DRAW_SIZE();
+ public static final long H5FD_MEM_DEFAULT_GHEAP_SIZE = H5FD_MEM_DEFAULT_GHEAP_SIZE();
+ public static final long H5FD_MEM_DEFAULT_LHEAP_SIZE = H5FD_MEM_DEFAULT_LHEAP_SIZE();
+ public static final long H5FD_MEM_DEFAULT_OHDR_SIZE = H5FD_MEM_DEFAULT_OHDR_SIZE();
+
+// public static final int H5G_DATASET = H5G_DATASET();
+// public static final int H5G_GROUP = H5G_GROUP();
+// public static final int H5G_LINK = H5G_LINK();
+// public static final int H5G_UDLINK = H5G_UDLINK();
+// public static final int H5G_LINK_ERROR = H5G_LINK_ERROR();
+// public static final int H5G_LINK_HARD = H5G_LINK_HARD();
+// public static final int H5G_LINK_SOFT = H5G_LINK_SOFT();
+// public static final int H5G_NLIBTYPES = H5G_NLIBTYPES();
+// public static final int H5G_NTYPES = H5G_NTYPES();
+// public static final int H5G_NUSERTYPES = H5G_NUSERTYPES();
+// public static final int H5G_RESERVED_5 = H5G_RESERVED_5();
+// public static final int H5G_RESERVED_6 = H5G_RESERVED_6();
+// public static final int H5G_RESERVED_7 = H5G_RESERVED_7();
+// public static final int H5G_SAME_LOC = H5G_SAME_LOC();
+// public static final int H5G_TYPE = H5G_TYPE();
+// public static final int H5G_UNKNOWN = H5G_UNKNOWN();
+
+ public static final int H5G_STORAGE_TYPE_UNKNOWN = H5G_STORAGE_TYPE_UNKNOWN();
+ public static final int H5G_STORAGE_TYPE_SYMBOL_TABLE = H5G_STORAGE_TYPE_SYMBOL_TABLE();
+ public static final int H5G_STORAGE_TYPE_COMPACT = H5G_STORAGE_TYPE_COMPACT();
+ public static final int H5G_STORAGE_TYPE_DENSE = H5G_STORAGE_TYPE_DENSE();
+
+ public static final int H5I_ATTR = H5I_ATTR();
+ public static final int H5I_BADID = H5I_BADID();
+ public static final int H5I_DATASET = H5I_DATASET();
+ public static final int H5I_DATASPACE = H5I_DATASPACE();
+ public static final int H5I_DATATYPE = H5I_DATATYPE();
+ public static final int H5I_ERROR_CLASS = H5I_ERROR_CLASS();
+ public static final int H5I_ERROR_MSG = H5I_ERROR_MSG();
+ public static final int H5I_ERROR_STACK = H5I_ERROR_STACK();
+ public static final int H5I_FILE = H5I_FILE();
+ public static final int H5I_GENPROP_CLS = H5I_GENPROP_CLS();
+ public static final int H5I_GENPROP_LST = H5I_GENPROP_LST();
+ public static final int H5I_GROUP = H5I_GROUP();
+ public static final int H5I_INVALID_HID = H5I_INVALID_HID();
+ public static final int H5I_NTYPES = H5I_NTYPES();
+ public static final int H5I_REFERENCE = H5I_REFERENCE();
+ public static final int H5I_UNINIT = H5I_UNINIT();
+ public static final int H5I_VFL = H5I_VFL();
+
+ public static final int H5L_TYPE_ERROR = H5L_TYPE_ERROR();
+ public static final int H5L_TYPE_HARD = H5L_TYPE_HARD();
+ public static final int H5L_TYPE_SOFT = H5L_TYPE_SOFT();
+ public static final int H5L_TYPE_EXTERNAL = H5L_TYPE_EXTERNAL();
+ public static final int H5L_TYPE_MAX = H5L_TYPE_MAX();
+
+ public static final int H5O_COPY_SHALLOW_HIERARCHY_FLAG = H5O_COPY_SHALLOW_HIERARCHY_FLAG();
+ public static final int H5O_COPY_EXPAND_SOFT_LINK_FLAG = H5O_COPY_EXPAND_SOFT_LINK_FLAG();
+ public static final int H5O_COPY_EXPAND_EXT_LINK_FLAG = H5O_COPY_EXPAND_EXT_LINK_FLAG();
+ public static final int H5O_COPY_EXPAND_REFERENCE_FLAG = H5O_COPY_EXPAND_REFERENCE_FLAG();
+ public static final int H5O_COPY_WITHOUT_ATTR_FLAG = H5O_COPY_WITHOUT_ATTR_FLAG();
+ public static final int H5O_COPY_PRESERVE_NULL_FLAG = H5O_COPY_PRESERVE_NULL_FLAG();
+ public static final int H5O_SHMESG_NONE_FLAG = H5O_SHMESG_NONE_FLAG();
+ public static final int H5O_SHMESG_SDSPACE_FLAG = H5O_SHMESG_SDSPACE_FLAG();
+ public static final int H5O_SHMESG_DTYPE_FLAG = H5O_SHMESG_DTYPE_FLAG();
+ public static final int H5O_SHMESG_FILL_FLAG = H5O_SHMESG_FILL_FLAG();
+ public static final int H5O_SHMESG_PLINE_FLAG = H5O_SHMESG_PLINE_FLAG();
+ public static final int H5O_SHMESG_ATTR_FLAG = H5O_SHMESG_ATTR_FLAG();
+ public static final int H5O_SHMESG_ALL_FLAG = H5O_SHMESG_ALL_FLAG();
+ public static final int H5O_TYPE_UNKNOWN = H5O_TYPE_UNKNOWN();
+ public static final int H5O_TYPE_GROUP = H5O_TYPE_GROUP();
+ public static final int H5O_TYPE_DATASET = H5O_TYPE_DATASET();
+ public static final int H5O_TYPE_NAMED_DATATYPE = H5O_TYPE_NAMED_DATATYPE();
+ public static final int H5O_TYPE_NTYPES = H5O_TYPE_NTYPES();
+
+ public static final long H5P_ROOT = H5P_ROOT();
+ public static final long H5P_OBJECT_CREATE = H5P_OBJECT_CREATE();
+ public static final long H5P_FILE_CREATE = H5P_FILE_CREATE();
+ public static final long H5P_FILE_ACCESS = H5P_FILE_ACCESS();
+ public static final long H5P_DATASET_CREATE = H5P_DATASET_CREATE();
+ public static final long H5P_DATASET_ACCESS = H5P_DATASET_ACCESS();
+ public static final long H5P_DATASET_XFER = H5P_DATASET_XFER();
+ public static final long H5P_FILE_MOUNT = H5P_FILE_MOUNT();
+ public static final long H5P_GROUP_CREATE = H5P_GROUP_CREATE();
+ public static final long H5P_GROUP_ACCESS = H5P_GROUP_ACCESS();
+ public static final long H5P_DATATYPE_CREATE = H5P_DATATYPE_CREATE();
+ public static final long H5P_DATATYPE_ACCESS = H5P_DATATYPE_ACCESS();
+ public static final long H5P_STRING_CREATE = H5P_STRING_CREATE();
+ public static final long H5P_ATTRIBUTE_CREATE = H5P_ATTRIBUTE_CREATE();
+ public static final long H5P_ATTRIBUTE_ACCESS = H5P_ATTRIBUTE_ACCESS();
+ public static final long H5P_OBJECT_COPY = H5P_OBJECT_COPY();
+ public static final long H5P_LINK_CREATE = H5P_LINK_CREATE();
+ public static final long H5P_LINK_ACCESS = H5P_LINK_ACCESS();
+ public static final long H5P_FILE_CREATE_DEFAULT = H5P_FILE_CREATE_DEFAULT();
+ public static final long H5P_FILE_ACCESS_DEFAULT = H5P_FILE_ACCESS_DEFAULT();
+ public static final long H5P_DATASET_CREATE_DEFAULT = H5P_DATASET_CREATE_DEFAULT();
+ public static final long H5P_DATASET_ACCESS_DEFAULT = H5P_DATASET_ACCESS_DEFAULT();
+ public static final long H5P_DATASET_XFER_DEFAULT = H5P_DATASET_XFER_DEFAULT();
+ public static final long H5P_FILE_MOUNT_DEFAULT = H5P_FILE_MOUNT_DEFAULT();
+ public static final long H5P_GROUP_CREATE_DEFAULT = H5P_GROUP_CREATE_DEFAULT();
+ public static final long H5P_GROUP_ACCESS_DEFAULT = H5P_GROUP_ACCESS_DEFAULT();
+ public static final long H5P_DATATYPE_CREATE_DEFAULT = H5P_DATATYPE_CREATE_DEFAULT();
+ public static final long H5P_DATATYPE_ACCESS_DEFAULT = H5P_DATATYPE_ACCESS_DEFAULT();
+ public static final long H5P_ATTRIBUTE_CREATE_DEFAULT = H5P_ATTRIBUTE_CREATE_DEFAULT();
+ public static final long H5P_ATTRIBUTE_ACCESS_DEFAULT = H5P_ATTRIBUTE_ACCESS_DEFAULT();
+ public static final long H5P_OBJECT_COPY_DEFAULT = H5P_OBJECT_COPY_DEFAULT();
+ public static final long H5P_LINK_CREATE_DEFAULT = H5P_LINK_CREATE_DEFAULT();
+ public static final long H5P_LINK_ACCESS_DEFAULT = H5P_LINK_ACCESS_DEFAULT();
+ public static final int H5P_CRT_ORDER_TRACKED = H5P_CRT_ORDER_TRACKED();
+ public static final int H5P_CRT_ORDER_INDEXED = H5P_CRT_ORDER_INDEXED();
+ public static final long H5P_DEFAULT = H5P_DEFAULT();
+
+ public static final int H5PL_TYPE_ERROR = H5PL_TYPE_ERROR();
+ public static final int H5PL_TYPE_FILTER = H5PL_TYPE_FILTER();
+ public static final int H5PL_FILTER_PLUGIN = H5PL_FILTER_PLUGIN();
+ public static final int H5PL_ALL_PLUGIN = H5PL_ALL_PLUGIN();
+
+ public static final int H5R_BADTYPE = H5R_BADTYPE();
+ public static final int H5R_DATASET_REGION = H5R_DATASET_REGION();
+ public static final int H5R_MAXTYPE = H5R_MAXTYPE();
+ public static final int H5R_OBJ_REF_BUF_SIZE = H5R_OBJ_REF_BUF_SIZE();
+ public static final int H5R_OBJECT = H5R_OBJECT();
+ public static final int H5S_ALL = H5S_ALL();
+ public static final int H5S_MAX_RANK = H5S_MAX_RANK();
+ public static final int H5S_NO_CLASS = H5S_NO_CLASS();
+ public static final int H5S_NULL = H5S_NULL();
+ public static final int H5S_SCALAR = H5S_SCALAR();
+ public static final int H5S_SEL_ALL = H5S_SEL_ALL();
+ public static final int H5S_SEL_ERROR = H5S_SEL_ERROR();
+ public static final int H5S_SEL_HYPERSLABS = H5S_SEL_HYPERSLABS();
+ public static final int H5S_SEL_N = H5S_SEL_N();
+ public static final int H5S_SEL_NONE = H5S_SEL_NONE();
+ public static final int H5S_SEL_POINTS = H5S_SEL_POINTS();
+ public static final int H5S_SELECT_AND = H5S_SELECT_AND();
+ public static final int H5S_SELECT_APPEND = H5S_SELECT_APPEND();
+ public static final int H5S_SELECT_INVALID = H5S_SELECT_INVALID();
+ public static final int H5S_SELECT_NOOP = H5S_SELECT_NOOP();
+ public static final int H5S_SELECT_NOTA = H5S_SELECT_NOTA();
+ public static final int H5S_SELECT_NOTB = H5S_SELECT_NOTB();
+ public static final int H5S_SELECT_OR = H5S_SELECT_OR();
+ public static final int H5S_SELECT_PREPEND = H5S_SELECT_PREPEND();
+ public static final int H5S_SELECT_SET = H5S_SELECT_SET();
+ public static final int H5S_SELECT_XOR = H5S_SELECT_XOR();
+ public static final int H5S_SIMPLE = H5S_SIMPLE();
+ public static final int H5S_UNLIMITED = H5S_UNLIMITED();
+ public static final long H5T_ALPHA_B16 = H5T_ALPHA_B16();
+ public static final long H5T_ALPHA_B32 = H5T_ALPHA_B32();
+ public static final long H5T_ALPHA_B64 = H5T_ALPHA_B64();
+ public static final long H5T_ALPHA_B8 = H5T_ALPHA_B8();
+ public static final long H5T_ALPHA_F32 = H5T_ALPHA_F32();
+ public static final long H5T_ALPHA_F64 = H5T_ALPHA_F64();
+ public static final long H5T_ALPHA_I16 = H5T_ALPHA_I16();
+ public static final long H5T_ALPHA_I32 = H5T_ALPHA_I32();
+ public static final long H5T_ALPHA_I64 = H5T_ALPHA_I64();
+ public static final long H5T_ALPHA_I8 = H5T_ALPHA_I8();
+ public static final long H5T_ALPHA_U16 = H5T_ALPHA_U16();
+ public static final long H5T_ALPHA_U32 = H5T_ALPHA_U32();
+ public static final long H5T_ALPHA_U64 = H5T_ALPHA_U64();
+ public static final long H5T_ALPHA_U8 = H5T_ALPHA_U8();
+ public static final int H5T_ARRAY = H5T_ARRAY();
+ public static final int H5T_BITFIELD = H5T_BITFIELD();
+ public static final int H5T_BKG_NO = H5T_BKG_NO();
+ public static final int H5T_BKG_YES = H5T_BKG_YES();
+ public static final long H5T_C_S1 = H5T_C_S1();
+ public static final int H5T_COMPOUND = H5T_COMPOUND();
+ public static final int H5T_CONV_CONV = H5T_CONV_CONV();
+ public static final int H5T_CONV_FREE = H5T_CONV_FREE();
+ public static final int H5T_CONV_INIT = H5T_CONV_INIT();
+ public static final int H5T_CSET_ERROR = H5T_CSET_ERROR();
+ public static final int H5T_CSET_ASCII = H5T_CSET_ASCII();
+ public static final int H5T_CSET_UTF8 = H5T_CSET_UTF8();
+ public static final int H5T_CSET_RESERVED_10 = H5T_CSET_RESERVED_10();
+ public static final int H5T_CSET_RESERVED_11 = H5T_CSET_RESERVED_11();
+ public static final int H5T_CSET_RESERVED_12 = H5T_CSET_RESERVED_12();
+ public static final int H5T_CSET_RESERVED_13 = H5T_CSET_RESERVED_13();
+ public static final int H5T_CSET_RESERVED_14 = H5T_CSET_RESERVED_14();
+ public static final int H5T_CSET_RESERVED_15 = H5T_CSET_RESERVED_15();
+ public static final int H5T_CSET_RESERVED_2 = H5T_CSET_RESERVED_2();
+ public static final int H5T_CSET_RESERVED_3 = H5T_CSET_RESERVED_3();
+ public static final int H5T_CSET_RESERVED_4 = H5T_CSET_RESERVED_4();
+ public static final int H5T_CSET_RESERVED_5 = H5T_CSET_RESERVED_5();
+ public static final int H5T_CSET_RESERVED_6 = H5T_CSET_RESERVED_6();
+ public static final int H5T_CSET_RESERVED_7 = H5T_CSET_RESERVED_7();
+ public static final int H5T_CSET_RESERVED_8 = H5T_CSET_RESERVED_8();
+ public static final int H5T_CSET_RESERVED_9 = H5T_CSET_RESERVED_9();
+ public static final int H5T_DIR_ASCEND = H5T_DIR_ASCEND();
+ public static final int H5T_DIR_DEFAULT = H5T_DIR_DEFAULT();
+ public static final int H5T_DIR_DESCEND = H5T_DIR_DESCEND();
+ public static final int H5T_ENUM = H5T_ENUM();
+ public static final int H5T_FLOAT = H5T_FLOAT();
+ public static final long H5T_FORTRAN_S1 = H5T_FORTRAN_S1();
+ public static final long H5T_IEEE_F32BE = H5T_IEEE_F32BE();
+ public static final long H5T_IEEE_F32LE = H5T_IEEE_F32LE();
+ public static final long H5T_IEEE_F64BE = H5T_IEEE_F64BE();
+ public static final long H5T_IEEE_F64LE = H5T_IEEE_F64LE();
+ public static final int H5T_INTEGER = H5T_INTEGER();
+ public static final long H5T_INTEL_B16 = H5T_INTEL_B16();
+ public static final long H5T_INTEL_B32 = H5T_INTEL_B32();
+ public static final long H5T_INTEL_B64 = H5T_INTEL_B64();
+ public static final long H5T_INTEL_B8 = H5T_INTEL_B8();
+ public static final long H5T_INTEL_F32 = H5T_INTEL_F32();
+ public static final long H5T_INTEL_F64 = H5T_INTEL_F64();
+ public static final long H5T_INTEL_I16 = H5T_INTEL_I16();
+ public static final long H5T_INTEL_I32 = H5T_INTEL_I32();
+ public static final long H5T_INTEL_I64 = H5T_INTEL_I64();
+ public static final long H5T_INTEL_I8 = H5T_INTEL_I8();
+ public static final long H5T_INTEL_U16 = H5T_INTEL_U16();
+ public static final long H5T_INTEL_U32 = H5T_INTEL_U32();
+ public static final long H5T_INTEL_U64 = H5T_INTEL_U64();
+ public static final long H5T_INTEL_U8 = H5T_INTEL_U8();
+ public static final long H5T_MIPS_B16 = H5T_MIPS_B16();
+ public static final long H5T_MIPS_B32 = H5T_MIPS_B32();
+ public static final long H5T_MIPS_B64 = H5T_MIPS_B64();
+ public static final long H5T_MIPS_B8 = H5T_MIPS_B8();
+ public static final long H5T_MIPS_F32 = H5T_MIPS_F32();
+ public static final long H5T_MIPS_F64 = H5T_MIPS_F64();
+ public static final long H5T_MIPS_I16 = H5T_MIPS_I16();
+ public static final long H5T_MIPS_I32 = H5T_MIPS_I32();
+ public static final long H5T_MIPS_I64 = H5T_MIPS_I64();
+ public static final long H5T_MIPS_I8 = H5T_MIPS_I8();
+ public static final long H5T_MIPS_U16 = H5T_MIPS_U16();
+ public static final long H5T_MIPS_U32 = H5T_MIPS_U32();
+ public static final long H5T_MIPS_U64 = H5T_MIPS_U64();
+ public static final long H5T_MIPS_U8 = H5T_MIPS_U8();
+ public static final long H5T_NATIVE_B16 = H5T_NATIVE_B16();
+ public static final long H5T_NATIVE_B32 = H5T_NATIVE_B32();
+ public static final long H5T_NATIVE_B64 = H5T_NATIVE_B64();
+ public static final long H5T_NATIVE_B8 = H5T_NATIVE_B8();
+ public static final long H5T_NATIVE_CHAR = H5T_NATIVE_CHAR();
+ public static final long H5T_NATIVE_DOUBLE = H5T_NATIVE_DOUBLE();
+ public static final long H5T_NATIVE_FLOAT = H5T_NATIVE_FLOAT();
+ public static final long H5T_NATIVE_HADDR = H5T_NATIVE_HADDR();
+ public static final long H5T_NATIVE_HBOOL = H5T_NATIVE_HBOOL();
+ public static final long H5T_NATIVE_HERR = H5T_NATIVE_HERR();
+ public static final long H5T_NATIVE_HSIZE = H5T_NATIVE_HSIZE();
+ public static final long H5T_NATIVE_HSSIZE = H5T_NATIVE_HSSIZE();
+ public static final long H5T_NATIVE_INT = H5T_NATIVE_INT();
+ public static final long H5T_NATIVE_INT_FAST16 = H5T_NATIVE_INT_FAST16();
+ public static final long H5T_NATIVE_INT_FAST32 = H5T_NATIVE_INT_FAST32();
+ public static final long H5T_NATIVE_INT_FAST64 = H5T_NATIVE_INT_FAST64();
+ public static final long H5T_NATIVE_INT_FAST8 = H5T_NATIVE_INT_FAST8();
+ public static final long H5T_NATIVE_INT_LEAST16 = H5T_NATIVE_INT_LEAST16();
+ public static final long H5T_NATIVE_INT_LEAST32 = H5T_NATIVE_INT_LEAST32();
+ public static final long H5T_NATIVE_INT_LEAST64 = H5T_NATIVE_INT_LEAST64();
+ public static final long H5T_NATIVE_INT_LEAST8 = H5T_NATIVE_INT_LEAST8();
+ public static final long H5T_NATIVE_INT16 = H5T_NATIVE_INT16();
+ public static final long H5T_NATIVE_INT32 = H5T_NATIVE_INT32();
+ public static final long H5T_NATIVE_INT64 = H5T_NATIVE_INT64();
+ public static final long H5T_NATIVE_INT8 = H5T_NATIVE_INT8();
+ public static final long H5T_NATIVE_LDOUBLE = H5T_NATIVE_LDOUBLE();
+ public static final long H5T_NATIVE_LLONG = H5T_NATIVE_LLONG();
+ public static final long H5T_NATIVE_LONG = H5T_NATIVE_LONG();
+ public static final long H5T_NATIVE_OPAQUE = H5T_NATIVE_OPAQUE();
+ public static final long H5T_NATIVE_SCHAR = H5T_NATIVE_SCHAR();
+ public static final long H5T_NATIVE_SHORT = H5T_NATIVE_SHORT();
+ public static final long H5T_NATIVE_UCHAR = H5T_NATIVE_UCHAR();
+ public static final long H5T_NATIVE_UINT = H5T_NATIVE_UINT();
+ public static final long H5T_NATIVE_UINT_FAST16 = H5T_NATIVE_UINT_FAST16();
+ public static final long H5T_NATIVE_UINT_FAST32 = H5T_NATIVE_UINT_FAST32();
+ public static final long H5T_NATIVE_UINT_FAST64 = H5T_NATIVE_UINT_FAST64();
+ public static final long H5T_NATIVE_UINT_FAST8 = H5T_NATIVE_UINT_FAST8();
+ public static final long H5T_NATIVE_UINT_LEAST16 = H5T_NATIVE_UINT_LEAST16();
+ public static final long H5T_NATIVE_UINT_LEAST32 = H5T_NATIVE_UINT_LEAST32();
+ public static final long H5T_NATIVE_UINT_LEAST64 = H5T_NATIVE_UINT_LEAST64();
+ public static final long H5T_NATIVE_UINT_LEAST8 = H5T_NATIVE_UINT_LEAST8();
+ public static final long H5T_NATIVE_UINT16 = H5T_NATIVE_UINT16();
+ public static final long H5T_NATIVE_UINT32 = H5T_NATIVE_UINT32();
+ public static final long H5T_NATIVE_UINT64 = H5T_NATIVE_UINT64();
+ public static final long H5T_NATIVE_UINT8 = H5T_NATIVE_UINT8();
+ public static final long H5T_NATIVE_ULLONG = H5T_NATIVE_ULLONG();
+ public static final long H5T_NATIVE_ULONG = H5T_NATIVE_ULONG();
+ public static final long H5T_NATIVE_USHORT = H5T_NATIVE_USHORT();
+ public static final int H5T_NCLASSES = H5T_NCLASSES();
+ public static final int H5T_NO_CLASS = H5T_NO_CLASS();
+ public static final int H5T_NORM_ERROR = H5T_NORM_ERROR();
+ public static final int H5T_NORM_IMPLIED = H5T_NORM_IMPLIED();
+ public static final int H5T_NORM_MSBSET = H5T_NORM_MSBSET();
+ public static final int H5T_NORM_NONE = H5T_NORM_NONE();
+ public static final int H5T_NPAD = H5T_NPAD();
+ public static final int H5T_NSGN = H5T_NSGN();
+ public static final int H5T_OPAQUE = H5T_OPAQUE();
+ public static final int H5T_OPAQUE_TAG_MAX = H5T_OPAQUE_TAG_MAX(); /* 1.6.5 */
+ public static final int H5T_ORDER_BE = H5T_ORDER_BE();
+ public static final int H5T_ORDER_ERROR = H5T_ORDER_ERROR();
+ public static final int H5T_ORDER_LE = H5T_ORDER_LE();
+ public static final int H5T_ORDER_NONE = H5T_ORDER_NONE();
+ public static final int H5T_ORDER_VAX = H5T_ORDER_VAX();
+ public static final int H5T_PAD_BACKGROUND = H5T_PAD_BACKGROUND();
+ public static final int H5T_PAD_ERROR = H5T_PAD_ERROR();
+ public static final int H5T_PAD_ONE = H5T_PAD_ONE();
+ public static final int H5T_PAD_ZERO = H5T_PAD_ZERO();
+ public static final int H5T_PERS_DONTCARE = H5T_PERS_DONTCARE();
+ public static final int H5T_PERS_HARD = H5T_PERS_HARD();
+ public static final int H5T_PERS_SOFT = H5T_PERS_SOFT();
+ public static final int H5T_REFERENCE = H5T_REFERENCE();
+ public static final int H5T_SGN_2 = H5T_SGN_2();
+ public static final int H5T_SGN_ERROR = H5T_SGN_ERROR();
+ public static final int H5T_SGN_NONE = H5T_SGN_NONE();
+ public static final long H5T_STD_B16BE = H5T_STD_B16BE();
+ public static final long H5T_STD_B16LE = H5T_STD_B16LE();
+ public static final long H5T_STD_B32BE = H5T_STD_B32BE();
+ public static final long H5T_STD_B32LE = H5T_STD_B32LE();
+ public static final long H5T_STD_B64BE = H5T_STD_B64BE();
+ public static final long H5T_STD_B64LE = H5T_STD_B64LE();
+ public static final long H5T_STD_B8BE = H5T_STD_B8BE();
+ public static final long H5T_STD_B8LE = H5T_STD_B8LE();
+ public static final long H5T_STD_I16BE = H5T_STD_I16BE();
+ public static final long H5T_STD_I16LE = H5T_STD_I16LE();
+ public static final long H5T_STD_I32BE = H5T_STD_I32BE();
+ public static final long H5T_STD_I32LE = H5T_STD_I32LE();
+ public static final long H5T_STD_I64BE = H5T_STD_I64BE();
+ public static final long H5T_STD_I64LE = H5T_STD_I64LE();
+ public static final long H5T_STD_I8BE = H5T_STD_I8BE();
+ public static final long H5T_STD_I8LE = H5T_STD_I8LE();
+ public static final long H5T_STD_REF_DSETREG = H5T_STD_REF_DSETREG();
+ public static final long H5T_STD_REF_OBJ = H5T_STD_REF_OBJ();
+ public static final long H5T_STD_U16BE = H5T_STD_U16BE();
+ public static final long H5T_STD_U16LE = H5T_STD_U16LE();
+ public static final long H5T_STD_U32BE = H5T_STD_U32BE();
+ public static final long H5T_STD_U32LE = H5T_STD_U32LE();
+ public static final long H5T_STD_U64BE = H5T_STD_U64BE();
+ public static final long H5T_STD_U64LE = H5T_STD_U64LE();
+ public static final long H5T_STD_U8BE = H5T_STD_U8BE();
+ public static final long H5T_STD_U8LE = H5T_STD_U8LE();
+ public static final int H5T_STR_ERROR = H5T_STR_ERROR();
+ public static final int H5T_STR_NULLPAD = H5T_STR_NULLPAD();
+ public static final int H5T_STR_NULLTERM = H5T_STR_NULLTERM();
+ public static final int H5T_STR_RESERVED_10 = H5T_STR_RESERVED_10();
+ public static final int H5T_STR_RESERVED_11 = H5T_STR_RESERVED_11();
+ public static final int H5T_STR_RESERVED_12 = H5T_STR_RESERVED_12();
+ public static final int H5T_STR_RESERVED_13 = H5T_STR_RESERVED_13();
+ public static final int H5T_STR_RESERVED_14 = H5T_STR_RESERVED_14();
+ public static final int H5T_STR_RESERVED_15 = H5T_STR_RESERVED_15();
+ public static final int H5T_STR_RESERVED_3 = H5T_STR_RESERVED_3();
+ public static final int H5T_STR_RESERVED_4 = H5T_STR_RESERVED_4();
+ public static final int H5T_STR_RESERVED_5 = H5T_STR_RESERVED_5();
+ public static final int H5T_STR_RESERVED_6 = H5T_STR_RESERVED_6();
+ public static final int H5T_STR_RESERVED_7 = H5T_STR_RESERVED_7();
+ public static final int H5T_STR_RESERVED_8 = H5T_STR_RESERVED_8();
+ public static final int H5T_STR_RESERVED_9 = H5T_STR_RESERVED_9();
+ public static final int H5T_STR_SPACEPAD = H5T_STR_SPACEPAD();
+ public static final int H5T_STRING = H5T_STRING();
+ public static final int H5T_TIME = H5T_TIME();
+ public static final long H5T_UNIX_D32BE = H5T_UNIX_D32BE();
+ public static final long H5T_UNIX_D32LE = H5T_UNIX_D32LE();
+ public static final long H5T_UNIX_D64BE = H5T_UNIX_D64BE();
+ public static final long H5T_UNIX_D64LE = H5T_UNIX_D64LE();
+ public static final long H5T_VARIABLE = H5T_VARIABLE();
+ public static final int H5T_VLEN = H5T_VLEN();
+ public static final int H5Z_CB_CONT = H5Z_CB_CONT();
+ public static final int H5Z_CB_ERROR = H5Z_CB_ERROR();
+ public static final int H5Z_CB_FAIL = H5Z_CB_FAIL();
+ public static final int H5Z_CB_NO = H5Z_CB_NO();
+ public static final int H5Z_DISABLE_EDC = H5Z_DISABLE_EDC();
+ public static final int H5Z_ENABLE_EDC = H5Z_ENABLE_EDC();
+ public static final int H5Z_ERROR_EDC = H5Z_ERROR_EDC();
+ public static final int H5Z_FILTER_DEFLATE = H5Z_FILTER_DEFLATE();
+ public static final int H5Z_FILTER_ERROR = H5Z_FILTER_ERROR();
+ public static final int H5Z_FILTER_FLETCHER32 = H5Z_FILTER_FLETCHER32();
+ public static final int H5Z_FILTER_MAX = H5Z_FILTER_MAX();
+ public static final int H5Z_FILTER_NBIT = H5Z_FILTER_NBIT();
+ public static final int H5Z_FILTER_NONE = H5Z_FILTER_NONE();
+ public static final int H5Z_FILTER_RESERVED = H5Z_FILTER_RESERVED();
+ public static final int H5Z_FILTER_SCALEOFFSET = H5Z_FILTER_SCALEOFFSET();
+ public static final int H5Z_FILTER_SHUFFLE = H5Z_FILTER_SHUFFLE();
+ public static final int H5Z_FILTER_SZIP = H5Z_FILTER_SZIP();
+ public static final int H5Z_FLAG_DEFMASK = H5Z_FLAG_DEFMASK();
+ public static final int H5Z_FLAG_INVMASK = H5Z_FLAG_INVMASK();
+ public static final int H5Z_FLAG_MANDATORY = H5Z_FLAG_MANDATORY();
+ public static final int H5Z_FLAG_OPTIONAL = H5Z_FLAG_OPTIONAL();
+ public static final int H5Z_FLAG_REVERSE = H5Z_FLAG_REVERSE();
+ public static final int H5Z_FLAG_SKIP_EDC = H5Z_FLAG_SKIP_EDC();
+ public static final int H5Z_MAX_NFILTERS = H5Z_MAX_NFILTERS();
+ public static final int H5Z_NO_EDC = H5Z_NO_EDC();
+ public static final int H5Z_FILTER_CONFIG_ENCODE_ENABLED = H5Z_FILTER_CONFIG_ENCODE_ENABLED();
+ public static final int H5Z_FILTER_CONFIG_DECODE_ENABLED = H5Z_FILTER_CONFIG_DECODE_ENABLED();
+ public static final int H5Z_SO_INT_MINBITS_DEFAULT = H5Z_SO_INT_MINBITS_DEFAULT();
+ public static final int H5Z_SO_FLOAT_DSCALE = H5Z_SO_FLOAT_DSCALE();
+ public static final int H5Z_SO_FLOAT_ESCALE = H5Z_SO_FLOAT_ESCALE();
+ public static final int H5Z_SO_INT = H5Z_SO_INT();
+ public static final int H5Z_SHUFFLE_USER_NPARMS = H5Z_SHUFFLE_USER_NPARMS();
+ public static final int H5Z_SHUFFLE_TOTAL_NPARMS = H5Z_SHUFFLE_TOTAL_NPARMS();
+ public static final int H5Z_SZIP_USER_NPARMS = H5Z_SZIP_USER_NPARMS();
+ public static final int H5Z_SZIP_TOTAL_NPARMS = H5Z_SZIP_TOTAL_NPARMS();
+ public static final int H5Z_SZIP_PARM_MASK = H5Z_SZIP_PARM_MASK();
+ public static final int H5Z_SZIP_PARM_PPB = H5Z_SZIP_PARM_PPB();
+ public static final int H5Z_SZIP_PARM_BPP = H5Z_SZIP_PARM_BPP();
+ public static final int H5Z_SZIP_PARM_PPS = H5Z_SZIP_PARM_PPS();
+ public static final int H5Z_NBIT_USER_NPARMS = H5Z_NBIT_USER_NPARMS();
+ public static final int H5Z_SCALEOFFSET_USER_NPARMS = H5Z_SCALEOFFSET_USER_NPARMS();
+ public static final int H5Z_FILTER_ALL = H5Z_FILTER_ALL();
+
+ // /////////////////////////////////////////////////////////////////////////
+ // List of private native variables to get constant values from C //
+ // DO NOT EDIT THE LIST UNLESS YOU KNOW WHAT YOU DO!!! //
+ // /////////////////////////////////////////////////////////////////////////
+
+ private static native final long H5_QUARTER_HADDR_MAX();
+
+ private static native final int H5_SZIP_MAX_PIXELS_PER_BLOCK();
+
+ private static native final int H5_SZIP_NN_OPTION_MASK();
+
+ private static native final int H5_SZIP_EC_OPTION_MASK();
+
+ private static native final int H5_SZIP_ALLOW_K13_OPTION_MASK();
+
+ private static native final int H5_SZIP_CHIP_OPTION_MASK();
+
+ private static native final int H5_INDEX_UNKNOWN();
+
+ private static native final int H5_INDEX_NAME();
+
+ private static native final int H5_INDEX_CRT_ORDER();
+
+ private static native final int H5_INDEX_N();
+
+ private static native final int H5_ITER_UNKNOWN();
+
+ private static native final int H5_ITER_INC();
+
+ private static native final int H5_ITER_DEC();
+
+ private static native final int H5_ITER_NATIVE();
+
+ private static native final int H5_ITER_N();
+
+ private static native final int H5AC_CURR_CACHE_CONFIG_VERSION();
+
+ private static native final int H5AC_MAX_TRACE_FILE_NAME_LEN();
+
+ private static native final int H5AC_METADATA_WRITE_STRATEGY_PROCESS_ZERO_ONLY();
+
+ private static native final int H5AC_METADATA_WRITE_STRATEGY_DISTRIBUTED();
+
+ private static native final int H5C_incr_off();
+
+ private static native final int H5C_incr_threshold();
+
+ private static native final int H5C_flash_incr_off();
+
+ private static native final int H5C_flash_incr_add_space();
+
+ private static native final int H5C_decr_off();
+
+ private static native final int H5C_decr_threshold();
+
+ private static native final int H5C_decr_age_out();
+
+ private static native final int H5C_decr_age_out_with_threshold();
+
+ private static native final int H5D_CHUNK_IDX_BTREE();
+
+ private static native final int H5D_ALLOC_TIME_DEFAULT();
+
+ private static native final int H5D_ALLOC_TIME_EARLY();
+
+ private static native final int H5D_ALLOC_TIME_ERROR();
+
+ private static native final int H5D_ALLOC_TIME_INCR();
+
+ private static native final int H5D_ALLOC_TIME_LATE();
+
+ private static native final int H5D_FILL_TIME_ERROR();
+
+ private static native final int H5D_FILL_TIME_ALLOC();
+
+ private static native final int H5D_FILL_TIME_NEVER();
+
+ private static native final int H5D_FILL_TIME_IFSET();
+
+ private static native final int H5D_FILL_VALUE_DEFAULT();
+
+ private static native final int H5D_FILL_VALUE_ERROR();
+
+ private static native final int H5D_FILL_VALUE_UNDEFINED();
+
+ private static native final int H5D_FILL_VALUE_USER_DEFINED();
+
+ private static native final int H5D_LAYOUT_ERROR();
+
+ private static native final int H5D_CHUNKED();
+
+ private static native final int H5D_COMPACT();
+
+ private static native final int H5D_CONTIGUOUS();
+
+ private static native final int H5D_VIRTUAL();
+
+ private static native final int H5D_NLAYOUTS();
+
+ private static native final int H5D_SPACE_STATUS_ALLOCATED();
+
+ private static native final int H5D_SPACE_STATUS_ERROR();
+
+ private static native final int H5D_SPACE_STATUS_NOT_ALLOCATED();
+
+ private static native final int H5D_SPACE_STATUS_PART_ALLOCATED();
+
+ private static native final int H5D_VDS_ERROR();
+
+ private static native final int H5D_VDS_FIRST_MISSING();
+
+ private static native final int H5D_VDS_LAST_AVAILABLE();
+
+ private static native final long H5E_ALIGNMENT();
+
+ private static native final long H5E_ALREADYEXISTS();
+
+ private static native final long H5E_ALREADYINIT();
+
+ private static native final long H5E_ARGS();
+
+ private static native final long H5E_ATOM();
+
+ private static native final long H5E_ATTR();
+
+ private static native final long H5E_BADATOM();
+
+ private static native final long H5E_BADFILE();
+
+ private static native final long H5E_BADGROUP();
+
+ private static native final long H5E_BADMESG();
+
+ private static native final long H5E_BADRANGE();
+
+ private static native final long H5E_BADSELECT();
+
+ private static native final long H5E_BADSIZE();
+
+ private static native final long H5E_BADTYPE();
+
+ private static native final long H5E_BADVALUE();
+
+ private static native final long H5E_BTREE();
+
+ private static native final long H5E_CACHE();
+
+ private static native final long H5E_CALLBACK();
+
+ private static native final long H5E_CANAPPLY();
+
+ // private static native final long H5E_CANTALLOC();
+ private static native final long H5E_CANTCLIP();
+
+ private static native final long H5E_CANTCLOSEFILE();
+
+ private static native final long H5E_CANTCONVERT();
+
+ private static native final long H5E_CANTCOPY();
+
+ private static native final long H5E_CANTCOUNT();
+
+ private static native final long H5E_CANTCREATE();
+
+ private static native final long H5E_CANTDEC();
+
+ private static native final long H5E_CANTDECODE();
+
+ private static native final long H5E_CANTDELETE();
+
+ private static native final long H5E_CANTENCODE();
+
+ private static native final long H5E_CANTFLUSH();
+
+ private static native final long H5E_CANTFREE();
+
+ private static native final long H5E_CANTGET();
+
+ private static native final long H5E_CANTINC();
+
+ private static native final long H5E_CANTINIT();
+
+ private static native final long H5E_CANTINSERT();
+
+ private static native final long H5E_CANTLIST();
+
+ private static native final long H5E_CANTLOAD();
+
+ private static native final long H5E_CANTLOCK();
+
+ private static native final long H5E_CANTNEXT();
+
+ private static native final long H5E_CANTOPENFILE();
+
+ private static native final long H5E_CANTOPENOBJ();
+
+ // private static native final long H5E_CANTRECV();
+ private static native final long H5E_CANTREGISTER();
+
+ private static native final long H5E_CANTRELEASE();
+
+ private static native final long H5E_CANTSELECT();
+
+ private static native final long H5E_CANTSET();
+
+ private static native final long H5E_CANTSPLIT();
+
+ private static native final long H5E_CANTUNLOCK();
+
+ private static native final long H5E_CLOSEERROR();
+
+ private static native final long H5E_COMPLEN();
+
+ private static native final long H5E_DATASET();
+
+ private static native final long H5E_DATASPACE();
+
+ private static native final long H5E_DATATYPE();
+
+ private static native final long H5E_DEFAULT();
+
+ private static native final long H5E_DUPCLASS();
+
+ private static native final long H5E_EFL();
+
+ private static native final long H5E_EXISTS();
+
+ private static native final long H5E_FCNTL();
+
+ private static native final long H5E_FILE();
+
+ private static native final long H5E_FILEEXISTS();
+
+ private static native final long H5E_FILEOPEN();
+
+ private static native final long H5E_FUNC();
+
+ private static native final long H5E_HEAP();
+
+ private static native final long H5E_INTERNAL();
+
+ private static native final long H5E_IO();
+
+ private static native final long H5E_LINK();
+
+ private static native final long H5E_LINKCOUNT();
+
+ private static native final int H5E_MAJOR();
+
+ private static native final int H5E_MINOR();
+
+ private static native final long H5E_MOUNT();
+
+ private static native final long H5E_MPI();
+
+ private static native final long H5E_MPIERRSTR();
+
+ private static native final long H5E_NOFILTER();
+
+ private static native final long H5E_NOIDS();
+
+ private static native final long H5E_NONE_MAJOR();
+
+ private static native final long H5E_NONE_MINOR();
+
+ private static native final long H5E_NOSPACE();
+
+ private static native final long H5E_NOTCACHED();
+
+ private static native final long H5E_NOTFOUND();
+
+ private static native final long H5E_NOTHDF5();
+
+ private static native final long H5E_OHDR();
+
+ private static native final long H5E_OVERFLOW();
+
+ private static native final long H5E_PLINE();
+
+ private static native final long H5E_PLIST();
+
+ private static native final long H5E_PROTECT();
+
+ private static native final long H5E_READERROR();
+
+ private static native final long H5E_REFERENCE();
+
+ private static native final long H5E_RESOURCE();
+
+ private static native final long H5E_RS();
+
+ private static native final long H5E_SEEKERROR();
+
+ private static native final long H5E_SETLOCAL();
+
+ private static native final long H5E_STORAGE();
+
+ private static native final long H5E_SYM();
+
+ private static native final long H5E_TRUNCATED();
+
+ private static native final long H5E_TST();
+
+ private static native final long H5E_UNINITIALIZED();
+
+ private static native final long H5E_UNSUPPORTED();
+
+ private static native final long H5E_VERSION();
+
+ private static native final long H5E_VFL();
+
+ private static native final long H5E_WALK_DOWNWARD();
+
+ private static native final long H5E_WALK_UPWARD();
+
+ private static native final long H5E_WRITEERROR();
+
+ private static native final int H5F_ACC_CREAT();
+
+ private static native final int H5F_ACC_EXCL();
+
+ private static native final int H5F_ACC_RDONLY();
+
+ private static native final int H5F_ACC_RDWR();
+
+ private static native final int H5F_ACC_TRUNC();
+
+ private static native final int H5F_ACC_DEFAULT();
+
+ private static native final int H5F_CLOSE_DEFAULT();
+
+ private static native final int H5F_CLOSE_SEMI();
+
+ private static native final int H5F_CLOSE_STRONG();
+
+ private static native final int H5F_CLOSE_WEAK();
+
+ private static native final int H5F_LIBVER_EARLIEST();
+
+ private static native final int H5F_LIBVER_LATEST();
+
+ private static native final int H5F_OBJ_ALL();
+
+ private static native final int H5F_OBJ_ATTR();
+
+ private static native final int H5F_OBJ_DATASET();
+
+ private static native final int H5F_OBJ_DATATYPE();
+
+ private static native final int H5F_OBJ_FILE();
+
+ private static native final int H5F_OBJ_GROUP();
+
+ private static native final int H5F_OBJ_LOCAL(); /* 1.6.5 */
+
+ private static native final int H5F_SCOPE_DOWN();
+
+ private static native final int H5F_SCOPE_GLOBAL();
+
+ private static native final int H5F_SCOPE_LOCAL();
+
+ private static native final int H5F_UNLIMITED();
+
+ private static native final int H5F_FILE_SPACE_DEFAULT();
+
+ private static native final int H5F_FILE_SPACE_ALL_PERSIST();
+
+ private static native final int H5F_FILE_SPACE_ALL();
+
+ private static native final int H5F_FILE_SPACE_AGGR_VFD();
+
+ private static native final int H5F_FILE_SPACE_VFD();
+
+ private static native final int H5F_FILE_SPACE_NTYPES();
+
+ private static native final long H5FD_CORE();
+
+ private static native final long H5FD_DIRECT();
+
+ private static native final long H5FD_FAMILY();
+
+ private static native final long H5FD_LOG();
+
+ private static native final long H5FD_MPIO();
+
+ private static native final long H5FD_MULTI();
+
+ private static native final long H5FD_SEC2();
+
+ private static native final long H5FD_STDIO();
+
+ private static native final long H5FD_WINDOWS();
+
+ private static native final int H5FD_LOG_LOC_READ();
+
+ private static native final int H5FD_LOG_LOC_WRITE();
+
+ private static native final int H5FD_LOG_LOC_SEEK();
+
+ private static native final int H5FD_LOG_LOC_IO();
+
+ private static native final int H5FD_LOG_FILE_READ();
+
+ private static native final int H5FD_LOG_FILE_WRITE();
+
+ private static native final int H5FD_LOG_FILE_IO();
+
+ private static native final int H5FD_LOG_FLAVOR();
+
+ private static native final int H5FD_LOG_NUM_READ();
+
+ private static native final int H5FD_LOG_NUM_WRITE();
+
+ private static native final int H5FD_LOG_NUM_SEEK();
+
+ private static native final int H5FD_LOG_NUM_TRUNCATE();
+
+ private static native final int H5FD_LOG_NUM_IO();
+
+ private static native final int H5FD_LOG_TIME_OPEN();
+
+ private static native final int H5FD_LOG_TIME_STAT();
+
+ private static native final int H5FD_LOG_TIME_READ();
+
+ private static native final int H5FD_LOG_TIME_WRITE();
+
+ private static native final int H5FD_LOG_TIME_SEEK();
+
+ private static native final int H5FD_LOG_TIME_CLOSE();
+
+ private static native final int H5FD_LOG_TIME_IO();
+
+ private static native final int H5FD_LOG_ALLOC();
+
+ private static native final int H5FD_LOG_ALL();
+
+ private static native final int H5FD_MEM_NOLIST();
+
+ private static native final int H5FD_MEM_DEFAULT();
+
+ private static native final int H5FD_MEM_SUPER();
+
+ private static native final int H5FD_MEM_BTREE();
+
+ private static native final int H5FD_MEM_DRAW();
+
+ private static native final int H5FD_MEM_GHEAP();
+
+ private static native final int H5FD_MEM_LHEAP();
+
+ private static native final int H5FD_MEM_OHDR();
+
+ private static native final int H5FD_MEM_NTYPES();
+
+ private static native final long H5FD_DEFAULT_HADDR_SIZE();
+
+ private static native final long H5FD_MEM_DEFAULT_SIZE();
+
+ private static native final long H5FD_MEM_DEFAULT_SUPER_SIZE();
+
+ private static native final long H5FD_MEM_DEFAULT_BTREE_SIZE();
+
+ private static native final long H5FD_MEM_DEFAULT_DRAW_SIZE();
+
+ private static native final long H5FD_MEM_DEFAULT_GHEAP_SIZE();
+
+ private static native final long H5FD_MEM_DEFAULT_LHEAP_SIZE();
+
+ private static native final long H5FD_MEM_DEFAULT_OHDR_SIZE();
+
+ private static native final int H5G_DATASET();
+
+ private static native final int H5G_GROUP();
+
+ private static native final int H5G_LINK();
+
+ private static native final int H5G_UDLINK();
+
+ private static native final int H5G_LINK_ERROR();
+
+ private static native final int H5G_LINK_HARD();
+
+ private static native final int H5G_LINK_SOFT();
+
+ private static native final int H5G_NLIBTYPES();
+
+ private static native final int H5G_NTYPES();
+
+ private static native final int H5G_NUSERTYPES();
+
+ private static native final int H5G_RESERVED_5();
+
+ private static native final int H5G_RESERVED_6();
+
+ private static native final int H5G_RESERVED_7();
+
+ private static native final int H5G_SAME_LOC();
+
+ private static native final int H5G_STORAGE_TYPE_UNKNOWN();
+
+ private static native final int H5G_STORAGE_TYPE_SYMBOL_TABLE();
+
+ private static native final int H5G_STORAGE_TYPE_COMPACT();
+
+ private static native final int H5G_STORAGE_TYPE_DENSE();
+
+ private static native final int H5G_TYPE();
+
+ private static native final int H5G_UNKNOWN();
+
+ private static native final int H5I_ATTR();
+
+ private static native final int H5I_BADID();
+
+ private static native final int H5I_DATASET();
+
+ private static native final int H5I_DATASPACE();
+
+ private static native final int H5I_DATATYPE();
+
+ private static native final int H5I_ERROR_CLASS();
+
+ private static native final int H5I_ERROR_MSG();
+
+ private static native final int H5I_ERROR_STACK();
+
+ private static native final int H5I_FILE();
+
+ private static native final int H5I_GENPROP_CLS();
+
+ private static native final int H5I_GENPROP_LST();
+
+ private static native final int H5I_GROUP();
+
+ private static native final int H5I_INVALID_HID();
+
+ private static native final int H5I_NTYPES();
+
+ private static native final int H5I_REFERENCE();
+
+ private static native final int H5I_UNINIT();
+
+ private static native final int H5I_VFL();
+
+ private static native final int H5L_TYPE_ERROR();
+
+ private static native final int H5L_TYPE_HARD();
+
+ private static native final int H5L_TYPE_SOFT();
+
+ private static native final int H5L_TYPE_EXTERNAL();
+
+ private static native final int H5L_TYPE_MAX();
+
+ private static native final int H5O_COPY_SHALLOW_HIERARCHY_FLAG();
+
+ private static native final int H5O_COPY_EXPAND_SOFT_LINK_FLAG();
+
+ private static native final int H5O_COPY_EXPAND_EXT_LINK_FLAG();
+
+ private static native final int H5O_COPY_EXPAND_REFERENCE_FLAG();
+
+ private static native final int H5O_COPY_WITHOUT_ATTR_FLAG();
+
+ private static native final int H5O_COPY_PRESERVE_NULL_FLAG();
+
+ private static native final int H5O_SHMESG_NONE_FLAG();
+
+ private static native final int H5O_SHMESG_SDSPACE_FLAG();
+
+ private static native final int H5O_SHMESG_DTYPE_FLAG();
+
+ private static native final int H5O_SHMESG_FILL_FLAG();
+
+ private static native final int H5O_SHMESG_PLINE_FLAG();
+
+ private static native final int H5O_SHMESG_ATTR_FLAG();
+
+ private static native final int H5O_SHMESG_ALL_FLAG();
+
+ private static native final int H5O_TYPE_UNKNOWN();
+
+ private static native final int H5O_TYPE_GROUP();
+
+ private static native final int H5O_TYPE_DATASET();
+
+ private static native final int H5O_TYPE_NAMED_DATATYPE();
+
+ private static native final int H5O_TYPE_NTYPES();
+
+ private static native final long H5P_ROOT();
+
+ private static native final long H5P_OBJECT_CREATE();
+
+ private static native final long H5P_FILE_CREATE();
+
+ private static native final long H5P_FILE_ACCESS();
+
+ private static native final long H5P_DATASET_CREATE();
+
+ private static native final long H5P_DATASET_ACCESS();
+
+ private static native final long H5P_DATASET_XFER();
+
+ private static native final long H5P_FILE_MOUNT();
+
+ private static native final long H5P_GROUP_CREATE();
+
+ private static native final long H5P_GROUP_ACCESS();
+
+ private static native final long H5P_DATATYPE_CREATE();
+
+ private static native final long H5P_DATATYPE_ACCESS();
+
+ private static native final long H5P_STRING_CREATE();
+
+ private static native final long H5P_ATTRIBUTE_CREATE();
+
+ private static native final long H5P_ATTRIBUTE_ACCESS();
+
+ private static native final long H5P_OBJECT_COPY();
+
+ private static native final long H5P_LINK_CREATE();
+
+ private static native final long H5P_LINK_ACCESS();
+
+ private static native final long H5P_FILE_CREATE_DEFAULT();
+
+ private static native final long H5P_FILE_ACCESS_DEFAULT();
+
+ private static native final long H5P_DATASET_CREATE_DEFAULT();
+
+ private static native final long H5P_DATASET_ACCESS_DEFAULT();
+
+ private static native final long H5P_DATASET_XFER_DEFAULT();
+
+ private static native final long H5P_FILE_MOUNT_DEFAULT();
+
+ private static native final long H5P_GROUP_CREATE_DEFAULT();
+
+ private static native final long H5P_GROUP_ACCESS_DEFAULT();
+
+ private static native final long H5P_DATATYPE_CREATE_DEFAULT();
+
+ private static native final long H5P_DATATYPE_ACCESS_DEFAULT();
+
+ private static native final long H5P_ATTRIBUTE_CREATE_DEFAULT();
+
+ private static native final long H5P_ATTRIBUTE_ACCESS_DEFAULT();
+
+ private static native final long H5P_OBJECT_COPY_DEFAULT();
+
+ private static native final long H5P_LINK_CREATE_DEFAULT();
+
+ private static native final long H5P_LINK_ACCESS_DEFAULT();
+
+ private static native final int H5P_CRT_ORDER_TRACKED();
+
+ private static native final int H5P_CRT_ORDER_INDEXED();
+
+ private static native final long H5P_DEFAULT();
+
+ private static native final int H5PL_TYPE_ERROR();
+
+ private static native final int H5PL_TYPE_FILTER();
+
+ private static native final int H5PL_FILTER_PLUGIN();
+
+ private static native final int H5PL_ALL_PLUGIN();
+
+ private static native final int H5R_BADTYPE();
+
+ private static native final int H5R_DATASET_REGION();
+
+ private static native final int H5R_MAXTYPE();
+
+ private static native final int H5R_OBJ_REF_BUF_SIZE();
+
+ private static native final int H5R_OBJECT();
+
+ private static native final int H5S_ALL();
+
+ private static native final int H5S_MAX_RANK();
+
+ private static native final int H5S_NO_CLASS();
+
+ private static native final int H5S_NULL();
+
+ private static native final int H5S_SCALAR();
+
+ private static native final int H5S_SEL_ALL();
+
+ private static native final int H5S_SEL_ERROR();
+
+ private static native final int H5S_SEL_HYPERSLABS();
+
+ private static native final int H5S_SEL_N();
+
+ private static native final int H5S_SEL_NONE();
+
+ private static native final int H5S_SEL_POINTS();
+
+ private static native final int H5S_SELECT_AND();
+
+ private static native final int H5S_SELECT_APPEND();
+
+ private static native final int H5S_SELECT_INVALID();
+
+ private static native final int H5S_SELECT_NOOP();
+
+ private static native final int H5S_SELECT_NOTA();
+
+ private static native final int H5S_SELECT_NOTB();
+
+ private static native final int H5S_SELECT_OR();
+
+ private static native final int H5S_SELECT_PREPEND();
+
+ private static native final int H5S_SELECT_SET();
+
+ private static native final int H5S_SELECT_XOR();
+
+ private static native final int H5S_SIMPLE();
+
+ private static native final int H5S_UNLIMITED();
+
+ private static native final long H5T_ALPHA_B16();
+
+ private static native final long H5T_ALPHA_B32();
+
+ private static native final long H5T_ALPHA_B64();
+
+ private static native final long H5T_ALPHA_B8();
+
+ private static native final long H5T_ALPHA_F32();
+
+ private static native final long H5T_ALPHA_F64();
+
+ private static native final long H5T_ALPHA_I16();
+
+ private static native final long H5T_ALPHA_I32();
+
+ private static native final long H5T_ALPHA_I64();
+
+ private static native final long H5T_ALPHA_I8();
+
+ private static native final long H5T_ALPHA_U16();
+
+ private static native final long H5T_ALPHA_U32();
+
+ private static native final long H5T_ALPHA_U64();
+
+ private static native final long H5T_ALPHA_U8();
+
+ private static native final int H5T_ARRAY();
+
+ private static native final int H5T_BITFIELD();
+
+ private static native final int H5T_BKG_NO();
+
+ private static native final int H5T_BKG_YES();
+
+ private static native final long H5T_C_S1();
+
+ private static native final int H5T_COMPOUND();
+
+ private static native final int H5T_CONV_CONV();
+
+ private static native final int H5T_CONV_FREE();
+
+ private static native final int H5T_CONV_INIT();
+
+ private static native final int H5T_CSET_ERROR();
+
+ private static native final int H5T_CSET_ASCII();
+
+ private static native final int H5T_CSET_UTF8();
+
+ private static native final int H5T_CSET_RESERVED_10();
+
+ private static native final int H5T_CSET_RESERVED_11();
+
+ private static native final int H5T_CSET_RESERVED_12();
+
+ private static native final int H5T_CSET_RESERVED_13();
+
+ private static native final int H5T_CSET_RESERVED_14();
+
+ private static native final int H5T_CSET_RESERVED_15();
+
+ private static native final int H5T_CSET_RESERVED_2();
+
+ private static native final int H5T_CSET_RESERVED_3();
+
+ private static native final int H5T_CSET_RESERVED_4();
+
+ private static native final int H5T_CSET_RESERVED_5();
+
+ private static native final int H5T_CSET_RESERVED_6();
+
+ private static native final int H5T_CSET_RESERVED_7();
+
+ private static native final int H5T_CSET_RESERVED_8();
+
+ private static native final int H5T_CSET_RESERVED_9();
+
+ private static native final int H5T_DIR_ASCEND();
+
+ private static native final int H5T_DIR_DEFAULT();
+
+ private static native final int H5T_DIR_DESCEND();
+
+ private static native final int H5T_ENUM();
+
+ private static native final int H5T_FLOAT();
+
+ private static native final long H5T_FORTRAN_S1();
+
+ private static native final long H5T_IEEE_F32BE();
+
+ private static native final long H5T_IEEE_F32LE();
+
+ private static native final long H5T_IEEE_F64BE();
+
+ private static native final long H5T_IEEE_F64LE();
+
+ private static native final int H5T_INTEGER();
+
+ private static native final long H5T_INTEL_B16();
+
+ private static native final long H5T_INTEL_B32();
+
+ private static native final long H5T_INTEL_B64();
+
+ private static native final long H5T_INTEL_B8();
+
+ private static native final long H5T_INTEL_F32();
+
+ private static native final long H5T_INTEL_F64();
+
+ private static native final long H5T_INTEL_I16();
+
+ private static native final long H5T_INTEL_I32();
+
+ private static native final long H5T_INTEL_I64();
+
+ private static native final long H5T_INTEL_I8();
+
+ private static native final long H5T_INTEL_U16();
+
+ private static native final long H5T_INTEL_U32();
+
+ private static native final long H5T_INTEL_U64();
+
+ private static native final long H5T_INTEL_U8();
+
+ private static native final long H5T_MIPS_B16();
+
+ private static native final long H5T_MIPS_B32();
+
+ private static native final long H5T_MIPS_B64();
+
+ private static native final long H5T_MIPS_B8();
+
+ private static native final long H5T_MIPS_F32();
+
+ private static native final long H5T_MIPS_F64();
+
+ private static native final long H5T_MIPS_I16();
+
+ private static native final long H5T_MIPS_I32();
+
+ private static native final long H5T_MIPS_I64();
+
+ private static native final long H5T_MIPS_I8();
+
+ private static native final long H5T_MIPS_U16();
+
+ private static native final long H5T_MIPS_U32();
+
+ private static native final long H5T_MIPS_U64();
+
+ private static native final long H5T_MIPS_U8();
+
+ private static native final long H5T_NATIVE_B16();
+
+ private static native final long H5T_NATIVE_B32();
+
+ private static native final long H5T_NATIVE_B64();
+
+ private static native final long H5T_NATIVE_B8();
+
+ private static native final long H5T_NATIVE_CHAR();
+
+ private static native final long H5T_NATIVE_DOUBLE();
+
+ private static native final long H5T_NATIVE_FLOAT();
+
+ private static native final long H5T_NATIVE_HADDR();
+
+ private static native final long H5T_NATIVE_HBOOL();
+
+ private static native final long H5T_NATIVE_HERR();
+
+ private static native final long H5T_NATIVE_HSIZE();
+
+ private static native final long H5T_NATIVE_HSSIZE();
+
+ private static native final long H5T_NATIVE_INT();
+
+ private static native final long H5T_NATIVE_INT_FAST16();
+
+ private static native final long H5T_NATIVE_INT_FAST32();
+
+ private static native final long H5T_NATIVE_INT_FAST64();
+
+ private static native final long H5T_NATIVE_INT_FAST8();
+
+ private static native final long H5T_NATIVE_INT_LEAST16();
+
+ private static native final long H5T_NATIVE_INT_LEAST32();
+
+ private static native final long H5T_NATIVE_INT_LEAST64();
+
+ private static native final long H5T_NATIVE_INT_LEAST8();
+
+ private static native final long H5T_NATIVE_INT16();
+
+ private static native final long H5T_NATIVE_INT32();
+
+ private static native final long H5T_NATIVE_INT64();
+
+ private static native final long H5T_NATIVE_INT8();
+
+ private static native final long H5T_NATIVE_LDOUBLE();
+
+ private static native final long H5T_NATIVE_LLONG();
+
+ private static native final long H5T_NATIVE_LONG();
+
+ private static native final long H5T_NATIVE_OPAQUE();
+
+ private static native final long H5T_NATIVE_SCHAR();
+
+ private static native final long H5T_NATIVE_SHORT();
+
+ private static native final long H5T_NATIVE_UCHAR();
+
+ private static native final long H5T_NATIVE_UINT();
+
+ private static native final long H5T_NATIVE_UINT_FAST16();
+
+ private static native final long H5T_NATIVE_UINT_FAST32();
+
+ private static native final long H5T_NATIVE_UINT_FAST64();
+
+ private static native final long H5T_NATIVE_UINT_FAST8();
+
+ private static native final long H5T_NATIVE_UINT_LEAST16();
+
+ private static native final long H5T_NATIVE_UINT_LEAST32();
+
+ private static native final long H5T_NATIVE_UINT_LEAST64();
+
+ private static native final long H5T_NATIVE_UINT_LEAST8();
+
+ private static native final long H5T_NATIVE_UINT16();
+
+ private static native final long H5T_NATIVE_UINT32();
+
+ private static native final long H5T_NATIVE_UINT64();
+
+ private static native final long H5T_NATIVE_UINT8();
+
+ private static native final long H5T_NATIVE_ULLONG();
+
+ private static native final long H5T_NATIVE_ULONG();
+
+ private static native final long H5T_NATIVE_USHORT();
+
+ private static native final int H5T_NCLASSES();
+
+ private static native final int H5T_NO_CLASS();
+
+ private static native final int H5T_NORM_ERROR();
+
+ private static native final int H5T_NORM_IMPLIED();
+
+ private static native final int H5T_NORM_MSBSET();
+
+ private static native final int H5T_NORM_NONE();
+
+ private static native final int H5T_NPAD();
+
+ private static native final int H5T_NSGN();
+
+ private static native final int H5T_OPAQUE();
+
+ private static native final int H5T_OPAQUE_TAG_MAX();
+
+ private static native final int H5T_ORDER_BE();
+
+ private static native final int H5T_ORDER_ERROR();
+
+ private static native final int H5T_ORDER_LE();
+
+ private static native final int H5T_ORDER_NONE();
+
+ private static native final int H5T_ORDER_VAX();
+
+ private static native final int H5T_PAD_BACKGROUND();
+
+ private static native final int H5T_PAD_ERROR();
+
+ private static native final int H5T_PAD_ONE();
+
+ private static native final int H5T_PAD_ZERO();
+
+ private static native final int H5T_PERS_DONTCARE();
+
+ private static native final int H5T_PERS_HARD();
+
+ private static native final int H5T_PERS_SOFT();
+
+ private static native final int H5T_REFERENCE();
+
+ private static native final int H5T_SGN_2();
+
+ private static native final int H5T_SGN_ERROR();
+
+ private static native final int H5T_SGN_NONE();
+
+ private static native final long H5T_STD_B16BE();
+
+ private static native final long H5T_STD_B16LE();
+
+ private static native final long H5T_STD_B32BE();
+
+ private static native final long H5T_STD_B32LE();
+
+ private static native final long H5T_STD_B64BE();
+
+ private static native final long H5T_STD_B64LE();
+
+ private static native final long H5T_STD_B8BE();
+
+ private static native final long H5T_STD_B8LE();
+
+ private static native final long H5T_STD_I16BE();
+
+ private static native final long H5T_STD_I16LE();
+
+ private static native final long H5T_STD_I32BE();
+
+ private static native final long H5T_STD_I32LE();
+
+ private static native final long H5T_STD_I64BE();
+
+ private static native final long H5T_STD_I64LE();
+
+ private static native final long H5T_STD_I8BE();
+
+ private static native final long H5T_STD_I8LE();
+
+ private static native final long H5T_STD_REF_DSETREG();
+
+ private static native final long H5T_STD_REF_OBJ();
+
+ private static native final long H5T_STD_U16BE();
+
+ private static native final long H5T_STD_U16LE();
+
+ private static native final long H5T_STD_U32BE();
+
+ private static native final long H5T_STD_U32LE();
+
+ private static native final long H5T_STD_U64BE();
+
+ private static native final long H5T_STD_U64LE();
+
+ private static native final long H5T_STD_U8BE();
+
+ private static native final long H5T_STD_U8LE();
+
+ private static native final int H5T_STR_ERROR();
+
+ private static native final int H5T_STR_NULLPAD();
+
+ private static native final int H5T_STR_NULLTERM();
+
+ private static native final int H5T_STR_RESERVED_10();
+
+ private static native final int H5T_STR_RESERVED_11();
+
+ private static native final int H5T_STR_RESERVED_12();
+
+ private static native final int H5T_STR_RESERVED_13();
+
+ private static native final int H5T_STR_RESERVED_14();
+
+ private static native final int H5T_STR_RESERVED_15();
+
+ private static native final int H5T_STR_RESERVED_3();
+
+ private static native final int H5T_STR_RESERVED_4();
+
+ private static native final int H5T_STR_RESERVED_5();
+
+ private static native final int H5T_STR_RESERVED_6();
+
+ private static native final int H5T_STR_RESERVED_7();
+
+ private static native final int H5T_STR_RESERVED_8();
+
+ private static native final int H5T_STR_RESERVED_9();
+
+ private static native final int H5T_STR_SPACEPAD();
+
+ private static native final int H5T_STRING();
+
+ private static native final int H5T_TIME();
+
+ private static native final long H5T_UNIX_D32BE();
+
+ private static native final long H5T_UNIX_D32LE();
+
+ private static native final long H5T_UNIX_D64BE();
+
+ private static native final long H5T_UNIX_D64LE();
+
+ private static native final long H5T_VARIABLE();
+
+ private static native final int H5T_VLEN();
+
+ private static native final int H5Z_CB_CONT();
+
+ private static native final int H5Z_CB_ERROR();
+
+ private static native final int H5Z_CB_FAIL();
+
+ private static native final int H5Z_CB_NO();
+
+ private static native final int H5Z_DISABLE_EDC();
+
+ private static native final int H5Z_ENABLE_EDC();
+
+ private static native final int H5Z_ERROR_EDC();
+
+ private static native final int H5Z_FILTER_DEFLATE();
+
+ private static native final int H5Z_FILTER_ERROR();
+
+ private static native final int H5Z_FILTER_FLETCHER32();
+
+ private static native final int H5Z_FILTER_MAX();
+
+ private static native final int H5Z_FILTER_NBIT();
+
+ private static native final int H5Z_FILTER_NONE();
+
+ private static native final int H5Z_FILTER_RESERVED();
+
+ private static native final int H5Z_FILTER_SCALEOFFSET();
+
+ private static native final int H5Z_FILTER_SHUFFLE();
+
+ private static native final int H5Z_FILTER_SZIP();
+
+ private static native final int H5Z_FLAG_DEFMASK();
+
+ private static native final int H5Z_FLAG_INVMASK();
+
+ private static native final int H5Z_FLAG_MANDATORY();
+
+ private static native final int H5Z_FLAG_OPTIONAL();
+
+ private static native final int H5Z_FLAG_REVERSE();
+
+ private static native final int H5Z_FLAG_SKIP_EDC();
+
+ private static native final int H5Z_MAX_NFILTERS();
+
+ private static native final int H5Z_NO_EDC();
+
+ private static native final int H5Z_FILTER_CONFIG_ENCODE_ENABLED();
+
+ private static native final int H5Z_FILTER_CONFIG_DECODE_ENABLED();
+
+ private static native final int H5Z_SO_INT_MINBITS_DEFAULT();
+
+ private static native final int H5Z_SO_FLOAT_DSCALE();
+
+ private static native final int H5Z_SO_FLOAT_ESCALE();
+
+ private static native final int H5Z_SO_INT();
+
+ private static native final int H5Z_SHUFFLE_USER_NPARMS();
+
+ private static native final int H5Z_SHUFFLE_TOTAL_NPARMS();
+
+ private static native final int H5Z_SZIP_USER_NPARMS();
+
+ private static native final int H5Z_SZIP_TOTAL_NPARMS();
+
+ private static native final int H5Z_SZIP_PARM_MASK();
+
+ private static native final int H5Z_SZIP_PARM_PPB();
+
+ private static native final int H5Z_SZIP_PARM_BPP();
+
+ private static native final int H5Z_SZIP_PARM_PPS();
+
+ private static native final int H5Z_NBIT_USER_NPARMS();
+
+ private static native final int H5Z_SCALEOFFSET_USER_NPARMS();
+
+ private static native final int H5Z_FILTER_ALL();
+
+}
diff --git a/java/src/hdf/hdf5lib/HDF5GroupInfo.java b/java/src/hdf/hdf5lib/HDF5GroupInfo.java
new file mode 100644
index 0000000..fa33fec
--- /dev/null
+++ b/java/src/hdf/hdf5lib/HDF5GroupInfo.java
@@ -0,0 +1,171 @@
+/* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
+ * Copyright by The HDF Group. *
+ * Copyright by the Board of Trustees of the University of Illinois. *
+ * All rights reserved. *
+ * *
+ * This file is part of HDF5. The full HDF5 copyright notice, including *
+ * terms governing use, modification, and redistribution, is contained in *
+ * the files COPYING and Copyright.html. COPYING can be found at the root *
+ * of the source code distribution tree; Copyright.html can be found at the *
+ * root level of an installed copy of the electronic HDF5 document set and *
+ * is linked from the top-level documents page. It can also be found at *
+ * http://hdfgroup.org/HDF5/doc/Copyright.html. If you do not have *
+ * access to either file, you may request a copy from help@hdfgroup.org. *
+ * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */
+
+
+package hdf.hdf5lib;
+
+/**
+ * <p>
+ * This class is a container for the information reported about an HDF5 Object
+ * from the H5Gget_obj_info() method.
+ * <p>
+ * The fileno and objno fields contain four values which uniquely identify an
+ * object among those HDF5 files which are open: if all four values are the same
+ * between two objects, then the two objects are the same (provided both files
+ * are still open). The nlink field is the number of hard links to the object or
+ * zero when information is being returned about a symbolic link (symbolic links
+ * do not have hard links but all other objects always have at least one). The
+ * type field contains the type of the object, one of H5G_GROUP, H5G_DATASET, or
+ * H5G_LINK. The mtime field contains the modification time. If information is
+ * being returned about a symbolic link then linklen will be the length of the
+ * link value (the name of the pointed-to object with the null terminator);
+ * otherwise linklen will be zero. Other fields may be added to this structure
+ * in the future.
+ */
+
+public class HDF5GroupInfo {
+ long[] fileno;
+ long[] objno;
+ int nlink;
+ int type;
+ long mtime;
+ int linklen;
+
+ public HDF5GroupInfo() {
+ fileno = new long[2];
+ objno = new long[2];
+ nlink = -1;
+ type = -1;
+ mtime = 0;
+ linklen = 0;
+ }
+
+ /**
+ * Sets the HDF5 group information. Used by the JHI5.
+ *
+ * @param fn
+ * File id number
+ * @param on
+ * Object id number
+ * @param nl
+ * Number of links
+ * @param t
+ * Type of the object
+ * @param mt
+ * Modification time
+ * @param len
+ * Length of link
+ **/
+ public void setGroupInfo(long[] fn, long[] on, int nl, int t, long mt,
+ int len) {
+ fileno = fn;
+ objno = on;
+ nlink = nl;
+ type = t;
+ mtime = mt;
+ linklen = len;
+ }
+
+ /** Resets all the group information to defaults. */
+ public void reset() {
+ fileno[0] = 0;
+ fileno[1] = 0;
+ objno[0] = 0;
+ objno[1] = 0;
+ nlink = -1;
+ type = -1;
+ mtime = 0;
+ linklen = 0;
+ }
+
+ /* accessors */
+ public long[] getFileno() {
+ return fileno;
+ }
+
+ public long[] getObjno() {
+ return objno;
+ }
+
+ public int getType() {
+ return type;
+ }
+
+ public int getNlink() {
+ return nlink;
+ }
+
+ public long getMtime() {
+ return mtime;
+ }
+
+ public int getLinklen() {
+ return linklen;
+ }
+
+ /**
+ * The fileno and objno fields contain four values which uniquely identify
+ * an object among those HDF5 files.
+ */
+ @Override
+ public boolean equals(Object obj) {
+ if (!(obj instanceof HDF5GroupInfo)) {
+ return false;
+ }
+
+ HDF5GroupInfo target = (HDF5GroupInfo) obj;
+ if ((fileno[0] == target.fileno[0]) && (fileno[1] == target.fileno[1])
+ && (objno[0] == target.objno[0])
+ && (objno[1] == target.objno[1])) {
+ return true;
+ }
+ else {
+ return false;
+ }
+ }
+
+ /**
+ * Returns the object id.
+ *
+ * @return the object id
+ */
+ public long getOID() {
+ return objno[0];
+ }
+
+ /**
+ * /** Converts this object to a String representation.
+ *
+ * @return a string representation of this object
+ */
+ @Override
+ public String toString() {
+ String fileStr = "fileno=null";
+ String objStr = "objno=null";
+
+ if (fileno != null) {
+ fileStr = "fileno[0]=" + fileno[0] + ",fileno[1]=" + fileno[1];
+ }
+
+ if (objno != null) {
+ objStr = "objno[0]=" + objno[0] + ",objno[1]=" + objno[1];
+ }
+
+ return getClass().getName() + "[" + fileStr + "," + objStr + ",type="
+ + type + ",nlink=" + nlink + ",mtime=" + mtime + ",linklen="
+ + linklen + "]";
+ }
+
+}
diff --git a/java/src/hdf/hdf5lib/HDFArray.java b/java/src/hdf/hdf5lib/HDFArray.java
new file mode 100644
index 0000000..55c19e4
--- /dev/null
+++ b/java/src/hdf/hdf5lib/HDFArray.java
@@ -0,0 +1,1096 @@
+/* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
+ * Copyright by The HDF Group. *
+ * Copyright by the Board of Trustees of the University of Illinois. *
+ * All rights reserved. *
+ * *
+ * This file is part of HDF5. The full HDF5 copyright notice, including *
+ * terms governing use, modification, and redistribution, is contained in *
+ * the files COPYING and Copyright.html. COPYING can be found at the root *
+ * of the source code distribution tree; Copyright.html can be found at the *
+ * root level of an installed copy of the electronic HDF5 document set and *
+ * is linked from the top-level documents page. It can also be found at *
+ * http://hdfgroup.org/HDF5/doc/Copyright.html. If you do not have *
+ * access to either file, you may request a copy from help@hdfgroup.org. *
+ * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */
+
+
+package hdf.hdf5lib;
+
+import hdf.hdf5lib.exceptions.HDF5Exception;
+import hdf.hdf5lib.exceptions.HDF5JavaException;
+
+/**
+ * This is a class for handling multidimensional arrays for HDF.
+ * <p>
+ * The purpose is to allow the storage and retrieval of arbitrary array types
+ * containing scientific data.
+ * <p>
+ * The methods support the conversion of an array to and from Java to a
+ * one-dimensional array of bytes suitable for I/O by the C library.
+ * <p>
+ * This class heavily uses the <a
+ * href="./hdf.hdf5lib.HDFNativeData.html">HDFNativeData</a> class to
+ * convert between Java and C representations.
+ */
+
+public class HDFArray {
+
+ private Object _theArray = null;
+ private ArrayDescriptor _desc = null;
+ private byte[] _barray = null;
+
+ // public HDFArray() {}
+
+ /**
+ * The input must be a Java Array (possibly multidimensional) of primitive
+ * numbers or sub-classes of Number.
+ * <p>
+ * The input is analysed to determine the number of dimensions and size of
+ * each dimension, as well as the type of the elements.
+ * <p>
+ * The description is saved in private variables, and used to convert data.
+ *
+ * @param anArray
+ * The array object.
+ *
+ * @exception hdf.hdf5lib.exceptions.HDF5Exception
+ * object is not an array.
+ */
+ public HDFArray(Object anArray) throws HDF5Exception {
+
+ if (anArray == null) {
+ HDF5JavaException ex = new HDF5JavaException(
+ "HDFArray: array is null?: ");
+ }
+ Class tc = anArray.getClass();
+ if (tc.isArray() == false) {
+ /* exception: not an array */
+ HDF5JavaException ex = new HDF5JavaException(
+ "HDFArray: not an array?: ");
+ throw (ex);
+ }
+ _theArray = anArray;
+ _desc = new ArrayDescriptor(_theArray);
+
+ /* extra error checking -- probably not needed */
+ if (_desc == null) {
+ HDF5JavaException ex = new HDF5JavaException(
+ "HDFArray: internal error: array description failed?: ");
+ throw (ex);
+ }
+ }
+
+ /**
+ * Allocate a one-dimensional array of bytes sufficient to store the array.
+ *
+ * @return A one-D array of bytes, filled with zeroes. The bytes are
+ * sufficient to hold the data of the Array passed to the
+ * constructor.
+ * @exception hdf.hdf5lib.exceptions.HDF5JavaException
+ * Allocation failed.
+ */
+
+ public byte[] emptyBytes() throws HDF5JavaException {
+ byte[] b = null;
+
+ if ((ArrayDescriptor.dims == 1) && (ArrayDescriptor.NT == 'B')) {
+ b = (byte[]) _theArray;
+ }
+ else {
+ b = new byte[ArrayDescriptor.totalSize];
+ }
+ if (b == null) {
+ HDF5JavaException ex = new HDF5JavaException(
+ "HDFArray: emptyBytes: allocation failed");
+ throw (ex);
+ }
+ return (b);
+ }
+
+ /**
+ * Given a Java array of numbers, convert it to a one-dimensional array of
+ * bytes in correct native order.
+ *
+ * @return A one-D array of bytes, constructed from the Array passed to the
+ * constructor.
+ * @exception hdf.hdf5lib.exceptions.HDF5JavaException
+ * the object not an array or other internal error.
+ */
+ public byte[] byteify() throws HDF5JavaException {
+
+ if (_barray != null) {
+ return _barray;
+ }
+
+ if (_theArray == null) {
+ /* exception: not an array */
+ HDF5JavaException ex = new HDF5JavaException(
+ "HDFArray: byteify not an array?: ");
+ throw (ex);
+ }
+
+ if (ArrayDescriptor.dims == 1) {
+ /* special case */
+ if (ArrayDescriptor.NT == 'B') {
+ /* really special case! */
+ _barray = (byte[]) _theArray;
+ return _barray;
+ }
+ else {
+ try {
+ _barray = new byte[ArrayDescriptor.totalSize];
+
+ byte[] therow;
+ if (ArrayDescriptor.NT == 'I') {
+ therow = HDFNativeData.intToByte(0,
+ ArrayDescriptor.dimlen[1], (int[]) _theArray);
+ }
+ else if (ArrayDescriptor.NT == 'S') {
+ therow = HDFNativeData.shortToByte(0,
+ ArrayDescriptor.dimlen[1], (short[]) _theArray);
+ }
+ else if (ArrayDescriptor.NT == 'F') {
+ therow = HDFNativeData.floatToByte(0,
+ ArrayDescriptor.dimlen[1], (float[]) _theArray);
+ }
+ else if (ArrayDescriptor.NT == 'J') {
+ therow = HDFNativeData.longToByte(0,
+ ArrayDescriptor.dimlen[1], (long[]) _theArray);
+ }
+ else if (ArrayDescriptor.NT == 'D') {
+ therow = HDFNativeData
+ .doubleToByte(0, ArrayDescriptor.dimlen[1],
+ (double[]) _theArray);
+ }
+ else if (ArrayDescriptor.NT == 'L') {
+ if (ArrayDescriptor.className.equals("java.lang.Byte")) {
+ therow = ByteObjToByte((Byte[]) _theArray);
+ }
+ else if (ArrayDescriptor.className
+ .equals("java.lang.Integer")) {
+ therow = IntegerToByte((Integer[]) _theArray);
+ }
+ else if (ArrayDescriptor.className
+ .equals("java.lang.Short")) {
+ therow = ShortToByte((Short[]) _theArray);
+ }
+ else if (ArrayDescriptor.className
+ .equals("java.lang.Float")) {
+ therow = FloatObjToByte((Float[]) _theArray);
+ }
+ else if (ArrayDescriptor.className
+ .equals("java.lang.Double")) {
+ therow = DoubleObjToByte((Double[]) _theArray);
+ }
+ else if (ArrayDescriptor.className
+ .equals("java.lang.Long")) {
+ therow = LongObjToByte((Long[]) _theArray);
+ }
+ else {
+ HDF5JavaException ex = new HDF5JavaException(
+ "HDFArray: unknown type of Object?");
+ throw (ex);
+ }
+ }
+ else {
+ HDF5JavaException ex = new HDF5JavaException(
+ "HDFArray: unknown type of data?");
+ throw (ex);
+ }
+ System
+ .arraycopy(
+ therow,
+ 0,
+ _barray,
+ 0,
+ (ArrayDescriptor.dimlen[1] * ArrayDescriptor.NTsize));
+ return _barray;
+ }
+ catch (OutOfMemoryError err) {
+ HDF5JavaException ex = new HDF5JavaException(
+ "HDFArray: byteify array too big?");
+ throw (ex);
+ }
+ }
+ }
+
+ try {
+ _barray = new byte[ArrayDescriptor.totalSize];
+ }
+ catch (OutOfMemoryError err) {
+ HDF5JavaException ex = new HDF5JavaException(
+ "HDFArray: byteify array too big?");
+ throw (ex);
+ }
+
+ Object oo = _theArray;
+ int n = 0; /* the current byte */
+ int index = 0;
+ int i;
+ while (n < ArrayDescriptor.totalSize) {
+ oo = ArrayDescriptor.objs[0];
+ index = n / ArrayDescriptor.bytetoindex[0];
+ index %= ArrayDescriptor.dimlen[0];
+ for (i = 0; i < (ArrayDescriptor.dims); i++) {
+ index = n / ArrayDescriptor.bytetoindex[i];
+ index %= ArrayDescriptor.dimlen[i];
+
+ if (index == ArrayDescriptor.currentindex[i]) {
+ /* then use cached copy */
+ oo = ArrayDescriptor.objs[i];
+ }
+ else {
+ /* check range of index */
+ if (index > (ArrayDescriptor.dimlen[i] - 1)) {
+ throw new java.lang.IndexOutOfBoundsException(
+ "HDFArray: byteify index OOB?");
+ }
+ oo = java.lang.reflect.Array.get(oo, index);
+ ArrayDescriptor.currentindex[i] = index;
+ ArrayDescriptor.objs[i] = oo;
+ }
+ }
+
+ /* byte-ify */
+ byte arow[];
+ try {
+ if (ArrayDescriptor.NT == 'J') {
+ arow = HDFNativeData
+ .longToByte(
+ 0,
+ ArrayDescriptor.dimlen[ArrayDescriptor.dims],
+ (long[]) ArrayDescriptor.objs[ArrayDescriptor.dims - 1]);
+ arow = HDFNativeData
+ .longToByte(
+ 0,
+ ArrayDescriptor.dimlen[ArrayDescriptor.dims],
+ (long[]) ArrayDescriptor.objs[ArrayDescriptor.dims - 1]);
+ }
+ else if (ArrayDescriptor.NT == 'I') {
+ arow = HDFNativeData
+ .intToByte(
+ 0,
+ ArrayDescriptor.dimlen[ArrayDescriptor.dims],
+ (int[]) ArrayDescriptor.objs[ArrayDescriptor.dims - 1]);
+ }
+ else if (ArrayDescriptor.NT == 'S') {
+ arow = HDFNativeData
+ .shortToByte(
+ 0,
+ ArrayDescriptor.dimlen[ArrayDescriptor.dims],
+ (short[]) ArrayDescriptor.objs[ArrayDescriptor.dims - 1]);
+ }
+ else if (ArrayDescriptor.NT == 'B') {
+ arow = (byte[]) ArrayDescriptor.objs[ArrayDescriptor.dims - 1];
+ }
+ else if (ArrayDescriptor.NT == 'F') {
+ /* 32 bit float */
+ arow = HDFNativeData
+ .floatToByte(
+ 0,
+ ArrayDescriptor.dimlen[ArrayDescriptor.dims],
+ (float[]) ArrayDescriptor.objs[ArrayDescriptor.dims - 1]);
+ }
+ else if (ArrayDescriptor.NT == 'D') {
+ /* 64 bit float */
+ arow = HDFNativeData
+ .doubleToByte(
+ 0,
+ ArrayDescriptor.dimlen[ArrayDescriptor.dims],
+ (double[]) ArrayDescriptor.objs[ArrayDescriptor.dims - 1]);
+ }
+ else if (ArrayDescriptor.NT == 'L') {
+ if (ArrayDescriptor.className.equals("java.lang.Byte")) {
+ arow = ByteObjToByte((Byte[]) ArrayDescriptor.objs[ArrayDescriptor.dims - 1]);
+ }
+ else if (ArrayDescriptor.className
+ .equals("java.lang.Integer")) {
+ arow = IntegerToByte((Integer[]) ArrayDescriptor.objs[ArrayDescriptor.dims - 1]);
+ }
+ else if (ArrayDescriptor.className
+ .equals("java.lang.Short")) {
+ arow = ShortToByte((Short[]) ArrayDescriptor.objs[ArrayDescriptor.dims - 1]);
+ }
+ else if (ArrayDescriptor.className
+ .equals("java.lang.Float")) {
+ arow = FloatObjToByte((Float[]) ArrayDescriptor.objs[ArrayDescriptor.dims - 1]);
+ }
+ else if (ArrayDescriptor.className
+ .equals("java.lang.Double")) {
+ arow = DoubleObjToByte((Double[]) ArrayDescriptor.objs[ArrayDescriptor.dims - 1]);
+ }
+ else if (ArrayDescriptor.className.equals("java.lang.Long")) {
+ arow = LongObjToByte((Long[]) ArrayDescriptor.objs[ArrayDescriptor.dims - 1]);
+ }
+ else {
+ HDF5JavaException ex = new HDF5JavaException(
+ "HDFArray: byteify Object type not implemented?");
+ throw (ex);
+ }
+ }
+ else {
+ HDF5JavaException ex = new HDF5JavaException(
+ "HDFArray: byteify unknown type not implemented?");
+ throw (ex);
+ }
+ System
+ .arraycopy(
+ arow,
+ 0,
+ _barray,
+ n,
+ (ArrayDescriptor.dimlen[ArrayDescriptor.dims] * ArrayDescriptor.NTsize));
+ n += ArrayDescriptor.bytetoindex[ArrayDescriptor.dims - 1];
+ }
+ catch (OutOfMemoryError err) {
+ HDF5JavaException ex = new HDF5JavaException(
+ "HDFArray: byteify array too big?");
+ throw (ex);
+ }
+ }
+ /* assert: the whole array is completed--currentindex should == len - 1 */
+
+ /* error checks */
+
+ if (n < ArrayDescriptor.totalSize) {
+ throw new java.lang.InternalError(new String(
+ "HDFArray::byteify: Panic didn't complete all input data: n= "
+ + n + " size = " + ArrayDescriptor.totalSize));
+ }
+ for (i = 0; i < ArrayDescriptor.dims; i++) {
+ if (ArrayDescriptor.currentindex[i] != ArrayDescriptor.dimlen[i] - 1) {
+ throw new java.lang.InternalError(new String(
+ "Panic didn't complete all data: currentindex[" + i
+ + "] = " + ArrayDescriptor.currentindex[i]
+ + " (should be "
+ + (ArrayDescriptor.dimlen[i] - 1) + " ?)"));
+ }
+ }
+ return _barray;
+ }
+
+ /**
+ * Given a one-dimensional array of bytes representing numbers, convert it
+ * to a java array of the shape and size passed to the constructor.
+ *
+ * @param bytes
+ * The bytes to construct the Array.
+ * @return An Array (possibly multidimensional) of primitive or number
+ * objects.
+ * @exception hdf.hdf5lib.exceptions.HDF5JavaException
+ * the object not an array or other internal error.
+ */
+ public Object arrayify(byte[] bytes) throws HDF5JavaException {
+
+ if (_theArray == null) {
+ /* exception: not an array */
+ HDF5JavaException ex = new HDF5JavaException(
+ "arrayify: not an array?: ");
+ throw (ex);
+ }
+
+ if (java.lang.reflect.Array.getLength(bytes) != ArrayDescriptor.totalSize) {
+ /* exception: array not right size */
+ HDF5JavaException ex = new HDF5JavaException(
+ "arrayify: array is wrong size?: ");
+ throw (ex);
+ }
+ _barray = bytes; /* hope that the bytes are correct.... */
+ if (ArrayDescriptor.dims == 1) {
+ /* special case */
+ /* 2 data copies here! */
+ try {
+ if (ArrayDescriptor.NT == 'I') {
+ int[] x = HDFNativeData.byteToInt(_barray);
+ System.arraycopy(x, 0, _theArray, 0,
+ ArrayDescriptor.dimlen[1]);
+ return _theArray;
+ }
+ else if (ArrayDescriptor.NT == 'S') {
+ short[] x = HDFNativeData.byteToShort(_barray);
+ System.arraycopy(x, 0, _theArray, 0,
+ ArrayDescriptor.dimlen[1]);
+ return _theArray;
+ }
+ else if (ArrayDescriptor.NT == 'F') {
+ float x[] = HDFNativeData.byteToFloat(_barray);
+ System.arraycopy(x, 0, _theArray, 0,
+ ArrayDescriptor.dimlen[1]);
+ return _theArray;
+ }
+ else if (ArrayDescriptor.NT == 'J') {
+ long x[] = HDFNativeData.byteToLong(_barray);
+ System.arraycopy(x, 0, _theArray, 0,
+ ArrayDescriptor.dimlen[1]);
+ return _theArray;
+ }
+ else if (ArrayDescriptor.NT == 'D') {
+ double x[] = HDFNativeData.byteToDouble(_barray);
+ System.arraycopy(x, 0, _theArray, 0,
+ ArrayDescriptor.dimlen[1]);
+ return _theArray;
+ }
+ else if (ArrayDescriptor.NT == 'B') {
+ System.arraycopy(_barray, 0, _theArray, 0,
+ ArrayDescriptor.dimlen[1]);
+ return _theArray;
+ }
+ else if (ArrayDescriptor.NT == 'L') {
+ if (ArrayDescriptor.className.equals("java.lang.Byte")) {
+ Byte I[] = ByteToByteObj(_barray);
+ System.arraycopy(I, 0, _theArray, 0,
+ ArrayDescriptor.dimlen[1]);
+ return _theArray;
+ }
+ else if (ArrayDescriptor.className
+ .equals("java.lang.Integer")) {
+ Integer I[] = ByteToInteger(_barray);
+ System.arraycopy(I, 0, _theArray, 0,
+ ArrayDescriptor.dimlen[1]);
+ return _theArray;
+ }
+ else if (ArrayDescriptor.className
+ .equals("java.lang.Short")) {
+ Short I[] = ByteToShort(_barray);
+ System.arraycopy(I, 0, _theArray, 0,
+ ArrayDescriptor.dimlen[1]);
+ return _theArray;
+ }
+ else if (ArrayDescriptor.className
+ .equals("java.lang.Float")) {
+ Float I[] = ByteToFloatObj(_barray);
+ System.arraycopy(I, 0, _theArray, 0,
+ ArrayDescriptor.dimlen[1]);
+ return _theArray;
+ }
+ else if (ArrayDescriptor.className
+ .equals("java.lang.Double")) {
+ Double I[] = ByteToDoubleObj(_barray);
+ System.arraycopy(I, 0, _theArray, 0,
+ ArrayDescriptor.dimlen[1]);
+ return _theArray;
+ }
+ else if (ArrayDescriptor.className.equals("java.lang.Long")) {
+ Long I[] = ByteToLongObj(_barray);
+ System.arraycopy(I, 0, _theArray, 0,
+ ArrayDescriptor.dimlen[1]);
+ return _theArray;
+ }
+ else {
+ HDF5JavaException ex = new HDF5JavaException(
+ "arrayify: Object type not implemented yet...");
+ throw (ex);
+ }
+ }
+ else {
+ HDF5JavaException ex = new HDF5JavaException(
+ "arrayify: unknown type not implemented yet...");
+ throw (ex);
+ }
+ }
+ catch (OutOfMemoryError err) {
+ HDF5JavaException ex = new HDF5JavaException(
+ "HDFArray: arrayify array too big?");
+ throw (ex);
+ }
+ }
+ /* Assert dims >= 2 */
+
+ Object oo = _theArray;
+ int n = 0; /* the current byte */
+ int index = 0;
+ int i;
+ while (n < ArrayDescriptor.totalSize) {
+ oo = ArrayDescriptor.objs[0];
+ index = n / ArrayDescriptor.bytetoindex[0];
+ index %= ArrayDescriptor.dimlen[0];
+ for (i = 0; i < (ArrayDescriptor.dims); i++) {
+ index = n / ArrayDescriptor.bytetoindex[i];
+ index %= ArrayDescriptor.dimlen[i];
+
+ if (index == ArrayDescriptor.currentindex[i]) {
+ /* then use cached copy */
+ oo = ArrayDescriptor.objs[i];
+ }
+ else {
+ /* check range of index */
+ if (index > (ArrayDescriptor.dimlen[i] - 1)) {
+ System.out.println("out of bounds?");
+ return null;
+ }
+ oo = java.lang.reflect.Array.get(oo, index);
+ ArrayDescriptor.currentindex[i] = index;
+ ArrayDescriptor.objs[i] = oo;
+ }
+ }
+
+ /* array-ify */
+ try {
+ if (ArrayDescriptor.NT == 'J') {
+ long[] arow = HDFNativeData.byteToLong(n,
+ ArrayDescriptor.dimlen[ArrayDescriptor.dims],
+ _barray);
+ java.lang.reflect.Array
+ .set(
+ ArrayDescriptor.objs[ArrayDescriptor.dims - 2],
+ (ArrayDescriptor.currentindex[ArrayDescriptor.dims - 1]),
+ arow);
+ n += ArrayDescriptor.bytetoindex[ArrayDescriptor.dims - 1];
+ ArrayDescriptor.currentindex[ArrayDescriptor.dims - 1]++;
+ }
+ else if (ArrayDescriptor.NT == 'I') {
+ int[] arow = HDFNativeData.byteToInt(n,
+ ArrayDescriptor.dimlen[ArrayDescriptor.dims],
+ _barray);
+ java.lang.reflect.Array
+ .set(
+ ArrayDescriptor.objs[ArrayDescriptor.dims - 2],
+ (ArrayDescriptor.currentindex[ArrayDescriptor.dims - 1]),
+ arow);
+
+ n += ArrayDescriptor.bytetoindex[ArrayDescriptor.dims - 1];
+ ArrayDescriptor.currentindex[ArrayDescriptor.dims - 1]++;
+ }
+ else if (ArrayDescriptor.NT == 'S') {
+ short[] arow = HDFNativeData.byteToShort(n,
+ ArrayDescriptor.dimlen[ArrayDescriptor.dims],
+ _barray);
+ java.lang.reflect.Array
+ .set(
+ ArrayDescriptor.objs[ArrayDescriptor.dims - 2],
+ (ArrayDescriptor.currentindex[ArrayDescriptor.dims - 1]),
+ arow);
+
+ n += ArrayDescriptor.bytetoindex[ArrayDescriptor.dims - 1];
+ ArrayDescriptor.currentindex[ArrayDescriptor.dims - 1]++;
+ }
+ else if (ArrayDescriptor.NT == 'B') {
+ System.arraycopy(_barray, n,
+ ArrayDescriptor.objs[ArrayDescriptor.dims - 1], 0,
+ ArrayDescriptor.dimlen[ArrayDescriptor.dims]);
+ n += ArrayDescriptor.bytetoindex[ArrayDescriptor.dims - 1];
+ }
+ else if (ArrayDescriptor.NT == 'F') {
+ float arow[] = HDFNativeData.byteToFloat(n,
+ ArrayDescriptor.dimlen[ArrayDescriptor.dims],
+ _barray);
+ java.lang.reflect.Array
+ .set(
+ ArrayDescriptor.objs[ArrayDescriptor.dims - 2],
+ (ArrayDescriptor.currentindex[ArrayDescriptor.dims - 1]),
+ arow);
+
+ n += ArrayDescriptor.bytetoindex[ArrayDescriptor.dims - 1];
+ ArrayDescriptor.currentindex[ArrayDescriptor.dims - 1]++;
+ }
+ else if (ArrayDescriptor.NT == 'D') {
+ double[] arow = HDFNativeData.byteToDouble(n,
+ ArrayDescriptor.dimlen[ArrayDescriptor.dims],
+ _barray);
+ java.lang.reflect.Array
+ .set(
+ ArrayDescriptor.objs[ArrayDescriptor.dims - 2],
+ (ArrayDescriptor.currentindex[ArrayDescriptor.dims - 1]),
+ arow);
+
+ n += ArrayDescriptor.bytetoindex[ArrayDescriptor.dims - 1];
+ ArrayDescriptor.currentindex[ArrayDescriptor.dims - 1]++;
+ }
+ else if (ArrayDescriptor.NT == 'L') {
+ if (ArrayDescriptor.className.equals("java.lang.Byte")) {
+ Byte I[] = ByteToByteObj(n,
+ ArrayDescriptor.dimlen[ArrayDescriptor.dims],
+ _barray);
+ java.lang.reflect.Array
+ .set(
+ ArrayDescriptor.objs[ArrayDescriptor.dims - 2],
+ (ArrayDescriptor.currentindex[ArrayDescriptor.dims - 1]),
+ I);
+
+ n += ArrayDescriptor.bytetoindex[ArrayDescriptor.dims - 1];
+ ArrayDescriptor.currentindex[ArrayDescriptor.dims - 1]++;
+ }
+ else if (ArrayDescriptor.className
+ .equals("java.lang.Integer")) {
+ Integer I[] = ByteToInteger(n,
+ ArrayDescriptor.dimlen[ArrayDescriptor.dims],
+ _barray);
+ java.lang.reflect.Array
+ .set(
+ ArrayDescriptor.objs[ArrayDescriptor.dims - 2],
+ (ArrayDescriptor.currentindex[ArrayDescriptor.dims - 1]),
+ I);
+
+ n += ArrayDescriptor.bytetoindex[ArrayDescriptor.dims - 1];
+ ArrayDescriptor.currentindex[ArrayDescriptor.dims - 1]++;
+ }
+ else if (ArrayDescriptor.className
+ .equals("java.lang.Short")) {
+ Short I[] = ByteToShort(n,
+ ArrayDescriptor.dimlen[ArrayDescriptor.dims],
+ _barray);
+ java.lang.reflect.Array
+ .set(
+ ArrayDescriptor.objs[ArrayDescriptor.dims - 2],
+ (ArrayDescriptor.currentindex[ArrayDescriptor.dims - 1]),
+ I);
+
+ n += ArrayDescriptor.bytetoindex[ArrayDescriptor.dims - 1];
+ ArrayDescriptor.currentindex[ArrayDescriptor.dims - 1]++;
+ }
+ else if (ArrayDescriptor.className
+ .equals("java.lang.Float")) {
+ Float I[] = ByteToFloatObj(n,
+ ArrayDescriptor.dimlen[ArrayDescriptor.dims],
+ _barray);
+ java.lang.reflect.Array
+ .set(
+ ArrayDescriptor.objs[ArrayDescriptor.dims - 2],
+ (ArrayDescriptor.currentindex[ArrayDescriptor.dims - 1]),
+ I);
+
+ n += ArrayDescriptor.bytetoindex[ArrayDescriptor.dims - 1];
+ ArrayDescriptor.currentindex[ArrayDescriptor.dims - 1]++;
+ }
+ else if (ArrayDescriptor.className
+ .equals("java.lang.Double")) {
+ Double I[] = ByteToDoubleObj(n,
+ ArrayDescriptor.dimlen[ArrayDescriptor.dims],
+ _barray);
+ java.lang.reflect.Array
+ .set(
+ ArrayDescriptor.objs[ArrayDescriptor.dims - 2],
+ (ArrayDescriptor.currentindex[ArrayDescriptor.dims - 1]),
+ I);
+
+ n += ArrayDescriptor.bytetoindex[ArrayDescriptor.dims - 1];
+ ArrayDescriptor.currentindex[ArrayDescriptor.dims - 1]++;
+ }
+ else if (ArrayDescriptor.className.equals("java.lang.Long")) {
+ Long I[] = ByteToLongObj(n,
+ ArrayDescriptor.dimlen[ArrayDescriptor.dims],
+ _barray);
+ java.lang.reflect.Array
+ .set(
+ ArrayDescriptor.objs[ArrayDescriptor.dims - 2],
+ (ArrayDescriptor.currentindex[ArrayDescriptor.dims - 1]),
+ I);
+
+ n += ArrayDescriptor.bytetoindex[ArrayDescriptor.dims - 1];
+ ArrayDescriptor.currentindex[ArrayDescriptor.dims - 1]++;
+ }
+ else {
+ HDF5JavaException ex = new HDF5JavaException(
+ "HDFArray: unsupported Object type: "
+ + ArrayDescriptor.NT);
+ throw (ex);
+ }
+ }
+ else {
+ HDF5JavaException ex = new HDF5JavaException(
+ "HDFArray: unknown or unsupported type: "
+ + ArrayDescriptor.NT);
+ throw (ex);
+ }
+ }
+ catch (OutOfMemoryError err) {
+ HDF5JavaException ex = new HDF5JavaException(
+ "HDFArray: arrayify array too big?");
+ throw (ex);
+ }
+
+ }
+
+ /* assert: the whole array is completed--currentindex should == len - 1 */
+
+ /* error checks */
+
+ if (n < ArrayDescriptor.totalSize) {
+ throw new java.lang.InternalError(new String(
+ "HDFArray::arrayify Panic didn't complete all input data: n= "
+ + n + " size = " + ArrayDescriptor.totalSize));
+ }
+ for (i = 0; i <= ArrayDescriptor.dims - 2; i++) {
+ if (ArrayDescriptor.currentindex[i] != ArrayDescriptor.dimlen[i] - 1) {
+ throw new java.lang.InternalError(new String(
+ "HDFArray::arrayify Panic didn't complete all data: currentindex["
+ + i + "] = " + ArrayDescriptor.currentindex[i]
+ + " (should be "
+ + (ArrayDescriptor.dimlen[i] - 1) + "?"));
+ }
+ }
+ if (ArrayDescriptor.NT != 'B') {
+ if (ArrayDescriptor.currentindex[ArrayDescriptor.dims - 1] != ArrayDescriptor.dimlen[ArrayDescriptor.dims - 1]) {
+ throw new java.lang.InternalError(new String(
+ "HDFArray::arrayify Panic didn't complete all data: currentindex["
+ + i + "] = " + ArrayDescriptor.currentindex[i]
+ + " (should be " + (ArrayDescriptor.dimlen[i])
+ + "?"));
+ }
+ }
+ else {
+ if (ArrayDescriptor.currentindex[ArrayDescriptor.dims - 1] != (ArrayDescriptor.dimlen[ArrayDescriptor.dims - 1] - 1)) {
+ throw new java.lang.InternalError(new String(
+ "HDFArray::arrayify Panic didn't complete all data: currentindex["
+ + i + "] = " + ArrayDescriptor.currentindex[i]
+ + " (should be "
+ + (ArrayDescriptor.dimlen[i] - 1) + "?"));
+ }
+ }
+
+ return _theArray;
+ }
+
+ private byte[] IntegerToByte(Integer in[]) {
+ int nelems = java.lang.reflect.Array.getLength(in);
+ int[] out = new int[nelems];
+
+ for (int i = 0; i < nelems; i++) {
+ out[i] = in[i].intValue();
+ }
+ return HDFNativeData.intToByte(0, nelems, out);
+ }
+
+ private Integer[] ByteToInteger(byte[] bin) {
+ int in[] = HDFNativeData.byteToInt(bin);
+ int nelems = java.lang.reflect.Array.getLength(in);
+ Integer[] out = new Integer[nelems];
+
+ for (int i = 0; i < nelems; i++) {
+ out[i] = new Integer(in[i]);
+ }
+ return out;
+ }
+
+ private Integer[] ByteToInteger(int start, int len, byte[] bin) {
+ int in[] = HDFNativeData.byteToInt(start, len, bin);
+ int nelems = java.lang.reflect.Array.getLength(in);
+ Integer[] out = new Integer[nelems];
+
+ for (int i = 0; i < nelems; i++) {
+ out[i] = new Integer(in[i]);
+ }
+ return out;
+ }
+
+ private byte[] ShortToByte(Short in[]) {
+ int nelems = java.lang.reflect.Array.getLength(in);
+ short[] out = new short[nelems];
+
+ for (int i = 0; i < nelems; i++) {
+ out[i] = in[i].shortValue();
+ }
+ return HDFNativeData.shortToByte(0, nelems, out);
+ }
+
+ private Short[] ByteToShort(byte[] bin) {
+ short in[] = HDFNativeData.byteToShort(bin);
+ int nelems = java.lang.reflect.Array.getLength((Object) in);
+ Short[] out = new Short[nelems];
+
+ for (int i = 0; i < nelems; i++) {
+ out[i] = new Short(in[i]);
+ }
+ return out;
+ }
+
+ private Short[] ByteToShort(int start, int len, byte[] bin) {
+ short in[] = (short[]) HDFNativeData.byteToShort(start, len, bin);
+ int nelems = java.lang.reflect.Array.getLength((Object) in);
+ Short[] out = new Short[nelems];
+
+ for (int i = 0; i < nelems; i++) {
+ out[i] = new Short(in[i]);
+ }
+ return out;
+ }
+
+ private byte[] ByteObjToByte(Byte in[]) {
+ int nelems = java.lang.reflect.Array.getLength((Object) in);
+ byte[] out = new byte[nelems];
+
+ for (int i = 0; i < nelems; i++) {
+ out[i] = in[i].byteValue();
+ }
+ return out;
+ }
+
+ private Byte[] ByteToByteObj(byte[] bin) {
+ int nelems = java.lang.reflect.Array.getLength((Object) bin);
+ Byte[] out = new Byte[nelems];
+
+ for (int i = 0; i < nelems; i++) {
+ out[i] = new Byte(bin[i]);
+ }
+ return out;
+ }
+
+ private Byte[] ByteToByteObj(int start, int len, byte[] bin) {
+ Byte[] out = new Byte[len];
+
+ for (int i = 0; i < len; i++) {
+ out[i] = new Byte(bin[i]);
+ }
+ return out;
+ }
+
+ private byte[] FloatObjToByte(Float in[]) {
+ int nelems = java.lang.reflect.Array.getLength((Object) in);
+ float[] out = new float[nelems];
+
+ for (int i = 0; i < nelems; i++) {
+ out[i] = in[i].floatValue();
+ }
+ return HDFNativeData.floatToByte(0, nelems, out);
+ }
+
+ private Float[] ByteToFloatObj(byte[] bin) {
+ float in[] = (float[]) HDFNativeData.byteToFloat(bin);
+ int nelems = java.lang.reflect.Array.getLength((Object) in);
+ Float[] out = new Float[nelems];
+
+ for (int i = 0; i < nelems; i++) {
+ out[i] = new Float(in[i]);
+ }
+ return out;
+ }
+
+ private Float[] ByteToFloatObj(int start, int len, byte[] bin) {
+ float in[] = (float[]) HDFNativeData.byteToFloat(start, len, bin);
+ int nelems = java.lang.reflect.Array.getLength((Object) in);
+ Float[] out = new Float[nelems];
+
+ for (int i = 0; i < nelems; i++) {
+ out[i] = new Float(in[i]);
+ }
+ return out;
+ }
+
+ private byte[] DoubleObjToByte(Double in[]) {
+ int nelems = java.lang.reflect.Array.getLength((Object) in);
+ double[] out = new double[nelems];
+
+ for (int i = 0; i < nelems; i++) {
+ out[i] = in[i].doubleValue();
+ }
+ return HDFNativeData.doubleToByte(0, nelems, out);
+ }
+
+ private Double[] ByteToDoubleObj(byte[] bin) {
+ double in[] = (double[]) HDFNativeData.byteToDouble(bin);
+ int nelems = java.lang.reflect.Array.getLength((Object) in);
+ Double[] out = new Double[nelems];
+
+ for (int i = 0; i < nelems; i++) {
+ out[i] = new Double(in[i]);
+ }
+ return out;
+ }
+
+ private Double[] ByteToDoubleObj(int start, int len, byte[] bin) {
+ double in[] = (double[]) HDFNativeData.byteToDouble(start, len, bin);
+ int nelems = java.lang.reflect.Array.getLength((Object) in);
+ Double[] out = new Double[nelems];
+
+ for (int i = 0; i < nelems; i++) {
+ out[i] = new Double(in[i]);
+ }
+ return out;
+ }
+
+ private byte[] LongObjToByte(Long in[]) {
+ int nelems = java.lang.reflect.Array.getLength((Object) in);
+ long[] out = new long[nelems];
+
+ for (int i = 0; i < nelems; i++) {
+ out[i] = in[i].longValue();
+ }
+ return HDFNativeData.longToByte(0, nelems, out);
+ }
+
+ private Long[] ByteToLongObj(byte[] bin) {
+ long in[] = (long[]) HDFNativeData.byteToLong(bin);
+ int nelems = java.lang.reflect.Array.getLength((Object) in);
+ Long[] out = new Long[nelems];
+
+ for (int i = 0; i < nelems; i++) {
+ out[i] = new Long(in[i]);
+ }
+ return out;
+ }
+
+ private Long[] ByteToLongObj(int start, int len, byte[] bin) {
+ long in[] = (long[]) HDFNativeData.byteToLong(start, len, bin);
+ int nelems = java.lang.reflect.Array.getLength((Object) in);
+ Long[] out = new Long[nelems];
+
+ for (int i = 0; i < nelems; i++) {
+ out[i] = new Long(in[i]);
+ }
+ return out;
+ }
+}
+
+/**
+ * This private class is used by HDFArray to discover the shape and type of an
+ * arbitrary array.
+ * <p>
+ * We use java.lang.reflection here.
+ */
+class ArrayDescriptor {
+
+ static String theType = "";
+ static Class theClass = null;
+ static int[] dimlen = null;
+ static int[] dimstart = null;
+ static int[] currentindex = null;
+ static int[] bytetoindex = null;
+ static int totalSize = 0;
+ static Object[] objs = null;
+ static char NT = ' '; /* must be B,S,I,L,F,D, else error */
+ static int NTsize = 0;
+ static int dims = 0;
+ static String className;
+
+ public ArrayDescriptor(Object anArray) throws HDF5Exception {
+
+ Class tc = anArray.getClass();
+ if (tc.isArray() == false) {
+ /* exception: not an array */
+ HDF5Exception ex = new HDF5JavaException(
+ "ArrayDescriptor: not an array?: ");
+ throw (ex);
+ }
+
+ theClass = tc;
+
+ /*
+ * parse the type descriptor to discover the shape of the array
+ */
+ String ss = tc.toString();
+ theType = ss;
+ int n = 6;
+ dims = 0;
+ char c = ' ';
+ while (n < ss.length()) {
+ c = ss.charAt(n);
+ n++;
+ if (c == '[') {
+ dims++;
+ }
+ }
+
+ String css = ss.substring(ss.lastIndexOf('[') + 1);
+ Class compC = tc.getComponentType();
+ String cs = compC.toString();
+ NT = c; /* must be B,S,I,L,F,D, else error */
+ if (NT == 'B') {
+ NTsize = 1;
+ }
+ else if (NT == 'S') {
+ NTsize = 2;
+ }
+ else if ((NT == 'I') || (NT == 'F')) {
+ NTsize = 4;
+ }
+ else if ((NT == 'J') || (NT == 'D')) {
+ NTsize = 8;
+ }
+ else if (css.startsWith("Ljava.lang.Byte")) {
+ NT = 'L';
+ className = "java.lang.Byte";
+ NTsize = 1;
+ }
+ else if (css.startsWith("Ljava.lang.Short")) {
+ NT = 'L';
+ className = "java.lang.Short";
+ NTsize = 2;
+ }
+ else if (css.startsWith("Ljava.lang.Integer")) {
+ NT = 'L';
+ className = "java.lang.Integer";
+ NTsize = 4;
+ }
+ else if (css.startsWith("Ljava.lang.Float")) {
+ NT = 'L';
+ className = "java.lang.Float";
+ NTsize = 4;
+ }
+ else if (css.startsWith("Ljava.lang.Double")) {
+ NT = 'L';
+ className = "java.lang.Double";
+ NTsize = 8;
+ }
+ else if (css.startsWith("Ljava.lang.Long")) {
+ NT = 'L';
+ className = "java.lang.Long";
+ NTsize = 8;
+ }
+ else if (css.startsWith("Ljava.lang.String")) {
+ throw new HDF5JavaException(new String(
+ "ArrayDesciptor: Error: String array not supported yet"));
+ }
+ else {
+ /*
+ * exception: not a numeric type
+ */
+ throw new HDF5JavaException(new String(
+ "ArrayDesciptor: Error: array is not numeric (type is "
+ + css + ") ?"));
+ }
+
+ /* fill in the table */
+ dimlen = new int[dims + 1];
+ dimstart = new int[dims + 1];
+ currentindex = new int[dims + 1];
+ bytetoindex = new int[dims + 1];
+ objs = new Object[dims + 1];
+
+ Object o = anArray;
+ objs[0] = o;
+ dimlen[0] = 1;
+ dimstart[0] = 0;
+ currentindex[0] = 0;
+ int i;
+ for (i = 1; i <= dims; i++) {
+ dimlen[i] = java.lang.reflect.Array.getLength((Object) o);
+ o = java.lang.reflect.Array.get((Object) o, 0);
+ objs[i] = o;
+ dimstart[i] = 0;
+ currentindex[i] = 0;
+ }
+
+ int j;
+ int dd;
+ bytetoindex[dims] = NTsize;
+ for (i = dims; i >= 0; i--) {
+ dd = NTsize;
+ for (j = i; j < dims; j++) {
+ dd *= dimlen[j + 1];
+ }
+ bytetoindex[i] = dd;
+ }
+
+ totalSize = bytetoindex[0];
+ }
+
+ /**
+ * Debug dump
+ */
+ public void dumpInfo() {
+ System.out.println("Type: " + theType);
+ System.out.println("Class: " + theClass);
+ System.out.println("NT: " + NT + " NTsize: " + NTsize);
+ System.out.println("Array has " + dims + " dimensions (" + totalSize
+ + " bytes)");
+ int i;
+ for (i = 0; i <= dims; i++) {
+ Class tc = objs[i].getClass();
+ String ss = tc.toString();
+ System.out.println(i + ": start " + dimstart[i] + ": len "
+ + dimlen[i] + " current " + currentindex[i]
+ + " bytetoindex " + bytetoindex[i] + " object " + objs[i]
+ + " otype " + ss);
+ }
+ }
+}
diff --git a/java/src/hdf/hdf5lib/HDFNativeData.java b/java/src/hdf/hdf5lib/HDFNativeData.java
new file mode 100644
index 0000000..66929fb
--- /dev/null
+++ b/java/src/hdf/hdf5lib/HDFNativeData.java
@@ -0,0 +1,481 @@
+/* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
+ * Copyright by The HDF Group. *
+ * Copyright by the Board of Trustees of the University of Illinois. *
+ * All rights reserved. *
+ * *
+ * This file is part of HDF5. The full HDF5 copyright notice, including *
+ * terms governing use, modification, and redistribution, is contained in *
+ * the files COPYING and Copyright.html. COPYING can be found at the root *
+ * of the source code distribution tree; Copyright.html can be found at the *
+ * root level of an installed copy of the electronic HDF5 document set and *
+ * is linked from the top-level documents page. It can also be found at *
+ * http://hdfgroup.org/HDF5/doc/Copyright.html. If you do not have *
+ * access to either file, you may request a copy from help@hdfgroup.org. *
+ * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */
+
+package hdf.hdf5lib;
+
+import hdf.hdf5lib.exceptions.HDF5Exception;
+import hdf.hdf5lib.exceptions.HDF5JavaException;
+
+/**
+ * This class encapsulates native methods to deal with arrays of numbers,
+ * converting from numbers to bytes and bytes to numbers.
+ * <p>
+ * These routines are used by class <b>HDFArray</b> to pass data to and from the
+ * HDF-5 library.
+ * <p>
+ * Methods xxxToByte() convert a Java array of primitive numbers (int, short,
+ * ...) to a Java array of bytes. Methods byteToXxx() convert from a Java array
+ * of bytes into a Java array of primitive numbers (int, short, ...)
+ * <p>
+ * Variant interfaces convert a section of an array, and also can convert to
+ * sub-classes of Java <b>Number</b>.
+ * <P>
+ * <b>See also:</b> hdf.hdf5lib.HDFArray.
+ */
+
+public class HDFNativeData {
+
+ /**
+ * Convert an array of bytes into an array of ints
+ *
+ * @param data
+ * The input array of bytes
+ * @return an array of int
+ */
+ public synchronized static native int[] byteToInt(byte[] data);
+
+ /**
+ * Convert an array of bytes into an array of floats
+ *
+ * @param data
+ * The input array of bytes
+ * @return an array of float
+ */
+ public synchronized static native float[] byteToFloat(byte[] data);
+
+ /**
+ * Convert an array of bytes into an array of shorts
+ *
+ * @param data
+ * The input array of bytes
+ * @return an array of short
+ */
+ public synchronized static native short[] byteToShort(byte[] data);
+
+ /**
+ * Convert an array of bytes into an array of long
+ *
+ * @param data
+ * The input array of bytes
+ * @return an array of long
+ */
+ /*
+ * does this really work? C 'long' is 32 bits, Java 'long' is 64-bits. What
+ * does this routine actually do?
+ */
+ public synchronized static native long[] byteToLong(byte[] data);
+
+ /**
+ * Convert an array of bytes into an array of double
+ *
+ * @param data
+ * The input array of bytes
+ * @return an array of double
+ */
+ public synchronized static native double[] byteToDouble(byte[] data);
+
+ /**
+ * Convert a range from an array of bytes into an array of int
+ *
+ * @param start
+ * The position in the input array of bytes to start
+ * @param len
+ * The number of 'int' to convert
+ * @param data
+ * The input array of bytes
+ * @return an array of 'len' int
+ */
+ public synchronized static native int[] byteToInt(int start, int len,
+ byte[] data);
+
+ /**
+ * Convert 4 bytes from an array of bytes into a single int
+ *
+ * @param start
+ * The position in the input array of bytes to start
+ * @param data
+ * The input array of bytes
+ * @return The integer value of the bytes.
+ */
+ public synchronized static int byteToInt(byte[] data, int start) {
+ int[] ival = new int[1];
+ ival = byteToInt(start, 1, data);
+ return (ival[0]);
+ }
+
+ /**
+ * Convert a range from an array of bytes into an array of short
+ *
+ * @param start
+ * The position in the input array of bytes to start
+ * @param len
+ * The number of 'short' to convert
+ * @param data
+ * The input array of bytes
+ * @return an array of 'len' short
+ */
+ public synchronized static native short[] byteToShort(int start, int len,
+ byte[] data);
+
+ /**
+ * Convert 2 bytes from an array of bytes into a single short
+ *
+ * @param start
+ * The position in the input array of bytes to start
+ * @param data
+ * The input array of bytes
+ * @return The short value of the bytes.
+ */
+ public synchronized static short byteToShort(byte[] data, int start) {
+ short[] sval = new short[1];
+ sval = byteToShort(start, 1, data);
+ return (sval[0]);
+ }
+
+ /**
+ * Convert a range from an array of bytes into an array of float
+ *
+ * @param start
+ * The position in the input array of bytes to start
+ * @param len
+ * The number of 'float' to convert
+ * @param data
+ * The input array of bytes
+ * @return an array of 'len' float
+ */
+ public synchronized static native float[] byteToFloat(int start, int len,
+ byte[] data);
+
+ /**
+ * Convert 4 bytes from an array of bytes into a single float
+ *
+ * @param start
+ * The position in the input array of bytes to start
+ * @param data
+ * The input array of bytes
+ * @return The float value of the bytes.
+ */
+ public synchronized static float byteToFloat(byte[] data, int start) {
+ float[] fval = new float[1];
+ fval = byteToFloat(start, 1, data);
+ return (fval[0]);
+ }
+
+ /**
+ * Convert a range from an array of bytes into an array of long
+ *
+ * @param start
+ * The position in the input array of bytes to start
+ * @param len
+ * The number of 'long' to convert
+ * @param data
+ * The input array of bytes
+ * @return an array of 'len' long
+ */
+ public synchronized static native long[] byteToLong(int start, int len,
+ byte[] data);
+
+ /**
+ * Convert 8 bytes from an array of bytes into a single long
+ *
+ * @param start
+ * The position in the input array of bytes to start
+ * @param data
+ * The input array of bytes
+ * @return The long value of the bytes.
+ */
+ public synchronized static long byteToLong(byte[] data, int start) {
+ long[] lval = new long[1];
+ lval = byteToLong(start, 1, data);
+ return (lval[0]);
+ }
+
+ /**
+ * Convert a range from an array of bytes into an array of double
+ *
+ * @param start
+ * The position in the input array of bytes to start
+ * @param len
+ * The number of 'double' to convert
+ * @param data
+ * The input array of bytes
+ * @return an array of 'len' double
+ */
+ public synchronized static native double[] byteToDouble(int start, int len,
+ byte[] data);
+
+ /**
+ * Convert 8 bytes from an array of bytes into a single double
+ *
+ * @param start
+ * The position in the input array of bytes to start
+ * @param data
+ * The input array of bytes
+ * @return The double value of the bytes.
+ */
+ public synchronized static double byteToDouble(byte[] data, int start) {
+ double[] dval = new double[1];
+ dval = byteToDouble(start, 1, data);
+ return (dval[0]);
+ }
+
+ /**
+ * Convert a range from an array of int into an array of bytes.
+ *
+ * @param start
+ * The position in the input array of int to start
+ * @param len
+ * The number of 'int' to convert
+ * @param data
+ * The input array of int
+ * @return an array of bytes
+ */
+ public synchronized static native byte[] intToByte(int start, int len,
+ int[] data);
+
+ /**
+ * Convert a range from an array of short into an array of bytes.
+ *
+ * @param start
+ * The position in the input array of int to start
+ * @param len
+ * The number of 'short' to convert
+ * @param data
+ * The input array of short
+ * @return an array of bytes
+ */
+ public synchronized static native byte[] shortToByte(int start, int len,
+ short[] data);
+
+ /**
+ * Convert a range from an array of float into an array of bytes.
+ *
+ * @param start
+ * The position in the input array of int to start
+ * @param len
+ * The number of 'float' to convert
+ * @param data
+ * The input array of float
+ * @return an array of bytes
+ */
+ public synchronized static native byte[] floatToByte(int start, int len,
+ float[] data);
+
+ /**
+ * Convert a range from an array of long into an array of bytes.
+ *
+ * @param start
+ * The position in the input array of int to start
+ * @param len
+ * The number of 'long' to convert
+ * @param data
+ * The input array of long
+ * @return an array of bytes
+ */
+ public synchronized static native byte[] longToByte(int start, int len,
+ long[] data);
+
+ /**
+ * Convert a range from an array of double into an array of bytes.
+ *
+ * @param start
+ * The position in the input array of double to start
+ * @param len
+ * The number of 'double' to convert
+ * @param data
+ * The input array of double
+ * @return an array of bytes
+ */
+ public synchronized static native byte[] doubleToByte(int start, int len,
+ double[] data);
+
+ /**
+ * Convert a single byte into an array of one byte.
+ * <p>
+ * (This is a trivial method.)
+ *
+ * @param data
+ * The input byte
+ * @return an array of bytes
+ */
+ public synchronized static native byte[] byteToByte(byte data);
+
+ /**
+ * Convert a single Byte object into an array of one byte.
+ * <p>
+ * (This is an almost trivial method.)
+ *
+ * @param data
+ * The input Byte
+ * @return an array of bytes
+ */
+ public synchronized static byte[] byteToByte(Byte data) {
+ return byteToByte(data.byteValue());
+ }
+
+ /**
+ * Convert a single int into an array of 4 bytes.
+ *
+ * @param data
+ * The input int
+ * @return an array of bytes
+ */
+ public synchronized static native byte[] intToByte(int data);
+
+ /**
+ * Convert a single Integer object into an array of 4 bytes.
+ *
+ * @param data
+ * The input Integer
+ * @return an array of bytes
+ */
+ public synchronized static byte[] intToByte(Integer data) {
+ return intToByte(data.intValue());
+ }
+
+ /**
+ * Convert a single short into an array of 2 bytes.
+ *
+ * @param data
+ * The input short
+ * @return an array of bytes
+ */
+ public synchronized static native byte[] shortToByte(short data);
+
+ /**
+ * Convert a single Short object into an array of 2 bytes.
+ *
+ * @param data
+ * The input Short
+ * @return an array of bytes
+ */
+ public synchronized static byte[] shortToByte(Short data) {
+ return shortToByte(data.shortValue());
+ }
+
+ /**
+ * Convert a single float into an array of 4 bytes.
+ *
+ * @param data
+ * The input float
+ * @return an array of bytes
+ */
+ public synchronized static native byte[] floatToByte(float data);
+
+ /**
+ * Convert a single Float object into an array of 4 bytes.
+ *
+ * @param data
+ * The input Float
+ * @return an array of bytes
+ */
+ public synchronized static byte[] floatToByte(Float data) {
+ return floatToByte(data.floatValue());
+ };
+
+ /**
+ * Convert a single long into an array of 8 bytes.
+ *
+ * @param data
+ * The input long
+ * @return an array of bytes
+ */
+ public synchronized static native byte[] longToByte(long data);
+
+ /**
+ * Convert a single Long object into an array of 8 bytes.
+ *
+ * @param data
+ * The input Long
+ * @return an array of bytes
+ */
+ public synchronized static byte[] longToByte(Long data) {
+ return longToByte(data.longValue());
+ }
+
+ /**
+ * Convert a single double into an array of 8 bytes.
+ *
+ * @param data
+ * The input double
+ * @return an array of bytes
+ */
+ public synchronized static native byte[] doubleToByte(double data);
+
+ /**
+ * Convert a single Double object into an array of 8 bytes.
+ *
+ * @param data
+ * The input Double
+ * @return an array of bytes
+ */
+ public synchronized static byte[] doubleToByte(Double data) {
+ return doubleToByte(data.doubleValue());
+ }
+
+ /**
+ * Create a Number object from an array of bytes.
+ *
+ * @param barray
+ * The bytes to be converted
+ * @param obj
+ * Input object of the desired output class. Must be a sub-class
+ * of Number.
+ * @return A Object of the type of obj.
+ *
+ * @exception HDF5Exception
+ * - Error unsupported type.
+ */
+ public synchronized static Object byteToNumber(byte[] barray, Object obj)
+ throws HDF5Exception {
+ Class theClass = obj.getClass();
+ String type = theClass.getName();
+ Object retobj = null;
+
+ if (type.equals("java.lang.Integer")) {
+ int[] i = hdf.hdf5lib.HDFNativeData.byteToInt(0, 1, barray);
+ retobj = new Integer(i[0]);
+ }
+ else if (type.equals("java.lang.Byte")) {
+ retobj = new Byte(barray[0]);
+ }
+ else if (type.equals("java.lang.Short")) {
+ short[] f = hdf.hdf5lib.HDFNativeData
+ .byteToShort(0, 1, barray);
+ retobj = new Short(f[0]);
+ }
+ else if (type.equals("java.lang.Float")) {
+ float[] f = hdf.hdf5lib.HDFNativeData
+ .byteToFloat(0, 1, barray);
+ retobj = new Float(f[0]);
+ }
+ else if (type.equals("java.lang.Long")) {
+ long[] f = hdf.hdf5lib.HDFNativeData.byteToLong(0, 1, barray);
+ retobj = new Long(f[0]);
+ }
+ else if (type.equals("java.lang.Double")) {
+ double[] f = hdf.hdf5lib.HDFNativeData.byteToDouble(0, 1,
+ barray);
+ retobj = new Double(f[0]);
+ }
+ else {
+ /* exception: unsupported type */
+ HDF5Exception ex = new HDF5JavaException(
+ "byteToNumber: setfield bad type: " + obj + " " + type);
+ throw (ex);
+ }
+ return (retobj);
+ }
+}
diff --git a/java/src/hdf/hdf5lib/callbacks/Callbacks.java b/java/src/hdf/hdf5lib/callbacks/Callbacks.java
new file mode 100644
index 0000000..5757eef
--- /dev/null
+++ b/java/src/hdf/hdf5lib/callbacks/Callbacks.java
@@ -0,0 +1,33 @@
+/* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
+ * Copyright by The HDF Group. *
+ * Copyright by the Board of Trustees of the University of Illinois. *
+ * All rights reserved. *
+ * *
+ * This file is part of HDF5. The full HDF5 copyright notice, including *
+ * terms governing use, modification, and redistribution, is contained in *
+ * the files COPYING and Copyright.html. COPYING can be found at the root *
+ * of the source code distribution tree; Copyright.html can be found at the *
+ * root level of an installed copy of the electronic HDF5 document set and *
+ * is linked from the top-level documents page. It can also be found at *
+ * http://hdfgroup.org/HDF5/doc/Copyright.html. If you do not have *
+ * access to either file, you may request a copy from help@hdfgroup.org. *
+ * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */
+
+package hdf.hdf5lib.callbacks;
+
+/** All callback definitions must derive from this interface. Any
+ * derived interfaces must define a single public method named "callback".
+ * You are responsible for deregistering your callback (if necessary)
+ * in its {@link Object#finalize} method. If native code attempts to call
+ * a callback which has been GC'd, you will likely crash the VM. If
+ * there is no method to deregister the callback (e.g. <code>atexit</code>
+ * in the C library), you must ensure that you always keep a live reference
+ * to the callback object.<p>
+ * A callback should generally never throw an exception, since it doesn't
+ * necessarily have an encompassing Java environment to catch it. Any
+ * exceptions thrown will be passed to the default callback exception
+ * handler.
+ */
+public interface Callbacks {
+
+}
diff --git a/java/src/hdf/hdf5lib/callbacks/H5A_iterate_cb.java b/java/src/hdf/hdf5lib/callbacks/H5A_iterate_cb.java
new file mode 100644
index 0000000..fa390a2
--- /dev/null
+++ b/java/src/hdf/hdf5lib/callbacks/H5A_iterate_cb.java
@@ -0,0 +1,23 @@
+/* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
+ * Copyright by The HDF Group. *
+ * Copyright by the Board of Trustees of the University of Illinois. *
+ * All rights reserved. *
+ * *
+ * This file is part of HDF5. The full HDF5 copyright notice, including *
+ * terms governing use, modification, and redistribution, is contained in *
+ * the files COPYING and Copyright.html. COPYING can be found at the root *
+ * of the source code distribution tree; Copyright.html can be found at the *
+ * root level of an installed copy of the electronic HDF5 document set and *
+ * is linked from the top-level documents page. It can also be found at *
+ * http://hdfgroup.org/HDF5/doc/Copyright.html. If you do not have *
+ * access to either file, you may request a copy from help@hdfgroup.org. *
+ * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */
+
+package hdf.hdf5lib.callbacks;
+
+import hdf.hdf5lib.structs.H5A_info_t;
+
+//Information class for link callback(for H5Aiterate)
+public interface H5A_iterate_cb extends Callbacks {
+ int callback(long group, String name, H5A_info_t info, H5A_iterate_t op_data);
+}
diff --git a/java/src/hdf/hdf5lib/callbacks/H5A_iterate_t.java b/java/src/hdf/hdf5lib/callbacks/H5A_iterate_t.java
new file mode 100644
index 0000000..4816f8f
--- /dev/null
+++ b/java/src/hdf/hdf5lib/callbacks/H5A_iterate_t.java
@@ -0,0 +1,22 @@
+/* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
+ * Copyright by The HDF Group. *
+ * Copyright by the Board of Trustees of the University of Illinois. *
+ * All rights reserved. *
+ * *
+ * This file is part of HDF5. The full HDF5 copyright notice, including *
+ * terms governing use, modification, and redistribution, is contained in *
+ * the files COPYING and Copyright.html. COPYING can be found at the root *
+ * of the source code distribution tree; Copyright.html can be found at the *
+ * root level of an installed copy of the electronic HDF5 document set and *
+ * is linked from the top-level documents page. It can also be found at *
+ * http://hdfgroup.org/HDF5/doc/Copyright.html. If you do not have *
+ * access to either file, you may request a copy from help@hdfgroup.org. *
+ * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */
+
+package hdf.hdf5lib.callbacks;
+
+public interface H5A_iterate_t {
+/** public ArrayList iterdata = new ArrayList();
+ * Any derived interfaces must define the single public variable as above.
+ */
+}
diff --git a/java/src/hdf/hdf5lib/callbacks/H5D_iterate_cb.java b/java/src/hdf/hdf5lib/callbacks/H5D_iterate_cb.java
new file mode 100644
index 0000000..bac3dd9
--- /dev/null
+++ b/java/src/hdf/hdf5lib/callbacks/H5D_iterate_cb.java
@@ -0,0 +1,21 @@
+/* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
+ * Copyright by The HDF Group. *
+ * Copyright by the Board of Trustees of the University of Illinois. *
+ * All rights reserved. *
+ * *
+ * This file is part of HDF5. The full HDF5 copyright notice, including *
+ * terms governing use, modification, and redistribution, is contained in *
+ * the files COPYING and Copyright.html. COPYING can be found at the root *
+ * of the source code distribution tree; Copyright.html can be found at the *
+ * root level of an installed copy of the electronic HDF5 document set and *
+ * is linked from the top-level documents page. It can also be found at *
+ * http://hdfgroup.org/HDF5/doc/Copyright.html. If you do not have *
+ * access to either file, you may request a copy from help@hdfgroup.org. *
+ * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */
+
+package hdf.hdf5lib.callbacks;
+
+//Information class for link callback(for H5Diterate)
+public interface H5D_iterate_cb extends Callbacks {
+ int callback(byte[] elem, long elem_type, int ndim, long[] point, H5D_iterate_t op_data);
+}
diff --git a/java/src/hdf/hdf5lib/callbacks/H5D_iterate_t.java b/java/src/hdf/hdf5lib/callbacks/H5D_iterate_t.java
new file mode 100644
index 0000000..03bcc20
--- /dev/null
+++ b/java/src/hdf/hdf5lib/callbacks/H5D_iterate_t.java
@@ -0,0 +1,22 @@
+/* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
+ * Copyright by The HDF Group. *
+ * Copyright by the Board of Trustees of the University of Illinois. *
+ * All rights reserved. *
+ * *
+ * This file is part of HDF5. The full HDF5 copyright notice, including *
+ * terms governing use, modification, and redistribution, is contained in *
+ * the files COPYING and Copyright.html. COPYING can be found at the root *
+ * of the source code distribution tree; Copyright.html can be found at the *
+ * root level of an installed copy of the electronic HDF5 document set and *
+ * is linked from the top-level documents page. It can also be found at *
+ * http://hdfgroup.org/HDF5/doc/Copyright.html. If you do not have *
+ * access to either file, you may request a copy from help@hdfgroup.org. *
+ * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */
+
+package hdf.hdf5lib.callbacks;
+
+public interface H5D_iterate_t {
+/** public ArrayList iterdata = new ArrayList();
+ * Any derived interfaces must define the single public variable as above.
+ */
+}
diff --git a/java/src/hdf/hdf5lib/callbacks/H5E_walk_cb.java b/java/src/hdf/hdf5lib/callbacks/H5E_walk_cb.java
new file mode 100644
index 0000000..672d151
--- /dev/null
+++ b/java/src/hdf/hdf5lib/callbacks/H5E_walk_cb.java
@@ -0,0 +1,23 @@
+/* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
+ * Copyright by The HDF Group. *
+ * Copyright by the Board of Trustees of the University of Illinois. *
+ * All rights reserved. *
+ * *
+ * This file is part of HDF5. The full HDF5 copyright notice, including *
+ * terms governing use, modification, and redistribution, is contained in *
+ * the files COPYING and Copyright.html. COPYING can be found at the root *
+ * of the source code distribution tree; Copyright.html can be found at the *
+ * root level of an installed copy of the electronic HDF5 document set and *
+ * is linked from the top-level documents page. It can also be found at *
+ * http://hdfgroup.org/HDF5/doc/Copyright.html. If you do not have *
+ * access to either file, you may request a copy from help@hdfgroup.org. *
+ * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */
+
+package hdf.hdf5lib.callbacks;
+
+import hdf.hdf5lib.structs.H5E_error2_t;
+
+//Information class for link callback(for H5Ewalk)
+public interface H5E_walk_cb extends Callbacks {
+ int callback(int nidx, H5E_error2_t info, H5E_walk_t op_data);
+}
diff --git a/java/src/hdf/hdf5lib/callbacks/H5E_walk_t.java b/java/src/hdf/hdf5lib/callbacks/H5E_walk_t.java
new file mode 100644
index 0000000..f3f8a39
--- /dev/null
+++ b/java/src/hdf/hdf5lib/callbacks/H5E_walk_t.java
@@ -0,0 +1,22 @@
+/* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
+ * Copyright by The HDF Group. *
+ * Copyright by the Board of Trustees of the University of Illinois. *
+ * All rights reserved. *
+ * *
+ * This file is part of HDF5. The full HDF5 copyright notice, including *
+ * terms governing use, modification, and redistribution, is contained in *
+ * the files COPYING and Copyright.html. COPYING can be found at the root *
+ * of the source code distribution tree; Copyright.html can be found at the *
+ * root level of an installed copy of the electronic HDF5 document set and *
+ * is linked from the top-level documents page. It can also be found at *
+ * http://hdfgroup.org/HDF5/doc/Copyright.html. If you do not have *
+ * access to either file, you may request a copy from help@hdfgroup.org. *
+ * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */
+
+package hdf.hdf5lib.callbacks;
+
+public interface H5E_walk_t {
+/** public ArrayList iterdata = new ArrayList();
+ * Any derived interfaces must define the single public variable as above.
+ */
+}
diff --git a/java/src/hdf/hdf5lib/callbacks/H5L_iterate_cb.java b/java/src/hdf/hdf5lib/callbacks/H5L_iterate_cb.java
new file mode 100644
index 0000000..5218311
--- /dev/null
+++ b/java/src/hdf/hdf5lib/callbacks/H5L_iterate_cb.java
@@ -0,0 +1,23 @@
+/* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
+ * Copyright by The HDF Group. *
+ * Copyright by the Board of Trustees of the University of Illinois. *
+ * All rights reserved. *
+ * *
+ * This file is part of HDF5. The full HDF5 copyright notice, including *
+ * terms governing use, modification, and redistribution, is contained in *
+ * the files COPYING and Copyright.html. COPYING can be found at the root *
+ * of the source code distribution tree; Copyright.html can be found at the *
+ * root level of an installed copy of the electronic HDF5 document set and *
+ * is linked from the top-level documents page. It can also be found at *
+ * http://hdfgroup.org/HDF5/doc/Copyright.html. If you do not have *
+ * access to either file, you may request a copy from help@hdfgroup.org. *
+ * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */
+
+package hdf.hdf5lib.callbacks;
+
+import hdf.hdf5lib.structs.H5L_info_t;
+
+//Information class for link callback(for H5Lvisit/H5Lvisit_by_name)
+public interface H5L_iterate_cb extends Callbacks {
+ int callback(long group, String name, H5L_info_t info, H5L_iterate_t op_data);
+}
diff --git a/java/src/hdf/hdf5lib/callbacks/H5L_iterate_t.java b/java/src/hdf/hdf5lib/callbacks/H5L_iterate_t.java
new file mode 100644
index 0000000..b7c25f4
--- /dev/null
+++ b/java/src/hdf/hdf5lib/callbacks/H5L_iterate_t.java
@@ -0,0 +1,22 @@
+/* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
+ * Copyright by The HDF Group. *
+ * Copyright by the Board of Trustees of the University of Illinois. *
+ * All rights reserved. *
+ * *
+ * This file is part of HDF5. The full HDF5 copyright notice, including *
+ * terms governing use, modification, and redistribution, is contained in *
+ * the files COPYING and Copyright.html. COPYING can be found at the root *
+ * of the source code distribution tree; Copyright.html can be found at the *
+ * root level of an installed copy of the electronic HDF5 document set and *
+ * is linked from the top-level documents page. It can also be found at *
+ * http://hdfgroup.org/HDF5/doc/Copyright.html. If you do not have *
+ * access to either file, you may request a copy from help@hdfgroup.org. *
+ * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */
+
+package hdf.hdf5lib.callbacks;
+
+public interface H5L_iterate_t {
+/** public ArrayList iterdata = new ArrayList();
+ * Any derived interfaces must define the single public variable as above.
+ */
+}
diff --git a/java/src/hdf/hdf5lib/callbacks/H5O_iterate_cb.java b/java/src/hdf/hdf5lib/callbacks/H5O_iterate_cb.java
new file mode 100644
index 0000000..630205f
--- /dev/null
+++ b/java/src/hdf/hdf5lib/callbacks/H5O_iterate_cb.java
@@ -0,0 +1,23 @@
+/* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
+ * Copyright by The HDF Group. *
+ * Copyright by the Board of Trustees of the University of Illinois. *
+ * All rights reserved. *
+ * *
+ * This file is part of HDF5. The full HDF5 copyright notice, including *
+ * terms governing use, modification, and redistribution, is contained in *
+ * the files COPYING and Copyright.html. COPYING can be found at the root *
+ * of the source code distribution tree; Copyright.html can be found at the *
+ * root level of an installed copy of the electronic HDF5 document set and *
+ * is linked from the top-level documents page. It can also be found at *
+ * http://hdfgroup.org/HDF5/doc/Copyright.html. If you do not have *
+ * access to either file, you may request a copy from help@hdfgroup.org. *
+ * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */
+
+package hdf.hdf5lib.callbacks;
+
+import hdf.hdf5lib.structs.H5O_info_t;
+
+//Information class for link callback(for H5Ovisit/H5Ovisit_by_name)
+public interface H5O_iterate_cb extends Callbacks {
+ int callback(long group, String name, H5O_info_t info, H5O_iterate_t op_data);
+}
diff --git a/java/src/hdf/hdf5lib/callbacks/H5O_iterate_t.java b/java/src/hdf/hdf5lib/callbacks/H5O_iterate_t.java
new file mode 100644
index 0000000..da64f19
--- /dev/null
+++ b/java/src/hdf/hdf5lib/callbacks/H5O_iterate_t.java
@@ -0,0 +1,22 @@
+/* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
+ * Copyright by The HDF Group. *
+ * Copyright by the Board of Trustees of the University of Illinois. *
+ * All rights reserved. *
+ * *
+ * This file is part of HDF5. The full HDF5 copyright notice, including *
+ * terms governing use, modification, and redistribution, is contained in *
+ * the files COPYING and Copyright.html. COPYING can be found at the root *
+ * of the source code distribution tree; Copyright.html can be found at the *
+ * root level of an installed copy of the electronic HDF5 document set and *
+ * is linked from the top-level documents page. It can also be found at *
+ * http://hdfgroup.org/HDF5/doc/Copyright.html. If you do not have *
+ * access to either file, you may request a copy from help@hdfgroup.org. *
+ * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */
+
+package hdf.hdf5lib.callbacks;
+
+public interface H5O_iterate_t {
+/** public ArrayList iterdata = new ArrayList();
+ * Any derived interfaces must define the single public variable as above.
+ */
+}
diff --git a/java/src/hdf/hdf5lib/callbacks/H5P_cls_close_func_cb.java b/java/src/hdf/hdf5lib/callbacks/H5P_cls_close_func_cb.java
new file mode 100644
index 0000000..dcd4ed2
--- /dev/null
+++ b/java/src/hdf/hdf5lib/callbacks/H5P_cls_close_func_cb.java
@@ -0,0 +1,21 @@
+/* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
+ * Copyright by The HDF Group. *
+ * Copyright by the Board of Trustees of the University of Illinois. *
+ * All rights reserved. *
+ * *
+ * This file is part of HDF5. The full HDF5 copyright notice, including *
+ * terms governing use, modification, and redistribution, is contained in *
+ * the files COPYING and Copyright.html. COPYING can be found at the root *
+ * of the source code distribution tree; Copyright.html can be found at the *
+ * root level of an installed copy of the electronic HDF5 document set and *
+ * is linked from the top-level documents page. It can also be found at *
+ * http://hdfgroup.org/HDF5/doc/Copyright.html. If you do not have *
+ * access to either file, you may request a copy from help@hdfgroup.org. *
+ * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */
+
+package hdf.hdf5lib.callbacks;
+
+//Information class for link callback(for H5Pcreate_class)
+public interface H5P_cls_close_func_cb extends Callbacks {
+ int callback(long prop_id, H5P_cls_close_func_t close_data);
+}
diff --git a/java/src/hdf/hdf5lib/callbacks/H5P_cls_close_func_t.java b/java/src/hdf/hdf5lib/callbacks/H5P_cls_close_func_t.java
new file mode 100644
index 0000000..222bd26
--- /dev/null
+++ b/java/src/hdf/hdf5lib/callbacks/H5P_cls_close_func_t.java
@@ -0,0 +1,22 @@
+/* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
+ * Copyright by The HDF Group. *
+ * Copyright by the Board of Trustees of the University of Illinois. *
+ * All rights reserved. *
+ * *
+ * This file is part of HDF5. The full HDF5 copyright notice, including *
+ * terms governing use, modification, and redistribution, is contained in *
+ * the files COPYING and Copyright.html. COPYING can be found at the root *
+ * of the source code distribution tree; Copyright.html can be found at the *
+ * root level of an installed copy of the electronic HDF5 document set and *
+ * is linked from the top-level documents page. It can also be found at *
+ * http://hdfgroup.org/HDF5/doc/Copyright.html. If you do not have *
+ * access to either file, you may request a copy from help@hdfgroup.org. *
+ * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */
+
+package hdf.hdf5lib.callbacks;
+
+public interface H5P_cls_close_func_t {
+/** public ArrayList iterdata = new ArrayList();
+ * Any derived interfaces must define the single public variable as above.
+ */
+}
diff --git a/java/src/hdf/hdf5lib/callbacks/H5P_cls_copy_func_cb.java b/java/src/hdf/hdf5lib/callbacks/H5P_cls_copy_func_cb.java
new file mode 100644
index 0000000..0dc8a94
--- /dev/null
+++ b/java/src/hdf/hdf5lib/callbacks/H5P_cls_copy_func_cb.java
@@ -0,0 +1,21 @@
+/* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
+ * Copyright by The HDF Group. *
+ * Copyright by the Board of Trustees of the University of Illinois. *
+ * All rights reserved. *
+ * *
+ * This file is part of HDF5. The full HDF5 copyright notice, including *
+ * terms governing use, modification, and redistribution, is contained in *
+ * the files COPYING and Copyright.html. COPYING can be found at the root *
+ * of the source code distribution tree; Copyright.html can be found at the *
+ * root level of an installed copy of the electronic HDF5 document set and *
+ * is linked from the top-level documents page. It can also be found at *
+ * http://hdfgroup.org/HDF5/doc/Copyright.html. If you do not have *
+ * access to either file, you may request a copy from help@hdfgroup.org. *
+ * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */
+
+package hdf.hdf5lib.callbacks;
+
+//Information class for link callback(for H5Pcreate_class)
+public interface H5P_cls_copy_func_cb extends Callbacks {
+ int callback(long new_prop_id, long old_prop_id, H5P_cls_copy_func_t copy_data);
+}
diff --git a/java/src/hdf/hdf5lib/callbacks/H5P_cls_copy_func_t.java b/java/src/hdf/hdf5lib/callbacks/H5P_cls_copy_func_t.java
new file mode 100644
index 0000000..eed29bb
--- /dev/null
+++ b/java/src/hdf/hdf5lib/callbacks/H5P_cls_copy_func_t.java
@@ -0,0 +1,22 @@
+/* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
+ * Copyright by The HDF Group. *
+ * Copyright by the Board of Trustees of the University of Illinois. *
+ * All rights reserved. *
+ * *
+ * This file is part of HDF5. The full HDF5 copyright notice, including *
+ * terms governing use, modification, and redistribution, is contained in *
+ * the files COPYING and Copyright.html. COPYING can be found at the root *
+ * of the source code distribution tree; Copyright.html can be found at the *
+ * root level of an installed copy of the electronic HDF5 document set and *
+ * is linked from the top-level documents page. It can also be found at *
+ * http://hdfgroup.org/HDF5/doc/Copyright.html. If you do not have *
+ * access to either file, you may request a copy from help@hdfgroup.org. *
+ * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */
+
+package hdf.hdf5lib.callbacks;
+
+public interface H5P_cls_copy_func_t {
+/** public ArrayList iterdata = new ArrayList();
+ * Any derived interfaces must define the single public variable as above.
+ */
+}
diff --git a/java/src/hdf/hdf5lib/callbacks/H5P_cls_create_func_cb.java b/java/src/hdf/hdf5lib/callbacks/H5P_cls_create_func_cb.java
new file mode 100644
index 0000000..777e302
--- /dev/null
+++ b/java/src/hdf/hdf5lib/callbacks/H5P_cls_create_func_cb.java
@@ -0,0 +1,21 @@
+/* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
+ * Copyright by The HDF Group. *
+ * Copyright by the Board of Trustees of the University of Illinois. *
+ * All rights reserved. *
+ * *
+ * This file is part of HDF5. The full HDF5 copyright notice, including *
+ * terms governing use, modification, and redistribution, is contained in *
+ * the files COPYING and Copyright.html. COPYING can be found at the root *
+ * of the source code distribution tree; Copyright.html can be found at the *
+ * root level of an installed copy of the electronic HDF5 document set and *
+ * is linked from the top-level documents page. It can also be found at *
+ * http://hdfgroup.org/HDF5/doc/Copyright.html. If you do not have *
+ * access to either file, you may request a copy from help@hdfgroup.org. *
+ * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */
+
+package hdf.hdf5lib.callbacks;
+
+//Information class for link callback(for H5Pcreate_class)
+public interface H5P_cls_create_func_cb extends Callbacks {
+ int callback(long prop_id, H5P_cls_create_func_t create_data);
+}
diff --git a/java/src/hdf/hdf5lib/callbacks/H5P_cls_create_func_t.java b/java/src/hdf/hdf5lib/callbacks/H5P_cls_create_func_t.java
new file mode 100644
index 0000000..78973f0
--- /dev/null
+++ b/java/src/hdf/hdf5lib/callbacks/H5P_cls_create_func_t.java
@@ -0,0 +1,22 @@
+/* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
+ * Copyright by The HDF Group. *
+ * Copyright by the Board of Trustees of the University of Illinois. *
+ * All rights reserved. *
+ * *
+ * This file is part of HDF5. The full HDF5 copyright notice, including *
+ * terms governing use, modification, and redistribution, is contained in *
+ * the files COPYING and Copyright.html. COPYING can be found at the root *
+ * of the source code distribution tree; Copyright.html can be found at the *
+ * root level of an installed copy of the electronic HDF5 document set and *
+ * is linked from the top-level documents page. It can also be found at *
+ * http://hdfgroup.org/HDF5/doc/Copyright.html. If you do not have *
+ * access to either file, you may request a copy from help@hdfgroup.org. *
+ * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */
+
+package hdf.hdf5lib.callbacks;
+
+public interface H5P_cls_create_func_t {
+/** public ArrayList iterdata = new ArrayList();
+ * Any derived interfaces must define the single public variable as above.
+ */
+}
diff --git a/java/src/hdf/hdf5lib/callbacks/H5P_iterate_cb.java b/java/src/hdf/hdf5lib/callbacks/H5P_iterate_cb.java
new file mode 100644
index 0000000..0d98325
--- /dev/null
+++ b/java/src/hdf/hdf5lib/callbacks/H5P_iterate_cb.java
@@ -0,0 +1,21 @@
+/* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
+ * Copyright by The HDF Group. *
+ * Copyright by the Board of Trustees of the University of Illinois. *
+ * All rights reserved. *
+ * *
+ * This file is part of HDF5. The full HDF5 copyright notice, including *
+ * terms governing use, modification, and redistribution, is contained in *
+ * the files COPYING and Copyright.html. COPYING can be found at the root *
+ * of the source code distribution tree; Copyright.html can be found at the *
+ * root level of an installed copy of the electronic HDF5 document set and *
+ * is linked from the top-level documents page. It can also be found at *
+ * http://hdfgroup.org/HDF5/doc/Copyright.html. If you do not have *
+ * access to either file, you may request a copy from help@hdfgroup.org. *
+ * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */
+
+package hdf.hdf5lib.callbacks;
+
+//Information class for link callback(for H5Piterate)
+public interface H5P_iterate_cb extends Callbacks {
+ int callback(long plist, String name, H5P_iterate_t op_data);
+}
diff --git a/java/src/hdf/hdf5lib/callbacks/H5P_iterate_t.java b/java/src/hdf/hdf5lib/callbacks/H5P_iterate_t.java
new file mode 100644
index 0000000..dbbf80d
--- /dev/null
+++ b/java/src/hdf/hdf5lib/callbacks/H5P_iterate_t.java
@@ -0,0 +1,22 @@
+/* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
+ * Copyright by The HDF Group. *
+ * Copyright by the Board of Trustees of the University of Illinois. *
+ * All rights reserved. *
+ * *
+ * This file is part of HDF5. The full HDF5 copyright notice, including *
+ * terms governing use, modification, and redistribution, is contained in *
+ * the files COPYING and Copyright.html. COPYING can be found at the root *
+ * of the source code distribution tree; Copyright.html can be found at the *
+ * root level of an installed copy of the electronic HDF5 document set and *
+ * is linked from the top-level documents page. It can also be found at *
+ * http://hdfgroup.org/HDF5/doc/Copyright.html. If you do not have *
+ * access to either file, you may request a copy from help@hdfgroup.org. *
+ * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */
+
+package hdf.hdf5lib.callbacks;
+
+public interface H5P_iterate_t {
+/** public ArrayList iterdata = new ArrayList();
+ * Any derived interfaces must define the single public variable as above.
+ */
+}
diff --git a/java/src/hdf/hdf5lib/callbacks/H5P_prp_close_func_cb.java b/java/src/hdf/hdf5lib/callbacks/H5P_prp_close_func_cb.java
new file mode 100644
index 0000000..3ea44ac
--- /dev/null
+++ b/java/src/hdf/hdf5lib/callbacks/H5P_prp_close_func_cb.java
@@ -0,0 +1,21 @@
+/* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
+ * Copyright by The HDF Group. *
+ * Copyright by the Board of Trustees of the University of Illinois. *
+ * All rights reserved. *
+ * *
+ * This file is part of HDF5. The full HDF5 copyright notice, including *
+ * terms governing use, modification, and redistribution, is contained in *
+ * the files COPYING and Copyright.html. COPYING can be found at the root *
+ * of the source code distribution tree; Copyright.html can be found at the *
+ * root level of an installed copy of the electronic HDF5 document set and *
+ * is linked from the top-level documents page. It can also be found at *
+ * http://hdfgroup.org/HDF5/doc/Copyright.html. If you do not have *
+ * access to either file, you may request a copy from help@hdfgroup.org. *
+ * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */
+
+package hdf.hdf5lib.callbacks;
+
+//Information class for link callback(for H5Pregister2)
+public interface H5P_prp_close_func_cb extends Callbacks {
+ int callback(String name, long size, byte[] value);
+}
diff --git a/java/src/hdf/hdf5lib/callbacks/H5P_prp_compare_func_cb.java b/java/src/hdf/hdf5lib/callbacks/H5P_prp_compare_func_cb.java
new file mode 100644
index 0000000..0daaca5
--- /dev/null
+++ b/java/src/hdf/hdf5lib/callbacks/H5P_prp_compare_func_cb.java
@@ -0,0 +1,21 @@
+/* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
+ * Copyright by The HDF Group. *
+ * Copyright by the Board of Trustees of the University of Illinois. *
+ * All rights reserved. *
+ * *
+ * This file is part of HDF5. The full HDF5 copyright notice, including *
+ * terms governing use, modification, and redistribution, is contained in *
+ * the files COPYING and Copyright.html. COPYING can be found at the root *
+ * of the source code distribution tree; Copyright.html can be found at the *
+ * root level of an installed copy of the electronic HDF5 document set and *
+ * is linked from the top-level documents page. It can also be found at *
+ * http://hdfgroup.org/HDF5/doc/Copyright.html. If you do not have *
+ * access to either file, you may request a copy from help@hdfgroup.org. *
+ * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */
+
+package hdf.hdf5lib.callbacks;
+
+//Information class for link callback(for H5Pregister2)
+public interface H5P_prp_compare_func_cb extends Callbacks {
+ int callback(byte[] value1, byte[] value2, long size);
+}
diff --git a/java/src/hdf/hdf5lib/callbacks/H5P_prp_copy_func_cb.java b/java/src/hdf/hdf5lib/callbacks/H5P_prp_copy_func_cb.java
new file mode 100644
index 0000000..4e560e2
--- /dev/null
+++ b/java/src/hdf/hdf5lib/callbacks/H5P_prp_copy_func_cb.java
@@ -0,0 +1,21 @@
+/* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
+ * Copyright by The HDF Group. *
+ * Copyright by the Board of Trustees of the University of Illinois. *
+ * All rights reserved. *
+ * *
+ * This file is part of HDF5. The full HDF5 copyright notice, including *
+ * terms governing use, modification, and redistribution, is contained in *
+ * the files COPYING and Copyright.html. COPYING can be found at the root *
+ * of the source code distribution tree; Copyright.html can be found at the *
+ * root level of an installed copy of the electronic HDF5 document set and *
+ * is linked from the top-level documents page. It can also be found at *
+ * http://hdfgroup.org/HDF5/doc/Copyright.html. If you do not have *
+ * access to either file, you may request a copy from help@hdfgroup.org. *
+ * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */
+
+package hdf.hdf5lib.callbacks;
+
+//Information class for link callback(for H5Pregister2)
+public interface H5P_prp_copy_func_cb extends Callbacks {
+ int callback(String name, long size, byte[] value);
+}
diff --git a/java/src/hdf/hdf5lib/callbacks/H5P_prp_create_func_cb.java b/java/src/hdf/hdf5lib/callbacks/H5P_prp_create_func_cb.java
new file mode 100644
index 0000000..dc4fee1
--- /dev/null
+++ b/java/src/hdf/hdf5lib/callbacks/H5P_prp_create_func_cb.java
@@ -0,0 +1,21 @@
+/* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
+ * Copyright by The HDF Group. *
+ * Copyright by the Board of Trustees of the University of Illinois. *
+ * All rights reserved. *
+ * *
+ * This file is part of HDF5. The full HDF5 copyright notice, including *
+ * terms governing use, modification, and redistribution, is contained in *
+ * the files COPYING and Copyright.html. COPYING can be found at the root *
+ * of the source code distribution tree; Copyright.html can be found at the *
+ * root level of an installed copy of the electronic HDF5 document set and *
+ * is linked from the top-level documents page. It can also be found at *
+ * http://hdfgroup.org/HDF5/doc/Copyright.html. If you do not have *
+ * access to either file, you may request a copy from help@hdfgroup.org. *
+ * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */
+
+package hdf.hdf5lib.callbacks;
+
+//Information class for link callback(for H5Pregister2)
+public interface H5P_prp_create_func_cb extends Callbacks {
+ int callback(String name, long size, byte[] value);
+}
diff --git a/java/src/hdf/hdf5lib/callbacks/H5P_prp_delete_func_cb.java b/java/src/hdf/hdf5lib/callbacks/H5P_prp_delete_func_cb.java
new file mode 100644
index 0000000..9aa27ab
--- /dev/null
+++ b/java/src/hdf/hdf5lib/callbacks/H5P_prp_delete_func_cb.java
@@ -0,0 +1,21 @@
+/* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
+ * Copyright by The HDF Group. *
+ * Copyright by the Board of Trustees of the University of Illinois. *
+ * All rights reserved. *
+ * *
+ * This file is part of HDF5. The full HDF5 copyright notice, including *
+ * terms governing use, modification, and redistribution, is contained in *
+ * the files COPYING and Copyright.html. COPYING can be found at the root *
+ * of the source code distribution tree; Copyright.html can be found at the *
+ * root level of an installed copy of the electronic HDF5 document set and *
+ * is linked from the top-level documents page. It can also be found at *
+ * http://hdfgroup.org/HDF5/doc/Copyright.html. If you do not have *
+ * access to either file, you may request a copy from help@hdfgroup.org. *
+ * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */
+
+package hdf.hdf5lib.callbacks;
+
+//Information class for link callback(for H5Pregister2)
+public interface H5P_prp_delete_func_cb extends Callbacks {
+ int callback(long prop_id, String name, long size, byte[] value);
+}
diff --git a/java/src/hdf/hdf5lib/callbacks/H5P_prp_get_func_cb.java b/java/src/hdf/hdf5lib/callbacks/H5P_prp_get_func_cb.java
new file mode 100644
index 0000000..bfc7b37
--- /dev/null
+++ b/java/src/hdf/hdf5lib/callbacks/H5P_prp_get_func_cb.java
@@ -0,0 +1,21 @@
+/* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
+ * Copyright by The HDF Group. *
+ * Copyright by the Board of Trustees of the University of Illinois. *
+ * All rights reserved. *
+ * *
+ * This file is part of HDF5. The full HDF5 copyright notice, including *
+ * terms governing use, modification, and redistribution, is contained in *
+ * the files COPYING and Copyright.html. COPYING can be found at the root *
+ * of the source code distribution tree; Copyright.html can be found at the *
+ * root level of an installed copy of the electronic HDF5 document set and *
+ * is linked from the top-level documents page. It can also be found at *
+ * http://hdfgroup.org/HDF5/doc/Copyright.html. If you do not have *
+ * access to either file, you may request a copy from help@hdfgroup.org. *
+ * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */
+
+package hdf.hdf5lib.callbacks;
+
+//Information class for link callback(for H5Pregister2)
+public interface H5P_prp_get_func_cb extends Callbacks {
+ int callback(long prop_id, String name, long size, byte[] value);
+}
diff --git a/java/src/hdf/hdf5lib/callbacks/H5P_prp_set_func_cb.java b/java/src/hdf/hdf5lib/callbacks/H5P_prp_set_func_cb.java
new file mode 100644
index 0000000..c1e9b00
--- /dev/null
+++ b/java/src/hdf/hdf5lib/callbacks/H5P_prp_set_func_cb.java
@@ -0,0 +1,21 @@
+/* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
+ * Copyright by The HDF Group. *
+ * Copyright by the Board of Trustees of the University of Illinois. *
+ * All rights reserved. *
+ * *
+ * This file is part of HDF5. The full HDF5 copyright notice, including *
+ * terms governing use, modification, and redistribution, is contained in *
+ * the files COPYING and Copyright.html. COPYING can be found at the root *
+ * of the source code distribution tree; Copyright.html can be found at the *
+ * root level of an installed copy of the electronic HDF5 document set and *
+ * is linked from the top-level documents page. It can also be found at *
+ * http://hdfgroup.org/HDF5/doc/Copyright.html. If you do not have *
+ * access to either file, you may request a copy from help@hdfgroup.org. *
+ * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */
+
+package hdf.hdf5lib.callbacks;
+
+//Information class for link callback(for H5Pregister2)
+public interface H5P_prp_set_func_cb extends Callbacks {
+ int callback(long prop_id, String name, long size, byte[] value);
+}
diff --git a/java/src/hdf/hdf5lib/exceptions/HDF5AtomException.java b/java/src/hdf/hdf5lib/exceptions/HDF5AtomException.java
new file mode 100644
index 0000000..3309223
--- /dev/null
+++ b/java/src/hdf/hdf5lib/exceptions/HDF5AtomException.java
@@ -0,0 +1,44 @@
+/* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
+ * Copyright by The HDF Group. *
+ * Copyright by the Board of Trustees of the University of Illinois. *
+ * All rights reserved. *
+ * *
+ * This file is part of HDF5. The full HDF5 copyright notice, including *
+ * terms governing use, modification, and redistribution, is contained in *
+ * the files COPYING and Copyright.html. COPYING can be found at the root *
+ * of the source code distribution tree; Copyright.html can be found at the *
+ * root level of an installed copy of the electronic HDF5 document set and *
+ * is linked from the top-level documents page. It can also be found at *
+ * http://hdfgroup.org/HDF5/doc/Copyright.html. If you do not have *
+ * access to either file, you may request a copy from help@hdfgroup.org. *
+ * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */
+
+package hdf.hdf5lib.exceptions;
+
+/**
+ * The class HDF5LibraryException returns errors raised by the HDF5 library.
+ * <p>
+ * This sub-class represents HDF-5 major error code <b>H5E_ATOM</b>
+ */
+
+public class HDF5AtomException extends HDF5LibraryException {
+ /**
+ * Constructs an <code>HDF5AtomException</code> with no specified detail
+ * message.
+ */
+ public HDF5AtomException() {
+ super();
+ }
+
+ /**
+ * Constructs an <code>HDF5AtomException</code> with the specified detail
+ * message.
+ *
+ * @param s
+ * the detail message.
+ */
+ public HDF5AtomException(String s) {
+ super(s);
+ }
+
+}
diff --git a/java/src/hdf/hdf5lib/exceptions/HDF5AttributeException.java b/java/src/hdf/hdf5lib/exceptions/HDF5AttributeException.java
new file mode 100644
index 0000000..8c1a7b8
--- /dev/null
+++ b/java/src/hdf/hdf5lib/exceptions/HDF5AttributeException.java
@@ -0,0 +1,42 @@
+/* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
+ * Copyright by The HDF Group. *
+ * Copyright by the Board of Trustees of the University of Illinois. *
+ * All rights reserved. *
+ * *
+ * This file is part of HDF5. The full HDF5 copyright notice, including *
+ * terms governing use, modification, and redistribution, is contained in *
+ * the files COPYING and Copyright.html. COPYING can be found at the root *
+ * of the source code distribution tree; Copyright.html can be found at the *
+ * root level of an installed copy of the electronic HDF5 document set and *
+ * is linked from the top-level documents page. It can also be found at *
+ * http://hdfgroup.org/HDF5/doc/Copyright.html. If you do not have *
+ * access to either file, you may request a copy from help@hdfgroup.org. *
+ * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */
+
+package hdf.hdf5lib.exceptions;
+
+/**
+ * The class HDF5LibraryException returns errors raised by the HDF5 library.
+ * <p>
+ * This sub-class represents HDF-5 major error code <b>H5E_ATTR</b>
+ */
+public class HDF5AttributeException extends HDF5LibraryException {
+ /**
+ * Constructs an <code>HDF5AttributeException</code> with no specified
+ * detail message.
+ */
+ public HDF5AttributeException() {
+ super();
+ }
+
+ /**
+ * Constructs an <code>HDF5AttributeException</code> with the specified
+ * detail message.
+ *
+ * @param s
+ * the detail message.
+ */
+ public HDF5AttributeException(String s) {
+ super(s);
+ }
+}
diff --git a/java/src/hdf/hdf5lib/exceptions/HDF5BtreeException.java b/java/src/hdf/hdf5lib/exceptions/HDF5BtreeException.java
new file mode 100644
index 0000000..dc82fd9
--- /dev/null
+++ b/java/src/hdf/hdf5lib/exceptions/HDF5BtreeException.java
@@ -0,0 +1,42 @@
+/* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
+ * Copyright by The HDF Group. *
+ * Copyright by the Board of Trustees of the University of Illinois. *
+ * All rights reserved. *
+ * *
+ * This file is part of HDF5. The full HDF5 copyright notice, including *
+ * terms governing use, modification, and redistribution, is contained in *
+ * the files COPYING and Copyright.html. COPYING can be found at the root *
+ * of the source code distribution tree; Copyright.html can be found at the *
+ * root level of an installed copy of the electronic HDF5 document set and *
+ * is linked from the top-level documents page. It can also be found at *
+ * http://hdfgroup.org/HDF5/doc/Copyright.html. If you do not have *
+ * access to either file, you may request a copy from help@hdfgroup.org. *
+ * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */
+
+package hdf.hdf5lib.exceptions;
+
+/**
+ * The class HDF5LibraryException returns errors raised by the HDF5 library.
+ * <p>
+ * This sub-class represents HDF-5 major error code <b>H5E_BTREE</b>
+ */
+public class HDF5BtreeException extends HDF5LibraryException {
+ /**
+ * Constructs an <code>HDF5BtreeException</code> with no specified detail
+ * message.
+ */
+ public HDF5BtreeException() {
+ super();
+ }
+
+ /**
+ * Constructs an <code>HDF5BtreeException</code> with the specified detail
+ * message.
+ *
+ * @param s
+ * the detail message.
+ */
+ public HDF5BtreeException(String s) {
+ super(s);
+ }
+}
diff --git a/java/src/hdf/hdf5lib/exceptions/HDF5DataFiltersException.java b/java/src/hdf/hdf5lib/exceptions/HDF5DataFiltersException.java
new file mode 100644
index 0000000..17fd9db
--- /dev/null
+++ b/java/src/hdf/hdf5lib/exceptions/HDF5DataFiltersException.java
@@ -0,0 +1,42 @@
+/* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
+ * Copyright by The HDF Group. *
+ * Copyright by the Board of Trustees of the University of Illinois. *
+ * All rights reserved. *
+ * *
+ * This file is part of HDF5. The full HDF5 copyright notice, including *
+ * terms governing use, modification, and redistribution, is contained in *
+ * the files COPYING and Copyright.html. COPYING can be found at the root *
+ * of the source code distribution tree; Copyright.html can be found at the *
+ * root level of an installed copy of the electronic HDF5 document set and *
+ * is linked from the top-level documents page. It can also be found at *
+ * http://hdfgroup.org/HDF5/doc/Copyright.html. If you do not have *
+ * access to either file, you may request a copy from help@hdfgroup.org. *
+ * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */
+
+package hdf.hdf5lib.exceptions;
+
+/**
+ * The class HDF5LibraryException returns errors raised by the HDF5 library.
+ * <p>
+ * This sub-class represents HDF-5 major error code <b>H5E_PLINE</b>
+ */
+public class HDF5DataFiltersException extends HDF5LibraryException {
+ /**
+ * Constructs an <code>HDF5DataFiltersException</code> with no specified
+ * detail message.
+ */
+ public HDF5DataFiltersException() {
+ super();
+ }
+
+ /**
+ * Constructs an <code>HDF5DataFiltersException</code> with the specified
+ * detail message.
+ *
+ * @param s
+ * the detail message.
+ */
+ public HDF5DataFiltersException(String s) {
+ super(s);
+ }
+}
diff --git a/java/src/hdf/hdf5lib/exceptions/HDF5DataStorageException.java b/java/src/hdf/hdf5lib/exceptions/HDF5DataStorageException.java
new file mode 100644
index 0000000..f22c9ec
--- /dev/null
+++ b/java/src/hdf/hdf5lib/exceptions/HDF5DataStorageException.java
@@ -0,0 +1,43 @@
+/* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
+ * Copyright by The HDF Group. *
+ * Copyright by the Board of Trustees of the University of Illinois. *
+ * All rights reserved. *
+ * *
+ * This file is part of HDF5. The full HDF5 copyright notice, including *
+ * terms governing use, modification, and redistribution, is contained in *
+ * the files COPYING and Copyright.html. COPYING can be found at the root *
+ * of the source code distribution tree; Copyright.html can be found at the *
+ * root level of an installed copy of the electronic HDF5 document set and *
+ * is linked from the top-level documents page. It can also be found at *
+ * http://hdfgroup.org/HDF5/doc/Copyright.html. If you do not have *
+ * access to either file, you may request a copy from help@hdfgroup.org. *
+ * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */
+
+package hdf.hdf5lib.exceptions;
+
+/**
+ * The class HDF5LibraryException returns errors raised by the HDF5 library.
+ * <p>
+ * This sub-class represents HDF-5 major error code <b>H5E_STORAGE</b>
+ */
+
+public class HDF5DataStorageException extends HDF5LibraryException {
+ /**
+ * Constructs an <code>HDF5DataStorageExceptionn</code> with no specified
+ * detail message.
+ */
+ public HDF5DataStorageException() {
+ super();
+ }
+
+ /**
+ * Constructs an <code>HDF5DataStorageException</code> with the specified
+ * detail message.
+ *
+ * @param s
+ * the detail message.
+ */
+ public HDF5DataStorageException(String s) {
+ super(s);
+ }
+}
diff --git a/java/src/hdf/hdf5lib/exceptions/HDF5DatasetInterfaceException.java b/java/src/hdf/hdf5lib/exceptions/HDF5DatasetInterfaceException.java
new file mode 100644
index 0000000..e39709d
--- /dev/null
+++ b/java/src/hdf/hdf5lib/exceptions/HDF5DatasetInterfaceException.java
@@ -0,0 +1,42 @@
+/* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
+ * Copyright by The HDF Group. *
+ * Copyright by the Board of Trustees of the University of Illinois. *
+ * All rights reserved. *
+ * *
+ * This file is part of HDF5. The full HDF5 copyright notice, including *
+ * terms governing use, modification, and redistribution, is contained in *
+ * the files COPYING and Copyright.html. COPYING can be found at the root *
+ * of the source code distribution tree; Copyright.html can be found at the *
+ * root level of an installed copy of the electronic HDF5 document set and *
+ * is linked from the top-level documents page. It can also be found at *
+ * http://hdfgroup.org/HDF5/doc/Copyright.html. If you do not have *
+ * access to either file, you may request a copy from help@hdfgroup.org. *
+ * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */
+
+package hdf.hdf5lib.exceptions;
+
+/**
+ * The class HDF5LibraryException returns errors raised by the HDF5 library.
+ * <p>
+ * This sub-class represents HDF-5 major error code <b>H5E_DATASET</b>
+ */
+public class HDF5DatasetInterfaceException extends HDF5LibraryException {
+ /**
+ * Constructs an <code>HDF5DatasetInterfaceException</code> with no
+ * specified detail message.
+ */
+ public HDF5DatasetInterfaceException() {
+ super();
+ }
+
+ /**
+ * Constructs an <code>HDF5DatasetInterfaceException</code> with the
+ * specified detail message.
+ *
+ * @param s
+ * the detail message.
+ */
+ public HDF5DatasetInterfaceException(String s) {
+ super(s);
+ }
+}
diff --git a/java/src/hdf/hdf5lib/exceptions/HDF5DataspaceInterfaceException.java b/java/src/hdf/hdf5lib/exceptions/HDF5DataspaceInterfaceException.java
new file mode 100644
index 0000000..531202c
--- /dev/null
+++ b/java/src/hdf/hdf5lib/exceptions/HDF5DataspaceInterfaceException.java
@@ -0,0 +1,43 @@
+/* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
+ * Copyright by The HDF Group. *
+ * Copyright by the Board of Trustees of the University of Illinois. *
+ * All rights reserved. *
+ * *
+ * This file is part of HDF5. The full HDF5 copyright notice, including *
+ * terms governing use, modification, and redistribution, is contained in *
+ * the files COPYING and Copyright.html. COPYING can be found at the root *
+ * of the source code distribution tree; Copyright.html can be found at the *
+ * root level of an installed copy of the electronic HDF5 document set and *
+ * is linked from the top-level documents page. It can also be found at *
+ * http://hdfgroup.org/HDF5/doc/Copyright.html. If you do not have *
+ * access to either file, you may request a copy from help@hdfgroup.org. *
+ * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */
+
+package hdf.hdf5lib.exceptions;
+
+/**
+ * The class HDF5LibraryException returns errors raised by the HDF5 library.
+ * <p>
+ * This sub-class represents HDF-5 major error code <b>H5E_DATASPACE</b>
+ */
+
+public class HDF5DataspaceInterfaceException extends HDF5LibraryException {
+ /**
+ * Constructs an <code>HDF5DataspaceInterfaceException</code> with no
+ * specified detail message.
+ */
+ public HDF5DataspaceInterfaceException() {
+ super();
+ }
+
+ /**
+ * Constructs an <code>HDF5DataspaceInterfaceException</code> with the
+ * specified detail message.
+ *
+ * @param s
+ * the detail message.
+ */
+ public HDF5DataspaceInterfaceException(String s) {
+ super(s);
+ }
+}
diff --git a/java/src/hdf/hdf5lib/exceptions/HDF5DatatypeInterfaceException.java b/java/src/hdf/hdf5lib/exceptions/HDF5DatatypeInterfaceException.java
new file mode 100644
index 0000000..3c0c144
--- /dev/null
+++ b/java/src/hdf/hdf5lib/exceptions/HDF5DatatypeInterfaceException.java
@@ -0,0 +1,43 @@
+/* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
+ * Copyright by The HDF Group. *
+ * Copyright by the Board of Trustees of the University of Illinois. *
+ * All rights reserved. *
+ * *
+ * This file is part of HDF5. The full HDF5 copyright notice, including *
+ * terms governing use, modification, and redistribution, is contained in *
+ * the files COPYING and Copyright.html. COPYING can be found at the root *
+ * of the source code distribution tree; Copyright.html can be found at the *
+ * root level of an installed copy of the electronic HDF5 document set and *
+ * is linked from the top-level documents page. It can also be found at *
+ * http://hdfgroup.org/HDF5/doc/Copyright.html. If you do not have *
+ * access to either file, you may request a copy from help@hdfgroup.org. *
+ * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */
+
+package hdf.hdf5lib.exceptions;
+
+/**
+ * The class HDF5LibraryException returns errors raised by the HDF5 library.
+ * <p>
+ * This sub-class represents HDF-5 major error code <b>H5E_DATATYPE</b>
+ */
+
+public class HDF5DatatypeInterfaceException extends HDF5LibraryException {
+ /**
+ * Constructs an <code>HDF5DatatypeInterfaceException</code> with no
+ * specified detail message.
+ */
+ public HDF5DatatypeInterfaceException() {
+ super();
+ }
+
+ /**
+ * Constructs an <code>HDF5DatatypeInterfaceException</code> with the
+ * specified detail message.
+ *
+ * @param s
+ * the detail message.
+ */
+ public HDF5DatatypeInterfaceException(String s) {
+ super(s);
+ }
+}
diff --git a/java/src/hdf/hdf5lib/exceptions/HDF5Exception.java b/java/src/hdf/hdf5lib/exceptions/HDF5Exception.java
new file mode 100644
index 0000000..8a83ca1
--- /dev/null
+++ b/java/src/hdf/hdf5lib/exceptions/HDF5Exception.java
@@ -0,0 +1,68 @@
+/* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
+ * Copyright by The HDF Group. *
+ * Copyright by the Board of Trustees of the University of Illinois. *
+ * All rights reserved. *
+ * *
+ * This file is part of HDF5. The full HDF5 copyright notice, including *
+ * terms governing use, modification, and redistribution, is contained in *
+ * the files COPYING and Copyright.html. COPYING can be found at the root *
+ * of the source code distribution tree; Copyright.html can be found at the *
+ * root level of an installed copy of the electronic HDF5 document set and *
+ * is linked from the top-level documents page. It can also be found at *
+ * http://hdfgroup.org/HDF5/doc/Copyright.html. If you do not have *
+ * access to either file, you may request a copy from help@hdfgroup.org. *
+ * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */
+
+package hdf.hdf5lib.exceptions;
+
+/**
+ * <p>
+ * The class HDF5Exception returns errors from the Java HDF5 Interface.
+ * <p>
+ * Two sub-classes of HDF5Exception are defined:
+ * <ol>
+ * <li>
+ * HDF5LibraryException -- errors raised the HDF5 library code
+ * <li>
+ * HDF5JavaException -- errors raised the HDF5 Java wrapper code
+ * </ol>
+ * <p>
+ * These exceptions are sub-classed to represent specific error conditions, as
+ * needed. In particular, HDF5LibraryException has a sub-class for each major
+ * error code returned by the HDF5 library.
+ *
+ */
+public class HDF5Exception extends Exception {
+ protected String detailMessage;
+
+ /**
+ * Constructs an <code>HDF5Exception</code> with no specified detail
+ * message.
+ */
+ public HDF5Exception() {
+ super();
+ }
+
+ /**
+ * Constructs an <code>HDF5Exception</code> with the specified detail
+ * message.
+ *
+ * @param message
+ * the detail message.
+ */
+ public HDF5Exception(String message) {
+ super();
+ detailMessage = message;
+ }
+
+ /**
+ * Returns the detail message of this exception
+ *
+ * @return the detail message or <code>null</code> if this object does not
+ * have a detail message.
+ */
+ @Override
+ public String getMessage() {
+ return detailMessage;
+ }
+}
diff --git a/java/src/hdf/hdf5lib/exceptions/HDF5ExternalFileListException.java b/java/src/hdf/hdf5lib/exceptions/HDF5ExternalFileListException.java
new file mode 100644
index 0000000..62c5dd8
--- /dev/null
+++ b/java/src/hdf/hdf5lib/exceptions/HDF5ExternalFileListException.java
@@ -0,0 +1,43 @@
+/* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
+ * Copyright by The HDF Group. *
+ * Copyright by the Board of Trustees of the University of Illinois. *
+ * All rights reserved. *
+ * *
+ * This file is part of HDF5. The full HDF5 copyright notice, including *
+ * terms governing use, modification, and redistribution, is contained in *
+ * the files COPYING and Copyright.html. COPYING can be found at the root *
+ * of the source code distribution tree; Copyright.html can be found at the *
+ * root level of an installed copy of the electronic HDF5 document set and *
+ * is linked from the top-level documents page. It can also be found at *
+ * http://hdfgroup.org/HDF5/doc/Copyright.html. If you do not have *
+ * access to either file, you may request a copy from help@hdfgroup.org. *
+ * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */
+
+package hdf.hdf5lib.exceptions;
+
+/**
+ * The class HDF5LibraryException returns errors raised by the HDF5 library.
+ * <p>
+ * This sub-class represents HDF-5 major error code <b>H5E_EFL</b>
+ */
+
+public class HDF5ExternalFileListException extends HDF5LibraryException {
+ /**
+ * Constructs an <code>HDF5ExternalFileListException</code> with no
+ * specified detail message.
+ */
+ public HDF5ExternalFileListException() {
+ super();
+ }
+
+ /**
+ * Constructs an <code>HDF5ExternalFileListException</code> with the
+ * specified detail message.
+ *
+ * @param s
+ * the detail message.
+ */
+ public HDF5ExternalFileListException(String s) {
+ super(s);
+ }
+}
diff --git a/java/src/hdf/hdf5lib/exceptions/HDF5FileInterfaceException.java b/java/src/hdf/hdf5lib/exceptions/HDF5FileInterfaceException.java
new file mode 100644
index 0000000..0b63b41
--- /dev/null
+++ b/java/src/hdf/hdf5lib/exceptions/HDF5FileInterfaceException.java
@@ -0,0 +1,43 @@
+/* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
+ * Copyright by The HDF Group. *
+ * Copyright by the Board of Trustees of the University of Illinois. *
+ * All rights reserved. *
+ * *
+ * This file is part of HDF5. The full HDF5 copyright notice, including *
+ * terms governing use, modification, and redistribution, is contained in *
+ * the files COPYING and Copyright.html. COPYING can be found at the root *
+ * of the source code distribution tree; Copyright.html can be found at the *
+ * root level of an installed copy of the electronic HDF5 document set and *
+ * is linked from the top-level documents page. It can also be found at *
+ * http://hdfgroup.org/HDF5/doc/Copyright.html. If you do not have *
+ * access to either file, you may request a copy from help@hdfgroup.org. *
+ * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */
+
+package hdf.hdf5lib.exceptions;
+
+/**
+ * The class HDF5LibraryException returns errors raised by the HDF5 library.
+ * <p>
+ * This sub-class represents HDF-5 major error code <b>H5E_FILE</b>
+ */
+
+public class HDF5FileInterfaceException extends HDF5LibraryException {
+ /**
+ * Constructs an <code>HDF5FileInterfaceException</code> with no specified
+ * detail message.
+ */
+ public HDF5FileInterfaceException() {
+ super();
+ }
+
+ /**
+ * Constructs an <code>HDF5FileInterfaceException</code> with the specified
+ * detail message.
+ *
+ * @param s
+ * the detail message.
+ */
+ public HDF5FileInterfaceException(String s) {
+ super(s);
+ }
+}
diff --git a/java/src/hdf/hdf5lib/exceptions/HDF5FunctionArgumentException.java b/java/src/hdf/hdf5lib/exceptions/HDF5FunctionArgumentException.java
new file mode 100644
index 0000000..c984aae
--- /dev/null
+++ b/java/src/hdf/hdf5lib/exceptions/HDF5FunctionArgumentException.java
@@ -0,0 +1,43 @@
+/* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
+ * Copyright by The HDF Group. *
+ * Copyright by the Board of Trustees of the University of Illinois. *
+ * All rights reserved. *
+ * *
+ * This file is part of HDF5. The full HDF5 copyright notice, including *
+ * terms governing use, modification, and redistribution, is contained in *
+ * the files COPYING and Copyright.html. COPYING can be found at the root *
+ * of the source code distribution tree; Copyright.html can be found at the *
+ * root level of an installed copy of the electronic HDF5 document set and *
+ * is linked from the top-level documents page. It can also be found at *
+ * http://hdfgroup.org/HDF5/doc/Copyright.html. If you do not have *
+ * access to either file, you may request a copy from help@hdfgroup.org. *
+ * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */
+
+package hdf.hdf5lib.exceptions;
+
+/**
+ * The class HDF5LibraryException returns errors raised by the HDF5 library.
+ * <p>
+ * This sub-class represents HDF-5 major error code <b>H5E_ARGS</b>
+ */
+
+public class HDF5FunctionArgumentException extends HDF5LibraryException {
+ /**
+ * Constructs an <code>HDF5FunctionArgumentException</code> with no
+ * specified detail message.
+ */
+ public HDF5FunctionArgumentException() {
+ super();
+ }
+
+ /**
+ * Constructs an <code>HDF5FunctionArgumentException</code> with the
+ * specified detail message.
+ *
+ * @param s
+ * the detail message.
+ */
+ public HDF5FunctionArgumentException(String s) {
+ super(s);
+ }
+}
diff --git a/java/src/hdf/hdf5lib/exceptions/HDF5FunctionEntryExitException.java b/java/src/hdf/hdf5lib/exceptions/HDF5FunctionEntryExitException.java
new file mode 100644
index 0000000..5dad930
--- /dev/null
+++ b/java/src/hdf/hdf5lib/exceptions/HDF5FunctionEntryExitException.java
@@ -0,0 +1,43 @@
+/* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
+ * Copyright by The HDF Group. *
+ * Copyright by the Board of Trustees of the University of Illinois. *
+ * All rights reserved. *
+ * *
+ * This file is part of HDF5. The full HDF5 copyright notice, including *
+ * terms governing use, modification, and redistribution, is contained in *
+ * the files COPYING and Copyright.html. COPYING can be found at the root *
+ * of the source code distribution tree; Copyright.html can be found at the *
+ * root level of an installed copy of the electronic HDF5 document set and *
+ * is linked from the top-level documents page. It can also be found at *
+ * http://hdfgroup.org/HDF5/doc/Copyright.html. If you do not have *
+ * access to either file, you may request a copy from help@hdfgroup.org. *
+ * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */
+
+package hdf.hdf5lib.exceptions;
+
+/**
+ * The class HDF5LibraryException returns errors raised by the HDF5 library.
+ * <p>
+ * This sub-class represents HDF-5 major error code <b>H5E_FUNC</b>
+ */
+
+public class HDF5FunctionEntryExitException extends HDF5LibraryException {
+ /**
+ * Constructs an <code>HDF5FunctionEntryExitException</code> with no
+ * specified detail message.
+ */
+ public HDF5FunctionEntryExitException() {
+ super();
+ }
+
+ /**
+ * Constructs an <code>HDF5FunctionEntryExitException</code> with the
+ * specified detail message.
+ *
+ * @param s
+ * the detail message.
+ */
+ public HDF5FunctionEntryExitException(String s) {
+ super(s);
+ }
+}
diff --git a/java/src/hdf/hdf5lib/exceptions/HDF5HeapException.java b/java/src/hdf/hdf5lib/exceptions/HDF5HeapException.java
new file mode 100644
index 0000000..dfe4ced
--- /dev/null
+++ b/java/src/hdf/hdf5lib/exceptions/HDF5HeapException.java
@@ -0,0 +1,43 @@
+/* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
+ * Copyright by The HDF Group. *
+ * Copyright by the Board of Trustees of the University of Illinois. *
+ * All rights reserved. *
+ * *
+ * This file is part of HDF5. The full HDF5 copyright notice, including *
+ * terms governing use, modification, and redistribution, is contained in *
+ * the files COPYING and Copyright.html. COPYING can be found at the root *
+ * of the source code distribution tree; Copyright.html can be found at the *
+ * root level of an installed copy of the electronic HDF5 document set and *
+ * is linked from the top-level documents page. It can also be found at *
+ * http://hdfgroup.org/HDF5/doc/Copyright.html. If you do not have *
+ * access to either file, you may request a copy from help@hdfgroup.org. *
+ * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */
+
+package hdf.hdf5lib.exceptions;
+
+/**
+ * The class HDF5LibraryException returns errors raised by the HDF5 library.
+ * <p>
+ * This sub-class represents HDF-5 major error code <b>H5E_HEAP</b>
+ */
+
+public class HDF5HeapException extends HDF5LibraryException {
+ /**
+ * Constructs an <code>HDF5HeapException</code> with no specified detail
+ * message.
+ */
+ public HDF5HeapException() {
+ super();
+ }
+
+ /**
+ * Constructs an <code>HDF5HeapException</code> with the specified detail
+ * message.
+ *
+ * @param s
+ * the detail message.
+ */
+ public HDF5HeapException(String s) {
+ super(s);
+ }
+}
diff --git a/java/src/hdf/hdf5lib/exceptions/HDF5InternalErrorException.java b/java/src/hdf/hdf5lib/exceptions/HDF5InternalErrorException.java
new file mode 100644
index 0000000..45a836c
--- /dev/null
+++ b/java/src/hdf/hdf5lib/exceptions/HDF5InternalErrorException.java
@@ -0,0 +1,43 @@
+/* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
+ * Copyright by The HDF Group. *
+ * Copyright by the Board of Trustees of the University of Illinois. *
+ * All rights reserved. *
+ * *
+ * This file is part of HDF5. The full HDF5 copyright notice, including *
+ * terms governing use, modification, and redistribution, is contained in *
+ * the files COPYING and Copyright.html. COPYING can be found at the root *
+ * of the source code distribution tree; Copyright.html can be found at the *
+ * root level of an installed copy of the electronic HDF5 document set and *
+ * is linked from the top-level documents page. It can also be found at *
+ * http://hdfgroup.org/HDF5/doc/Copyright.html. If you do not have *
+ * access to either file, you may request a copy from help@hdfgroup.org. *
+ * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */
+
+package hdf.hdf5lib.exceptions;
+
+/**
+ * The class HDF5LibraryException returns errors raised by the HDF5 library.
+ * <p>
+ * This sub-class represents HDF-5 major error code <b>H5E_INTERNAL</b>
+ */
+
+public class HDF5InternalErrorException extends HDF5LibraryException {
+ /**
+ * Constructs an <code>HDF5InternalErrorException</code> with no specified
+ * detail message.
+ */
+ public HDF5InternalErrorException() {
+ super();
+ }
+
+ /**
+ * Constructs an <code>HDF5InternalErrorException</code> with the specified
+ * detail message.
+ *
+ * @param s
+ * the detail message.
+ */
+ public HDF5InternalErrorException(String s) {
+ super(s);
+ }
+}
diff --git a/java/src/hdf/hdf5lib/exceptions/HDF5JavaException.java b/java/src/hdf/hdf5lib/exceptions/HDF5JavaException.java
new file mode 100644
index 0000000..2db79ef
--- /dev/null
+++ b/java/src/hdf/hdf5lib/exceptions/HDF5JavaException.java
@@ -0,0 +1,45 @@
+/* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
+ * Copyright by The HDF Group. *
+ * Copyright by the Board of Trustees of the University of Illinois. *
+ * All rights reserved. *
+ * *
+ * This file is part of HDF5. The full HDF5 copyright notice, including *
+ * terms governing use, modification, and redistribution, is contained in *
+ * the files COPYING and Copyright.html. COPYING can be found at the root *
+ * of the source code distribution tree; Copyright.html can be found at the *
+ * root level of an installed copy of the electronic HDF5 document set and *
+ * is linked from the top-level documents page. It can also be found at *
+ * http://hdfgroup.org/HDF5/doc/Copyright.html. If you do not have *
+ * access to either file, you may request a copy from help@hdfgroup.org. *
+ * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */
+
+package hdf.hdf5lib.exceptions;
+
+/**
+ * <p>
+ * The class HDF5JavaException returns errors from the Java wrapper of theHDF5
+ * library.
+ * <p>
+ * These errors include Java configuration errors, security violations, and
+ * resource exhaustion.
+ */
+public class HDF5JavaException extends HDF5Exception {
+ /**
+ * Constructs an <code>HDF5JavaException</code> with no specified detail
+ * message.
+ */
+ public HDF5JavaException() {
+ super();
+ }
+
+ /**
+ * Constructs an <code>HDF5JavaException</code> with the specified detail
+ * message.
+ *
+ * @param s
+ * the detail message.
+ */
+ public HDF5JavaException(String s) {
+ super(s);
+ }
+}
diff --git a/java/src/hdf/hdf5lib/exceptions/HDF5LibraryException.java b/java/src/hdf/hdf5lib/exceptions/HDF5LibraryException.java
new file mode 100644
index 0000000..8b21ff8
--- /dev/null
+++ b/java/src/hdf/hdf5lib/exceptions/HDF5LibraryException.java
@@ -0,0 +1,383 @@
+/* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
+ * Copyright by The HDF Group. *
+ * Copyright by the Board of Trustees of the University of Illinois. *
+ * All rights reserved. *
+ * *
+ * This file is part of HDF5. The full HDF5 copyright notice, including *
+ * terms governing use, modification, and redistribution, is contained in *
+ * the files COPYING and Copyright.html. COPYING can be found at the root *
+ * of the source code distribution tree; Copyright.html can be found at the *
+ * root level of an installed copy of the electronic HDF5 document set and *
+ * is linked from the top-level documents page. It can also be found at *
+ * http://hdfgroup.org/HDF5/doc/Copyright.html. If you do not have *
+ * access to either file, you may request a copy from help@hdfgroup.org. *
+ * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */
+
+package hdf.hdf5lib.exceptions;
+
+import hdf.hdf5lib.H5;
+import hdf.hdf5lib.HDF5Constants;
+
+/**
+ * <p>
+ * The class HDF5LibraryException returns errors raised by the HDF5 library.
+ * <p>
+ * Each major error code from the HDF-5 Library is represented by a sub-class of
+ * this class, and by default the 'detailedMessage' is set according to the
+ * minor error code from the HDF-5 Library.
+ * <p>
+ * For major and minor error codes, see <b>H5Epublic.h</b> in the HDF-5 library.
+ *
+ */
+
+@SuppressWarnings("serial")
+public class HDF5LibraryException extends HDF5Exception {
+ /**
+ * Constructs an <code>HDF5LibraryException</code> with no specified detail
+ * message.
+ */
+ public HDF5LibraryException() {
+ super();
+
+ // this code forces the loading of the HDF-5 library
+ // to assure that the native methods are available
+ try {
+ H5.H5open();
+ }
+ catch (Exception e) {
+ }
+ ;
+
+ detailMessage = getMinorError(getMinorErrorNumber());
+ }
+
+ /**
+ * Constructs an <code>HDF5LibraryException</code> with the specified detail
+ * message.
+ *
+ * @param s
+ * the detail message.
+ */
+ public HDF5LibraryException(String s) {
+ super(s);
+ // this code forces the loading of the HDF-5 library
+ // to assure that the native methods are available
+ try {
+ H5.H5open();
+ }
+ catch (Exception e) {
+ }
+ ;
+ }
+
+ /**
+ * Get the major error number of the first error on the HDF5 library error
+ * stack.
+ *
+ * @return the major error number
+ */
+ public native long getMajorErrorNumber();
+
+ /**
+ * Get the minor error number of the first error on the HDF5 library error
+ * stack.
+ *
+ * @return the minor error number
+ */
+ public native long getMinorErrorNumber();
+
+ /**
+ * Return a error message for the minor error number.
+ * <p>
+ * These messages come from <b>H5Epublic.h</b>.
+ *
+ * @param err_code
+ * the error code
+ *
+ * @return the string of the minor error
+ */
+ public String getMinorError(long err_code) {
+ if (err_code == 0) {
+ return "special zero no error";
+ }
+ else if (err_code == HDF5Constants.H5E_UNINITIALIZED) {
+ return "information is unitialized";
+ }
+ else if (err_code == HDF5Constants.H5E_UNSUPPORTED) {
+ return "feature is unsupported";
+ }
+ else if (err_code == HDF5Constants.H5E_BADTYPE) {
+ return "incorrect type found";
+ }
+ else if (err_code == HDF5Constants.H5E_BADRANGE) {
+ return "argument out of range";
+ }
+ else if (err_code == HDF5Constants.H5E_BADVALUE) {
+ return "bad value for argument";
+ }
+ else if (err_code == HDF5Constants.H5E_NOSPACE) {
+ return "no space available for allocation";
+ }
+ else if (err_code == HDF5Constants.H5E_CANTCOPY) {
+ return "unable to copy object";
+ }
+ else if (err_code == HDF5Constants.H5E_CANTFREE) {
+ return "unable to free object";
+ }
+ else if (err_code == HDF5Constants.H5E_ALREADYEXISTS) {
+ return "Object already exists";
+ }
+ else if (err_code == HDF5Constants.H5E_CANTLOCK) {
+ return "Unable to lock object";
+ }
+ else if (err_code == HDF5Constants.H5E_CANTUNLOCK) {
+ return "Unable to unlock object";
+ }
+ else if (err_code == HDF5Constants.H5E_FILEEXISTS) {
+ return "file already exists";
+ }
+ else if (err_code == HDF5Constants.H5E_FILEOPEN) {
+ return "file already open";
+ }
+ else if (err_code == HDF5Constants.H5E_CANTCREATE) {
+ return "Can't create file";
+ }
+ else if (err_code == HDF5Constants.H5E_CANTOPENFILE) {
+ return "Can't open file";
+ }
+ else if (err_code == HDF5Constants.H5E_CANTCLOSEFILE) {
+ return "Can't close file";
+ }
+ else if (err_code == HDF5Constants.H5E_NOTHDF5) {
+ return "not an HDF5 format file";
+ }
+ else if (err_code == HDF5Constants.H5E_BADFILE) {
+ return "bad file ID accessed";
+ }
+ else if (err_code == HDF5Constants.H5E_TRUNCATED) {
+ return "file has been truncated";
+ }
+ else if (err_code == HDF5Constants.H5E_MOUNT) {
+ return "file mount error";
+ }
+ else if (err_code == HDF5Constants.H5E_SEEKERROR) {
+ return "seek failed";
+ }
+ else if (err_code == HDF5Constants.H5E_READERROR) {
+ return "read failed";
+ }
+ else if (err_code == HDF5Constants.H5E_WRITEERROR) {
+ return "write failed";
+ }
+ else if (err_code == HDF5Constants.H5E_CLOSEERROR) {
+ return "close failed";
+ }
+ else if (err_code == HDF5Constants.H5E_OVERFLOW) {
+ return "address overflowed";
+ }
+ else if (err_code == HDF5Constants.H5E_FCNTL) {
+ return "file fcntl failed";
+ }
+ else if (err_code == HDF5Constants.H5E_CANTINIT) {
+ return "Can't initialize object";
+ }
+ else if (err_code == HDF5Constants.H5E_ALREADYINIT) {
+ return "object already initialized";
+ }
+ else if (err_code == HDF5Constants.H5E_CANTRELEASE) {
+ return "Can't release object";
+ }
+ else if (err_code == HDF5Constants.H5E_BADATOM) {
+ return "Can't find atom information";
+ }
+ else if (err_code == HDF5Constants.H5E_BADGROUP) {
+ return "Can't find group information";
+ }
+ else if (err_code == HDF5Constants.H5E_CANTREGISTER) {
+ return "Can't register new atom";
+ }
+ else if (err_code == HDF5Constants.H5E_CANTINC) {
+ return "Can't increment reference count";
+ }
+ else if (err_code == HDF5Constants.H5E_CANTDEC) {
+ return "Can't decrement reference count";
+ }
+ else if (err_code == HDF5Constants.H5E_NOIDS) {
+ return "Out of IDs for group";
+ }
+ else if (err_code == HDF5Constants.H5E_CANTFLUSH) {
+ return "Can't flush object from cache";
+ }
+ else if (err_code == HDF5Constants.H5E_CANTLOAD) {
+ return "Can't load object into cache";
+ }
+ else if (err_code == HDF5Constants.H5E_PROTECT) {
+ return "protected object error";
+ }
+ else if (err_code == HDF5Constants.H5E_NOTCACHED) {
+ return "object not currently cached";
+ }
+ else if (err_code == HDF5Constants.H5E_NOTFOUND) {
+ return "object not found";
+ }
+ else if (err_code == HDF5Constants.H5E_EXISTS) {
+ return "object already exists";
+ }
+ else if (err_code == HDF5Constants.H5E_CANTENCODE) {
+ return "Can't encode value";
+ }
+ else if (err_code == HDF5Constants.H5E_CANTDECODE) {
+ return "Can't decode value";
+ }
+ else if (err_code == HDF5Constants.H5E_CANTSPLIT) {
+ return "Can't split node";
+ }
+ else if (err_code == HDF5Constants.H5E_CANTINSERT) {
+ return "Can't insert object";
+ }
+ else if (err_code == HDF5Constants.H5E_CANTLIST) {
+ return "Can't list node";
+ }
+ else if (err_code == HDF5Constants.H5E_LINKCOUNT) {
+ return "bad object header link count";
+ }
+ else if (err_code == HDF5Constants.H5E_VERSION) {
+ return "wrong version number";
+ }
+ else if (err_code == HDF5Constants.H5E_ALIGNMENT) {
+ return "alignment error";
+ }
+ else if (err_code == HDF5Constants.H5E_BADMESG) {
+ return "unrecognized message";
+ }
+ else if (err_code == HDF5Constants.H5E_CANTDELETE) {
+ return " Can't delete message";
+ }
+ else if (err_code == HDF5Constants.H5E_CANTOPENOBJ) {
+ return "Can't open object";
+ }
+ else if (err_code == HDF5Constants.H5E_COMPLEN) {
+ return "name component is too long";
+ }
+ else if (err_code == HDF5Constants.H5E_LINK) {
+ return "link count failure";
+ }
+ else if (err_code == HDF5Constants.H5E_CANTCONVERT) {
+ return "Can't convert datatypes";
+ }
+ else if (err_code == HDF5Constants.H5E_BADSIZE) {
+ return "Bad size for object";
+ }
+ else if (err_code == HDF5Constants.H5E_CANTCLIP) {
+ return "Can't clip hyperslab region";
+ }
+ else if (err_code == HDF5Constants.H5E_CANTCOUNT) {
+ return "Can't count elements";
+ }
+ else if (err_code == HDF5Constants.H5E_CANTSELECT) {
+ return "Can't select hyperslab";
+ }
+ else if (err_code == HDF5Constants.H5E_CANTNEXT) {
+ return "Can't move to next iterator location";
+ }
+ else if (err_code == HDF5Constants.H5E_BADSELECT) {
+ return "Invalid selection";
+ }
+ else if (err_code == HDF5Constants.H5E_CANTGET) {
+ return "Can't get value";
+ }
+ else if (err_code == HDF5Constants.H5E_CANTSET) {
+ return "Can't set value";
+ }
+ else if (err_code == HDF5Constants.H5E_DUPCLASS) {
+ return "Duplicate class name in parent class";
+ }
+ else if (err_code == HDF5Constants.H5E_MPI) {
+ return "some MPI function failed";
+ }
+ else if (err_code == HDF5Constants.H5E_MPIERRSTR) {
+ return "MPI Error String";
+ // }
+ // else
+ // if
+ // (err_code
+ // ==
+ // HDF5Constants.H5E_CANTRECV
+ // )
+ // {
+ // return
+ // "can't receive messages from processes";
+ // }
+ // else
+ // if
+ // (err_code
+ // ==
+ // HDF5Constants.H5E_CANTALLOC
+ // )
+ // {
+ // return
+ // "can't allocate from file";
+ }
+ else if (err_code == HDF5Constants.H5E_NOFILTER) {
+ return "requested filter is not available";
+ }
+ else if (err_code == HDF5Constants.H5E_CALLBACK) {
+ return "callback failed";
+ }
+ else if (err_code == HDF5Constants.H5E_CANAPPLY) {
+ return "error from filter \"can apply\" callback";
+ }
+ else if (err_code == HDF5Constants.H5E_SETLOCAL) {
+ return "error from filter \"set local\" callback";
+ }
+ else {
+ return "undefined error(" + err_code + ")";
+ }
+ }
+
+ /**
+ * Prints this <code>HDF5LibraryException</code>, the HDF-5 Library error
+ * stack, and and the Java stack trace to the standard error stream.
+ */
+ @Override
+ public void printStackTrace() {
+ System.err.println(this);
+ printStackTrace0(null); // the HDF-5 Library error stack
+ super.printStackTrace(); // the Java stack trace
+ }
+
+ /**
+ * Prints this <code>HDF5LibraryException</code> the HDF-5 Library error
+ * stack, and and the Java stack trace to the specified print stream.
+ *
+ * @param f
+ * the file print stream.
+ */
+ public void printStackTrace(java.io.File f) {
+ if ((f == null) || !f.exists() || f.isDirectory() || !f.canWrite()) {
+ printStackTrace();
+ }
+ else {
+ try {
+ java.io.FileOutputStream o = new java.io.FileOutputStream(f);
+ java.io.PrintWriter p = new java.io.PrintWriter(o);
+ p.println(this);
+ p.close();
+ }
+ catch (Exception ex) {
+ System.err.println(this);
+ }
+ ;
+ // the HDF-5 Library error stack
+ printStackTrace0(f.getPath());
+ super.printStackTrace(); // the Java stack trace
+ }
+ }
+
+ /*
+ * This private method calls the HDF-5 library to extract the error codes
+ * and error stack.
+ */
+ private native void printStackTrace0(String s);
+
+}
diff --git a/java/src/hdf/hdf5lib/exceptions/HDF5LowLevelIOException.java b/java/src/hdf/hdf5lib/exceptions/HDF5LowLevelIOException.java
new file mode 100644
index 0000000..e0a95d0
--- /dev/null
+++ b/java/src/hdf/hdf5lib/exceptions/HDF5LowLevelIOException.java
@@ -0,0 +1,43 @@
+/* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
+ * Copyright by The HDF Group. *
+ * Copyright by the Board of Trustees of the University of Illinois. *
+ * All rights reserved. *
+ * *
+ * This file is part of HDF5. The full HDF5 copyright notice, including *
+ * terms governing use, modification, and redistribution, is contained in *
+ * the files COPYING and Copyright.html. COPYING can be found at the root *
+ * of the source code distribution tree; Copyright.html can be found at the *
+ * root level of an installed copy of the electronic HDF5 document set and *
+ * is linked from the top-level documents page. It can also be found at *
+ * http://hdfgroup.org/HDF5/doc/Copyright.html. If you do not have *
+ * access to either file, you may request a copy from help@hdfgroup.org. *
+ * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */
+
+package hdf.hdf5lib.exceptions;
+
+/**
+ * The class HDF5LibraryException returns errors raised by the HDF5 library.
+ * <p>
+ * This sub-class represents HDF-5 major error code <b>H5E_IO</b>
+ */
+
+public class HDF5LowLevelIOException extends HDF5LibraryException {
+ /**
+ * Constructs an <code>HDF5LowLevelIOException</code> with no specified
+ * detail message.
+ */
+ public HDF5LowLevelIOException() {
+ super();
+ }
+
+ /**
+ * Constructs an <code>HDF5LowLevelIOException</code> with the specified
+ * detail message.
+ *
+ * @param s
+ * the detail message.
+ */
+ public HDF5LowLevelIOException(String s) {
+ super(s);
+ }
+}
diff --git a/java/src/hdf/hdf5lib/exceptions/HDF5MetaDataCacheException.java b/java/src/hdf/hdf5lib/exceptions/HDF5MetaDataCacheException.java
new file mode 100644
index 0000000..cbc3b75
--- /dev/null
+++ b/java/src/hdf/hdf5lib/exceptions/HDF5MetaDataCacheException.java
@@ -0,0 +1,43 @@
+/* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
+ * Copyright by The HDF Group. *
+ * Copyright by the Board of Trustees of the University of Illinois. *
+ * All rights reserved. *
+ * *
+ * This file is part of HDF5. The full HDF5 copyright notice, including *
+ * terms governing use, modification, and redistribution, is contained in *
+ * the files COPYING and Copyright.html. COPYING can be found at the root *
+ * of the source code distribution tree; Copyright.html can be found at the *
+ * root level of an installed copy of the electronic HDF5 document set and *
+ * is linked from the top-level documents page. It can also be found at *
+ * http://hdfgroup.org/HDF5/doc/Copyright.html. If you do not have *
+ * access to either file, you may request a copy from help@hdfgroup.org. *
+ * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */
+
+package hdf.hdf5lib.exceptions;
+
+/**
+ * The class HDF5LibraryException returns errors raised by the HDF5 library.
+ * <p>
+ * This sub-class represents HDF-5 major error code <b>H5E_CACHE</b>
+ */
+
+public class HDF5MetaDataCacheException extends HDF5LibraryException {
+ /**
+ * Constructs an <code>HDF5MetaDataCacheException</code> with no specified
+ * detail message.
+ */
+ public HDF5MetaDataCacheException() {
+ super();
+ }
+
+ /**
+ * Constructs an <code>HDF5MetaDataCacheException</code> with the specified
+ * detail message.
+ *
+ * @param s
+ * the detail message.
+ */
+ public HDF5MetaDataCacheException(String s) {
+ super(s);
+ }
+}
diff --git a/java/src/hdf/hdf5lib/exceptions/HDF5ObjectHeaderException.java b/java/src/hdf/hdf5lib/exceptions/HDF5ObjectHeaderException.java
new file mode 100644
index 0000000..0cdd0d1
--- /dev/null
+++ b/java/src/hdf/hdf5lib/exceptions/HDF5ObjectHeaderException.java
@@ -0,0 +1,43 @@
+/* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
+ * Copyright by The HDF Group. *
+ * Copyright by the Board of Trustees of the University of Illinois. *
+ * All rights reserved. *
+ * *
+ * This file is part of HDF5. The full HDF5 copyright notice, including *
+ * terms governing use, modification, and redistribution, is contained in *
+ * the files COPYING and Copyright.html. COPYING can be found at the root *
+ * of the source code distribution tree; Copyright.html can be found at the *
+ * root level of an installed copy of the electronic HDF5 document set and *
+ * is linked from the top-level documents page. It can also be found at *
+ * http://hdfgroup.org/HDF5/doc/Copyright.html. If you do not have *
+ * access to either file, you may request a copy from help@hdfgroup.org. *
+ * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */
+
+package hdf.hdf5lib.exceptions;
+
+/**
+ * The class HDF5LibraryException returns errors raised by the HDF5 library.
+ * <p>
+ * This sub-class represents HDF-5 major error code <b>H5E_OHDR</b>
+ */
+
+public class HDF5ObjectHeaderException extends HDF5LibraryException {
+ /**
+ * Constructs an <code>HDF5ObjectHeaderException</code> with no specified
+ * detail message.
+ */
+ public HDF5ObjectHeaderException() {
+ super();
+ }
+
+ /**
+ * Constructs an <code>HDF5ObjectHeaderException</code> with the specified
+ * detail message.
+ *
+ * @param s
+ * the detail message.
+ */
+ public HDF5ObjectHeaderException(String s) {
+ super(s);
+ }
+}
diff --git a/java/src/hdf/hdf5lib/exceptions/HDF5PropertyListInterfaceException.java b/java/src/hdf/hdf5lib/exceptions/HDF5PropertyListInterfaceException.java
new file mode 100644
index 0000000..aa9f668
--- /dev/null
+++ b/java/src/hdf/hdf5lib/exceptions/HDF5PropertyListInterfaceException.java
@@ -0,0 +1,43 @@
+/* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
+ * Copyright by The HDF Group. *
+ * Copyright by the Board of Trustees of the University of Illinois. *
+ * All rights reserved. *
+ * *
+ * This file is part of HDF5. The full HDF5 copyright notice, including *
+ * terms governing use, modification, and redistribution, is contained in *
+ * the files COPYING and Copyright.html. COPYING can be found at the root *
+ * of the source code distribution tree; Copyright.html can be found at the *
+ * root level of an installed copy of the electronic HDF5 document set and *
+ * is linked from the top-level documents page. It can also be found at *
+ * http://hdfgroup.org/HDF5/doc/Copyright.html. If you do not have *
+ * access to either file, you may request a copy from help@hdfgroup.org. *
+ * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */
+
+package hdf.hdf5lib.exceptions;
+
+/**
+ * The class HDF5LibraryException returns errors raised by the HDF5 library.
+ * <p>
+ * This sub-class represents HDF-5 major error code <b>H5E_PLIST</b>
+ */
+
+public class HDF5PropertyListInterfaceException extends HDF5LibraryException {
+ /**
+ * Constructs an <code>HDF5PropertyListInterfaceException</code> with no
+ * specified detail message.
+ */
+ public HDF5PropertyListInterfaceException() {
+ super();
+ }
+
+ /**
+ * Constructs an <code>HDF5PropertyListInterfaceException</code> with the
+ * specified detail message.
+ *
+ * @param s
+ * the detail message.
+ */
+ public HDF5PropertyListInterfaceException(String s) {
+ super(s);
+ }
+}
diff --git a/java/src/hdf/hdf5lib/exceptions/HDF5ReferenceException.java b/java/src/hdf/hdf5lib/exceptions/HDF5ReferenceException.java
new file mode 100644
index 0000000..2c8c93b
--- /dev/null
+++ b/java/src/hdf/hdf5lib/exceptions/HDF5ReferenceException.java
@@ -0,0 +1,37 @@
+/* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
+ * Copyright by The HDF Group. *
+ * Copyright by the Board of Trustees of the University of Illinois. *
+ * All rights reserved. *
+ * *
+ * This file is part of HDF5. The full HDF5 copyright notice, including *
+ * terms governing use, modification, and redistribution, is contained in *
+ * the files COPYING and Copyright.html. COPYING can be found at the root *
+ * of the source code distribution tree; Copyright.html can be found at the *
+ * root level of an installed copy of the electronic HDF5 document set and *
+ * is linked from the top-level documents page. It can also be found at *
+ * http://hdfgroup.org/HDF5/doc/Copyright.html. If you do not have *
+ * access to either file, you may request a copy from help@hdfgroup.org. *
+ * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */
+
+package hdf.hdf5lib.exceptions;
+
+public class HDF5ReferenceException extends HDF5LibraryException {
+ /**
+ * Constructs an <code>HDF5ReferenceException</code> with no specified
+ * detail message.
+ */
+ public HDF5ReferenceException() {
+ super();
+ }
+
+ /**
+ * Constructs an <code>HDF5ReferenceException</code> with the specified
+ * detail message.
+ *
+ * @param s
+ * the detail message.
+ */
+ public HDF5ReferenceException(String s) {
+ super(s);
+ }
+}
diff --git a/java/src/hdf/hdf5lib/exceptions/HDF5ResourceUnavailableException.java b/java/src/hdf/hdf5lib/exceptions/HDF5ResourceUnavailableException.java
new file mode 100644
index 0000000..f1c0c60
--- /dev/null
+++ b/java/src/hdf/hdf5lib/exceptions/HDF5ResourceUnavailableException.java
@@ -0,0 +1,43 @@
+/* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
+ * Copyright by The HDF Group. *
+ * Copyright by the Board of Trustees of the University of Illinois. *
+ * All rights reserved. *
+ * *
+ * This file is part of HDF5. The full HDF5 copyright notice, including *
+ * terms governing use, modification, and redistribution, is contained in *
+ * the files COPYING and Copyright.html. COPYING can be found at the root *
+ * of the source code distribution tree; Copyright.html can be found at the *
+ * root level of an installed copy of the electronic HDF5 document set and *
+ * is linked from the top-level documents page. It can also be found at *
+ * http://hdfgroup.org/HDF5/doc/Copyright.html. If you do not have *
+ * access to either file, you may request a copy from help@hdfgroup.org. *
+ * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */
+
+package hdf.hdf5lib.exceptions;
+
+/**
+ * The class HDF5LibraryException returns errors raised by the HDF5 library.
+ * <p>
+ * This sub-class represents HDF-5 major error code <b>H5E_RESOURCE</b>
+ */
+
+public class HDF5ResourceUnavailableException extends HDF5LibraryException {
+ /**
+ * Constructs an <code>HDF5ResourceUnavailableException</code> with no
+ * specified detail message.
+ */
+ public HDF5ResourceUnavailableException() {
+ super();
+ }
+
+ /**
+ * Constructs an <code>HDF5FunctionArgumentException</code> with the
+ * specified detail message.
+ *
+ * @param s
+ * the detail message.
+ */
+ public HDF5ResourceUnavailableException(String s) {
+ super(s);
+ }
+}
diff --git a/java/src/hdf/hdf5lib/exceptions/HDF5SymbolTableException.java b/java/src/hdf/hdf5lib/exceptions/HDF5SymbolTableException.java
new file mode 100644
index 0000000..87384d1
--- /dev/null
+++ b/java/src/hdf/hdf5lib/exceptions/HDF5SymbolTableException.java
@@ -0,0 +1,43 @@
+/* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
+ * Copyright by The HDF Group. *
+ * Copyright by the Board of Trustees of the University of Illinois. *
+ * All rights reserved. *
+ * *
+ * This file is part of HDF5. The full HDF5 copyright notice, including *
+ * terms governing use, modification, and redistribution, is contained in *
+ * the files COPYING and Copyright.html. COPYING can be found at the root *
+ * of the source code distribution tree; Copyright.html can be found at the *
+ * root level of an installed copy of the electronic HDF5 document set and *
+ * is linked from the top-level documents page. It can also be found at *
+ * http://hdfgroup.org/HDF5/doc/Copyright.html. If you do not have *
+ * access to either file, you may request a copy from help@hdfgroup.org. *
+ * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */
+
+package hdf.hdf5lib.exceptions;
+
+/**
+ * The class HDF5LibraryException returns errors raised by the HDF5 library.
+ * <p>
+ * This sub-class represents HDF-5 major error code <b>H5E_SYM</b>
+ */
+
+public class HDF5SymbolTableException extends HDF5LibraryException {
+ /**
+ * Constructs an <code>HDF5SymbolTableException</code> with no specified
+ * detail message.
+ */
+ public HDF5SymbolTableException() {
+ super();
+ }
+
+ /**
+ * Constructs an <code>HDF5SymbolTableException</code> with the specified
+ * detail message.
+ *
+ * @param s
+ * the detail message.
+ */
+ public HDF5SymbolTableException(String s) {
+ super(s);
+ }
+}
diff --git a/java/src/hdf/hdf5lib/structs/H5AC_cache_config_t.java b/java/src/hdf/hdf5lib/structs/H5AC_cache_config_t.java
new file mode 100644
index 0000000..2128d8a
--- /dev/null
+++ b/java/src/hdf/hdf5lib/structs/H5AC_cache_config_t.java
@@ -0,0 +1,99 @@
+/* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
+ * Copyright by The HDF Group. *
+ * Copyright by the Board of Trustees of the University of Illinois. *
+ * All rights reserved. *
+ * *
+ * This file is part of HDF5. The full HDF5 copyright notice, including *
+ * terms governing use, modification, and redistribution, is contained in *
+ * the files COPYING and Copyright.html. COPYING can be found at the root *
+ * of the source code distribution tree; Copyright.html can be found at the *
+ * root level of an installed copy of the electronic HDF5 document set and *
+ * is linked from the top-level documents page. It can also be found at *
+ * http://hdfgroup.org/HDF5/doc/Copyright.html. If you do not have *
+ * access to either file, you may request a copy from help@hdfgroup.org. *
+ * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */
+
+package hdf.hdf5lib.structs;
+
+import java.io.Serializable;
+
+//Information struct for H5Pget_mdc_config/H5Pset_mdc_config
+public class H5AC_cache_config_t implements Serializable{
+ private static final long serialVersionUID = -6748085696476149972L;
+ // general configuration fields:
+ public int version;
+ public boolean rpt_fcn_enabled;
+ public boolean open_trace_file;
+ public boolean close_trace_file;
+ public String trace_file_name;
+ public boolean evictions_enabled;
+ public boolean set_initial_size;
+ public long initial_size;
+ public double min_clean_fraction;
+ public long max_size;
+ public long min_size;
+ public long epoch_length;
+ // size increase control fields:
+ public int incr_mode; // H5C_cache_incr_mode
+ public double lower_hr_threshold;
+ public double increment;
+ public boolean apply_max_increment;
+ public long max_increment;
+ public int flash_incr_mode; // H5C_cache_flash_incr_mode
+ public double flash_multiple;
+ public double flash_threshold;
+ // size decrease control fields:
+ public int decr_mode; // H5C_cache_decr_mode
+ public double upper_hr_threshold;
+ public double decrement;
+ public boolean apply_max_decrement;
+ public long max_decrement;
+ public int epochs_before_eviction;
+ public boolean apply_empty_reserve;
+ public double empty_reserve;
+ // parallel configuration fields:
+ public long dirty_bytes_threshold;
+ public int metadata_write_strategy;
+
+ public H5AC_cache_config_t (int version, boolean rpt_fcn_enabled, boolean open_trace_file,
+ boolean close_trace_file, String trace_file_name, boolean evictions_enabled,
+ boolean set_initial_size, long initial_size, double min_clean_fraction, long max_size,
+ long min_size, long epoch_length, int incr_mode, double lower_hr_threshold,
+ double increment, boolean apply_max_increment, long max_increment, int flash_incr_mode,
+ double flash_multiple, double flash_threshold, int decr_mode, double upper_hr_threshold,
+ double decrement, boolean apply_max_decrement, long max_decrement,
+ int epochs_before_eviction, boolean apply_empty_reserve, double empty_reserve,
+ long dirty_bytes_threshold, int metadata_write_strategy)
+ {
+ this.version = version;
+ this.rpt_fcn_enabled = rpt_fcn_enabled;
+ this.open_trace_file = open_trace_file;
+ this.close_trace_file = close_trace_file;
+ this.trace_file_name = trace_file_name;
+ this.evictions_enabled = evictions_enabled;
+ this.set_initial_size = set_initial_size;
+ this.initial_size = initial_size;
+ this.min_clean_fraction = min_clean_fraction;
+ this.max_size = max_size;
+ this.min_size = min_size;
+ this.epoch_length = epoch_length;
+ this.incr_mode = incr_mode;
+ this.lower_hr_threshold = lower_hr_threshold;
+ this.increment = increment;
+ this.apply_max_increment = apply_max_increment;
+ this.max_increment = flash_incr_mode;
+ this.flash_incr_mode = flash_incr_mode;
+ this.flash_multiple = flash_multiple;
+ this.flash_threshold = flash_threshold;
+ this.decr_mode = decr_mode;
+ this.upper_hr_threshold = upper_hr_threshold;
+ this.decrement = decrement;
+ this.apply_max_decrement = apply_max_decrement;
+ this.max_decrement = max_decrement;
+ this.epochs_before_eviction = epochs_before_eviction;
+ this.apply_empty_reserve = apply_empty_reserve;
+ this.empty_reserve = empty_reserve;
+ this.dirty_bytes_threshold = dirty_bytes_threshold;
+ this.metadata_write_strategy = metadata_write_strategy;
+ }
+}
diff --git a/java/src/hdf/hdf5lib/structs/H5A_info_t.java b/java/src/hdf/hdf5lib/structs/H5A_info_t.java
new file mode 100644
index 0000000..b04a757
--- /dev/null
+++ b/java/src/hdf/hdf5lib/structs/H5A_info_t.java
@@ -0,0 +1,34 @@
+/* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
+ * Copyright by The HDF Group. *
+ * Copyright by the Board of Trustees of the University of Illinois. *
+ * All rights reserved. *
+ * *
+ * This file is part of HDF5. The full HDF5 copyright notice, including *
+ * terms governing use, modification, and redistribution, is contained in *
+ * the files COPYING and Copyright.html. COPYING can be found at the root *
+ * of the source code distribution tree; Copyright.html can be found at the *
+ * root level of an installed copy of the electronic HDF5 document set and *
+ * is linked from the top-level documents page. It can also be found at *
+ * http://hdfgroup.org/HDF5/doc/Copyright.html. If you do not have *
+ * access to either file, you may request a copy from help@hdfgroup.org. *
+ * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */
+
+package hdf.hdf5lib.structs;
+
+import java.io.Serializable;
+
+//Information struct for Attribute (For H5Aget_info/H5Aget_info_by_idx/H5Aget_info_by_name)
+public class H5A_info_t implements Serializable{
+ private static final long serialVersionUID = 2791443594041667613L;
+ public boolean corder_valid; // Indicate if creation order is valid
+ public long corder; // Creation order of attribute
+ public int cset; // Character set of attribute name
+ public long data_size; // Size of raw data
+
+ H5A_info_t(boolean corder_valid, long corder, int cset, long data_size) {
+ this.corder_valid = corder_valid;
+ this.corder = corder;
+ this.cset = cset;
+ this.data_size = data_size;
+ }
+}
diff --git a/java/src/hdf/hdf5lib/structs/H5E_error2_t.java b/java/src/hdf/hdf5lib/structs/H5E_error2_t.java
new file mode 100644
index 0000000..b7cd60a
--- /dev/null
+++ b/java/src/hdf/hdf5lib/structs/H5E_error2_t.java
@@ -0,0 +1,41 @@
+/* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
+ * Copyright by The HDF Group. *
+ * Copyright by the Board of Trustees of the University of Illinois. *
+ * All rights reserved. *
+ * *
+ * This file is part of HDF5. The full HDF5 copyright notice, including *
+ * terms governing use, modification, and redistribution, is contained in *
+ * the files COPYING and Copyright.html. COPYING can be found at the root *
+ * of the source code distribution tree; Copyright.html can be found at the *
+ * root level of an installed copy of the electronic HDF5 document set and *
+ * is linked from the top-level documents page. It can also be found at *
+ * http://hdfgroup.org/HDF5/doc/Copyright.html. If you do not have *
+ * access to either file, you may request a copy from help@hdfgroup.org. *
+ * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */
+
+package hdf.hdf5lib.structs;
+
+import java.io.Serializable;
+
+//Information struct for Attribute (For H5Ewalk)
+public class H5E_error2_t implements Serializable{
+ private static final long serialVersionUID = 279144359041667613L;
+
+ public long cls_id; //class ID
+ public long maj_num; //major error ID
+ public long min_num; //minor error number
+ public int line; //line in file where error occurs
+ public String func_name; //function in which error occurred
+ public String file_name; //file in which error occurred
+ public String desc; //optional supplied description
+
+ H5E_error2_t(long cls_id, long maj_num, long min_num, int line, String func_name, String file_name, String desc) {
+ this.cls_id = cls_id;
+ this.maj_num = maj_num;
+ this.min_num = min_num;
+ this.line = line;
+ this.func_name = func_name;
+ this.file_name = file_name;
+ this.desc = desc;
+ }
+}
diff --git a/java/src/hdf/hdf5lib/structs/H5F_info2_t.java b/java/src/hdf/hdf5lib/structs/H5F_info2_t.java
new file mode 100644
index 0000000..8fbde68
--- /dev/null
+++ b/java/src/hdf/hdf5lib/structs/H5F_info2_t.java
@@ -0,0 +1,47 @@
+/* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
+ * Copyright by The HDF Group. *
+ * Copyright by the Board of Trustees of the University of Illinois. *
+ * All rights reserved. *
+ * *
+ * This file is part of HDF5. The full HDF5 copyright notice, including *
+ * terms governing use, modification, and redistribution, is contained in *
+ * the files COPYING and Copyright.html. COPYING can be found at the root *
+ * of the source code distribution tree; Copyright.html can be found at the *
+ * root level of an installed copy of the electronic HDF5 document set and *
+ * is linked from the top-level documents page. It can also be found at *
+ * http://hdfgroup.org/HDF5/doc/Copyright.html. If you do not have *
+ * access to either file, you may request a copy from help@hdfgroup.org. *
+ * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */
+
+package hdf.hdf5lib.structs;
+
+import java.io.Serializable;
+
+//Information struct for object (for H5Fget_info)
+public class H5F_info2_t implements Serializable{
+ private static final long serialVersionUID = 4691681162544054518L;
+ public int super_version; // Superblock version #
+ public long super_size; // Superblock size
+ public long super_ext_size; // Superblock extension size
+ public int free_version; // Version # of file free space management
+ public long free_meta_size; // Free space manager metadata size
+ public long free_tot_space; // Amount of free space in the file
+ public int sohm_version; // Version # of shared object header info
+ public long sohm_hdr_size; // Shared object header message header size
+ public H5_ih_info_t sohm_msgs_info; // Shared object header message index & heap size
+
+ public H5F_info2_t (int super_version, long super_size, long super_ext_size,
+ int free_version, long free_meta_size, long free_tot_space,
+ int sohm_version, long sohm_hdr_size, H5_ih_info_t sohm_msgs_info)
+ {
+ this.super_version = super_version;
+ this.super_size = super_size;
+ this.super_ext_size = super_ext_size;
+ this.free_version = free_version;
+ this.free_meta_size = free_meta_size;
+ this.free_tot_space = free_tot_space;
+ this.sohm_version = sohm_version;
+ this.sohm_hdr_size = sohm_hdr_size;
+ this.sohm_msgs_info = sohm_msgs_info;
+ }
+}
diff --git a/java/src/hdf/hdf5lib/structs/H5G_info_t.java b/java/src/hdf/hdf5lib/structs/H5G_info_t.java
new file mode 100644
index 0000000..daa6808
--- /dev/null
+++ b/java/src/hdf/hdf5lib/structs/H5G_info_t.java
@@ -0,0 +1,27 @@
+/* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
+ * Copyright by The HDF Group. *
+ * Copyright by the Board of Trustees of the University of Illinois. *
+ * All rights reserved. *
+ * *
+ * This file is part of HDF5. The full HDF5 copyright notice, including *
+ * terms governing use, modification, and redistribution, is contained in *
+ * the files COPYING and Copyright.html. COPYING can be found at the root *
+ * of the source code distribution tree; Copyright.html can be found at the *
+ * root level of an installed copy of the electronic HDF5 document set and *
+ * is linked from the top-level documents page. It can also be found at *
+ * http://hdfgroup.org/HDF5/doc/Copyright.html. If you do not have *
+ * access to either file, you may request a copy from help@hdfgroup.org. *
+ * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */
+
+package hdf.hdf5lib.structs;
+
+import java.io.Serializable;
+
+//Information struct for group (for H5Gget_info/H5Gget_info_by_name/H5Gget_info_by_idx)
+public class H5G_info_t implements Serializable{
+ private static final long serialVersionUID = -3746463015312132912L;
+ public int storage_type; // Type of storage for links in group
+ public long nlinks; // Number of links in group
+ public long max_corder; // Current max. creation order value for group
+ public boolean mounted; // Whether group has a file mounted on it
+}
diff --git a/java/src/hdf/hdf5lib/structs/H5L_info_t.java b/java/src/hdf/hdf5lib/structs/H5L_info_t.java
new file mode 100644
index 0000000..d43853f
--- /dev/null
+++ b/java/src/hdf/hdf5lib/structs/H5L_info_t.java
@@ -0,0 +1,38 @@
+/* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
+ * Copyright by The HDF Group. *
+ * Copyright by the Board of Trustees of the University of Illinois. *
+ * All rights reserved. *
+ * *
+ * This file is part of HDF5. The full HDF5 copyright notice, including *
+ * terms governing use, modification, and redistribution, is contained in *
+ * the files COPYING and Copyright.html. COPYING can be found at the root *
+ * of the source code distribution tree; Copyright.html can be found at the *
+ * root level of an installed copy of the electronic HDF5 document set and *
+ * is linked from the top-level documents page. It can also be found at *
+ * http://hdfgroup.org/HDF5/doc/Copyright.html. If you do not have *
+ * access to either file, you may request a copy from help@hdfgroup.org. *
+ * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */
+
+package hdf.hdf5lib.structs;
+
+import java.io.Serializable;
+
+//Information struct for link (for H5Lget_info/H5Lget_info_by_idx)
+public class H5L_info_t implements Serializable{
+ private static final long serialVersionUID = -4754320605310155033L;
+ public int type;
+ public boolean corder_valid;
+ public long corder;
+ public int cset;
+ public long address_val_size;
+
+ H5L_info_t (int type, boolean corder_valid, long corder,
+ int cset, long address_val_size)
+ {
+ this.type = type;
+ this.corder_valid = corder_valid;
+ this.corder = corder;
+ this.cset = cset;
+ this.address_val_size = address_val_size;
+ }
+}
diff --git a/java/src/hdf/hdf5lib/structs/H5O_hdr_info_t.java b/java/src/hdf/hdf5lib/structs/H5O_hdr_info_t.java
new file mode 100644
index 0000000..86cf883
--- /dev/null
+++ b/java/src/hdf/hdf5lib/structs/H5O_hdr_info_t.java
@@ -0,0 +1,49 @@
+/* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
+ * Copyright by The HDF Group. *
+ * Copyright by the Board of Trustees of the University of Illinois. *
+ * All rights reserved. *
+ * *
+ * This file is part of HDF5. The full HDF5 copyright notice, including *
+ * terms governing use, modification, and redistribution, is contained in *
+ * the files COPYING and Copyright.html. COPYING can be found at the root *
+ * of the source code distribution tree; Copyright.html can be found at the *
+ * root level of an installed copy of the electronic HDF5 document set and *
+ * is linked from the top-level documents page. It can also be found at *
+ * http://hdfgroup.org/HDF5/doc/Copyright.html. If you do not have *
+ * access to either file, you may request a copy from help@hdfgroup.org. *
+ * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */
+
+package hdf.hdf5lib.structs;
+
+import java.io.Serializable;
+
+// Information struct for object header metadata (for H5Oget_info/H5Oget_info_by_name/H5Oget_info_by_idx)
+public class H5O_hdr_info_t implements Serializable{
+ private static final long serialVersionUID = 7883826382952577189L;
+ public int version; /* Version number of header format in file */
+ public int nmesgs; /* Number of object header messages */
+ public int nchunks; /* Number of object header chunks */
+ public int flags; /* Object header status flags */
+ public long space_total; /* Total space for storing object header in file */
+ public long space_meta; /* Space within header for object header metadata information */
+ public long space_mesg; /* Space within header for actual message information */
+ public long space_free; /* Free space within object header */
+ public long mesg_present; /* Flags to indicate presence of message type in header */
+ public long mesg_shared; /* Flags to indicate message type is shared in header */
+
+ H5O_hdr_info_t (int version, int nmesgs, int nchunks, int flags,
+ long space_total, long space_meta, long space_mesg, long space_free,
+ long mesg_present, long mesg_shared)
+ {
+ this.version = version;
+ this.nmesgs = nmesgs;
+ this.nchunks = nchunks;
+ this.flags = flags;
+ this.space_total = space_total;
+ this.space_meta = space_meta;
+ this.space_mesg = space_mesg;
+ this.space_free = space_free;
+ this.mesg_present = mesg_present;
+ this.mesg_shared = mesg_shared;
+ }
+}
diff --git a/java/src/hdf/hdf5lib/structs/H5O_info_t.java b/java/src/hdf/hdf5lib/structs/H5O_info_t.java
new file mode 100644
index 0000000..a89aaf0
--- /dev/null
+++ b/java/src/hdf/hdf5lib/structs/H5O_info_t.java
@@ -0,0 +1,54 @@
+/* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
+ * Copyright by The HDF Group. *
+ * Copyright by the Board of Trustees of the University of Illinois. *
+ * All rights reserved. *
+ * *
+ * This file is part of HDF5. The full HDF5 copyright notice, including *
+ * terms governing use, modification, and redistribution, is contained in *
+ * the files COPYING and Copyright.html. COPYING can be found at the root *
+ * of the source code distribution tree; Copyright.html can be found at the *
+ * root level of an installed copy of the electronic HDF5 document set and *
+ * is linked from the top-level documents page. It can also be found at *
+ * http://hdfgroup.org/HDF5/doc/Copyright.html. If you do not have *
+ * access to either file, you may request a copy from help@hdfgroup.org. *
+ * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */
+
+package hdf.hdf5lib.structs;
+
+import java.io.Serializable;
+
+//Information struct for object (for H5Oget_info/H5Oget_info_by_name/H5Oget_info_by_idx)
+public class H5O_info_t implements Serializable{
+ private static final long serialVersionUID = 4691681163544054518L;
+ public long fileno; /* File number that object is located in */
+ public long addr; /* Object address in file */
+ public int type; /* Basic object type (group, dataset, etc.) */
+ public int rc; /* Reference count of object */
+ public long atime; /* Access time */
+ public long mtime; /* Modification time */
+ public long ctime; /* Change time */
+ public long btime; /* Birth time */
+ public long num_attrs; /* # of attributes attached to object */
+ public H5O_hdr_info_t hdr; /* Object header information */
+ /* Extra metadata storage for obj & attributes */
+ public H5_ih_info_t meta_size_obj; /* v1/v2 B-tree & local/fractal heap for groups, B-tree for chunked datasets */
+ public H5_ih_info_t meta_size_attr; /* v2 B-tree & heap for attributes */
+
+ public H5O_info_t (long fileno, long addr, int type,
+ int rc, long num_attrs, long atime, long mtime, long ctime, long btime,
+ H5O_hdr_info_t hdr, H5_ih_info_t meta_size_obj, H5_ih_info_t meta_size_attr)
+ {
+ this.fileno = fileno;
+ this.addr = addr;
+ this.type = type;
+ this.rc = rc;
+ this.num_attrs = num_attrs;
+ this.atime = atime;
+ this.mtime = mtime;
+ this.ctime = ctime;
+ this.btime = btime;
+ this.hdr = hdr;
+ this.meta_size_obj = meta_size_obj;
+ this.meta_size_attr = meta_size_attr;
+ }
+}
diff --git a/java/src/hdf/hdf5lib/structs/H5_ih_info_t.java b/java/src/hdf/hdf5lib/structs/H5_ih_info_t.java
new file mode 100644
index 0000000..450f548
--- /dev/null
+++ b/java/src/hdf/hdf5lib/structs/H5_ih_info_t.java
@@ -0,0 +1,31 @@
+/* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
+ * Copyright by The HDF Group. *
+ * Copyright by the Board of Trustees of the University of Illinois. *
+ * All rights reserved. *
+ * *
+ * This file is part of HDF5. The full HDF5 copyright notice, including *
+ * terms governing use, modification, and redistribution, is contained in *
+ * the files COPYING and Copyright.html. COPYING can be found at the root *
+ * of the source code distribution tree; Copyright.html can be found at the *
+ * root level of an installed copy of the electronic HDF5 document set and *
+ * is linked from the top-level documents page. It can also be found at *
+ * http://hdfgroup.org/HDF5/doc/Copyright.html. If you do not have *
+ * access to either file, you may request a copy from help@hdfgroup.org. *
+ * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */
+
+package hdf.hdf5lib.structs;
+
+import java.io.Serializable;
+
+//Information struct for group (for H5Gget_info/H5Gget_info_by_name/H5Gget_info_by_idx)
+public class H5_ih_info_t implements Serializable {
+ private static final long serialVersionUID = -142238015615462707L;
+ public long index_size; /* btree and/or list */
+ public long heap_size;
+
+ H5_ih_info_t (long index_size, long heap_size)
+ {
+ this.index_size = index_size;
+ this.heap_size = heap_size;
+ }
+}
diff --git a/java/src/hdf/overview.html b/java/src/hdf/overview.html
new file mode 100644
index 0000000..39d75b7
--- /dev/null
+++ b/java/src/hdf/overview.html
@@ -0,0 +1,96 @@
+<body>
+
+<h1>Java HDF5 Interface (JHI5)</h1>
+
+<h2><u>What it is</u></h2>
+The <b>Java HD5 Interface (JHI5)</b> is a Java package
+(<a href="../../hdf-java-html/javadocs/hdf/hdf5lib/package-summary.html">hdf.hdf5lib</a>)
+that ``wraps around'' the HDF5 library.
+<p />There are a large number of functions in the HDF5
+library (version 1.10). Some of the functions are not supported in JHI5. Most
+of the unsupported functions have C function pointers, which is not currently
+implemented in JHI5.
+
+<center><table BORDER=1 COLS=1 WIDTH="71%" BGCOLOR="#dbeaf5" >
+<tr>
+<td>
+<center>Note: The JHI5 does not support HDF4 or earlier.</center>
+</td>
+</tr>
+</table></center>
+
+<p>The JHI5 may be used by any Java application that needs to access HDF5
+files. It is extremely important to emphasize that <i>this package is not
+a pure Java implementation of the HDF5 library.</i> The JHI5 calls the
+same HDF5 library that is used by C or FORTRAN programs. (Note that this
+product cannot be used in most network browsers because it accesses the
+local disk using native code.)
+<p>The Java HDF5 Interface consists of Java classes and a dynamically
+linked native library. The Java classes declare native methods, and the
+library contains C functions which implement the native methods. The C
+functions call the standard HDF5 library, which is linked as part of the
+same library on most platforms.
+<p>The central part of the JHI5 is the Java class <i>
+<a href="../../hdf-java-html/javadocs/hdf/hdf5lib/H5.html">hdf.hdf5lib.H5</a></i>.
+The <i>H5 </i>class calls the standard (<i>i.e.</i>, `native' code) HDF5
+library, with native methods for most of the HDF5 functions.
+
+<h3>
+<u>How to use it</u></h3>
+The JHI5 is used by Java classes to call the HDF5 library, in order to
+create HDF5 files, and read and write data in existing HDF5 files.
+<p>For example, the HDF5 library has the function <b>H5Fopen</b> to open
+an HDF5 file. The Java interface is the class <i>
+<a href="../../hdf-java-html/javadocs/hdf/hdf5lib/H5.html">hdf.hdf5lib.H5</a></i>,
+which has a method:
+<pre><b>static native int H5Fopen(String filename, int flags, int access );</b></pre>
+The native method is implemented in C using the
+<a href="http://java.sun.com/javase/6/docs/technotes/guides/jni/index.html">Java
+Native Method Interface </a>(JNI). This is written something like the following:
+<pre><b>JNIEXPORT jint
+JNICALL Java_hdf_hdf5lib_H5_H5Fopen
+(
+&nbsp;JNIEnv *env,
+&nbsp;jclass class,
+&nbsp;jstring hdfFile,
+&nbsp;jint flags,
+&nbsp;jint access)
+&nbsp;{
+
+&nbsp;/* ...convert Java String to (char *) */
+
+&nbsp;/* call the HDF library */
+&nbsp;retVal = H5Fopen((char *)file, (unsigned)flags, (hid_t)access );
+
+&nbsp;/* ... */
+}</b></pre>
+This C function calls the HDF5 library and returns the result appropriately.
+<p>There is one native method for each HDF entry point (several hundred
+in all), which are compiled with the HDF library into a dynamically loaded
+library (<i>libhdf5_java</i>). Note that this library must be built for each
+platform.
+<p>To call the HDF `<b><i>H5Fopen</i></b>' function, a Java program would
+import the package '<i><b>hdf.hdf5lib.*</b>'</i>, and invoke the method
+on the class '<b><i>H5</i></b>'. The Java program would look something
+like this:
+<pre><b>import hdf.hdf5lib.*;
+
+{
+&nbsp;/* ... */
+
+&nbsp;try {
+&nbsp;file = H5.Hopen("myFile.hdf", flags, access );
+&nbsp;} catch (HDF5Exception ex) {
+&nbsp;//...
+&nbsp;}
+
+&nbsp;/* ... */
+}</b></pre>
+The <i><b>H5</b> </i>class automatically loads the native method implementations
+and the HDF5 library.
+
+<h3>
+<a NAME="DOWNLOAD"></a>To Obtain</h3>
+The JHI5 is included with the <a href="http://www.hdfgroup.org/HDF5/index.html">HDF5</a> library.
+
+</body>
diff --git a/java/src/jni/CMakeLists.txt b/java/src/jni/CMakeLists.txt
new file mode 100644
index 0000000..ef80072
--- /dev/null
+++ b/java/src/jni/CMakeLists.txt
@@ -0,0 +1,85 @@
+cmake_minimum_required (VERSION 3.1.0)
+PROJECT (HDF5_JAVA_JNI C CXX)
+
+set (HDF5_JAVA_JNI_CSRCS
+ ${HDF5_JAVA_JNI_SOURCE_DIR}/exceptionImp.c
+ ${HDF5_JAVA_JNI_SOURCE_DIR}/h5aImp.c
+ ${HDF5_JAVA_JNI_SOURCE_DIR}/h5Constants.c
+ ${HDF5_JAVA_JNI_SOURCE_DIR}/h5dImp.c
+ ${HDF5_JAVA_JNI_SOURCE_DIR}/h5eImp.c
+ ${HDF5_JAVA_JNI_SOURCE_DIR}/h5fImp.c
+ ${HDF5_JAVA_JNI_SOURCE_DIR}/h5gImp.c
+ ${HDF5_JAVA_JNI_SOURCE_DIR}/h5iImp.c
+ ${HDF5_JAVA_JNI_SOURCE_DIR}/h5Imp.c
+ ${HDF5_JAVA_JNI_SOURCE_DIR}/h5lImp.c
+ ${HDF5_JAVA_JNI_SOURCE_DIR}/h5oImp.c
+ ${HDF5_JAVA_JNI_SOURCE_DIR}/h5pImp.c
+ ${HDF5_JAVA_JNI_SOURCE_DIR}/h5plImp.c
+ ${HDF5_JAVA_JNI_SOURCE_DIR}/h5rImp.c
+ ${HDF5_JAVA_JNI_SOURCE_DIR}/h5sImp.c
+ ${HDF5_JAVA_JNI_SOURCE_DIR}/h5tImp.c
+ ${HDF5_JAVA_JNI_SOURCE_DIR}/h5util.c
+ ${HDF5_JAVA_JNI_SOURCE_DIR}/h5zImp.c
+ ${HDF5_JAVA_JNI_SOURCE_DIR}/nativeData.c
+)
+
+set (HDF5_JAVA_JNI_CHDRS
+ ${HDF5_JAVA_JNI_SOURCE_DIR}/exceptionImp.h
+ ${HDF5_JAVA_JNI_SOURCE_DIR}/h5aImp.h
+ ${HDF5_JAVA_JNI_SOURCE_DIR}/h5dImp.h
+ ${HDF5_JAVA_JNI_SOURCE_DIR}/h5eImp.h
+ ${HDF5_JAVA_JNI_SOURCE_DIR}/h5fImp.h
+ ${HDF5_JAVA_JNI_SOURCE_DIR}/h5gImp.h
+ ${HDF5_JAVA_JNI_SOURCE_DIR}/h5iImp.h
+ ${HDF5_JAVA_JNI_SOURCE_DIR}/h5jni.h
+ ${HDF5_JAVA_JNI_SOURCE_DIR}/h5lImp.h
+ ${HDF5_JAVA_JNI_SOURCE_DIR}/h5oImp.h
+ ${HDF5_JAVA_JNI_SOURCE_DIR}/h5pImp.h
+ ${HDF5_JAVA_JNI_SOURCE_DIR}/h5plImp.h
+ ${HDF5_JAVA_JNI_SOURCE_DIR}/h5rImp.h
+ ${HDF5_JAVA_JNI_SOURCE_DIR}/h5sImp.h
+ ${HDF5_JAVA_JNI_SOURCE_DIR}/h5tImp.h
+ ${HDF5_JAVA_JNI_SOURCE_DIR}/h5util.h
+ ${HDF5_JAVA_JNI_SOURCE_DIR}/h5zImp.h
+ ${HDF5_JAVA_JNI_SOURCE_DIR}/nativeData.h
+)
+
+INCLUDE_DIRECTORIES( ${HDF5_JAVA_JNI_SOURCE_DIR} )
+
+set (CMAKE_INCLUDE_CURRENT_DIR_IN_INTERFACE ON)
+
+########### JNI libraries always must be built shared ###############
+add_library (${HDF5_JAVA_JNI_LIB_TARGET} SHARED ${HDF5_JAVA_JNI_CSRCS} ${HDF5_JAVA_JNI_CHDRS})
+TARGET_C_PROPERTIES (${HDF5_JAVA_JNI_LIB_TARGET} SHARED " " " ")
+target_link_libraries (${HDF5_JAVA_JNI_LIB_TARGET} ${HDF5_LIB_TARGET})
+set_target_properties (${HDF5_JAVA_JNI_LIB_TARGET} PROPERTIES FOLDER libraries/jni)
+SET_GLOBAL_VARIABLE (HDF5_LIBRARIES_TO_EXPORT "${HDF5_LIBRARIES_TO_EXPORT};${HDF5_JAVA_JNI_LIB_TARGET}")
+H5_SET_LIB_OPTIONS (${HDF5_JAVA_JNI_LIB_TARGET} ${HDF5_JAVA_JNI_LIB_NAME} SHARED ${HDF5_JAVA_PACKAGE_SOVERSION})
+if (WIN32)
+ get_filename_component (HDF5_JAVA_JNI_DLL_NAME ${HDF5_JAVA_JNI_LIB_TARGET} NAME_WE)
+ # message (STATUS "HDF5_JAVA_JNI_DLL_NAME: ${HDF5_JAVA_JNI_DLL_NAME}")
+ if (BUILD_TESTING)
+ add_custom_target (HDF5_JAVA_JNI-Test-Copy ALL
+ COMMAND ${CMAKE_COMMAND} -E copy_if_different ${CMAKE_RUNTIME_OUTPUT_DIRECTORY}/${CMAKE_CFG_INTDIR}/${HDF5_JAVA_JNI_DLL_NAME}${CMAKE_SHARED_LIBRARY_SUFFIX} ${CMAKE_RUNTIME_OUTPUT_DIRECTORY}/${HDF5_JAVA_JNI_DLL_NAME}${CMAKE_SHARED_LIBRARY_SUFFIX}
+ COMMENT "Copying ${CMAKE_RUNTIME_OUTPUT_DIRECTORY}/${CMAKE_CFG_INTDIR}/${HDF5_JAVA_JNI_DLL_NAME}${CMAKE_SHARED_LIBRARY_SUFFIX} to ${CMAKE_RUNTIME_OUTPUT_DIRECTORY}/"
+ )
+ add_dependencies (HDF5_JAVA_JNI-Test-Copy ${HDF5_JAVA_JNI_LIB_TARGET})
+ endif (BUILD_TESTING)
+endif (WIN32)
+
+#-----------------------------------------------------------------------------
+# Add Target(s) to CMake Install for import into other projects
+#-----------------------------------------------------------------------------
+if (HDF5_EXPORTED_TARGETS)
+ INSTALL_TARGET_PDB (${HDF5_JAVA_JNI_LIB_TARGET} ${HDF5_INSTALL_BIN_DIR} libraries)
+ install (
+ TARGETS
+ ${HDF5_JAVA_JNI_LIB_TARGET}
+ EXPORT
+ ${HDF5_JAVA_EXPORTED_TARGETS}
+ LIBRARY DESTINATION ${HDF5_INSTALL_LIB_DIR} COMPONENT libraries
+ ARCHIVE DESTINATION ${HDF5_INSTALL_LIB_DIR} COMPONENT libraries
+ RUNTIME DESTINATION ${HDF5_INSTALL_LIB_DIR} COMPONENT libraries
+ FRAMEWORK DESTINATION ${HDF5_INSTALL_FWRK_DIR} COMPONENT libraries
+ )
+endif (HDF5_EXPORTED_TARGETS)
diff --git a/java/src/jni/Makefile.am b/java/src/jni/Makefile.am
new file mode 100644
index 0000000..4667407
--- /dev/null
+++ b/java/src/jni/Makefile.am
@@ -0,0 +1,44 @@
+#
+# Copyright by The HDF Group.
+# Copyright by the Board of Trustees of the University of Illinois.
+# All rights reserved.
+#
+# This file is part of HDF5. The full HDF5 copyright notice, including
+# terms governing use, modification, and redistribution, is contained in
+# the files COPYING and Copyright.html. COPYING can be found at the root
+# of the source code distribution tree; Copyright.html can be found at the
+# root level of an installed copy of the electronic HDF5 document set and
+# is linked from the top-level documents page. It can also be found at
+# http://hdfgroup.org/HDF5/doc/Copyright.html. If you do not have
+# access to either file, you may request a copy from help@hdfgroup.org.
+##
+## Makefile.am
+## Run automake to generate a Makefile.in from this file.
+#
+# HDF5 Java native interface (JNI) Library Makefile(.in)
+#
+
+include $(top_srcdir)/config/commence.am
+include $(top_srcdir)/config/lt_vers.am
+
+# Mark this directory as part of the JNI API
+JAVA_API=yes
+
+# Include src directory and JNI flags
+AM_CPPFLAGS+=-I$(top_srcdir)/src -I$(top_srcdir)/java/src/jni $(JNIFLAGS)
+
+# This is our main target
+lib_LTLIBRARIES=libhdf5_java.la
+
+# Add libtool numbers to the HDF5 Java (JNI) library (from config/lt_vers.am)
+libhdf5_java_la_LDFLAGS= -version-info $(LT_JAVA_VERS_INTERFACE):$(LT_JAVA_VERS_REVISION):$(LT_JAVA_VERS_AGE) $(AM_LDFLAGS)
+
+# Source files for the library
+libhdf5_java_la_SOURCES=exceptionImp.c h5Constants.c nativeData.c h5util.c h5Imp.c \
+ h5aImp.c h5dImp.c h5eImp.c h5fImp.c h5gImp.c h5iImp.c h5lImp.c h5oImp.c \
+ h5pImp.c h5plImp.c h5rImp.c h5sImp.c h5tImp.c h5zImp.c
+
+# HDF5 Java (JNI) library depends on HDF5 Library.
+libhdf5_java_la_LIBADD=$(LIBHDF5)
+
+include $(top_srcdir)/config/conclude.am
diff --git a/java/src/jni/exceptionImp.c b/java/src/jni/exceptionImp.c
new file mode 100644
index 0000000..8623ca9
--- /dev/null
+++ b/java/src/jni/exceptionImp.c
@@ -0,0 +1,414 @@
+/* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
+ * Copyright by The HDF Group. *
+ * Copyright by the Board of Trustees of the University of Illinois. *
+ * All rights reserved. *
+ * *
+ * This file is part of HDF5. The full HDF5 copyright notice, including *
+ * terms governing use, modification, and redistribution, is contained in *
+ * the files COPYING and Copyright.html. COPYING can be found at the root *
+ * of the source code distribution tree; Copyright.html can be found at the *
+ * root level of an installed copy of the electronic HDF5 document set and *
+ * is linked from the top-level documents page. It can also be found at *
+ * http://hdfgroup.org/HDF5/doc/Copyright.html. If you do not have *
+ * access to either file, you may request a copy from help@hdfgroup.org. *
+ * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */
+
+/*
+ * For details of the HDF libraries, see the HDF Documentation at:
+ * http://hdfdfgroup.org/HDF5/doc/
+ *
+ */
+
+#ifdef __cplusplus
+extern "C" {
+#endif /* __cplusplus */
+
+
+/***********/
+/* Headers */
+/***********/
+
+#include "hdf5.h"
+#include <stdio.h>
+#include <stdlib.h>
+#include "jni.h"
+#include "h5jni.h"
+#include "exceptionImp.h"
+
+
+/*******************/
+/* Local Variables */
+/*******************/
+
+/* These types are copied from H5Eprivate.h
+ * They should be moved to a public include file, and deleted from
+ * here.
+ */
+
+#define H5E_NSLOTS 32 /*number of slots in an error stack */
+
+/*
+ * The list of error messages in the system is kept as an array of
+ * error_code/message pairs, one for major error numbers and another for
+ * minor error numbers.
+ */
+typedef struct H5E_major_mesg_t {
+ hid_t error_code;
+ const char *str;
+} H5E_major_mesg_t;
+
+typedef struct H5E_minor_mesg_t {
+ hid_t error_code;
+ const char *str;
+} H5E_minor_mesg_t;
+
+/* major and minor error numbers */
+typedef struct H5E_num_t {
+ hid_t maj_num;
+ hid_t min_num;
+} H5E_num_t;
+
+/********************/
+/* Local Macros */
+/********************/
+
+#define THROWEXCEPTION(className,args) { \
+ jclass jc; \
+ jmethodID jm; \
+ jobject ex; \
+ jc = ENVPTR->FindClass(ENVPAR (className)); \
+ if (jc == NULL) { \
+ return JNI_FALSE; \
+ } \
+ jm = ENVPTR->GetMethodID(ENVPAR jc, "<init>", "(Ljava/lang/String;)V"); \
+ if (jm == NULL) { \
+ return JNI_FALSE; \
+ } \
+ ex = ENVPTR->NewObjectA (ENVPAR jc, jm, (jvalue*)(args)); \
+ if (ENVPTR->Throw(ENVPAR (jthrowable)ex) < 0) { \
+ printf("FATAL ERROR: %s: Throw failed\n", (className)); \
+ return JNI_FALSE; \
+ } \
+ return JNI_TRUE; \
+}
+
+/********************/
+/* Local Prototypes */
+/********************/
+
+static const char *defineHDF5LibraryException(hid_t maj_num);
+static jboolean H5JNIErrorClass(JNIEnv *env, const char *message, const char *className);
+
+/* get the major and minor error numbers on the top of the error stack */
+static herr_t
+walk_error_callback(unsigned n, const H5E_error2_t *err_desc, void *_err_nums)
+{
+ H5E_num_t *err_nums = (H5E_num_t *)_err_nums;
+
+ if(err_desc) {
+ err_nums->maj_num = err_desc->maj_num;
+ err_nums->min_num = err_desc->min_num;
+ } /* end if */
+
+ return 0;
+} /* end walk_error_callback() */
+
+/*
+ * Class: hdf_hdf5lib_exceptions_HDF5Library
+ * Method: H5error_off
+ * Signature: ()I
+ *
+ */
+JNIEXPORT jint JNICALL
+Java_hdf_hdf5lib_H5_H5error_1off(JNIEnv *env, jclass clss)
+{
+ H5Eset_auto2(H5E_DEFAULT, NULL, NULL);
+ return 0;
+} /* end Java_hdf_hdf5lib_H5_H5error_1off() */
+
+
+/*
+ * Class: hdf_hdf5lib_exceptions_HDFLibraryException
+ * Method: printStackTrace0
+ * Signature: (Ljava/lang/Object;)V
+ *
+ * Call the HDF-5 library to print the HDF-5 error stack to 'file_name'.
+ */
+JNIEXPORT void JNICALL
+Java_hdf_hdf5lib_exceptions_HDF5LibraryException_printStackTrace0(
+ JNIEnv *env, jobject obj, jstring file_name)
+{
+ FILE *stream = NULL;
+ const char *file = NULL;
+
+ if(file_name == NULL) {
+ H5Eprint2(H5E_DEFAULT, stderr);
+ } /* end if */
+ else {
+ file = ENVPTR->GetStringUTFChars(ENVPAR file_name, 0);
+ stream = HDfopen(file, "a+");
+ if(stream) {
+ H5Eprint2(H5E_DEFAULT, stream);
+ HDfclose(stream);
+ } /* end if */
+ ENVPTR->ReleaseStringUTFChars(ENVPAR file_name, file);
+ } /* end else */
+} /* end Java_hdf_hdf5lib_exceptions_HDF5LibraryException_printStackTrace0() */
+
+/*
+ * Class: hdf_hdf5lib_exceptions_HDFLibraryException
+ * Method: getMajorErrorNumber
+ * Signature: ()J
+ *
+ * Extract the HDF-5 major error number from the HDF-5 error stack.
+ */
+JNIEXPORT jlong JNICALL
+Java_hdf_hdf5lib_exceptions_HDF5LibraryException_getMajorErrorNumber(
+ JNIEnv *env, jobject obj)
+{
+ H5E_num_t err_nums;
+ err_nums.maj_num = 0;
+ err_nums.min_num = 0;
+
+ H5Ewalk2(H5E_DEFAULT, H5E_WALK_DOWNWARD, walk_error_callback, &err_nums);
+
+ return err_nums.maj_num;
+} /* end Java_hdf_hdf5lib_exceptions_HDF5LibraryException_getMajorErrorNumber() */
+
+/*
+ * Class: hdf_hdf5lib_exceptions_HDFLibraryException
+ * Method: getMinorErrorNumber
+ * Signature: ()J
+ *
+ * Extract the HDF-5 minor error number from the HDF-5 error stack.
+ */
+JNIEXPORT jlong JNICALL
+Java_hdf_hdf5lib_exceptions_HDF5LibraryException_getMinorErrorNumber(
+ JNIEnv *env, jobject obj)
+{
+ H5E_num_t err_nums;
+ err_nums.maj_num = 0;
+ err_nums.min_num = 0;
+
+ H5Ewalk2(H5E_DEFAULT, H5E_WALK_DOWNWARD, walk_error_callback, &err_nums);
+
+ return err_nums.min_num;
+} /* end Java_hdf_hdf5lib_exceptions_HDF5LibraryException_getMinorErrorNumber() */
+
+/*
+ * Routine to raise particular Java exceptions from C
+ */
+static
+jboolean
+H5JNIErrorClass(JNIEnv *env, const char *message, const char *className)
+{
+ char *args[2];
+ jstring str = ENVPTR->NewStringUTF(ENVPAR message);
+ args[0] = (char *)str;
+ args[1] = 0;
+
+ THROWEXCEPTION(className, args);
+} /* end H5JNIErrorClass() */
+
+/*
+ * Create and throw an 'outOfMemoryException'
+ *
+ * Note: This routine never returns from the 'throw',
+ * and the Java native method immediately raises the
+ * exception.
+ */
+jboolean
+h5outOfMemory(JNIEnv *env, const char *functName)
+{
+ return H5JNIErrorClass(env, functName, "java/lang/OutOfMemoryError");
+} /* end h5outOfMemory() */
+
+
+/*
+ * A fatal error in a JNI call
+ * Create and throw an 'InternalError'
+ *
+ * Note: This routine never returns from the 'throw',
+ * and the Java native method immediately raises the
+ * exception.
+ */
+jboolean
+h5JNIFatalError(JNIEnv *env, const char *functName)
+{
+ return H5JNIErrorClass(env, functName, "java/lang/InternalError");
+} /* end h5JNIFatalError() */
+
+/*
+ * A NULL argument in an HDF5 call
+ * Create and throw an 'NullPointerException'
+ *
+ * Note: This routine never returns from the 'throw',
+ * and the Java native method immediately raises the
+ * exception.
+ */
+jboolean
+h5nullArgument(JNIEnv *env, const char *functName)
+{
+ return H5JNIErrorClass(env, functName, "java/lang/NullPointerException");
+} /* end h5nullArgument() */
+
+/*
+ * A bad argument in an HDF5 call
+ * Create and throw an 'IllegalArgumentException'
+ *
+ * Note: This routine never returns from the 'throw',
+ * and the Java native method immediately raises the
+ * exception.
+ */
+jboolean
+h5badArgument(JNIEnv *env, const char *functName)
+{
+ return H5JNIErrorClass(env, functName, "java/lang/IllegalArgumentException");
+} /* end h5badArgument() */
+
+/*
+ * Some feature Not implemented yet
+ * Create and throw an 'UnsupportedOperationException'
+ *
+ * Note: This routine never returns from the 'throw',
+ * and the Java native method immediately raises the
+ * exception.
+ */
+jboolean
+h5unimplemented(JNIEnv *env, const char *functName)
+{
+ return H5JNIErrorClass(env, functName, "java/lang/UnsupportedOperationException");
+} /* end h5unimplemented() */
+
+/* h5raiseException(). This routine is called to generate
+ * an arbitrary Java exception with a particular message.
+ *
+ * Note: This routine never returns from the 'throw',
+ * and the Java native method immediately raises the
+ * exception.
+ */
+jboolean
+h5raiseException(JNIEnv *env, const char *exception, const char *message)
+{
+ return H5JNIErrorClass(env, message, exception);
+} /* end h5raiseException() */
+
+/*
+ * h5libraryError() determines the HDF-5 major error code
+ * and creates and throws the appropriate sub-class of
+ * HDF5LibraryException(). This routine should be called
+ * whenever a call to the HDF-5 library fails, i.e., when
+ * the return is -1.
+ *
+ * Note: This routine never returns from the 'throw',
+ * and the Java native method immediately raises the
+ * exception.
+ */
+jboolean
+h5libraryError(JNIEnv *env)
+{
+ char *args[2];
+ const char *exception = NULL;
+ char *msg_str = NULL;
+ int num_errs = 0;
+ hid_t min_num;
+ hid_t maj_num;
+ ssize_t msg_size = 0;
+ H5E_type_t error_msg_type;
+ jstring str = NULL;
+ hid_t stk_id = -1;
+ H5E_num_t exceptionNumbers;
+
+ exceptionNumbers.maj_num = 0;
+ exceptionNumbers.min_num = 0;
+
+ /* Save current stack contents for future use */
+ stk_id = H5Eget_current_stack(); /* This will clear current stack */
+ if(stk_id >= 0)
+ H5Ewalk2(stk_id, H5E_WALK_DOWNWARD, walk_error_callback, &exceptionNumbers);
+ maj_num = exceptionNumbers.maj_num;
+ min_num = exceptionNumbers.min_num;
+
+ exception = defineHDF5LibraryException(maj_num);
+
+ /* get the length of the name */
+ msg_size = H5Eget_msg(min_num, NULL, NULL, 0);
+ if(msg_size > 0) {
+ msg_size++; /* add extra space for the null terminator */
+ msg_str = (char*)HDcalloc((size_t)msg_size, sizeof(char));
+ if(msg_str) {
+ msg_size = H5Eget_msg(min_num, &error_msg_type, (char *)msg_str, (size_t)msg_size);
+ str = ENVPTR->NewStringUTF(ENVPAR msg_str);
+ HDfree(msg_str);
+ } /* end if */
+ } /* end if */
+ else
+ str = NULL;
+ if(stk_id >= 0)
+ H5Eset_current_stack(stk_id);
+
+ args[0] = (char *)str;
+ args[1] = 0;
+ THROWEXCEPTION(exception, args);
+} /* end h5libraryError() */
+
+
+/*
+ * defineHDF5LibraryException() returns the name of the sub-class
+ * which goes with an HDF-5 error code.
+ */
+static const char *
+defineHDF5LibraryException(hid_t maj_num)
+{
+ hid_t err_num = maj_num;
+
+ if (H5E_ARGS == err_num)
+ return "hdf/hdf5lib/exceptions/HDF5FunctionArgumentException";
+ else if (H5E_RESOURCE == err_num)
+ return "hdf/hdf5lib/exceptions/HDF5ResourceUnavailableException";
+ else if (H5E_INTERNAL == err_num)
+ return "hdf/hdf5lib/exceptions/HDF5InternalErrorException";
+ else if (H5E_FILE == err_num)
+ return "hdf/hdf5lib/exceptions/HDF5FileInterfaceException";
+ else if (H5E_IO == err_num)
+ return "hdf/hdf5lib/exceptions/HDF5LowLevelIOException";
+ else if (H5E_FUNC == err_num)
+ return "hdf/hdf5lib/exceptions/HDF5FunctionEntryExitException";
+ else if (H5E_ATOM == err_num)
+ return "hdf/hdf5lib/exceptions/HDF5AtomException";
+ else if (H5E_CACHE == err_num)
+ return "hdf/hdf5lib/exceptions/HDF5MetaDataCacheException";
+ else if (H5E_BTREE == err_num)
+ return "hdf/hdf5lib/exceptions/HDF5BtreeException";
+ else if (H5E_SYM == err_num)
+ return "hdf/hdf5lib/exceptions/HDF5SymbolTableException";
+ else if (H5E_HEAP == err_num)
+ return "hdf/hdf5lib/exceptions/HDF5HeapException";
+ else if (H5E_OHDR == err_num)
+ return "hdf/hdf5lib/exceptions/HDF5ObjectHeaderException";
+ else if (H5E_DATATYPE == err_num)
+ return "hdf/hdf5lib/exceptions/HDF5DatatypeInterfaceException";
+ else if (H5E_DATASPACE == err_num)
+ return "hdf/hdf5lib/exceptions/HDF5DataspaceInterfaceException";
+ else if (H5E_DATASET == err_num)
+ return "hdf/hdf5lib/exceptions/HDF5DatasetInterfaceException";
+ else if (H5E_STORAGE == err_num)
+ return "hdf/hdf5lib/exceptions/HDF5DataStorageException";
+ else if (H5E_PLIST == err_num)
+ return "hdf/hdf5lib/exceptions/HDF5PropertyListInterfaceException";
+ else if (H5E_ATTR == err_num)
+ return "hdf/hdf5lib/exceptions/HDF5AttributeException";
+ else if (H5E_PLINE == err_num)
+ return "hdf/hdf5lib/exceptions/HDF5DataFiltersException";
+ else if (H5E_EFL == err_num)
+ return "hdf/hdf5lib/exceptions/HDF5ExternalFileListException";
+ else if (H5E_REFERENCE == err_num)
+ return "hdf/hdf5lib/exceptions/HDF5ReferenceException";
+
+ return "hdf/hdf5lib/exceptions/HDF5LibraryException";
+} /* end defineHDF5LibraryException() */
+
+#ifdef __cplusplus
+} /* end extern "C" */
+#endif /* __cplusplus */
+
diff --git a/java/src/jni/exceptionImp.h b/java/src/jni/exceptionImp.h
new file mode 100644
index 0000000..e238372
--- /dev/null
+++ b/java/src/jni/exceptionImp.h
@@ -0,0 +1,67 @@
+/* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
+ * Copyright by The HDF Group. *
+ * Copyright by the Board of Trustees of the University of Illinois. *
+ * All rights reserved. *
+ * *
+ * This file is part of HDF5. The full HDF5 copyright notice, including *
+ * terms governing use, modification, and redistribution, is contained in *
+ * the files COPYING and Copyright.html. COPYING can be found at the root *
+ * of the source code distribution tree; Copyright.html can be found at the *
+ * root level of an installed copy of the electronic HDF5 document set and *
+ * is linked from the top-level documents page. It can also be found at *
+ * http://hdfgroup.org/HDF5/doc/Copyright.html. If you do not have *
+ * access to either file, you may request a copy from help@hdfgroup.org. *
+ * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */
+
+#include <jni.h>
+/* Header for class hdf_hdf5lib_H5_exception */
+
+#ifndef _Included_hdf_hdf5lib_H5_exception
+#define _Included_hdf_hdf5lib_H5_exception
+
+#ifdef __cplusplus
+extern "C" {
+#endif /* __cplusplus */
+
+/*
+ * Class: hdf_hdf5lib_exceptions_HDF5Library
+ * Method: H5error_off
+ * Signature: ()I
+ *
+ */
+JNIEXPORT jint JNICALL
+Java_hdf_hdf5lib_H5_H5error_1off(JNIEnv *env, jclass clss );
+
+
+/*
+ * Class: hdf_hdf5lib_exceptions_HDFLibraryException
+ * Method: printStackTrace0
+ * Signature: (Ljava/lang/Object;)V
+ *
+ * Call the HDF-5 library to print the HDF-5 error stack to 'file_name'.
+ */
+JNIEXPORT void JNICALL
+Java_hdf_hdf5lib_exceptions_HDF5LibraryException_printStackTrace0(JNIEnv *env,
+ jobject obj, jstring file_name);
+
+/*
+ * Class: hdf_hdf5lib_exceptions_HDFLibraryException
+ * Method: getMajorErrorNumber
+ * Signature: ()J
+ */
+JNIEXPORT jlong JNICALL
+Java_hdf_hdf5lib_exceptions_HDF5LibraryException_getMajorErrorNumber(JNIEnv *env, jobject obj);
+
+/*
+ * Class: hdf_hdf5lib_exceptions_HDFLibraryException
+ * Method: getMinorErrorNumber
+ * Signature: ()J
+ */
+JNIEXPORT jlong JNICALL
+Java_hdf_hdf5lib_exceptions_HDF5LibraryException_getMinorErrorNumber(JNIEnv *env, jobject obj);
+
+#ifdef __cplusplus
+} /* end extern "C" */
+#endif /* __cplusplus */
+
+#endif /* _Included_hdf_hdf5lib_H5_exception */
diff --git a/java/src/jni/h5Constants.c b/java/src/jni/h5Constants.c
new file mode 100644
index 0000000..f1d49be
--- /dev/null
+++ b/java/src/jni/h5Constants.c
@@ -0,0 +1,692 @@
+/* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
+ * Copyright by The HDF Group. *
+ * Copyright by the Board of Trustees of the University of Illinois. *
+ * All rights reserved. *
+ * *
+ * This file is part of HDF5. The full HDF5 copyright notice, including *
+ * terms governing use, modification, and redistribution, is contained in *
+ * the files COPYING and Copyright.html. COPYING can be found at the root *
+ * of the source code distribution tree; Copyright.html can be found at the *
+ * root level of an installed copy of the electronic HDF5 document set and *
+ * is linked from the top-level documents page. It can also be found at *
+ * http://hdfgroup.org/HDF5/doc/Copyright.html. If you do not have *
+ * access to either file, you may request a copy from help@hdfgroup.org. *
+ * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */
+
+/*
+ * For details of the HDF libraries, see the HDF Documentation at:
+ * http://hdfdfgroup.org/HDF5/doc/
+ *
+ */
+
+#ifdef __cplusplus
+extern "C" {
+#endif /* __cplusplus */
+
+#include <jni.h>
+#include "hdf5.h"
+
+#pragma GCC diagnostic ignored "-Wmissing-prototypes"
+
+JNIEXPORT jlong JNICALL Java_hdf_hdf5lib_HDF5Constants_H5_1QUARTER_1HADDR_1MAX(JNIEnv *env, jclass cls) { return (hsize_t)HADDR_MAX/4; }
+
+JNIEXPORT jint JNICALL Java_hdf_hdf5lib_HDF5Constants_H5_1SZIP_1ALLOW_1K13_1OPTION_1MASK(JNIEnv *env, jclass cls) { return H5_SZIP_ALLOW_K13_OPTION_MASK; }
+JNIEXPORT jint JNICALL Java_hdf_hdf5lib_HDF5Constants_H5_1SZIP_1CHIP_1OPTION_1MASK(JNIEnv *env, jclass cls) { return H5_SZIP_CHIP_OPTION_MASK; }
+JNIEXPORT jint JNICALL Java_hdf_hdf5lib_HDF5Constants_H5_1SZIP_1EC_1OPTION_1MASK(JNIEnv *env, jclass cls) { return H5_SZIP_EC_OPTION_MASK; }
+JNIEXPORT jint JNICALL Java_hdf_hdf5lib_HDF5Constants_H5_1SZIP_1MAX_1PIXELS_1PER_1BLOCK(JNIEnv *env, jclass cls) { return H5_SZIP_MAX_PIXELS_PER_BLOCK; }
+JNIEXPORT jint JNICALL Java_hdf_hdf5lib_HDF5Constants_H5_1SZIP_1NN_1OPTION_1MASK(JNIEnv *env, jclass cls) { return H5_SZIP_NN_OPTION_MASK; }
+JNIEXPORT jint JNICALL Java_hdf_hdf5lib_HDF5Constants_H5_1INDEX_1UNKNOWN(JNIEnv *env, jclass cls) { return H5_INDEX_UNKNOWN; }
+JNIEXPORT jint JNICALL Java_hdf_hdf5lib_HDF5Constants_H5_1INDEX_1NAME(JNIEnv *env, jclass cls) { return H5_INDEX_NAME; }
+JNIEXPORT jint JNICALL Java_hdf_hdf5lib_HDF5Constants_H5_1INDEX_1CRT_1ORDER(JNIEnv *env, jclass cls) { return H5_INDEX_CRT_ORDER; }
+JNIEXPORT jint JNICALL Java_hdf_hdf5lib_HDF5Constants_H5_1INDEX_1N(JNIEnv *env, jclass cls) { return H5_INDEX_N; }
+JNIEXPORT jint JNICALL Java_hdf_hdf5lib_HDF5Constants_H5_1ITER_1UNKNOWN(JNIEnv *env, jclass cls) { return H5_ITER_UNKNOWN; }
+JNIEXPORT jint JNICALL Java_hdf_hdf5lib_HDF5Constants_H5_1ITER_1INC(JNIEnv *env, jclass cls) { return H5_ITER_INC; }
+JNIEXPORT jint JNICALL Java_hdf_hdf5lib_HDF5Constants_H5_1ITER_1DEC(JNIEnv *env, jclass cls) { return H5_ITER_DEC; }
+JNIEXPORT jint JNICALL Java_hdf_hdf5lib_HDF5Constants_H5_1ITER_1NATIVE(JNIEnv *env, jclass cls) { return H5_ITER_NATIVE; }
+JNIEXPORT jint JNICALL Java_hdf_hdf5lib_HDF5Constants_H5_1ITER_1N(JNIEnv *env, jclass cls) { return H5_ITER_N; }
+
+JNIEXPORT jint JNICALL Java_hdf_hdf5lib_HDF5Constants_H5AC_1CURR_1CACHE_1CONFIG_1VERSION(JNIEnv *env, jclass cls) { return H5AC__CURR_CACHE_CONFIG_VERSION; }
+JNIEXPORT jint JNICALL Java_hdf_hdf5lib_HDF5Constants_H5AC_1MAX_1TRACE_1FILE_1NAME_1LEN(JNIEnv *env, jclass cls) { return H5AC__MAX_TRACE_FILE_NAME_LEN; }
+JNIEXPORT jint JNICALL Java_hdf_hdf5lib_HDF5Constants_H5AC_1METADATA_1WRITE_1STRATEGY_1PROCESS_1ZERO_1ONLY(JNIEnv *env, jclass cls) { return H5AC_METADATA_WRITE_STRATEGY__PROCESS_0_ONLY; }
+JNIEXPORT jint JNICALL Java_hdf_hdf5lib_HDF5Constants_H5AC_1METADATA_1WRITE_1STRATEGY_1DISTRIBUTED(JNIEnv *env, jclass cls) { return H5AC_METADATA_WRITE_STRATEGY__DISTRIBUTED; }
+
+JNIEXPORT jint JNICALL Java_hdf_hdf5lib_HDF5Constants_H5C_1incr_1off(JNIEnv *env, jclass cls) { return H5C_incr__off; }
+JNIEXPORT jint JNICALL Java_hdf_hdf5lib_HDF5Constants_H5C_1incr_1threshold(JNIEnv *env, jclass cls) { return H5C_incr__threshold; }
+JNIEXPORT jint JNICALL Java_hdf_hdf5lib_HDF5Constants_H5C_1flash_1incr_1off(JNIEnv *env, jclass cls) { return H5C_flash_incr__off; }
+JNIEXPORT jint JNICALL Java_hdf_hdf5lib_HDF5Constants_H5C_1flash_1incr_1add_1space(JNIEnv *env, jclass cls) { return H5C_flash_incr__add_space; }
+JNIEXPORT jint JNICALL Java_hdf_hdf5lib_HDF5Constants_H5C_1decr_1off(JNIEnv *env, jclass cls) { return H5C_decr__off; }
+JNIEXPORT jint JNICALL Java_hdf_hdf5lib_HDF5Constants_H5C_1decr_1threshold(JNIEnv *env, jclass cls) { return H5C_decr__threshold; }
+JNIEXPORT jint JNICALL Java_hdf_hdf5lib_HDF5Constants_H5C_1decr_1age_1out(JNIEnv *env, jclass cls) { return H5C_decr__age_out; }
+JNIEXPORT jint JNICALL Java_hdf_hdf5lib_HDF5Constants_H5C_1decr_1age_1out_1with_1threshold(JNIEnv *env, jclass cls) { return H5C_decr__age_out_with_threshold; }
+
+JNIEXPORT jint JNICALL Java_hdf_hdf5lib_HDF5Constants_H5D_1CHUNK_1IDX_1BTREE(JNIEnv *env, jclass cls) { return H5D_CHUNK_IDX_BTREE; }
+JNIEXPORT jint JNICALL Java_hdf_hdf5lib_HDF5Constants_H5D_1ALLOC_1TIME_1DEFAULT(JNIEnv *env, jclass cls) { return H5D_ALLOC_TIME_DEFAULT; }
+JNIEXPORT jint JNICALL Java_hdf_hdf5lib_HDF5Constants_H5D_1ALLOC_1TIME_1EARLY(JNIEnv *env, jclass cls) { return H5D_ALLOC_TIME_EARLY; }
+JNIEXPORT jint JNICALL Java_hdf_hdf5lib_HDF5Constants_H5D_1ALLOC_1TIME_1ERROR(JNIEnv *env, jclass cls) { return H5D_ALLOC_TIME_ERROR; }
+JNIEXPORT jint JNICALL Java_hdf_hdf5lib_HDF5Constants_H5D_1ALLOC_1TIME_1INCR(JNIEnv *env, jclass cls) { return H5D_ALLOC_TIME_INCR; }
+JNIEXPORT jint JNICALL Java_hdf_hdf5lib_HDF5Constants_H5D_1ALLOC_1TIME_1LATE(JNIEnv *env, jclass cls) { return H5D_ALLOC_TIME_LATE; }
+JNIEXPORT jint JNICALL Java_hdf_hdf5lib_HDF5Constants_H5D_1FILL_1TIME_1ERROR(JNIEnv *env, jclass cls) { return H5D_FILL_TIME_ERROR; }
+JNIEXPORT jint JNICALL Java_hdf_hdf5lib_HDF5Constants_H5D_1FILL_1TIME_1ALLOC(JNIEnv *env, jclass cls) { return H5D_FILL_TIME_ALLOC; }
+JNIEXPORT jint JNICALL Java_hdf_hdf5lib_HDF5Constants_H5D_1FILL_1TIME_1NEVER(JNIEnv *env, jclass cls) { return H5D_FILL_TIME_NEVER; }
+JNIEXPORT jint JNICALL Java_hdf_hdf5lib_HDF5Constants_H5D_1FILL_1TIME_1IFSET(JNIEnv *env, jclass cls) { return H5D_FILL_TIME_IFSET; }
+JNIEXPORT jint JNICALL Java_hdf_hdf5lib_HDF5Constants_H5D_1FILL_1VALUE_1DEFAULT(JNIEnv *env, jclass cls) { return H5D_FILL_VALUE_DEFAULT; }
+JNIEXPORT jint JNICALL Java_hdf_hdf5lib_HDF5Constants_H5D_1FILL_1VALUE_1ERROR(JNIEnv *env, jclass cls) { return H5D_FILL_VALUE_ERROR; }
+JNIEXPORT jint JNICALL Java_hdf_hdf5lib_HDF5Constants_H5D_1FILL_1VALUE_1UNDEFINED(JNIEnv *env, jclass cls) { return H5D_FILL_VALUE_UNDEFINED; }
+JNIEXPORT jint JNICALL Java_hdf_hdf5lib_HDF5Constants_H5D_1FILL_1VALUE_1USER_1DEFINED(JNIEnv *env, jclass cls) { return H5D_FILL_VALUE_USER_DEFINED; }
+JNIEXPORT jint JNICALL Java_hdf_hdf5lib_HDF5Constants_H5D_1LAYOUT_1ERROR(JNIEnv *env, jclass cls) { return H5D_LAYOUT_ERROR; }
+JNIEXPORT jint JNICALL Java_hdf_hdf5lib_HDF5Constants_H5D_1CHUNKED(JNIEnv *env, jclass cls) { return H5D_CHUNKED; }
+JNIEXPORT jint JNICALL Java_hdf_hdf5lib_HDF5Constants_H5D_1COMPACT(JNIEnv *env, jclass cls) { return H5D_COMPACT; }
+JNIEXPORT jint JNICALL Java_hdf_hdf5lib_HDF5Constants_H5D_1CONTIGUOUS(JNIEnv *env, jclass cls) { return H5D_CONTIGUOUS; }
+JNIEXPORT jint JNICALL Java_hdf_hdf5lib_HDF5Constants_H5D_1VIRTUAL(JNIEnv *env, jclass cls) { return H5D_VIRTUAL; }
+JNIEXPORT jint JNICALL Java_hdf_hdf5lib_HDF5Constants_H5D_1NLAYOUTS(JNIEnv *env, jclass cls) { return H5D_NLAYOUTS; }
+JNIEXPORT jint JNICALL Java_hdf_hdf5lib_HDF5Constants_H5D_1SPACE_1STATUS_1ALLOCATED(JNIEnv *env, jclass cls) { return H5D_SPACE_STATUS_ALLOCATED; }
+JNIEXPORT jint JNICALL Java_hdf_hdf5lib_HDF5Constants_H5D_1SPACE_1STATUS_1ERROR(JNIEnv *env, jclass cls) { return H5D_SPACE_STATUS_ERROR; }
+JNIEXPORT jint JNICALL Java_hdf_hdf5lib_HDF5Constants_H5D_1SPACE_1STATUS_1NOT_1ALLOCATED(JNIEnv *env, jclass cls) { return H5D_SPACE_STATUS_NOT_ALLOCATED; }
+JNIEXPORT jint JNICALL Java_hdf_hdf5lib_HDF5Constants_H5D_1SPACE_1STATUS_1PART_1ALLOCATED(JNIEnv *env, jclass cls) { return H5D_SPACE_STATUS_PART_ALLOCATED; }
+JNIEXPORT jint JNICALL Java_hdf_hdf5lib_HDF5Constants_H5D_1VDS_1ERROR(JNIEnv *env, jclass cls) { return H5D_VDS_ERROR; }
+JNIEXPORT jint JNICALL Java_hdf_hdf5lib_HDF5Constants_H5D_1VDS_1FIRST_1MISSING(JNIEnv *env, jclass cls) { return H5D_VDS_FIRST_MISSING; }
+JNIEXPORT jint JNICALL Java_hdf_hdf5lib_HDF5Constants_H5D_1VDS_1LAST_1AVAILABLE(JNIEnv *env, jclass cls) { return H5D_VDS_LAST_AVAILABLE; }
+
+JNIEXPORT jlong JNICALL Java_hdf_hdf5lib_HDF5Constants_H5E_1ALIGNMENT(JNIEnv *env, jclass cls) { return H5E_ALIGNMENT; }
+JNIEXPORT jlong JNICALL Java_hdf_hdf5lib_HDF5Constants_H5E_1ALREADYEXISTS(JNIEnv *env, jclass cls) { return H5E_ALREADYEXISTS; }
+JNIEXPORT jlong JNICALL Java_hdf_hdf5lib_HDF5Constants_H5E_1ALREADYINIT(JNIEnv *env, jclass cls) { return H5E_ALREADYINIT; }
+JNIEXPORT jlong JNICALL Java_hdf_hdf5lib_HDF5Constants_H5E_1ARGS(JNIEnv *env, jclass cls) { return H5E_ARGS; }
+JNIEXPORT jlong JNICALL Java_hdf_hdf5lib_HDF5Constants_H5E_1ATOM(JNIEnv *env, jclass cls) { return H5E_ATOM; }
+JNIEXPORT jlong JNICALL Java_hdf_hdf5lib_HDF5Constants_H5E_1ATTR(JNIEnv *env, jclass cls) { return H5E_ATTR; }
+JNIEXPORT jlong JNICALL Java_hdf_hdf5lib_HDF5Constants_H5E_1BADATOM(JNIEnv *env, jclass cls) { return H5E_BADATOM; }
+JNIEXPORT jlong JNICALL Java_hdf_hdf5lib_HDF5Constants_H5E_1BADFILE(JNIEnv *env, jclass cls) { return H5E_BADFILE; }
+JNIEXPORT jlong JNICALL Java_hdf_hdf5lib_HDF5Constants_H5E_1BADGROUP(JNIEnv *env, jclass cls) { return H5E_BADGROUP; }
+JNIEXPORT jlong JNICALL Java_hdf_hdf5lib_HDF5Constants_H5E_1BADMESG(JNIEnv *env, jclass cls) { return H5E_BADMESG; }
+JNIEXPORT jlong JNICALL Java_hdf_hdf5lib_HDF5Constants_H5E_1BADRANGE(JNIEnv *env, jclass cls) { return H5E_BADRANGE; }
+JNIEXPORT jlong JNICALL Java_hdf_hdf5lib_HDF5Constants_H5E_1BADSELECT(JNIEnv *env, jclass cls) { return H5E_BADSELECT; }
+JNIEXPORT jlong JNICALL Java_hdf_hdf5lib_HDF5Constants_H5E_1BADSIZE(JNIEnv *env, jclass cls) { return H5E_BADSIZE; }
+JNIEXPORT jlong JNICALL Java_hdf_hdf5lib_HDF5Constants_H5E_1BADTYPE(JNIEnv *env, jclass cls) { return H5E_BADTYPE; }
+JNIEXPORT jlong JNICALL Java_hdf_hdf5lib_HDF5Constants_H5E_1BADVALUE(JNIEnv *env, jclass cls) { return H5E_BADVALUE; }
+JNIEXPORT jlong JNICALL Java_hdf_hdf5lib_HDF5Constants_H5E_1BTREE(JNIEnv *env, jclass cls) { return H5E_BTREE; }
+JNIEXPORT jlong JNICALL Java_hdf_hdf5lib_HDF5Constants_H5E_1CACHE(JNIEnv *env, jclass cls) { return H5E_CACHE; }
+JNIEXPORT jlong JNICALL Java_hdf_hdf5lib_HDF5Constants_H5E_1CALLBACK(JNIEnv *env, jclass cls) { return H5E_CALLBACK; }
+JNIEXPORT jlong JNICALL Java_hdf_hdf5lib_HDF5Constants_H5E_1CANAPPLY(JNIEnv *env, jclass cls) { return H5E_CANAPPLY; }
+/*JNIEXPORT jlong JNICALL Java_hdf_hdf5lib_HDF5Constants_H5E_1CANTALLOC(JNIEnv *env, jclass cls) { return H5E_CANTALLOC; }*/
+JNIEXPORT jlong JNICALL Java_hdf_hdf5lib_HDF5Constants_H5E_1CANTCLIP(JNIEnv *env, jclass cls) { return H5E_CANTCLIP; }
+JNIEXPORT jlong JNICALL Java_hdf_hdf5lib_HDF5Constants_H5E_1CANTCLOSEFILE(JNIEnv *env, jclass cls) { return H5E_CANTCLOSEFILE; }
+JNIEXPORT jlong JNICALL Java_hdf_hdf5lib_HDF5Constants_H5E_1CANTCONVERT(JNIEnv *env, jclass cls) { return H5E_CANTCONVERT; }
+JNIEXPORT jlong JNICALL Java_hdf_hdf5lib_HDF5Constants_H5E_1CANTCOPY(JNIEnv *env, jclass cls) { return H5E_CANTCOPY; }
+JNIEXPORT jlong JNICALL Java_hdf_hdf5lib_HDF5Constants_H5E_1CANTCOUNT(JNIEnv *env, jclass cls) { return H5E_CANTCOUNT; }
+JNIEXPORT jlong JNICALL Java_hdf_hdf5lib_HDF5Constants_H5E_1CANTCREATE(JNIEnv *env, jclass cls) { return H5E_CANTCREATE; }
+JNIEXPORT jlong JNICALL Java_hdf_hdf5lib_HDF5Constants_H5E_1CANTDEC(JNIEnv *env, jclass cls) { return H5E_CANTDEC; }
+JNIEXPORT jlong JNICALL Java_hdf_hdf5lib_HDF5Constants_H5E_1CANTDECODE(JNIEnv *env, jclass cls) { return H5E_CANTDECODE; }
+JNIEXPORT jlong JNICALL Java_hdf_hdf5lib_HDF5Constants_H5E_1CANTDELETE(JNIEnv *env, jclass cls) { return H5E_CANTDELETE; }
+JNIEXPORT jlong JNICALL Java_hdf_hdf5lib_HDF5Constants_H5E_1CANTENCODE(JNIEnv *env, jclass cls) { return H5E_CANTENCODE; }
+JNIEXPORT jlong JNICALL Java_hdf_hdf5lib_HDF5Constants_H5E_1CANTFLUSH(JNIEnv *env, jclass cls) { return H5E_CANTFLUSH; }
+JNIEXPORT jlong JNICALL Java_hdf_hdf5lib_HDF5Constants_H5E_1CANTFREE(JNIEnv *env, jclass cls) { return H5E_CANTFREE; }
+JNIEXPORT jlong JNICALL Java_hdf_hdf5lib_HDF5Constants_H5E_1CANTGET(JNIEnv *env, jclass cls) { return H5E_CANTGET; }
+JNIEXPORT jlong JNICALL Java_hdf_hdf5lib_HDF5Constants_H5E_1CANTINC(JNIEnv *env, jclass cls) { return H5E_CANTINC; }
+JNIEXPORT jlong JNICALL Java_hdf_hdf5lib_HDF5Constants_H5E_1CANTINIT(JNIEnv *env, jclass cls) { return H5E_CANTINIT; }
+JNIEXPORT jlong JNICALL Java_hdf_hdf5lib_HDF5Constants_H5E_1CANTINSERT(JNIEnv *env, jclass cls) { return H5E_CANTINSERT; }
+JNIEXPORT jlong JNICALL Java_hdf_hdf5lib_HDF5Constants_H5E_1CANTLIST(JNIEnv *env, jclass cls) { return H5E_CANTLIST; }
+JNIEXPORT jlong JNICALL Java_hdf_hdf5lib_HDF5Constants_H5E_1CANTLOAD(JNIEnv *env, jclass cls) { return H5E_CANTLOAD; }
+JNIEXPORT jlong JNICALL Java_hdf_hdf5lib_HDF5Constants_H5E_1CANTLOCK(JNIEnv *env, jclass cls) { return H5E_CANTLOCK; }
+JNIEXPORT jlong JNICALL Java_hdf_hdf5lib_HDF5Constants_H5E_1CANTNEXT(JNIEnv *env, jclass cls) { return H5E_CANTNEXT; }
+JNIEXPORT jlong JNICALL Java_hdf_hdf5lib_HDF5Constants_H5E_1CANTOPENFILE(JNIEnv *env, jclass cls) { return H5E_CANTOPENFILE; }
+JNIEXPORT jlong JNICALL Java_hdf_hdf5lib_HDF5Constants_H5E_1CANTOPENOBJ(JNIEnv *env, jclass cls) { return H5E_CANTOPENOBJ; }
+/*JNIEXPORT jlong JNICALL Java_hdf_hdf5lib_HDF5Constants_H5E_1CANTRECV(JNIEnv *env, jclass cls) { return H5E_CANTRECV; }*/
+JNIEXPORT jlong JNICALL Java_hdf_hdf5lib_HDF5Constants_H5E_1CANTREGISTER(JNIEnv *env, jclass cls) { return H5E_CANTREGISTER; }
+JNIEXPORT jlong JNICALL Java_hdf_hdf5lib_HDF5Constants_H5E_1CANTRELEASE(JNIEnv *env, jclass cls) { return H5E_CANTRELEASE; }
+JNIEXPORT jlong JNICALL Java_hdf_hdf5lib_HDF5Constants_H5E_1CANTSELECT(JNIEnv *env, jclass cls) { return H5E_CANTSELECT; }
+JNIEXPORT jlong JNICALL Java_hdf_hdf5lib_HDF5Constants_H5E_1CANTSET(JNIEnv *env, jclass cls) { return H5E_CANTSET; }
+JNIEXPORT jlong JNICALL Java_hdf_hdf5lib_HDF5Constants_H5E_1CANTSPLIT(JNIEnv *env, jclass cls) { return H5E_CANTSPLIT; }
+JNIEXPORT jlong JNICALL Java_hdf_hdf5lib_HDF5Constants_H5E_1CANTUNLOCK(JNIEnv *env, jclass cls) { return H5E_CANTUNLOCK; }
+JNIEXPORT jlong JNICALL Java_hdf_hdf5lib_HDF5Constants_H5E_1CLOSEERROR(JNIEnv *env, jclass cls) { return H5E_CLOSEERROR; }
+JNIEXPORT jlong JNICALL Java_hdf_hdf5lib_HDF5Constants_H5E_1COMPLEN(JNIEnv *env, jclass cls) { return H5E_COMPLEN; }
+JNIEXPORT jlong JNICALL Java_hdf_hdf5lib_HDF5Constants_H5E_1DATASET(JNIEnv *env, jclass cls) { return H5E_DATASET; }
+JNIEXPORT jlong JNICALL Java_hdf_hdf5lib_HDF5Constants_H5E_1DATASPACE(JNIEnv *env, jclass cls) { return H5E_DATASPACE; }
+JNIEXPORT jlong JNICALL Java_hdf_hdf5lib_HDF5Constants_H5E_1DATATYPE(JNIEnv *env, jclass cls) { return H5E_DATATYPE; }
+JNIEXPORT jlong JNICALL Java_hdf_hdf5lib_HDF5Constants_H5E_1DEFAULT(JNIEnv *env, jclass cls) { return H5E_DEFAULT; }
+JNIEXPORT jlong JNICALL Java_hdf_hdf5lib_HDF5Constants_H5E_1DUPCLASS(JNIEnv *env, jclass cls) { return H5E_DUPCLASS; }
+JNIEXPORT jlong JNICALL Java_hdf_hdf5lib_HDF5Constants_H5E_1EFL(JNIEnv *env, jclass cls) { return H5E_EFL; }
+JNIEXPORT jlong JNICALL Java_hdf_hdf5lib_HDF5Constants_H5E_1EXISTS(JNIEnv *env, jclass cls) { return H5E_EXISTS; }
+JNIEXPORT jlong JNICALL Java_hdf_hdf5lib_HDF5Constants_H5E_1FCNTL(JNIEnv *env, jclass cls) { return H5E_FCNTL; }
+JNIEXPORT jlong JNICALL Java_hdf_hdf5lib_HDF5Constants_H5E_1FILE(JNIEnv *env, jclass cls) { return H5E_FILE; }
+JNIEXPORT jlong JNICALL Java_hdf_hdf5lib_HDF5Constants_H5E_1FILEEXISTS(JNIEnv *env, jclass cls) { return H5E_FILEEXISTS; }
+JNIEXPORT jlong JNICALL Java_hdf_hdf5lib_HDF5Constants_H5E_1FILEOPEN(JNIEnv *env, jclass cls) { return H5E_FILEOPEN; }
+JNIEXPORT jlong JNICALL Java_hdf_hdf5lib_HDF5Constants_H5E_1FUNC(JNIEnv *env, jclass cls) { return H5E_FUNC; }
+JNIEXPORT jlong JNICALL Java_hdf_hdf5lib_HDF5Constants_H5E_1HEAP(JNIEnv *env, jclass cls) { return H5E_HEAP; }
+JNIEXPORT jlong JNICALL Java_hdf_hdf5lib_HDF5Constants_H5E_1INTERNAL(JNIEnv *env, jclass cls) { return H5E_INTERNAL; }
+JNIEXPORT jlong JNICALL Java_hdf_hdf5lib_HDF5Constants_H5E_1IO(JNIEnv *env, jclass cls) { return H5E_IO; }
+JNIEXPORT jlong JNICALL Java_hdf_hdf5lib_HDF5Constants_H5E_1LINK(JNIEnv *env, jclass cls) { return H5E_LINK; }
+JNIEXPORT jlong JNICALL Java_hdf_hdf5lib_HDF5Constants_H5E_1LINKCOUNT(JNIEnv *env, jclass cls) { return H5E_LINKCOUNT; }
+JNIEXPORT jint JNICALL Java_hdf_hdf5lib_HDF5Constants_H5E_1MAJOR(JNIEnv *env, jclass cls) { return H5E_MAJOR; }
+JNIEXPORT jint JNICALL Java_hdf_hdf5lib_HDF5Constants_H5E_1MINOR(JNIEnv *env, jclass cls) { return H5E_MINOR; }
+JNIEXPORT jlong JNICALL Java_hdf_hdf5lib_HDF5Constants_H5E_1MOUNT(JNIEnv *env, jclass cls) { return H5E_MOUNT; }
+JNIEXPORT jlong JNICALL Java_hdf_hdf5lib_HDF5Constants_H5E_1MPI(JNIEnv *env, jclass cls) { return H5E_MPI; }
+JNIEXPORT jlong JNICALL Java_hdf_hdf5lib_HDF5Constants_H5E_1MPIERRSTR(JNIEnv *env, jclass cls) { return H5E_MPIERRSTR; }
+JNIEXPORT jlong JNICALL Java_hdf_hdf5lib_HDF5Constants_H5E_1NOFILTER(JNIEnv *env, jclass cls) { return H5E_NOFILTER; }
+JNIEXPORT jlong JNICALL Java_hdf_hdf5lib_HDF5Constants_H5E_1NOIDS(JNIEnv *env, jclass cls) { return H5E_NOIDS; }
+JNIEXPORT jlong JNICALL Java_hdf_hdf5lib_HDF5Constants_H5E_1NONE_1MAJOR(JNIEnv *env, jclass cls) { return H5E_NONE_MAJOR; }
+JNIEXPORT jlong JNICALL Java_hdf_hdf5lib_HDF5Constants_H5E_1NONE_1MINOR(JNIEnv *env, jclass cls) { return H5E_NONE_MINOR; }
+JNIEXPORT jlong JNICALL Java_hdf_hdf5lib_HDF5Constants_H5E_1NOSPACE(JNIEnv *env, jclass cls) { return H5E_NOSPACE; }
+JNIEXPORT jlong JNICALL Java_hdf_hdf5lib_HDF5Constants_H5E_1NOTCACHED(JNIEnv *env, jclass cls) { return H5E_NOTCACHED; }
+JNIEXPORT jlong JNICALL Java_hdf_hdf5lib_HDF5Constants_H5E_1NOTFOUND(JNIEnv *env, jclass cls) { return H5E_NOTFOUND; }
+JNIEXPORT jlong JNICALL Java_hdf_hdf5lib_HDF5Constants_H5E_1NOTHDF5(JNIEnv *env, jclass cls) { return H5E_NOTHDF5; }
+JNIEXPORT jlong JNICALL Java_hdf_hdf5lib_HDF5Constants_H5E_1OHDR(JNIEnv *env, jclass cls) { return H5E_OHDR; }
+JNIEXPORT jlong JNICALL Java_hdf_hdf5lib_HDF5Constants_H5E_1OVERFLOW(JNIEnv *env, jclass cls) { return H5E_OVERFLOW; }
+JNIEXPORT jlong JNICALL Java_hdf_hdf5lib_HDF5Constants_H5E_1PLINE(JNIEnv *env, jclass cls) { return H5E_PLINE; }
+JNIEXPORT jlong JNICALL Java_hdf_hdf5lib_HDF5Constants_H5E_1PLIST(JNIEnv *env, jclass cls) { return H5E_PLIST; }
+JNIEXPORT jlong JNICALL Java_hdf_hdf5lib_HDF5Constants_H5E_1PROTECT(JNIEnv *env, jclass cls) { return H5E_PROTECT; }
+JNIEXPORT jlong JNICALL Java_hdf_hdf5lib_HDF5Constants_H5E_1READERROR(JNIEnv *env, jclass cls) { return H5E_READERROR; }
+JNIEXPORT jlong JNICALL Java_hdf_hdf5lib_HDF5Constants_H5E_1REFERENCE(JNIEnv *env, jclass cls) { return H5E_REFERENCE; }
+JNIEXPORT jlong JNICALL Java_hdf_hdf5lib_HDF5Constants_H5E_1RESOURCE(JNIEnv *env, jclass cls) { return H5E_RESOURCE; }
+JNIEXPORT jlong JNICALL Java_hdf_hdf5lib_HDF5Constants_H5E_1RS(JNIEnv *env, jclass cls) { return H5E_RS; }
+JNIEXPORT jlong JNICALL Java_hdf_hdf5lib_HDF5Constants_H5E_1SEEKERROR(JNIEnv *env, jclass cls) { return H5E_SEEKERROR; }
+JNIEXPORT jlong JNICALL Java_hdf_hdf5lib_HDF5Constants_H5E_1SETLOCAL(JNIEnv *env, jclass cls) { return H5E_SETLOCAL; }
+JNIEXPORT jlong JNICALL Java_hdf_hdf5lib_HDF5Constants_H5E_1STORAGE(JNIEnv *env, jclass cls) { return H5E_STORAGE; }
+JNIEXPORT jlong JNICALL Java_hdf_hdf5lib_HDF5Constants_H5E_1SYM(JNIEnv *env, jclass cls) { return H5E_SYM; }
+JNIEXPORT jlong JNICALL Java_hdf_hdf5lib_HDF5Constants_H5E_1TRUNCATED(JNIEnv *env, jclass cls) { return H5E_TRUNCATED; }
+JNIEXPORT jlong JNICALL Java_hdf_hdf5lib_HDF5Constants_H5E_1TST(JNIEnv *env, jclass cls) { return H5E_TST; }
+JNIEXPORT jlong JNICALL Java_hdf_hdf5lib_HDF5Constants_H5E_1UNINITIALIZED(JNIEnv *env, jclass cls) { return H5E_UNINITIALIZED; }
+JNIEXPORT jlong JNICALL Java_hdf_hdf5lib_HDF5Constants_H5E_1UNSUPPORTED(JNIEnv *env, jclass cls) { return H5E_UNSUPPORTED; }
+JNIEXPORT jlong JNICALL Java_hdf_hdf5lib_HDF5Constants_H5E_1VERSION(JNIEnv *env, jclass cls) { return H5E_VERSION; }
+JNIEXPORT jlong JNICALL Java_hdf_hdf5lib_HDF5Constants_H5E_1VFL(JNIEnv *env, jclass cls) { return H5E_VFL; }
+JNIEXPORT jlong JNICALL Java_hdf_hdf5lib_HDF5Constants_H5E_1WALK_1DOWNWARD(JNIEnv *env, jclass cls) { return H5E_WALK_DOWNWARD; }
+JNIEXPORT jlong JNICALL Java_hdf_hdf5lib_HDF5Constants_H5E_1WALK_1UPWARD(JNIEnv *env, jclass cls) { return H5E_WALK_UPWARD; }
+JNIEXPORT jlong JNICALL Java_hdf_hdf5lib_HDF5Constants_H5E_1WRITEERROR(JNIEnv *env, jclass cls) { return H5E_WRITEERROR; }
+
+/* Java does not have unsigned native types */
+#pragma GCC diagnostic push
+#pragma GCC diagnostic ignored "-Wsign-conversion"
+JNIEXPORT jint JNICALL Java_hdf_hdf5lib_HDF5Constants_H5F_1ACC_1CREAT(JNIEnv *env, jclass cls) { return H5F_ACC_CREAT; }
+JNIEXPORT jint JNICALL Java_hdf_hdf5lib_HDF5Constants_H5F_1ACC_1EXCL(JNIEnv *env, jclass cls) { return H5F_ACC_EXCL; }
+JNIEXPORT jint JNICALL Java_hdf_hdf5lib_HDF5Constants_H5F_1ACC_1RDONLY(JNIEnv *env, jclass cls) { return H5F_ACC_RDONLY; }
+JNIEXPORT jint JNICALL Java_hdf_hdf5lib_HDF5Constants_H5F_1ACC_1RDWR(JNIEnv *env, jclass cls) { return H5F_ACC_RDWR; }
+JNIEXPORT jint JNICALL Java_hdf_hdf5lib_HDF5Constants_H5F_1ACC_1TRUNC(JNIEnv *env, jclass cls) { return H5F_ACC_TRUNC; }
+JNIEXPORT jint JNICALL Java_hdf_hdf5lib_HDF5Constants_H5F_1ACC_1DEFAULT(JNIEnv *env, jclass cls) { return H5F_ACC_DEFAULT; }
+#pragma GCC diagnostic pop
+
+JNIEXPORT jint JNICALL Java_hdf_hdf5lib_HDF5Constants_H5F_1CLOSE_1DEFAULT(JNIEnv *env, jclass cls) { return H5F_CLOSE_DEFAULT; }
+JNIEXPORT jint JNICALL Java_hdf_hdf5lib_HDF5Constants_H5F_1CLOSE_1SEMI(JNIEnv *env, jclass cls) { return H5F_CLOSE_SEMI; }
+JNIEXPORT jint JNICALL Java_hdf_hdf5lib_HDF5Constants_H5F_1CLOSE_1STRONG(JNIEnv *env, jclass cls) { return H5F_CLOSE_STRONG; }
+JNIEXPORT jint JNICALL Java_hdf_hdf5lib_HDF5Constants_H5F_1CLOSE_1WEAK(JNIEnv *env, jclass cls) { return H5F_CLOSE_WEAK; }
+JNIEXPORT jint JNICALL Java_hdf_hdf5lib_HDF5Constants_H5F_1LIBVER_1EARLIEST(JNIEnv *env, jclass cls){return H5F_LIBVER_EARLIEST;}
+JNIEXPORT jint JNICALL Java_hdf_hdf5lib_HDF5Constants_H5F_1LIBVER_1LATEST(JNIEnv *env, jclass cls){return H5F_LIBVER_LATEST;}
+JNIEXPORT jint JNICALL Java_hdf_hdf5lib_HDF5Constants_H5F_1OBJ_1ALL(JNIEnv *env, jclass cls) { return H5F_OBJ_ALL; }
+JNIEXPORT jint JNICALL Java_hdf_hdf5lib_HDF5Constants_H5F_1OBJ_1ATTR(JNIEnv *env, jclass cls) { return H5F_OBJ_ATTR; }
+JNIEXPORT jint JNICALL Java_hdf_hdf5lib_HDF5Constants_H5F_1OBJ_1DATASET(JNIEnv *env, jclass cls) { return H5F_OBJ_DATASET; }
+JNIEXPORT jint JNICALL Java_hdf_hdf5lib_HDF5Constants_H5F_1OBJ_1DATATYPE(JNIEnv *env, jclass cls) { return H5F_OBJ_DATATYPE; }
+JNIEXPORT jint JNICALL Java_hdf_hdf5lib_HDF5Constants_H5F_1OBJ_1FILE(JNIEnv *env, jclass cls) { return H5F_OBJ_FILE; }
+JNIEXPORT jint JNICALL Java_hdf_hdf5lib_HDF5Constants_H5F_1OBJ_1GROUP(JNIEnv *env, jclass cls) { return H5F_OBJ_GROUP; }
+JNIEXPORT jint JNICALL Java_hdf_hdf5lib_HDF5Constants_H5F_1OBJ_1LOCAL(JNIEnv *env, jclass cls) { return H5F_OBJ_LOCAL; }
+JNIEXPORT jint JNICALL Java_hdf_hdf5lib_HDF5Constants_H5F_1SCOPE_1GLOBAL(JNIEnv *env, jclass cls) { return H5F_SCOPE_GLOBAL; }
+JNIEXPORT jint JNICALL Java_hdf_hdf5lib_HDF5Constants_H5F_1SCOPE_1LOCAL(JNIEnv *env, jclass cls) { return H5F_SCOPE_LOCAL; }
+JNIEXPORT jint JNICALL Java_hdf_hdf5lib_HDF5Constants_H5F_1UNLIMITED(JNIEnv *env, jclass cls) { return (jint)H5F_UNLIMITED; }
+
+JNIEXPORT jint JNICALL Java_hdf_hdf5lib_HDF5Constants_H5F_1FILE_1SPACE_1DEFAULT(JNIEnv *env, jclass cls) { return H5F_FILE_SPACE_DEFAULT; }
+JNIEXPORT jint JNICALL Java_hdf_hdf5lib_HDF5Constants_H5F_1FILE_1SPACE_1ALL_1PERSIST(JNIEnv *env, jclass cls) { return H5F_FILE_SPACE_ALL_PERSIST; }
+JNIEXPORT jint JNICALL Java_hdf_hdf5lib_HDF5Constants_H5F_1FILE_1SPACE_1ALL(JNIEnv *env, jclass cls) { return H5F_FILE_SPACE_ALL; }
+JNIEXPORT jint JNICALL Java_hdf_hdf5lib_HDF5Constants_H5F_1FILE_1SPACE_1AGGR_1VFD(JNIEnv *env, jclass cls) { return H5F_FILE_SPACE_AGGR_VFD; }
+JNIEXPORT jint JNICALL Java_hdf_hdf5lib_HDF5Constants_H5F_1FILE_1SPACE_1VFD(JNIEnv *env, jclass cls) { return H5F_FILE_SPACE_VFD; }
+JNIEXPORT jint JNICALL Java_hdf_hdf5lib_HDF5Constants_H5F_1FILE_1SPACE_1NTYPES(JNIEnv *env, jclass cls) { return H5F_FILE_SPACE_NTYPES; }
+
+JNIEXPORT jlong JNICALL Java_hdf_hdf5lib_HDF5Constants_H5FD_1CORE(JNIEnv *env, jclass cls) { return H5FD_CORE; }
+JNIEXPORT jlong JNICALL Java_hdf_hdf5lib_HDF5Constants_H5FD_1DIRECT(JNIEnv *env, jclass cls) {
+#ifdef H5_HAVE_DIRECT
+ return H5FD_DIRECT;
+#else
+ return -1;
+#endif
+}
+JNIEXPORT jlong JNICALL Java_hdf_hdf5lib_HDF5Constants_H5FD_1FAMILY(JNIEnv *env, jclass cls) { return H5FD_FAMILY; }
+JNIEXPORT jlong JNICALL Java_hdf_hdf5lib_HDF5Constants_H5FD_1LOG(JNIEnv *env, jclass cls) { return H5FD_LOG; }
+JNIEXPORT jlong JNICALL Java_hdf_hdf5lib_HDF5Constants_H5FD_1MPIO(JNIEnv *env, jclass cls) { return H5FD_MPIO; }
+JNIEXPORT jlong JNICALL Java_hdf_hdf5lib_HDF5Constants_H5FD_1MULTI(JNIEnv *env, jclass cls) { return H5FD_MULTI; }
+JNIEXPORT jlong JNICALL Java_hdf_hdf5lib_HDF5Constants_H5FD_1SEC2(JNIEnv *env, jclass cls) { return H5FD_SEC2; }
+JNIEXPORT jlong JNICALL Java_hdf_hdf5lib_HDF5Constants_H5FD_1STDIO(JNIEnv *env, jclass cls) { return H5FD_STDIO; }
+JNIEXPORT jlong JNICALL Java_hdf_hdf5lib_HDF5Constants_H5FD_1WINDOWS(JNIEnv *env, jclass cls) {
+#ifdef H5_HAVE_WINDOWS
+ return H5FD_DIRECT;
+#else
+ return -1;
+#endif
+}
+JNIEXPORT jint JNICALL Java_hdf_hdf5lib_HDF5Constants_H5FD_1LOG_1LOC_1READ(JNIEnv *env, jclass cls) { return H5FD_LOG_LOC_READ; }
+JNIEXPORT jint JNICALL Java_hdf_hdf5lib_HDF5Constants_H5FD_1LOG_1LOC_1WRITE(JNIEnv *env, jclass cls) { return H5FD_LOG_LOC_WRITE; }
+JNIEXPORT jint JNICALL Java_hdf_hdf5lib_HDF5Constants_H5FD_1LOG_1LOC_1SEEK(JNIEnv *env, jclass cls) { return H5FD_LOG_LOC_SEEK; }
+JNIEXPORT jint JNICALL Java_hdf_hdf5lib_HDF5Constants_H5FD_1LOG_1LOC_1IO(JNIEnv *env, jclass cls) { return H5FD_LOG_LOC_IO; }
+JNIEXPORT jint JNICALL Java_hdf_hdf5lib_HDF5Constants_H5FD_1LOG_1FILE_1READ(JNIEnv *env, jclass cls) { return H5FD_LOG_FILE_READ; }
+JNIEXPORT jint JNICALL Java_hdf_hdf5lib_HDF5Constants_H5FD_1LOG_1FILE_1WRITE(JNIEnv *env, jclass cls) { return H5FD_LOG_FILE_WRITE; }
+JNIEXPORT jint JNICALL Java_hdf_hdf5lib_HDF5Constants_H5FD_1LOG_1FILE_1IO(JNIEnv *env, jclass cls) { return H5FD_LOG_FILE_IO; }
+JNIEXPORT jint JNICALL Java_hdf_hdf5lib_HDF5Constants_H5FD_1LOG_1FLAVOR(JNIEnv *env, jclass cls) { return H5FD_LOG_FLAVOR; }
+JNIEXPORT jint JNICALL Java_hdf_hdf5lib_HDF5Constants_H5FD_1LOG_1NUM_1READ(JNIEnv *env, jclass cls) { return H5FD_LOG_NUM_READ; }
+JNIEXPORT jint JNICALL Java_hdf_hdf5lib_HDF5Constants_H5FD_1LOG_1NUM_1WRITE(JNIEnv *env, jclass cls) { return H5FD_LOG_NUM_WRITE; }
+JNIEXPORT jint JNICALL Java_hdf_hdf5lib_HDF5Constants_H5FD_1LOG_1NUM_1SEEK(JNIEnv *env, jclass cls) { return H5FD_LOG_NUM_SEEK; }
+JNIEXPORT jint JNICALL Java_hdf_hdf5lib_HDF5Constants_H5FD_1LOG_1NUM_1TRUNCATE(JNIEnv *env, jclass cls) { return H5FD_LOG_NUM_TRUNCATE; }
+JNIEXPORT jint JNICALL Java_hdf_hdf5lib_HDF5Constants_H5FD_1LOG_1NUM_1IO(JNIEnv *env, jclass cls) { return H5FD_LOG_NUM_IO; }
+JNIEXPORT jint JNICALL Java_hdf_hdf5lib_HDF5Constants_H5FD_1LOG_1TIME_1OPEN(JNIEnv *env, jclass cls) { return H5FD_LOG_TIME_OPEN; }
+JNIEXPORT jint JNICALL Java_hdf_hdf5lib_HDF5Constants_H5FD_1LOG_1TIME_1STAT(JNIEnv *env, jclass cls) { return H5FD_LOG_TIME_STAT; }
+JNIEXPORT jint JNICALL Java_hdf_hdf5lib_HDF5Constants_H5FD_1LOG_1TIME_1READ(JNIEnv *env, jclass cls) { return H5FD_LOG_TIME_READ; }
+JNIEXPORT jint JNICALL Java_hdf_hdf5lib_HDF5Constants_H5FD_1LOG_1TIME_1WRITE(JNIEnv *env, jclass cls) { return H5FD_LOG_TIME_WRITE; }
+JNIEXPORT jint JNICALL Java_hdf_hdf5lib_HDF5Constants_H5FD_1LOG_1TIME_1SEEK(JNIEnv *env, jclass cls) { return H5FD_LOG_TIME_SEEK; }
+JNIEXPORT jint JNICALL Java_hdf_hdf5lib_HDF5Constants_H5FD_1LOG_1TIME_1CLOSE(JNIEnv *env, jclass cls) { return H5FD_LOG_TIME_CLOSE; }
+JNIEXPORT jint JNICALL Java_hdf_hdf5lib_HDF5Constants_H5FD_1LOG_1TIME_1IO(JNIEnv *env, jclass cls) { return H5FD_LOG_TIME_IO; }
+JNIEXPORT jint JNICALL Java_hdf_hdf5lib_HDF5Constants_H5FD_1LOG_1ALLOC(JNIEnv *env, jclass cls) { return H5FD_LOG_ALLOC; }
+JNIEXPORT jint JNICALL Java_hdf_hdf5lib_HDF5Constants_H5FD_1LOG_1ALL(JNIEnv *env, jclass cls) { return H5FD_LOG_ALL; }
+
+JNIEXPORT jint JNICALL Java_hdf_hdf5lib_HDF5Constants_H5FD_1MEM_1NOLIST(JNIEnv *env, jclass cls) { return H5FD_MEM_NOLIST; }
+JNIEXPORT jint JNICALL Java_hdf_hdf5lib_HDF5Constants_H5FD_1MEM_1DEFAULT(JNIEnv *env, jclass cls) { return H5FD_MEM_DEFAULT; }
+JNIEXPORT jint JNICALL Java_hdf_hdf5lib_HDF5Constants_H5FD_1MEM_1SUPER(JNIEnv *env, jclass cls) { return H5FD_MEM_SUPER; }
+JNIEXPORT jint JNICALL Java_hdf_hdf5lib_HDF5Constants_H5FD_1MEM_1BTREE(JNIEnv *env, jclass cls) { return H5FD_MEM_BTREE; }
+JNIEXPORT jint JNICALL Java_hdf_hdf5lib_HDF5Constants_H5FD_1MEM_1DRAW(JNIEnv *env, jclass cls) { return H5FD_MEM_DRAW; }
+JNIEXPORT jint JNICALL Java_hdf_hdf5lib_HDF5Constants_H5FD_1MEM_1GHEAP(JNIEnv *env, jclass cls) { return H5FD_MEM_GHEAP; }
+JNIEXPORT jint JNICALL Java_hdf_hdf5lib_HDF5Constants_H5FD_1MEM_1LHEAP(JNIEnv *env, jclass cls) { return H5FD_MEM_LHEAP; }
+JNIEXPORT jint JNICALL Java_hdf_hdf5lib_HDF5Constants_H5FD_1MEM_1OHDR(JNIEnv *env, jclass cls) { return H5FD_MEM_OHDR; }
+JNIEXPORT jint JNICALL Java_hdf_hdf5lib_HDF5Constants_H5FD_1MEM_1NTYPES(JNIEnv *env, jclass cls) { return H5FD_MEM_NTYPES; }
+JNIEXPORT jlong JNICALL Java_hdf_hdf5lib_HDF5Constants_H5FD_1DEFAULT_1HADDR_1SIZE(JNIEnv *env, jclass cls) { return (hsize_t)(HADDR_MAX/H5FD_MEM_NTYPES); }
+JNIEXPORT jlong JNICALL Java_hdf_hdf5lib_HDF5Constants_H5FD_1MEM_1DEFAULT_1SIZE(JNIEnv *env, jclass cls) { return (hsize_t)0; }
+JNIEXPORT jlong JNICALL Java_hdf_hdf5lib_HDF5Constants_H5FD_1MEM_1DEFAULT_1SUPER_1SIZE(JNIEnv *env, jclass cls) { return (hsize_t)0; }
+JNIEXPORT jlong JNICALL Java_hdf_hdf5lib_HDF5Constants_H5FD_1MEM_1DEFAULT_1BTREE_1SIZE(JNIEnv *env, jclass cls) { return (hsize_t)(1 * (HADDR_MAX / (H5FD_MEM_NTYPES-1))); }
+JNIEXPORT jlong JNICALL Java_hdf_hdf5lib_HDF5Constants_H5FD_1MEM_1DEFAULT_1DRAW_1SIZE(JNIEnv *env, jclass cls) { return (hsize_t)(2 * (HADDR_MAX / (H5FD_MEM_NTYPES-1))); }
+JNIEXPORT jlong JNICALL Java_hdf_hdf5lib_HDF5Constants_H5FD_1MEM_1DEFAULT_1GHEAP_1SIZE(JNIEnv *env, jclass cls) { return (hsize_t)(3 * (HADDR_MAX / (H5FD_MEM_NTYPES-1))); }
+JNIEXPORT jlong JNICALL Java_hdf_hdf5lib_HDF5Constants_H5FD_1MEM_1DEFAULT_1LHEAP_1SIZE(JNIEnv *env, jclass cls) { return (hsize_t)(4 * (HADDR_MAX / (H5FD_MEM_NTYPES-1))); }
+JNIEXPORT jlong JNICALL Java_hdf_hdf5lib_HDF5Constants_H5FD_1MEM_1DEFAULT_1OHDR_1SIZE(JNIEnv *env, jclass cls) { return (hsize_t)(5 * (HADDR_MAX / (H5FD_MEM_NTYPES-1))); }
+
+
+/* Symbols defined for compatibility with previous versions of the HDF5 API.
+ *
+ * Use of these symbols is deprecated.
+ */
+#ifndef H5_NO_DEPRECATED_SYMBOLS
+
+JNIEXPORT jint JNICALL Java_hdf_hdf5lib_HDF5Constants_H5G_1DATASET(JNIEnv *env, jclass cls) { return H5G_DATASET; }
+JNIEXPORT jint JNICALL Java_hdf_hdf5lib_HDF5Constants_H5G_1GROUP(JNIEnv *env, jclass cls) { return H5G_GROUP; }
+JNIEXPORT jint JNICALL Java_hdf_hdf5lib_HDF5Constants_H5G_1LINK(JNIEnv *env, jclass cls) { return H5G_LINK; }
+JNIEXPORT jint JNICALL Java_hdf_hdf5lib_HDF5Constants_H5G_1UDLINK(JNIEnv *env, jclass cls) { return H5G_UDLINK; }
+JNIEXPORT jint JNICALL Java_hdf_hdf5lib_HDF5Constants_H5G_1RESERVED_15(JNIEnv *env, jclass cls) { return H5G_RESERVED_5; }
+JNIEXPORT jint JNICALL Java_hdf_hdf5lib_HDF5Constants_H5G_1RESERVED_16(JNIEnv *env, jclass cls) { return H5G_RESERVED_6; }
+JNIEXPORT jint JNICALL Java_hdf_hdf5lib_HDF5Constants_H5G_1RESERVED_17(JNIEnv *env, jclass cls) { return H5G_RESERVED_7; }
+JNIEXPORT jint JNICALL Java_hdf_hdf5lib_HDF5Constants_H5G_1TYPE(JNIEnv *env, jclass cls) { return H5G_TYPE; }
+JNIEXPORT jint JNICALL Java_hdf_hdf5lib_HDF5Constants_H5G_1UNKNOWN(JNIEnv *env, jclass cls) { return H5G_UNKNOWN; }
+JNIEXPORT jint JNICALL Java_hdf_hdf5lib_HDF5Constants_H5G_1LINK_1ERROR(JNIEnv *env, jclass cls) { return H5G_LINK_ERROR; }
+JNIEXPORT jint JNICALL Java_hdf_hdf5lib_HDF5Constants_H5G_1LINK_1HARD(JNIEnv *env, jclass cls) { return H5G_LINK_HARD; }
+JNIEXPORT jint JNICALL Java_hdf_hdf5lib_HDF5Constants_H5G_1LINK_1SOFT(JNIEnv *env, jclass cls) { return H5G_LINK_SOFT; }
+JNIEXPORT jint JNICALL Java_hdf_hdf5lib_HDF5Constants_H5G_1NLIBTYPES(JNIEnv *env, jclass cls) { return H5G_NLIBTYPES; }
+JNIEXPORT jint JNICALL Java_hdf_hdf5lib_HDF5Constants_H5G_1NTYPES(JNIEnv *env, jclass cls) { return H5G_NTYPES; }
+JNIEXPORT jint JNICALL Java_hdf_hdf5lib_HDF5Constants_H5G_1NUSERTYPES(JNIEnv *env, jclass cls) { return H5G_NUSERTYPES; }
+JNIEXPORT jint JNICALL Java_hdf_hdf5lib_HDF5Constants_H5G_1SAME_1LOC(JNIEnv *env, jclass cls) { return H5G_SAME_LOC; }
+
+#endif /* H5_NO_DEPRECATED_SYMBOLS */
+
+JNIEXPORT jint JNICALL Java_hdf_hdf5lib_HDF5Constants_H5G_1STORAGE_1TYPE_1UNKNOWN(JNIEnv *env, jclass cls){ return H5G_STORAGE_TYPE_UNKNOWN; }
+JNIEXPORT jint JNICALL Java_hdf_hdf5lib_HDF5Constants_H5G_1STORAGE_1TYPE_1SYMBOL_1TABLE(JNIEnv *env, jclass cls){ return H5G_STORAGE_TYPE_SYMBOL_TABLE; }
+JNIEXPORT jint JNICALL Java_hdf_hdf5lib_HDF5Constants_H5G_1STORAGE_1TYPE_1COMPACT(JNIEnv *env, jclass cls){ return H5G_STORAGE_TYPE_COMPACT; }
+JNIEXPORT jint JNICALL Java_hdf_hdf5lib_HDF5Constants_H5G_1STORAGE_1TYPE_1DENSE(JNIEnv *env, jclass cls){ return H5G_STORAGE_TYPE_DENSE; }
+
+JNIEXPORT jint JNICALL Java_hdf_hdf5lib_HDF5Constants_H5I_1UNINIT(JNIEnv *env, jclass cls) { return H5I_UNINIT; }
+JNIEXPORT jint JNICALL Java_hdf_hdf5lib_HDF5Constants_H5I_1BADID(JNIEnv *env, jclass cls) { return H5I_BADID; }
+JNIEXPORT jint JNICALL Java_hdf_hdf5lib_HDF5Constants_H5I_1FILE(JNIEnv *env, jclass cls) { return H5I_FILE; }
+JNIEXPORT jint JNICALL Java_hdf_hdf5lib_HDF5Constants_H5I_1GROUP(JNIEnv *env, jclass cls) { return H5I_GROUP; }
+JNIEXPORT jint JNICALL Java_hdf_hdf5lib_HDF5Constants_H5I_1DATATYPE(JNIEnv *env, jclass cls) { return H5I_DATATYPE; }
+JNIEXPORT jint JNICALL Java_hdf_hdf5lib_HDF5Constants_H5I_1DATASPACE(JNIEnv *env, jclass cls) { return H5I_DATASPACE; }
+JNIEXPORT jint JNICALL Java_hdf_hdf5lib_HDF5Constants_H5I_1DATASET(JNIEnv *env, jclass cls) { return H5I_DATASET; }
+JNIEXPORT jint JNICALL Java_hdf_hdf5lib_HDF5Constants_H5I_1ATTR(JNIEnv *env, jclass cls) { return H5I_ATTR; }
+JNIEXPORT jint JNICALL Java_hdf_hdf5lib_HDF5Constants_H5I_1REFERENCE(JNIEnv *env, jclass cls) { return H5I_REFERENCE; }
+JNIEXPORT jint JNICALL Java_hdf_hdf5lib_HDF5Constants_H5I_1VFL(JNIEnv *env, jclass cls) { return H5I_VFL; }
+JNIEXPORT jint JNICALL Java_hdf_hdf5lib_HDF5Constants_H5I_1INVALID_1HID(JNIEnv *env, jclass cls) { return H5I_INVALID_HID; }
+JNIEXPORT jint JNICALL Java_hdf_hdf5lib_HDF5Constants_H5I_1GENPROP_1CLS(JNIEnv *env, jclass cls) { return H5I_GENPROP_CLS; }
+JNIEXPORT jint JNICALL Java_hdf_hdf5lib_HDF5Constants_H5I_1GENPROP_1LST(JNIEnv *env, jclass cls) { return H5I_GENPROP_LST; }
+JNIEXPORT jint JNICALL Java_hdf_hdf5lib_HDF5Constants_H5I_1ERROR_1CLASS(JNIEnv *env, jclass cls) { return H5I_ERROR_CLASS; }
+JNIEXPORT jint JNICALL Java_hdf_hdf5lib_HDF5Constants_H5I_1ERROR_1MSG(JNIEnv *env, jclass cls) { return H5I_ERROR_MSG; }
+JNIEXPORT jint JNICALL Java_hdf_hdf5lib_HDF5Constants_H5I_1ERROR_1STACK(JNIEnv *env, jclass cls) { return H5I_ERROR_STACK; }
+JNIEXPORT jint JNICALL Java_hdf_hdf5lib_HDF5Constants_H5I_1NTYPES(JNIEnv *env, jclass cls) { return H5I_NTYPES; }
+
+JNIEXPORT jint JNICALL Java_hdf_hdf5lib_HDF5Constants_H5L_1TYPE_1ERROR(JNIEnv *env, jclass cls) { return H5L_TYPE_ERROR; }
+JNIEXPORT jint JNICALL Java_hdf_hdf5lib_HDF5Constants_H5L_1TYPE_1HARD(JNIEnv *env, jclass cls) { return H5L_TYPE_HARD; }
+JNIEXPORT jint JNICALL Java_hdf_hdf5lib_HDF5Constants_H5L_1TYPE_1SOFT(JNIEnv *env, jclass cls) { return H5L_TYPE_SOFT; }
+JNIEXPORT jint JNICALL Java_hdf_hdf5lib_HDF5Constants_H5L_1TYPE_1EXTERNAL(JNIEnv *env, jclass cls) { return H5L_TYPE_EXTERNAL; }
+JNIEXPORT jint JNICALL Java_hdf_hdf5lib_HDF5Constants_H5L_1TYPE_1MAX(JNIEnv *env, jclass cls) { return H5L_TYPE_MAX; }
+
+JNIEXPORT jint JNICALL Java_hdf_hdf5lib_HDF5Constants_H5O_1COPY_1SHALLOW_1HIERARCHY_1FLAG(JNIEnv *env, jclass cls){return H5O_COPY_SHALLOW_HIERARCHY_FLAG; }
+JNIEXPORT jint JNICALL Java_hdf_hdf5lib_HDF5Constants_H5O_1COPY_1EXPAND_1SOFT_1LINK_1FLAG(JNIEnv *env, jclass cls){return H5O_COPY_EXPAND_SOFT_LINK_FLAG; }
+JNIEXPORT jint JNICALL Java_hdf_hdf5lib_HDF5Constants_H5O_1COPY_1EXPAND_1EXT_1LINK_1FLAG(JNIEnv *env, jclass cls){return H5O_COPY_EXPAND_EXT_LINK_FLAG; }
+JNIEXPORT jint JNICALL Java_hdf_hdf5lib_HDF5Constants_H5O_1COPY_1EXPAND_1REFERENCE_1FLAG(JNIEnv *env, jclass cls){return H5O_COPY_EXPAND_REFERENCE_FLAG; }
+JNIEXPORT jint JNICALL Java_hdf_hdf5lib_HDF5Constants_H5O_1COPY_1WITHOUT_1ATTR_1FLAG(JNIEnv *env, jclass cls){return H5O_COPY_WITHOUT_ATTR_FLAG; }
+JNIEXPORT jint JNICALL Java_hdf_hdf5lib_HDF5Constants_H5O_1COPY_1PRESERVE_1NULL_1FLAG(JNIEnv *env, jclass cls){return H5O_COPY_PRESERVE_NULL_FLAG; }
+JNIEXPORT jint JNICALL Java_hdf_hdf5lib_HDF5Constants_H5O_1SHMESG_1NONE_1FLAG(JNIEnv *env, jclass cls){return H5O_SHMESG_NONE_FLAG; }
+JNIEXPORT jint JNICALL Java_hdf_hdf5lib_HDF5Constants_H5O_1SHMESG_1SDSPACE_1FLAG(JNIEnv *env, jclass cls){return H5O_SHMESG_SDSPACE_FLAG; }
+JNIEXPORT jint JNICALL Java_hdf_hdf5lib_HDF5Constants_H5O_1SHMESG_1DTYPE_1FLAG(JNIEnv *env, jclass cls){return H5O_SHMESG_DTYPE_FLAG; }
+JNIEXPORT jint JNICALL Java_hdf_hdf5lib_HDF5Constants_H5O_1SHMESG_1FILL_1FLAG(JNIEnv *env, jclass cls){return H5O_SHMESG_FILL_FLAG; }
+JNIEXPORT jint JNICALL Java_hdf_hdf5lib_HDF5Constants_H5O_1SHMESG_1PLINE_1FLAG(JNIEnv *env, jclass cls){return H5O_SHMESG_PLINE_FLAG; }
+JNIEXPORT jint JNICALL Java_hdf_hdf5lib_HDF5Constants_H5O_1SHMESG_1ATTR_1FLAG(JNIEnv *env, jclass cls){return H5O_SHMESG_ATTR_FLAG; }
+JNIEXPORT jint JNICALL Java_hdf_hdf5lib_HDF5Constants_H5O_1SHMESG_1ALL_1FLAG(JNIEnv *env, jclass cls){return H5O_SHMESG_ALL_FLAG; }
+JNIEXPORT jint JNICALL Java_hdf_hdf5lib_HDF5Constants_H5O_1TYPE_1UNKNOWN(JNIEnv *env, jclass cls) { return H5O_TYPE_UNKNOWN; }
+JNIEXPORT jint JNICALL Java_hdf_hdf5lib_HDF5Constants_H5O_1TYPE_1GROUP(JNIEnv *env, jclass cls) { return H5O_TYPE_GROUP; }
+JNIEXPORT jint JNICALL Java_hdf_hdf5lib_HDF5Constants_H5O_1TYPE_1DATASET(JNIEnv *env, jclass cls) { return H5O_TYPE_DATASET; }
+JNIEXPORT jint JNICALL Java_hdf_hdf5lib_HDF5Constants_H5O_1TYPE_1NAMED_1DATATYPE(JNIEnv *env, jclass cls) { return H5O_TYPE_NAMED_DATATYPE; }
+JNIEXPORT jint JNICALL Java_hdf_hdf5lib_HDF5Constants_H5O_1TYPE_1NTYPES(JNIEnv *env, jclass cls) { return H5O_TYPE_NTYPES; }
+
+JNIEXPORT jlong JNICALL Java_hdf_hdf5lib_HDF5Constants_H5P_1ROOT(JNIEnv *env, jclass cls){return H5P_ROOT;}
+JNIEXPORT jlong JNICALL Java_hdf_hdf5lib_HDF5Constants_H5P_1OBJECT_1CREATE(JNIEnv *env, jclass cls){return H5P_OBJECT_CREATE;}
+JNIEXPORT jlong JNICALL Java_hdf_hdf5lib_HDF5Constants_H5P_1FILE_1CREATE(JNIEnv *env, jclass cls){return H5P_FILE_CREATE;}
+JNIEXPORT jlong JNICALL Java_hdf_hdf5lib_HDF5Constants_H5P_1FILE_1ACCESS(JNIEnv *env, jclass cls){return H5P_FILE_ACCESS;}
+JNIEXPORT jlong JNICALL Java_hdf_hdf5lib_HDF5Constants_H5P_1DATASET_1CREATE(JNIEnv *env, jclass cls){return H5P_DATASET_CREATE;}
+JNIEXPORT jlong JNICALL Java_hdf_hdf5lib_HDF5Constants_H5P_1DATASET_1ACCESS(JNIEnv *env, jclass cls){return H5P_DATASET_ACCESS;}
+JNIEXPORT jlong JNICALL Java_hdf_hdf5lib_HDF5Constants_H5P_1DATASET_1XFER(JNIEnv *env, jclass cls){return H5P_DATASET_XFER;}
+JNIEXPORT jlong JNICALL Java_hdf_hdf5lib_HDF5Constants_H5P_1FILE_1MOUNT(JNIEnv *env, jclass cls){return H5P_FILE_MOUNT;}
+JNIEXPORT jlong JNICALL Java_hdf_hdf5lib_HDF5Constants_H5P_1GROUP_1CREATE(JNIEnv *env, jclass cls){return H5P_GROUP_CREATE;}
+JNIEXPORT jlong JNICALL Java_hdf_hdf5lib_HDF5Constants_H5P_1GROUP_1ACCESS(JNIEnv *env, jclass cls){return H5P_GROUP_ACCESS;}
+JNIEXPORT jlong JNICALL Java_hdf_hdf5lib_HDF5Constants_H5P_1DATATYPE_1CREATE(JNIEnv *env, jclass cls){return H5P_DATATYPE_CREATE;}
+JNIEXPORT jlong JNICALL Java_hdf_hdf5lib_HDF5Constants_H5P_1DATATYPE_1ACCESS(JNIEnv *env, jclass cls){return H5P_DATATYPE_ACCESS;}
+JNIEXPORT jlong JNICALL Java_hdf_hdf5lib_HDF5Constants_H5P_1STRING_1CREATE(JNIEnv *env, jclass cls){return H5P_STRING_CREATE;}
+JNIEXPORT jlong JNICALL Java_hdf_hdf5lib_HDF5Constants_H5P_1ATTRIBUTE_1CREATE(JNIEnv *env, jclass cls){return H5P_ATTRIBUTE_CREATE;}
+JNIEXPORT jlong JNICALL Java_hdf_hdf5lib_HDF5Constants_H5P_1ATTRIBUTE_1ACCESS(JNIEnv *env, jclass cls){return H5P_ATTRIBUTE_ACCESS;}
+JNIEXPORT jlong JNICALL Java_hdf_hdf5lib_HDF5Constants_H5P_1OBJECT_1COPY(JNIEnv *env, jclass cls){return H5P_OBJECT_COPY;}
+JNIEXPORT jlong JNICALL Java_hdf_hdf5lib_HDF5Constants_H5P_1LINK_1CREATE(JNIEnv *env, jclass cls){return H5P_LINK_CREATE;}
+JNIEXPORT jlong JNICALL Java_hdf_hdf5lib_HDF5Constants_H5P_1LINK_1ACCESS(JNIEnv *env, jclass cls){return H5P_LINK_ACCESS;}
+JNIEXPORT jlong JNICALL Java_hdf_hdf5lib_HDF5Constants_H5P_1FILE_1CREATE_1DEFAULT(JNIEnv *env, jclass cls){return H5P_FILE_CREATE_DEFAULT;}
+JNIEXPORT jlong JNICALL Java_hdf_hdf5lib_HDF5Constants_H5P_1FILE_1ACCESS_1DEFAULT(JNIEnv *env, jclass cls){return H5P_FILE_ACCESS_DEFAULT;}
+JNIEXPORT jlong JNICALL Java_hdf_hdf5lib_HDF5Constants_H5P_1DATASET_1CREATE_1DEFAULT(JNIEnv *env, jclass cls){return H5P_DATASET_CREATE_DEFAULT;}
+JNIEXPORT jlong JNICALL Java_hdf_hdf5lib_HDF5Constants_H5P_1DATASET_1ACCESS_1DEFAULT(JNIEnv *env, jclass cls){return H5P_DATASET_ACCESS_DEFAULT;}
+JNIEXPORT jlong JNICALL Java_hdf_hdf5lib_HDF5Constants_H5P_1DATASET_1XFER_1DEFAULT(JNIEnv *env, jclass cls){return H5P_DATASET_XFER_DEFAULT;}
+JNIEXPORT jlong JNICALL Java_hdf_hdf5lib_HDF5Constants_H5P_1FILE_1MOUNT_1DEFAULT(JNIEnv *env, jclass cls){return H5P_FILE_MOUNT_DEFAULT;}
+JNIEXPORT jlong JNICALL Java_hdf_hdf5lib_HDF5Constants_H5P_1GROUP_1CREATE_1DEFAULT(JNIEnv *env, jclass cls){return H5P_GROUP_CREATE_DEFAULT;}
+JNIEXPORT jlong JNICALL Java_hdf_hdf5lib_HDF5Constants_H5P_1GROUP_1ACCESS_1DEFAULT(JNIEnv *env, jclass cls){return H5P_GROUP_ACCESS_DEFAULT;}
+JNIEXPORT jlong JNICALL Java_hdf_hdf5lib_HDF5Constants_H5P_1DATATYPE_1CREATE_1DEFAULT(JNIEnv *env, jclass cls){return H5P_DATATYPE_CREATE_DEFAULT;}
+JNIEXPORT jlong JNICALL Java_hdf_hdf5lib_HDF5Constants_H5P_1DATATYPE_1ACCESS_1DEFAULT(JNIEnv *env, jclass cls){return H5P_DATATYPE_ACCESS_DEFAULT;}
+JNIEXPORT jlong JNICALL Java_hdf_hdf5lib_HDF5Constants_H5P_1ATTRIBUTE_1CREATE_1DEFAULT(JNIEnv *env, jclass cls){return H5P_ATTRIBUTE_CREATE_DEFAULT;}
+JNIEXPORT jlong JNICALL Java_hdf_hdf5lib_HDF5Constants_H5P_1ATTRIBUTE_1ACCESS_1DEFAULT(JNIEnv *env, jclass cls){return H5P_ATTRIBUTE_ACCESS_DEFAULT;}
+JNIEXPORT jlong JNICALL Java_hdf_hdf5lib_HDF5Constants_H5P_1OBJECT_1COPY_1DEFAULT(JNIEnv *env, jclass cls){return H5P_OBJECT_COPY_DEFAULT;}
+JNIEXPORT jlong JNICALL Java_hdf_hdf5lib_HDF5Constants_H5P_1LINK_1CREATE_1DEFAULT(JNIEnv *env, jclass cls){return H5P_LINK_CREATE_DEFAULT;}
+JNIEXPORT jlong JNICALL Java_hdf_hdf5lib_HDF5Constants_H5P_1LINK_1ACCESS_1DEFAULT(JNIEnv *env, jclass cls){return H5P_LINK_ACCESS_DEFAULT;}
+JNIEXPORT jint JNICALL Java_hdf_hdf5lib_HDF5Constants_H5P_1CRT_1ORDER_1TRACKED(JNIEnv *env, jclass cls){return H5P_CRT_ORDER_TRACKED;}
+JNIEXPORT jint JNICALL Java_hdf_hdf5lib_HDF5Constants_H5P_1CRT_1ORDER_1INDEXED(JNIEnv *env, jclass cls){return H5P_CRT_ORDER_INDEXED;}
+JNIEXPORT jlong JNICALL Java_hdf_hdf5lib_HDF5Constants_H5P_1DEFAULT(JNIEnv *env, jclass cls) { return H5P_DEFAULT; }
+JNIEXPORT jlong JNICALL Java_hdf_hdf5lib_HDF5Constants_H5P_1NO_1CLASS(JNIEnv *env, jclass cls) { return H5P_ROOT; }
+
+JNIEXPORT jint JNICALL Java_hdf_hdf5lib_HDF5Constants_H5PL_1TYPE_1ERROR(JNIEnv *env, jclass cls) { return H5PL_TYPE_ERROR; }
+JNIEXPORT jint JNICALL Java_hdf_hdf5lib_HDF5Constants_H5PL_1TYPE_1FILTER(JNIEnv *env, jclass cls) { return H5PL_TYPE_FILTER; }
+JNIEXPORT jint JNICALL Java_hdf_hdf5lib_HDF5Constants_H5PL_1FILTER_1PLUGIN(JNIEnv *env, jclass cls) { return H5PL_FILTER_PLUGIN; }
+JNIEXPORT jint JNICALL Java_hdf_hdf5lib_HDF5Constants_H5PL_1ALL_1PLUGIN(JNIEnv *env, jclass cls) { return H5PL_ALL_PLUGIN; }
+
+JNIEXPORT jint JNICALL Java_hdf_hdf5lib_HDF5Constants_H5R_1BADTYPE(JNIEnv *env, jclass cls) { return H5R_BADTYPE; }
+JNIEXPORT jint JNICALL Java_hdf_hdf5lib_HDF5Constants_H5R_1MAXTYPE(JNIEnv *env, jclass cls) { return H5R_MAXTYPE; }
+JNIEXPORT jint JNICALL Java_hdf_hdf5lib_HDF5Constants_H5R_1OBJ_1REF_1BUF_1SIZE(JNIEnv *env, jclass cls) { return H5R_OBJ_REF_BUF_SIZE; }
+JNIEXPORT jint JNICALL Java_hdf_hdf5lib_HDF5Constants_H5R_1DSET_1REG_1REF_1BUF_1SIZE(JNIEnv *env, jclass cls) { return H5R_DSET_REG_REF_BUF_SIZE; }
+JNIEXPORT jint JNICALL Java_hdf_hdf5lib_HDF5Constants_H5R_1OBJECT(JNIEnv *env, jclass cls) { return H5R_OBJECT; }
+JNIEXPORT jint JNICALL Java_hdf_hdf5lib_HDF5Constants_H5R_1DATASET_1REGION(JNIEnv *env, jclass cls) { return H5R_DATASET_REGION; }
+
+JNIEXPORT jint JNICALL Java_hdf_hdf5lib_HDF5Constants_H5S_1ALL(JNIEnv *env, jclass cls) { return H5S_ALL; }
+JNIEXPORT jint JNICALL Java_hdf_hdf5lib_HDF5Constants_H5S_1MAX_1RANK(JNIEnv *env, jclass cls) { return H5S_MAX_RANK; }
+JNIEXPORT jint JNICALL Java_hdf_hdf5lib_HDF5Constants_H5S_1NO_1CLASS(JNIEnv *env, jclass cls) { return H5S_NO_CLASS; }
+JNIEXPORT jint JNICALL Java_hdf_hdf5lib_HDF5Constants_H5S_1NULL(JNIEnv *env, jclass cls) { return H5S_NULL; }
+JNIEXPORT jint JNICALL Java_hdf_hdf5lib_HDF5Constants_H5S_1SCALAR(JNIEnv *env, jclass cls) { return H5S_SCALAR; }
+JNIEXPORT jint JNICALL Java_hdf_hdf5lib_HDF5Constants_H5S_1SEL_1ALL(JNIEnv *env, jclass cls) { return H5S_SEL_ALL; }
+JNIEXPORT jint JNICALL Java_hdf_hdf5lib_HDF5Constants_H5S_1SEL_1ERROR(JNIEnv *env, jclass cls) { return H5S_SEL_ERROR; }
+JNIEXPORT jint JNICALL Java_hdf_hdf5lib_HDF5Constants_H5S_1SEL_1HYPERSLABS(JNIEnv *env, jclass cls) { return H5S_SEL_HYPERSLABS; }
+JNIEXPORT jint JNICALL Java_hdf_hdf5lib_HDF5Constants_H5S_1SEL_1N(JNIEnv *env, jclass cls) { return H5S_SEL_N; }
+JNIEXPORT jint JNICALL Java_hdf_hdf5lib_HDF5Constants_H5S_1SEL_1NONE(JNIEnv *env, jclass cls) { return H5S_SEL_NONE; }
+JNIEXPORT jint JNICALL Java_hdf_hdf5lib_HDF5Constants_H5S_1SEL_1POINTS(JNIEnv *env, jclass cls) { return H5S_SEL_POINTS; }
+JNIEXPORT jint JNICALL Java_hdf_hdf5lib_HDF5Constants_H5S_1SELECT_1AND(JNIEnv *env, jclass cls) { return H5S_SELECT_AND; }
+JNIEXPORT jint JNICALL Java_hdf_hdf5lib_HDF5Constants_H5S_1SELECT_1APPEND(JNIEnv *env, jclass cls) { return H5S_SELECT_APPEND; }
+JNIEXPORT jint JNICALL Java_hdf_hdf5lib_HDF5Constants_H5S_1SELECT_1INVALID(JNIEnv *env, jclass cls) { return H5S_SELECT_INVALID; }
+JNIEXPORT jint JNICALL Java_hdf_hdf5lib_HDF5Constants_H5S_1SELECT_1NOOP(JNIEnv *env, jclass cls) { return H5S_SELECT_NOOP; }
+JNIEXPORT jint JNICALL Java_hdf_hdf5lib_HDF5Constants_H5S_1SELECT_1NOTA(JNIEnv *env, jclass cls) { return H5S_SELECT_NOTA; }
+JNIEXPORT jint JNICALL Java_hdf_hdf5lib_HDF5Constants_H5S_1SELECT_1NOTB(JNIEnv *env, jclass cls) { return H5S_SELECT_NOTB; }
+JNIEXPORT jint JNICALL Java_hdf_hdf5lib_HDF5Constants_H5S_1SELECT_1OR(JNIEnv *env, jclass cls) { return H5S_SELECT_OR; }
+JNIEXPORT jint JNICALL Java_hdf_hdf5lib_HDF5Constants_H5S_1SELECT_1PREPEND(JNIEnv *env, jclass cls) { return H5S_SELECT_PREPEND; }
+JNIEXPORT jint JNICALL Java_hdf_hdf5lib_HDF5Constants_H5S_1SELECT_1SET(JNIEnv *env, jclass cls) { return H5S_SELECT_SET; }
+JNIEXPORT jint JNICALL Java_hdf_hdf5lib_HDF5Constants_H5S_1SELECT_1XOR(JNIEnv *env, jclass cls) { return H5S_SELECT_XOR; }
+JNIEXPORT jint JNICALL Java_hdf_hdf5lib_HDF5Constants_H5S_1SIMPLE(JNIEnv *env, jclass cls) { return H5S_SIMPLE; }
+JNIEXPORT jint JNICALL Java_hdf_hdf5lib_HDF5Constants_H5S_1UNLIMITED(JNIEnv *env, jclass cls) { return (jint)H5S_UNLIMITED; }
+
+JNIEXPORT jlong JNICALL Java_hdf_hdf5lib_HDF5Constants_H5T_1ALPHA_1B16(JNIEnv *env, jclass cls) { return H5T_ALPHA_B16; }
+JNIEXPORT jlong JNICALL Java_hdf_hdf5lib_HDF5Constants_H5T_1ALPHA_1B32(JNIEnv *env, jclass cls) { return H5T_ALPHA_B32; }
+JNIEXPORT jlong JNICALL Java_hdf_hdf5lib_HDF5Constants_H5T_1ALPHA_1B64(JNIEnv *env, jclass cls) { return H5T_ALPHA_B64; }
+JNIEXPORT jlong JNICALL Java_hdf_hdf5lib_HDF5Constants_H5T_1ALPHA_1B8(JNIEnv *env, jclass cls) { return H5T_ALPHA_B8; }
+JNIEXPORT jlong JNICALL Java_hdf_hdf5lib_HDF5Constants_H5T_1ALPHA_1F32(JNIEnv *env, jclass cls) { return H5T_ALPHA_F32; }
+JNIEXPORT jlong JNICALL Java_hdf_hdf5lib_HDF5Constants_H5T_1ALPHA_1F64(JNIEnv *env, jclass cls) { return H5T_ALPHA_F64; }
+JNIEXPORT jlong JNICALL Java_hdf_hdf5lib_HDF5Constants_H5T_1ALPHA_1I16(JNIEnv *env, jclass cls) { return H5T_ALPHA_I16; }
+JNIEXPORT jlong JNICALL Java_hdf_hdf5lib_HDF5Constants_H5T_1ALPHA_1I32(JNIEnv *env, jclass cls) { return H5T_ALPHA_I32; }
+JNIEXPORT jlong JNICALL Java_hdf_hdf5lib_HDF5Constants_H5T_1ALPHA_1I64(JNIEnv *env, jclass cls) { return H5T_ALPHA_I64; }
+JNIEXPORT jlong JNICALL Java_hdf_hdf5lib_HDF5Constants_H5T_1ALPHA_1I8(JNIEnv *env, jclass cls) { return H5T_ALPHA_I8; }
+JNIEXPORT jlong JNICALL Java_hdf_hdf5lib_HDF5Constants_H5T_1ALPHA_1U16(JNIEnv *env, jclass cls) { return H5T_ALPHA_U16; }
+JNIEXPORT jlong JNICALL Java_hdf_hdf5lib_HDF5Constants_H5T_1ALPHA_1U32(JNIEnv *env, jclass cls) { return H5T_ALPHA_U32; }
+JNIEXPORT jlong JNICALL Java_hdf_hdf5lib_HDF5Constants_H5T_1ALPHA_1U64(JNIEnv *env, jclass cls) { return H5T_ALPHA_U64; }
+JNIEXPORT jlong JNICALL Java_hdf_hdf5lib_HDF5Constants_H5T_1ALPHA_1U8(JNIEnv *env, jclass cls) { return H5T_ALPHA_U8; }
+JNIEXPORT jint JNICALL Java_hdf_hdf5lib_HDF5Constants_H5T_1ARRAY(JNIEnv *env, jclass cls) { return H5T_ARRAY; }
+JNIEXPORT jint JNICALL Java_hdf_hdf5lib_HDF5Constants_H5T_1BITFIELD(JNIEnv *env, jclass cls) { return H5T_BITFIELD; }
+JNIEXPORT jint JNICALL Java_hdf_hdf5lib_HDF5Constants_H5T_1BKG_1NO(JNIEnv *env, jclass cls) { return H5T_BKG_NO; }
+JNIEXPORT jint JNICALL Java_hdf_hdf5lib_HDF5Constants_H5T_1BKG_1YES(JNIEnv *env, jclass cls) { return H5T_BKG_YES; }
+JNIEXPORT jlong JNICALL Java_hdf_hdf5lib_HDF5Constants_H5T_1C_1S1(JNIEnv *env, jclass cls) { return H5T_C_S1; }
+JNIEXPORT jint JNICALL Java_hdf_hdf5lib_HDF5Constants_H5T_1COMPOUND(JNIEnv *env, jclass cls) { return H5T_COMPOUND; }
+JNIEXPORT jint JNICALL Java_hdf_hdf5lib_HDF5Constants_H5T_1CONV_1CONV(JNIEnv *env, jclass cls) { return H5T_CONV_CONV; }
+JNIEXPORT jint JNICALL Java_hdf_hdf5lib_HDF5Constants_H5T_1CONV_1FREE(JNIEnv *env, jclass cls) { return H5T_CONV_FREE; }
+JNIEXPORT jint JNICALL Java_hdf_hdf5lib_HDF5Constants_H5T_1CONV_1INIT(JNIEnv *env, jclass cls) { return H5T_CONV_INIT; }
+JNIEXPORT jint JNICALL Java_hdf_hdf5lib_HDF5Constants_H5T_1CSET_1ERROR(JNIEnv *env, jclass cls) { return H5T_CSET_ERROR; }
+JNIEXPORT jint JNICALL Java_hdf_hdf5lib_HDF5Constants_H5T_1CSET_1ASCII(JNIEnv *env, jclass cls) { return H5T_CSET_ASCII; }
+JNIEXPORT jint JNICALL Java_hdf_hdf5lib_HDF5Constants_H5T_1CSET_1UTF8(JNIEnv *env, jclass cls) { return H5T_CSET_UTF8; }
+JNIEXPORT jint JNICALL Java_hdf_hdf5lib_HDF5Constants_H5T_1CSET_1RESERVED_110(JNIEnv *env, jclass cls) { return H5T_CSET_RESERVED_10; }
+JNIEXPORT jint JNICALL Java_hdf_hdf5lib_HDF5Constants_H5T_1CSET_1RESERVED_111(JNIEnv *env, jclass cls) { return H5T_CSET_RESERVED_11; }
+JNIEXPORT jint JNICALL Java_hdf_hdf5lib_HDF5Constants_H5T_1CSET_1RESERVED_112(JNIEnv *env, jclass cls) { return H5T_CSET_RESERVED_12; }
+JNIEXPORT jint JNICALL Java_hdf_hdf5lib_HDF5Constants_H5T_1CSET_1RESERVED_113(JNIEnv *env, jclass cls) { return H5T_CSET_RESERVED_13; }
+JNIEXPORT jint JNICALL Java_hdf_hdf5lib_HDF5Constants_H5T_1CSET_1RESERVED_114(JNIEnv *env, jclass cls) { return H5T_CSET_RESERVED_14; }
+JNIEXPORT jint JNICALL Java_hdf_hdf5lib_HDF5Constants_H5T_1CSET_1RESERVED_115(JNIEnv *env, jclass cls) { return H5T_CSET_RESERVED_15; }
+JNIEXPORT jint JNICALL Java_hdf_hdf5lib_HDF5Constants_H5T_1CSET_1RESERVED_12(JNIEnv *env, jclass cls) { return H5T_CSET_RESERVED_2; }
+JNIEXPORT jint JNICALL Java_hdf_hdf5lib_HDF5Constants_H5T_1CSET_1RESERVED_13(JNIEnv *env, jclass cls) { return H5T_CSET_RESERVED_3; }
+JNIEXPORT jint JNICALL Java_hdf_hdf5lib_HDF5Constants_H5T_1CSET_1RESERVED_14(JNIEnv *env, jclass cls) { return H5T_CSET_RESERVED_4; }
+JNIEXPORT jint JNICALL Java_hdf_hdf5lib_HDF5Constants_H5T_1CSET_1RESERVED_15(JNIEnv *env, jclass cls) { return H5T_CSET_RESERVED_5; }
+JNIEXPORT jint JNICALL Java_hdf_hdf5lib_HDF5Constants_H5T_1CSET_1RESERVED_16(JNIEnv *env, jclass cls) { return H5T_CSET_RESERVED_6; }
+JNIEXPORT jint JNICALL Java_hdf_hdf5lib_HDF5Constants_H5T_1CSET_1RESERVED_17(JNIEnv *env, jclass cls) { return H5T_CSET_RESERVED_7; }
+JNIEXPORT jint JNICALL Java_hdf_hdf5lib_HDF5Constants_H5T_1CSET_1RESERVED_18(JNIEnv *env, jclass cls) { return H5T_CSET_RESERVED_8; }
+JNIEXPORT jint JNICALL Java_hdf_hdf5lib_HDF5Constants_H5T_1CSET_1RESERVED_19(JNIEnv *env, jclass cls) { return H5T_CSET_RESERVED_9; }
+JNIEXPORT jint JNICALL Java_hdf_hdf5lib_HDF5Constants_H5T_1DIR_1ASCEND(JNIEnv *env, jclass cls) { return H5T_DIR_ASCEND; }
+JNIEXPORT jint JNICALL Java_hdf_hdf5lib_HDF5Constants_H5T_1DIR_1DEFAULT(JNIEnv *env, jclass cls) { return H5T_DIR_DEFAULT; }
+JNIEXPORT jint JNICALL Java_hdf_hdf5lib_HDF5Constants_H5T_1DIR_1DESCEND(JNIEnv *env, jclass cls) { return H5T_DIR_DESCEND; }
+JNIEXPORT jint JNICALL Java_hdf_hdf5lib_HDF5Constants_H5T_1ENUM(JNIEnv *env, jclass cls) { return H5T_ENUM; }
+JNIEXPORT jint JNICALL Java_hdf_hdf5lib_HDF5Constants_H5T_1FLOAT(JNIEnv *env, jclass cls) { return H5T_FLOAT; }
+JNIEXPORT jlong JNICALL Java_hdf_hdf5lib_HDF5Constants_H5T_1FORTRAN_1S1(JNIEnv *env, jclass cls) { return H5T_FORTRAN_S1; }
+JNIEXPORT jlong JNICALL Java_hdf_hdf5lib_HDF5Constants_H5T_1IEEE_1F32BE(JNIEnv *env, jclass cls) { return H5T_IEEE_F32BE; }
+JNIEXPORT jlong JNICALL Java_hdf_hdf5lib_HDF5Constants_H5T_1IEEE_1F32LE(JNIEnv *env, jclass cls) { return H5T_IEEE_F32LE; }
+JNIEXPORT jlong JNICALL Java_hdf_hdf5lib_HDF5Constants_H5T_1IEEE_1F64BE(JNIEnv *env, jclass cls) { return H5T_IEEE_F64BE; }
+JNIEXPORT jlong JNICALL Java_hdf_hdf5lib_HDF5Constants_H5T_1IEEE_1F64LE(JNIEnv *env, jclass cls) { return H5T_IEEE_F64LE; }
+JNIEXPORT jint JNICALL Java_hdf_hdf5lib_HDF5Constants_H5T_1INTEGER(JNIEnv *env, jclass cls) { return H5T_INTEGER; }
+JNIEXPORT jlong JNICALL Java_hdf_hdf5lib_HDF5Constants_H5T_1INTEL_1B16(JNIEnv *env, jclass cls) { return H5T_INTEL_B16; }
+JNIEXPORT jlong JNICALL Java_hdf_hdf5lib_HDF5Constants_H5T_1INTEL_1B32(JNIEnv *env, jclass cls) { return H5T_INTEL_B32; }
+JNIEXPORT jlong JNICALL Java_hdf_hdf5lib_HDF5Constants_H5T_1INTEL_1B64(JNIEnv *env, jclass cls) { return H5T_INTEL_B64; }
+JNIEXPORT jlong JNICALL Java_hdf_hdf5lib_HDF5Constants_H5T_1INTEL_1B8(JNIEnv *env, jclass cls) { return H5T_INTEL_B8; }
+JNIEXPORT jlong JNICALL Java_hdf_hdf5lib_HDF5Constants_H5T_1INTEL_1F32(JNIEnv *env, jclass cls) { return H5T_INTEL_F32; }
+JNIEXPORT jlong JNICALL Java_hdf_hdf5lib_HDF5Constants_H5T_1INTEL_1F64(JNIEnv *env, jclass cls) { return H5T_INTEL_F64; }
+JNIEXPORT jlong JNICALL Java_hdf_hdf5lib_HDF5Constants_H5T_1INTEL_1I16(JNIEnv *env, jclass cls) { return H5T_INTEL_I16; }
+JNIEXPORT jlong JNICALL Java_hdf_hdf5lib_HDF5Constants_H5T_1INTEL_1I32(JNIEnv *env, jclass cls) { return H5T_INTEL_I32; }
+JNIEXPORT jlong JNICALL Java_hdf_hdf5lib_HDF5Constants_H5T_1INTEL_1I64(JNIEnv *env, jclass cls) { return H5T_INTEL_I64; }
+JNIEXPORT jlong JNICALL Java_hdf_hdf5lib_HDF5Constants_H5T_1INTEL_1I8(JNIEnv *env, jclass cls) { return H5T_INTEL_I8; }
+JNIEXPORT jlong JNICALL Java_hdf_hdf5lib_HDF5Constants_H5T_1INTEL_1U16(JNIEnv *env, jclass cls) { return H5T_INTEL_U16; }
+JNIEXPORT jlong JNICALL Java_hdf_hdf5lib_HDF5Constants_H5T_1INTEL_1U32(JNIEnv *env, jclass cls) { return H5T_INTEL_U32; }
+JNIEXPORT jlong JNICALL Java_hdf_hdf5lib_HDF5Constants_H5T_1INTEL_1U64(JNIEnv *env, jclass cls) { return H5T_INTEL_U64; }
+JNIEXPORT jlong JNICALL Java_hdf_hdf5lib_HDF5Constants_H5T_1INTEL_1U8(JNIEnv *env, jclass cls) { return H5T_INTEL_U8; }
+JNIEXPORT jlong JNICALL Java_hdf_hdf5lib_HDF5Constants_H5T_1MIPS_1B16(JNIEnv *env, jclass cls) { return H5T_MIPS_B16; }
+JNIEXPORT jlong JNICALL Java_hdf_hdf5lib_HDF5Constants_H5T_1MIPS_1B32(JNIEnv *env, jclass cls) { return H5T_MIPS_B32; }
+JNIEXPORT jlong JNICALL Java_hdf_hdf5lib_HDF5Constants_H5T_1MIPS_1B64(JNIEnv *env, jclass cls) { return H5T_MIPS_B64; }
+JNIEXPORT jlong JNICALL Java_hdf_hdf5lib_HDF5Constants_H5T_1MIPS_1B8(JNIEnv *env, jclass cls) { return H5T_MIPS_B8; }
+JNIEXPORT jlong JNICALL Java_hdf_hdf5lib_HDF5Constants_H5T_1MIPS_1F32(JNIEnv *env, jclass cls) { return H5T_MIPS_F32; }
+JNIEXPORT jlong JNICALL Java_hdf_hdf5lib_HDF5Constants_H5T_1MIPS_1F64(JNIEnv *env, jclass cls) { return H5T_MIPS_F64; }
+JNIEXPORT jlong JNICALL Java_hdf_hdf5lib_HDF5Constants_H5T_1MIPS_1I16(JNIEnv *env, jclass cls) { return H5T_MIPS_I16; }
+JNIEXPORT jlong JNICALL Java_hdf_hdf5lib_HDF5Constants_H5T_1MIPS_1I32(JNIEnv *env, jclass cls) { return H5T_MIPS_I32; }
+JNIEXPORT jlong JNICALL Java_hdf_hdf5lib_HDF5Constants_H5T_1MIPS_1I64(JNIEnv *env, jclass cls) { return H5T_MIPS_I64; }
+JNIEXPORT jlong JNICALL Java_hdf_hdf5lib_HDF5Constants_H5T_1MIPS_1I8(JNIEnv *env, jclass cls) { return H5T_MIPS_I8; }
+JNIEXPORT jlong JNICALL Java_hdf_hdf5lib_HDF5Constants_H5T_1MIPS_1U16(JNIEnv *env, jclass cls) { return H5T_MIPS_U16; }
+JNIEXPORT jlong JNICALL Java_hdf_hdf5lib_HDF5Constants_H5T_1MIPS_1U32(JNIEnv *env, jclass cls) { return H5T_MIPS_U32; }
+JNIEXPORT jlong JNICALL Java_hdf_hdf5lib_HDF5Constants_H5T_1MIPS_1U64(JNIEnv *env, jclass cls) { return H5T_MIPS_U64; }
+JNIEXPORT jlong JNICALL Java_hdf_hdf5lib_HDF5Constants_H5T_1MIPS_1U8(JNIEnv *env, jclass cls) { return H5T_MIPS_U8; }
+JNIEXPORT jlong JNICALL Java_hdf_hdf5lib_HDF5Constants_H5T_1NATIVE_1B16(JNIEnv *env, jclass cls) { return H5T_NATIVE_B16; }
+JNIEXPORT jlong JNICALL Java_hdf_hdf5lib_HDF5Constants_H5T_1NATIVE_1B32(JNIEnv *env, jclass cls) { return H5T_NATIVE_B32; }
+JNIEXPORT jlong JNICALL Java_hdf_hdf5lib_HDF5Constants_H5T_1NATIVE_1B64(JNIEnv *env, jclass cls) { return H5T_NATIVE_B64; }
+JNIEXPORT jlong JNICALL Java_hdf_hdf5lib_HDF5Constants_H5T_1NATIVE_1B8(JNIEnv *env, jclass cls) { return H5T_NATIVE_B8; }
+JNIEXPORT jlong JNICALL Java_hdf_hdf5lib_HDF5Constants_H5T_1NATIVE_1CHAR(JNIEnv *env, jclass cls) { return H5T_NATIVE_CHAR; }
+JNIEXPORT jlong JNICALL Java_hdf_hdf5lib_HDF5Constants_H5T_1NATIVE_1DOUBLE(JNIEnv *env, jclass cls) { return H5T_NATIVE_DOUBLE; }
+JNIEXPORT jlong JNICALL Java_hdf_hdf5lib_HDF5Constants_H5T_1NATIVE_1FLOAT(JNIEnv *env, jclass cls) { return H5T_NATIVE_FLOAT; }
+JNIEXPORT jlong JNICALL Java_hdf_hdf5lib_HDF5Constants_H5T_1NATIVE_1HADDR(JNIEnv *env, jclass cls) { return H5T_NATIVE_HADDR; }
+JNIEXPORT jlong JNICALL Java_hdf_hdf5lib_HDF5Constants_H5T_1NATIVE_1HBOOL(JNIEnv *env, jclass cls) { return H5T_NATIVE_HBOOL; }
+JNIEXPORT jlong JNICALL Java_hdf_hdf5lib_HDF5Constants_H5T_1NATIVE_1HERR(JNIEnv *env, jclass cls) { return H5T_NATIVE_HERR; }
+JNIEXPORT jlong JNICALL Java_hdf_hdf5lib_HDF5Constants_H5T_1NATIVE_1HSIZE(JNIEnv *env, jclass cls) { return H5T_NATIVE_HSIZE; }
+JNIEXPORT jlong JNICALL Java_hdf_hdf5lib_HDF5Constants_H5T_1NATIVE_1HSSIZE(JNIEnv *env, jclass cls) { return H5T_NATIVE_HSSIZE; }
+JNIEXPORT jlong JNICALL Java_hdf_hdf5lib_HDF5Constants_H5T_1NATIVE_1INT(JNIEnv *env, jclass cls) { return H5T_NATIVE_INT; }
+JNIEXPORT jlong JNICALL Java_hdf_hdf5lib_HDF5Constants_H5T_1NATIVE_1INT_1FAST16(JNIEnv *env, jclass cls) { return H5T_NATIVE_INT_FAST16; }
+JNIEXPORT jlong JNICALL Java_hdf_hdf5lib_HDF5Constants_H5T_1NATIVE_1INT_1FAST32(JNIEnv *env, jclass cls) { return H5T_NATIVE_INT_FAST32; }
+JNIEXPORT jlong JNICALL Java_hdf_hdf5lib_HDF5Constants_H5T_1NATIVE_1INT_1FAST64(JNIEnv *env, jclass cls) { return H5T_NATIVE_INT_FAST64; }
+JNIEXPORT jlong JNICALL Java_hdf_hdf5lib_HDF5Constants_H5T_1NATIVE_1INT_1FAST8(JNIEnv *env, jclass cls) { return H5T_NATIVE_INT_FAST8; }
+JNIEXPORT jlong JNICALL Java_hdf_hdf5lib_HDF5Constants_H5T_1NATIVE_1INT_1LEAST16(JNIEnv *env, jclass cls) { return H5T_NATIVE_INT_LEAST16; }
+JNIEXPORT jlong JNICALL Java_hdf_hdf5lib_HDF5Constants_H5T_1NATIVE_1INT_1LEAST32(JNIEnv *env, jclass cls) { return H5T_NATIVE_INT_LEAST32; }
+JNIEXPORT jlong JNICALL Java_hdf_hdf5lib_HDF5Constants_H5T_1NATIVE_1INT_1LEAST64(JNIEnv *env, jclass cls) { return H5T_NATIVE_INT_LEAST64; }
+JNIEXPORT jlong JNICALL Java_hdf_hdf5lib_HDF5Constants_H5T_1NATIVE_1INT_1LEAST8(JNIEnv *env, jclass cls) { return H5T_NATIVE_INT_LEAST8; }
+JNIEXPORT jlong JNICALL Java_hdf_hdf5lib_HDF5Constants_H5T_1NATIVE_1INT16(JNIEnv *env, jclass cls) { return H5T_NATIVE_INT16; }
+JNIEXPORT jlong JNICALL Java_hdf_hdf5lib_HDF5Constants_H5T_1NATIVE_1INT32(JNIEnv *env, jclass cls) { return H5T_NATIVE_INT32; }
+JNIEXPORT jlong JNICALL Java_hdf_hdf5lib_HDF5Constants_H5T_1NATIVE_1INT64(JNIEnv *env, jclass cls) { return H5T_NATIVE_INT64; }
+JNIEXPORT jlong JNICALL Java_hdf_hdf5lib_HDF5Constants_H5T_1NATIVE_1INT8(JNIEnv *env, jclass cls) { return H5T_NATIVE_INT8; }
+JNIEXPORT jlong JNICALL Java_hdf_hdf5lib_HDF5Constants_H5T_1NATIVE_1LDOUBLE(JNIEnv *env, jclass cls) { return H5T_NATIVE_LDOUBLE; }
+JNIEXPORT jlong JNICALL Java_hdf_hdf5lib_HDF5Constants_H5T_1NATIVE_1LLONG(JNIEnv *env, jclass cls) { return H5T_NATIVE_LLONG; }
+JNIEXPORT jlong JNICALL Java_hdf_hdf5lib_HDF5Constants_H5T_1NATIVE_1LONG(JNIEnv *env, jclass cls) { return H5T_NATIVE_LONG; }
+JNIEXPORT jlong JNICALL Java_hdf_hdf5lib_HDF5Constants_H5T_1NATIVE_1OPAQUE(JNIEnv *env, jclass cls) { return H5T_NATIVE_OPAQUE; }
+JNIEXPORT jlong JNICALL Java_hdf_hdf5lib_HDF5Constants_H5T_1NATIVE_1SCHAR(JNIEnv *env, jclass cls) { return H5T_NATIVE_SCHAR; }
+JNIEXPORT jlong JNICALL Java_hdf_hdf5lib_HDF5Constants_H5T_1NATIVE_1SHORT(JNIEnv *env, jclass cls) { return H5T_NATIVE_SHORT; }
+JNIEXPORT jlong JNICALL Java_hdf_hdf5lib_HDF5Constants_H5T_1NATIVE_1UCHAR(JNIEnv *env, jclass cls) { return H5T_NATIVE_UCHAR; }
+JNIEXPORT jlong JNICALL Java_hdf_hdf5lib_HDF5Constants_H5T_1NATIVE_1UINT(JNIEnv *env, jclass cls) { return H5T_NATIVE_UINT; }
+JNIEXPORT jlong JNICALL Java_hdf_hdf5lib_HDF5Constants_H5T_1NATIVE_1UINT_1FAST16(JNIEnv *env, jclass cls) { return H5T_NATIVE_UINT_FAST16; }
+JNIEXPORT jlong JNICALL Java_hdf_hdf5lib_HDF5Constants_H5T_1NATIVE_1UINT_1FAST32(JNIEnv *env, jclass cls) { return H5T_NATIVE_UINT_FAST32; }
+JNIEXPORT jlong JNICALL Java_hdf_hdf5lib_HDF5Constants_H5T_1NATIVE_1UINT_1FAST64(JNIEnv *env, jclass cls) { return H5T_NATIVE_UINT_FAST64; }
+JNIEXPORT jlong JNICALL Java_hdf_hdf5lib_HDF5Constants_H5T_1NATIVE_1UINT_1FAST8(JNIEnv *env, jclass cls) { return H5T_NATIVE_UINT_FAST8; }
+JNIEXPORT jlong JNICALL Java_hdf_hdf5lib_HDF5Constants_H5T_1NATIVE_1UINT_1LEAST16(JNIEnv *env, jclass cls) { return H5T_NATIVE_UINT_LEAST16; }
+JNIEXPORT jlong JNICALL Java_hdf_hdf5lib_HDF5Constants_H5T_1NATIVE_1UINT_1LEAST32(JNIEnv *env, jclass cls) { return H5T_NATIVE_UINT_LEAST32; }
+JNIEXPORT jlong JNICALL Java_hdf_hdf5lib_HDF5Constants_H5T_1NATIVE_1UINT_1LEAST64(JNIEnv *env, jclass cls) { return H5T_NATIVE_UINT_LEAST64; }
+JNIEXPORT jlong JNICALL Java_hdf_hdf5lib_HDF5Constants_H5T_1NATIVE_1UINT_1LEAST8(JNIEnv *env, jclass cls) { return H5T_NATIVE_UINT_LEAST8; }
+JNIEXPORT jlong JNICALL Java_hdf_hdf5lib_HDF5Constants_H5T_1NATIVE_1UINT16(JNIEnv *env, jclass cls) { return H5T_NATIVE_UINT16; }
+JNIEXPORT jlong JNICALL Java_hdf_hdf5lib_HDF5Constants_H5T_1NATIVE_1UINT32(JNIEnv *env, jclass cls) { return H5T_NATIVE_UINT32; }
+JNIEXPORT jlong JNICALL Java_hdf_hdf5lib_HDF5Constants_H5T_1NATIVE_1UINT64(JNIEnv *env, jclass cls) { return H5T_NATIVE_UINT64; }
+JNIEXPORT jlong JNICALL Java_hdf_hdf5lib_HDF5Constants_H5T_1NATIVE_1UINT8(JNIEnv *env, jclass cls) { return H5T_NATIVE_UINT8; }
+JNIEXPORT jlong JNICALL Java_hdf_hdf5lib_HDF5Constants_H5T_1NATIVE_1ULLONG(JNIEnv *env, jclass cls) { return H5T_NATIVE_ULLONG; }
+JNIEXPORT jlong JNICALL Java_hdf_hdf5lib_HDF5Constants_H5T_1NATIVE_1ULONG(JNIEnv *env, jclass cls) { return H5T_NATIVE_ULONG; }
+JNIEXPORT jlong JNICALL Java_hdf_hdf5lib_HDF5Constants_H5T_1NATIVE_1USHORT(JNIEnv *env, jclass cls) { return H5T_NATIVE_USHORT; }
+JNIEXPORT jint JNICALL Java_hdf_hdf5lib_HDF5Constants_H5T_1NCLASSES(JNIEnv *env, jclass cls) { return H5T_NCLASSES; }
+JNIEXPORT jint JNICALL Java_hdf_hdf5lib_HDF5Constants_H5T_1NO_1CLASS(JNIEnv *env, jclass cls) { return H5T_NO_CLASS; }
+JNIEXPORT jint JNICALL Java_hdf_hdf5lib_HDF5Constants_H5T_1NORM_1ERROR(JNIEnv *env, jclass cls) { return H5T_NORM_ERROR; }
+JNIEXPORT jint JNICALL Java_hdf_hdf5lib_HDF5Constants_H5T_1NORM_1IMPLIED(JNIEnv *env, jclass cls) { return H5T_NORM_IMPLIED; }
+JNIEXPORT jint JNICALL Java_hdf_hdf5lib_HDF5Constants_H5T_1NORM_1MSBSET(JNIEnv *env, jclass cls) { return H5T_NORM_MSBSET; }
+JNIEXPORT jint JNICALL Java_hdf_hdf5lib_HDF5Constants_H5T_1NORM_1NONE(JNIEnv *env, jclass cls) { return H5T_NORM_NONE; }
+JNIEXPORT jint JNICALL Java_hdf_hdf5lib_HDF5Constants_H5T_1NPAD(JNIEnv *env, jclass cls) { return H5T_NPAD; }
+JNIEXPORT jint JNICALL Java_hdf_hdf5lib_HDF5Constants_H5T_1NSGN(JNIEnv *env, jclass cls) { return H5T_NSGN; }
+JNIEXPORT jint JNICALL Java_hdf_hdf5lib_HDF5Constants_H5T_1OPAQUE(JNIEnv *env, jclass cls) { return H5T_OPAQUE; }
+JNIEXPORT jint JNICALL Java_hdf_hdf5lib_HDF5Constants_H5T_1OPAQUE_1TAG_1MAX(JNIEnv *env, jclass cls) { return H5T_OPAQUE_TAG_MAX; }
+JNIEXPORT jint JNICALL Java_hdf_hdf5lib_HDF5Constants_H5T_1ORDER_1BE(JNIEnv *env, jclass cls) { return H5T_ORDER_BE; }
+JNIEXPORT jint JNICALL Java_hdf_hdf5lib_HDF5Constants_H5T_1ORDER_1ERROR(JNIEnv *env, jclass cls) { return H5T_ORDER_ERROR; }
+JNIEXPORT jint JNICALL Java_hdf_hdf5lib_HDF5Constants_H5T_1ORDER_1LE(JNIEnv *env, jclass cls) { return H5T_ORDER_LE; }
+JNIEXPORT jint JNICALL Java_hdf_hdf5lib_HDF5Constants_H5T_1ORDER_1NONE(JNIEnv *env, jclass cls) { return H5T_ORDER_NONE; }
+JNIEXPORT jint JNICALL Java_hdf_hdf5lib_HDF5Constants_H5T_1ORDER_1VAX(JNIEnv *env, jclass cls) { return H5T_ORDER_VAX; }
+JNIEXPORT jint JNICALL Java_hdf_hdf5lib_HDF5Constants_H5T_1PAD_1BACKGROUND(JNIEnv *env, jclass cls) { return H5T_PAD_BACKGROUND; }
+JNIEXPORT jint JNICALL Java_hdf_hdf5lib_HDF5Constants_H5T_1PAD_1ERROR(JNIEnv *env, jclass cls) { return H5T_PAD_ERROR; }
+JNIEXPORT jint JNICALL Java_hdf_hdf5lib_HDF5Constants_H5T_1PAD_1ONE(JNIEnv *env, jclass cls) { return H5T_PAD_ONE; }
+JNIEXPORT jint JNICALL Java_hdf_hdf5lib_HDF5Constants_H5T_1PAD_1ZERO(JNIEnv *env, jclass cls) { return H5T_PAD_ZERO; }
+JNIEXPORT jint JNICALL Java_hdf_hdf5lib_HDF5Constants_H5T_1PERS_1DONTCARE(JNIEnv *env, jclass cls) { return H5T_PERS_DONTCARE; }
+JNIEXPORT jint JNICALL Java_hdf_hdf5lib_HDF5Constants_H5T_1PERS_1HARD(JNIEnv *env, jclass cls) { return H5T_PERS_HARD; }
+JNIEXPORT jint JNICALL Java_hdf_hdf5lib_HDF5Constants_H5T_1PERS_1SOFT(JNIEnv *env, jclass cls) { return H5T_PERS_SOFT; }
+JNIEXPORT jint JNICALL Java_hdf_hdf5lib_HDF5Constants_H5T_1REFERENCE(JNIEnv *env, jclass cls) { return H5T_REFERENCE; }
+JNIEXPORT jint JNICALL Java_hdf_hdf5lib_HDF5Constants_H5T_1SGN_12(JNIEnv *env, jclass cls) { return H5T_SGN_2; }
+JNIEXPORT jint JNICALL Java_hdf_hdf5lib_HDF5Constants_H5T_1SGN_1ERROR(JNIEnv *env, jclass cls) { return H5T_SGN_ERROR; }
+JNIEXPORT jint JNICALL Java_hdf_hdf5lib_HDF5Constants_H5T_1SGN_1NONE(JNIEnv *env, jclass cls) { return H5T_SGN_NONE; }
+JNIEXPORT jlong JNICALL Java_hdf_hdf5lib_HDF5Constants_H5T_1STD_1B16BE(JNIEnv *env, jclass cls) { return H5T_STD_B16BE; }
+JNIEXPORT jlong JNICALL Java_hdf_hdf5lib_HDF5Constants_H5T_1STD_1B16LE(JNIEnv *env, jclass cls) { return H5T_STD_B16LE; }
+JNIEXPORT jlong JNICALL Java_hdf_hdf5lib_HDF5Constants_H5T_1STD_1B32BE(JNIEnv *env, jclass cls) { return H5T_STD_B32BE; }
+JNIEXPORT jlong JNICALL Java_hdf_hdf5lib_HDF5Constants_H5T_1STD_1B32LE(JNIEnv *env, jclass cls) { return H5T_STD_B32LE; }
+JNIEXPORT jlong JNICALL Java_hdf_hdf5lib_HDF5Constants_H5T_1STD_1B64BE(JNIEnv *env, jclass cls) { return H5T_STD_B64BE; }
+JNIEXPORT jlong JNICALL Java_hdf_hdf5lib_HDF5Constants_H5T_1STD_1B64LE(JNIEnv *env, jclass cls) { return H5T_STD_B64LE; }
+JNIEXPORT jlong JNICALL Java_hdf_hdf5lib_HDF5Constants_H5T_1STD_1B8BE(JNIEnv *env, jclass cls) { return H5T_STD_B8BE; }
+JNIEXPORT jlong JNICALL Java_hdf_hdf5lib_HDF5Constants_H5T_1STD_1B8LE(JNIEnv *env, jclass cls) { return H5T_STD_B8LE; }
+JNIEXPORT jlong JNICALL Java_hdf_hdf5lib_HDF5Constants_H5T_1STD_1I16BE(JNIEnv *env, jclass cls) { return H5T_STD_I16BE; }
+JNIEXPORT jlong JNICALL Java_hdf_hdf5lib_HDF5Constants_H5T_1STD_1I16LE(JNIEnv *env, jclass cls) { return H5T_STD_I16LE; }
+JNIEXPORT jlong JNICALL Java_hdf_hdf5lib_HDF5Constants_H5T_1STD_1I32BE(JNIEnv *env, jclass cls) { return H5T_STD_I32BE; }
+JNIEXPORT jlong JNICALL Java_hdf_hdf5lib_HDF5Constants_H5T_1STD_1I32LE(JNIEnv *env, jclass cls) { return H5T_STD_I32LE; }
+JNIEXPORT jlong JNICALL Java_hdf_hdf5lib_HDF5Constants_H5T_1STD_1I64BE(JNIEnv *env, jclass cls) { return H5T_STD_I64BE; }
+JNIEXPORT jlong JNICALL Java_hdf_hdf5lib_HDF5Constants_H5T_1STD_1I64LE(JNIEnv *env, jclass cls) { return H5T_STD_I64LE; }
+JNIEXPORT jlong JNICALL Java_hdf_hdf5lib_HDF5Constants_H5T_1STD_1I8BE(JNIEnv *env, jclass cls) { return H5T_STD_I8BE; }
+JNIEXPORT jlong JNICALL Java_hdf_hdf5lib_HDF5Constants_H5T_1STD_1I8LE(JNIEnv *env, jclass cls) { return H5T_STD_I8LE; }
+JNIEXPORT jlong JNICALL Java_hdf_hdf5lib_HDF5Constants_H5T_1STD_1REF_1DSETREG(JNIEnv *env, jclass cls) { return H5T_STD_REF_DSETREG; }
+JNIEXPORT jlong JNICALL Java_hdf_hdf5lib_HDF5Constants_H5T_1STD_1REF_1OBJ(JNIEnv *env, jclass cls) { return H5T_STD_REF_OBJ; }
+JNIEXPORT jlong JNICALL Java_hdf_hdf5lib_HDF5Constants_H5T_1STD_1U16BE(JNIEnv *env, jclass cls) { return H5T_STD_U16BE; }
+JNIEXPORT jlong JNICALL Java_hdf_hdf5lib_HDF5Constants_H5T_1STD_1U16LE(JNIEnv *env, jclass cls) { return H5T_STD_U16LE; }
+JNIEXPORT jlong JNICALL Java_hdf_hdf5lib_HDF5Constants_H5T_1STD_1U32BE(JNIEnv *env, jclass cls) { return H5T_STD_U32BE; }
+JNIEXPORT jlong JNICALL Java_hdf_hdf5lib_HDF5Constants_H5T_1STD_1U32LE(JNIEnv *env, jclass cls) { return H5T_STD_U32LE; }
+JNIEXPORT jlong JNICALL Java_hdf_hdf5lib_HDF5Constants_H5T_1STD_1U64BE(JNIEnv *env, jclass cls) { return H5T_STD_U64BE; }
+JNIEXPORT jlong JNICALL Java_hdf_hdf5lib_HDF5Constants_H5T_1STD_1U64LE(JNIEnv *env, jclass cls) { return H5T_STD_U64LE; }
+JNIEXPORT jlong JNICALL Java_hdf_hdf5lib_HDF5Constants_H5T_1STD_1U8BE(JNIEnv *env, jclass cls) { return H5T_STD_U8BE; }
+JNIEXPORT jlong JNICALL Java_hdf_hdf5lib_HDF5Constants_H5T_1STD_1U8LE(JNIEnv *env, jclass cls) { return H5T_STD_U8LE; }
+JNIEXPORT jint JNICALL Java_hdf_hdf5lib_HDF5Constants_H5T_1STR_1ERROR(JNIEnv *env, jclass cls) { return H5T_STR_ERROR; }
+JNIEXPORT jint JNICALL Java_hdf_hdf5lib_HDF5Constants_H5T_1STR_1NULLPAD(JNIEnv *env, jclass cls) { return H5T_STR_NULLPAD; }
+JNIEXPORT jint JNICALL Java_hdf_hdf5lib_HDF5Constants_H5T_1STR_1NULLTERM(JNIEnv *env, jclass cls) { return H5T_STR_NULLTERM; }
+JNIEXPORT jint JNICALL Java_hdf_hdf5lib_HDF5Constants_H5T_1STR_1RESERVED_110(JNIEnv *env, jclass cls) { return H5T_STR_RESERVED_10; }
+JNIEXPORT jint JNICALL Java_hdf_hdf5lib_HDF5Constants_H5T_1STR_1RESERVED_111(JNIEnv *env, jclass cls) { return H5T_STR_RESERVED_11; }
+JNIEXPORT jint JNICALL Java_hdf_hdf5lib_HDF5Constants_H5T_1STR_1RESERVED_112(JNIEnv *env, jclass cls) { return H5T_STR_RESERVED_12; }
+JNIEXPORT jint JNICALL Java_hdf_hdf5lib_HDF5Constants_H5T_1STR_1RESERVED_113(JNIEnv *env, jclass cls) { return H5T_STR_RESERVED_13; }
+JNIEXPORT jint JNICALL Java_hdf_hdf5lib_HDF5Constants_H5T_1STR_1RESERVED_114(JNIEnv *env, jclass cls) { return H5T_STR_RESERVED_14; }
+JNIEXPORT jint JNICALL Java_hdf_hdf5lib_HDF5Constants_H5T_1STR_1RESERVED_115(JNIEnv *env, jclass cls) { return H5T_STR_RESERVED_15; }
+JNIEXPORT jint JNICALL Java_hdf_hdf5lib_HDF5Constants_H5T_1STR_1RESERVED_13(JNIEnv *env, jclass cls) { return H5T_STR_RESERVED_3; }
+JNIEXPORT jint JNICALL Java_hdf_hdf5lib_HDF5Constants_H5T_1STR_1RESERVED_14(JNIEnv *env, jclass cls) { return H5T_STR_RESERVED_4; }
+JNIEXPORT jint JNICALL Java_hdf_hdf5lib_HDF5Constants_H5T_1STR_1RESERVED_15(JNIEnv *env, jclass cls) { return H5T_STR_RESERVED_5; }
+JNIEXPORT jint JNICALL Java_hdf_hdf5lib_HDF5Constants_H5T_1STR_1RESERVED_16(JNIEnv *env, jclass cls) { return H5T_STR_RESERVED_6; }
+JNIEXPORT jint JNICALL Java_hdf_hdf5lib_HDF5Constants_H5T_1STR_1RESERVED_17(JNIEnv *env, jclass cls) { return H5T_STR_RESERVED_7; }
+JNIEXPORT jint JNICALL Java_hdf_hdf5lib_HDF5Constants_H5T_1STR_1RESERVED_18(JNIEnv *env, jclass cls) { return H5T_STR_RESERVED_8; }
+JNIEXPORT jint JNICALL Java_hdf_hdf5lib_HDF5Constants_H5T_1STR_1RESERVED_19(JNIEnv *env, jclass cls) { return H5T_STR_RESERVED_9; }
+JNIEXPORT jint JNICALL Java_hdf_hdf5lib_HDF5Constants_H5T_1STR_1SPACEPAD(JNIEnv *env, jclass cls) { return H5T_STR_SPACEPAD; }
+JNIEXPORT jint JNICALL Java_hdf_hdf5lib_HDF5Constants_H5T_1STRING(JNIEnv *env, jclass cls) { return H5T_STRING; }
+JNIEXPORT jint JNICALL Java_hdf_hdf5lib_HDF5Constants_H5T_1TIME(JNIEnv *env, jclass cls) { return H5T_TIME; }
+JNIEXPORT jlong JNICALL Java_hdf_hdf5lib_HDF5Constants_H5T_1UNIX_1D32BE(JNIEnv *env, jclass cls) { return H5T_UNIX_D32BE; }
+JNIEXPORT jlong JNICALL Java_hdf_hdf5lib_HDF5Constants_H5T_1UNIX_1D32LE(JNIEnv *env, jclass cls) { return H5T_UNIX_D32LE; }
+JNIEXPORT jlong JNICALL Java_hdf_hdf5lib_HDF5Constants_H5T_1UNIX_1D64BE(JNIEnv *env, jclass cls) { return H5T_UNIX_D64BE; }
+JNIEXPORT jlong JNICALL Java_hdf_hdf5lib_HDF5Constants_H5T_1UNIX_1D64LE(JNIEnv *env, jclass cls) { return H5T_UNIX_D64LE; }
+JNIEXPORT jlong JNICALL Java_hdf_hdf5lib_HDF5Constants_H5T_1VARIABLE(JNIEnv *env, jclass cls) { return (int)H5T_VARIABLE; }
+JNIEXPORT jint JNICALL Java_hdf_hdf5lib_HDF5Constants_H5T_1VLEN(JNIEnv *env, jclass cls) { return H5T_VLEN; }
+
+JNIEXPORT jint JNICALL Java_hdf_hdf5lib_HDF5Constants_H5Z_1CB_1CONT(JNIEnv *env, jclass cls) { return H5Z_CB_CONT; }
+JNIEXPORT jint JNICALL Java_hdf_hdf5lib_HDF5Constants_H5Z_1CB_1ERROR(JNIEnv *env, jclass cls) { return H5Z_CB_ERROR; }
+JNIEXPORT jint JNICALL Java_hdf_hdf5lib_HDF5Constants_H5Z_1CB_1FAIL(JNIEnv *env, jclass cls) { return H5Z_CB_FAIL; }
+JNIEXPORT jint JNICALL Java_hdf_hdf5lib_HDF5Constants_H5Z_1CB_1NO(JNIEnv *env, jclass cls) { return H5Z_CB_NO; }
+JNIEXPORT jint JNICALL Java_hdf_hdf5lib_HDF5Constants_H5Z_1DISABLE_1EDC(JNIEnv *env, jclass cls) { return H5Z_DISABLE_EDC; }
+JNIEXPORT jint JNICALL Java_hdf_hdf5lib_HDF5Constants_H5Z_1ENABLE_1EDC(JNIEnv *env, jclass cls) { return H5Z_ENABLE_EDC; }
+JNIEXPORT jint JNICALL Java_hdf_hdf5lib_HDF5Constants_H5Z_1ERROR_1EDC(JNIEnv *env, jclass cls) { return H5Z_ERROR_EDC; }
+JNIEXPORT jint JNICALL Java_hdf_hdf5lib_HDF5Constants_H5Z_1FILTER_1CONFIG_1DECODE_1ENABLED(JNIEnv *env, jclass cls) { return H5Z_FILTER_CONFIG_DECODE_ENABLED; }
+JNIEXPORT jint JNICALL Java_hdf_hdf5lib_HDF5Constants_H5Z_1FILTER_1CONFIG_1ENCODE_1ENABLED(JNIEnv *env, jclass cls) { return H5Z_FILTER_CONFIG_ENCODE_ENABLED; }
+JNIEXPORT jint JNICALL Java_hdf_hdf5lib_HDF5Constants_H5Z_1FILTER_1DEFLATE(JNIEnv *env, jclass cls) { return H5Z_FILTER_DEFLATE; }
+JNIEXPORT jint JNICALL Java_hdf_hdf5lib_HDF5Constants_H5Z_1FILTER_1ERROR(JNIEnv *env, jclass cls) { return H5Z_FILTER_ERROR; }
+JNIEXPORT jint JNICALL Java_hdf_hdf5lib_HDF5Constants_H5Z_1FILTER_1FLETCHER32(JNIEnv *env, jclass cls) { return H5Z_FILTER_FLETCHER32; }
+JNIEXPORT jint JNICALL Java_hdf_hdf5lib_HDF5Constants_H5Z_1FILTER_1MAX(JNIEnv *env, jclass cls) { return H5Z_FILTER_MAX; }
+JNIEXPORT jint JNICALL Java_hdf_hdf5lib_HDF5Constants_H5Z_1FILTER_1NBIT(JNIEnv *env, jclass cls) {return H5Z_FILTER_NBIT; }
+JNIEXPORT jint JNICALL Java_hdf_hdf5lib_HDF5Constants_H5Z_1FILTER_1NONE(JNIEnv *env, jclass cls) { return H5Z_FILTER_NONE; }
+JNIEXPORT jint JNICALL Java_hdf_hdf5lib_HDF5Constants_H5Z_1FILTER_1RESERVED(JNIEnv *env, jclass cls) { return H5Z_FILTER_RESERVED; }
+JNIEXPORT jint JNICALL Java_hdf_hdf5lib_HDF5Constants_H5Z_1FILTER_1SCALEOFFSET(JNIEnv *env, jclass cls){ return H5Z_FILTER_SCALEOFFSET; }
+JNIEXPORT jint JNICALL Java_hdf_hdf5lib_HDF5Constants_H5Z_1FILTER_1SHUFFLE(JNIEnv *env, jclass cls) { return H5Z_FILTER_SHUFFLE; }
+JNIEXPORT jint JNICALL Java_hdf_hdf5lib_HDF5Constants_H5Z_1FILTER_1SZIP(JNIEnv *env, jclass cls) { return H5Z_FILTER_SZIP; }
+JNIEXPORT jint JNICALL Java_hdf_hdf5lib_HDF5Constants_H5Z_1FLAG_1DEFMASK(JNIEnv *env, jclass cls) { return H5Z_FLAG_DEFMASK; }
+JNIEXPORT jint JNICALL Java_hdf_hdf5lib_HDF5Constants_H5Z_1FLAG_1INVMASK(JNIEnv *env, jclass cls) { return H5Z_FLAG_INVMASK; }
+JNIEXPORT jint JNICALL Java_hdf_hdf5lib_HDF5Constants_H5Z_1FLAG_1MANDATORY(JNIEnv *env, jclass cls) { return H5Z_FLAG_MANDATORY; }
+JNIEXPORT jint JNICALL Java_hdf_hdf5lib_HDF5Constants_H5Z_1FLAG_1OPTIONAL(JNIEnv *env, jclass cls) { return H5Z_FLAG_OPTIONAL; }
+JNIEXPORT jint JNICALL Java_hdf_hdf5lib_HDF5Constants_H5Z_1FLAG_1REVERSE(JNIEnv *env, jclass cls) { return H5Z_FLAG_REVERSE; }
+JNIEXPORT jint JNICALL Java_hdf_hdf5lib_HDF5Constants_H5Z_1FLAG_1SKIP_1EDC(JNIEnv *env, jclass cls) { return H5Z_FLAG_SKIP_EDC; }
+JNIEXPORT jint JNICALL Java_hdf_hdf5lib_HDF5Constants_H5Z_1MAX_1NFILTERS(JNIEnv *env, jclass cls) { return H5Z_MAX_NFILTERS; }
+JNIEXPORT jint JNICALL Java_hdf_hdf5lib_HDF5Constants_H5Z_1NO_1EDC(JNIEnv *env, jclass cls) { return H5Z_NO_EDC; }
+JNIEXPORT jint JNICALL Java_hdf_hdf5lib_HDF5Constants_H5Z_1SO_1INT_1MINBITS_1DEFAULT(JNIEnv *env, jclass cls) { return H5Z_SO_INT_MINBITS_DEFAULT; }
+JNIEXPORT jint JNICALL Java_hdf_hdf5lib_HDF5Constants_H5Z_1SO_1FLOAT_1DSCALE(JNIEnv *env, jclass cls){return H5Z_SO_FLOAT_DSCALE; }
+JNIEXPORT jint JNICALL Java_hdf_hdf5lib_HDF5Constants_H5Z_1SO_1FLOAT_1ESCALE(JNIEnv *env, jclass cls){return H5Z_SO_FLOAT_ESCALE; }
+JNIEXPORT jint JNICALL Java_hdf_hdf5lib_HDF5Constants_H5Z_1SO_1INT(JNIEnv *env, jclass cls){return H5Z_SO_INT; }
+JNIEXPORT jint JNICALL Java_hdf_hdf5lib_HDF5Constants_H5Z_1SHUFFLE_1USER_1NPARMS(JNIEnv *env, jclass cls) { return H5Z_SHUFFLE_USER_NPARMS; }
+JNIEXPORT jint JNICALL Java_hdf_hdf5lib_HDF5Constants_H5Z_1SHUFFLE_1TOTAL_1NPARMS(JNIEnv *env, jclass cls) { return H5Z_SHUFFLE_TOTAL_NPARMS; }
+JNIEXPORT jint JNICALL Java_hdf_hdf5lib_HDF5Constants_H5Z_1SZIP_1USER_1NPARMS(JNIEnv *env, jclass cls) { return H5Z_SZIP_USER_NPARMS; }
+JNIEXPORT jint JNICALL Java_hdf_hdf5lib_HDF5Constants_H5Z_1SZIP_1TOTAL_1NPARMS(JNIEnv *env, jclass cls) { return H5Z_SZIP_TOTAL_NPARMS; }
+JNIEXPORT jint JNICALL Java_hdf_hdf5lib_HDF5Constants_H5Z_1SZIP_1PARM_1MASK(JNIEnv *env, jclass cls) { return H5Z_SZIP_PARM_MASK; }
+JNIEXPORT jint JNICALL Java_hdf_hdf5lib_HDF5Constants_H5Z_1SZIP_1PARM_1PPB(JNIEnv *env, jclass cls) { return H5Z_SZIP_PARM_PPB; }
+JNIEXPORT jint JNICALL Java_hdf_hdf5lib_HDF5Constants_H5Z_1SZIP_1PARM_1BPP(JNIEnv *env, jclass cls) { return H5Z_SZIP_PARM_BPP; }
+JNIEXPORT jint JNICALL Java_hdf_hdf5lib_HDF5Constants_H5Z_1SZIP_1PARM_1PPS(JNIEnv *env, jclass cls) { return H5Z_SZIP_PARM_PPS; }
+JNIEXPORT jint JNICALL Java_hdf_hdf5lib_HDF5Constants_H5Z_1NBIT_1USER_1NPARMS(JNIEnv *env, jclass cls) { return H5Z_NBIT_USER_NPARMS; }
+JNIEXPORT jint JNICALL Java_hdf_hdf5lib_HDF5Constants_H5Z_1SCALEOFFSET_1USER_1NPARMS(JNIEnv *env, jclass cls) { return H5Z_SCALEOFFSET_USER_NPARMS; }
+JNIEXPORT jint JNICALL Java_hdf_hdf5lib_HDF5Constants_H5Z_1FILTER_1ALL(JNIEnv *env, jclass cls) { return H5Z_FILTER_ALL; }
+
+#pragma GCC diagnostic pop
+
+#ifdef __cplusplus
+} /* end extern "C" */
+#endif /* __cplusplus */
+
diff --git a/java/src/jni/h5Imp.c b/java/src/jni/h5Imp.c
new file mode 100644
index 0000000..05d5d59
--- /dev/null
+++ b/java/src/jni/h5Imp.c
@@ -0,0 +1,181 @@
+/* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
+ * Copyright by The HDF Group. *
+ * Copyright by the Board of Trustees of the University of Illinois. *
+ * All rights reserved. *
+ * *
+ * This file is part of HDF5. The full HDF5 copyright notice, including *
+ * terms governing use, modification, and redistribution, is contained in *
+ * the files COPYING and Copyright.html. COPYING can be found at the root *
+ * of the source code distribution tree; Copyright.html can be found at the *
+ * root level of an installed copy of the electronic HDF5 document set and *
+ * is linked from the top-level documents page. It can also be found at *
+ * http://hdfgroup.org/HDF5/doc/Copyright.html. If you do not have *
+ * access to either file, you may request a copy from help@hdfgroup.org. *
+ * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */
+
+/*
+ * For details of the HDF libraries, see the HDF Documentation at:
+ * http://hdfdfgroup.org/HDF5/doc/
+ *
+ */
+
+#ifdef __cplusplus
+extern "C" {
+#endif /* __cplusplus */
+
+/*
+ * This code is the C-interface called by Java programs to access the
+ * general library functions of the HDF5 library.
+ *
+ * Each routine wraps a single HDF entry point, generally with the
+ * analogous arguments and return codes.
+ *
+ */
+
+#include "hdf5.h"
+#include <jni.h>
+#include "h5jni.h"
+#include "h5Imp.h"
+
+extern JavaVM *jvm;
+extern jobject visit_callback;
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5open
+ * Signature: ()I
+ */
+JNIEXPORT jint JNICALL
+Java_hdf_hdf5lib_H5_H5open(JNIEnv *env, jclass clss)
+{
+ herr_t retVal = H5open();
+ if (retVal < 0)
+ h5libraryError(env);
+
+ return (jint)retVal;
+} /* end Java_hdf_hdf5lib_H5_H5open */
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5close
+ * Signature: ()I
+ */
+JNIEXPORT jint JNICALL
+Java_hdf_hdf5lib_H5_H5close(JNIEnv *env, jclass clss)
+{
+ herr_t retVal = H5close();
+ if (retVal < 0)
+ h5libraryError(env);
+
+ return (jint)retVal;
+} /* end Java_hdf_hdf5lib_H5_H5close */
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5dont_atexit
+ * Signature: ()I
+ */
+JNIEXPORT jint JNICALL
+Java_hdf_hdf5lib_H5_H5dont_1atexit(JNIEnv *env, jclass clss)
+{
+ herr_t retVal = H5dont_atexit();
+ if (retVal < 0)
+ h5libraryError(env);
+
+ return (jint)retVal;
+} /* end Java_hdf_hdf5lib_H5_H5dont_1atexit */
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5get_libversion
+ * Signature: ([I)I
+ */
+JNIEXPORT jint JNICALL
+Java_hdf_hdf5lib_H5_H5get_1libversion(JNIEnv *env, jclass clss, jintArray libversion)
+{
+ unsigned *theArray = NULL;
+ herr_t status = -1;
+ jboolean isCopy;
+
+ if (libversion == NULL) {
+ h5nullArgument(env, "H5get_version: libversion is NULL");
+ } /* end if */
+ else {
+ theArray = (unsigned*)ENVPTR->GetIntArrayElements(ENVPAR libversion, &isCopy);
+ if (theArray == NULL) {
+ h5JNIFatalError( env, "H5get_libversion: input not pinned");
+ } /* end if */
+ else {
+ status = H5get_libversion(&(theArray[0]), &(theArray[1]), &(theArray[2]));
+
+ if (status < 0) {
+ ENVPTR->ReleaseIntArrayElements(ENVPAR libversion, (jint*)theArray, JNI_ABORT);
+ h5libraryError(env);
+ } /* end if */
+ ENVPTR->ReleaseIntArrayElements(ENVPAR libversion, (jint*)theArray,0);
+ } /* end else */
+ } /* end else */
+ return (jint)status;
+} /* end Java_hdf_hdf5lib_H5_H5get_1libversion */
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5check_version
+ * Signature: (III)I
+ */
+JNIEXPORT jint JNICALL
+Java_hdf_hdf5lib_H5_H5check_1version(JNIEnv *env, jclass clss, jint majnum, jint minnum, jint relnum)
+{
+ return (jint)H5check_version((unsigned)majnum, (unsigned)minnum, (unsigned)relnum);
+} /* end Java_hdf_hdf5lib_H5_H5check_1version */
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5garbage_collect
+ * Signature: ()I
+ *
+ */
+JNIEXPORT jint JNICALL
+Java_hdf_hdf5lib_H5_H5garbage_1collect(JNIEnv *env, jclass clss)
+{
+ herr_t retVal = H5garbage_collect();
+ if (retVal < 0)
+ h5libraryError(env);
+
+ return (jint)retVal;
+} /* end Java_hdf_hdf5lib_H5_H5garbage_1collect */
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5set_free_list_limits
+ * Signature: (IIIIII)I
+ */
+JNIEXPORT jint JNICALL
+Java_hdf_hdf5lib_H5_H5set_1free_1list_1limits(JNIEnv *env, jclass clss, jint reg_global_lim, jint reg_list_lim,
+ jint arr_global_lim, jint arr_list_lim, jint blk_global_lim, jint blk_list_lim )
+{
+ herr_t retVal = H5set_free_list_limits((int)reg_global_lim, (int)reg_list_lim,
+ (int)arr_global_lim, (int)arr_list_lim, (int)blk_global_lim, (int)blk_list_lim);
+ if (retVal < 0)
+ h5libraryError(env);
+
+ return (jint)retVal;
+} /* end Java_hdf_hdf5lib_H5_H5set_1free_1list_1limits */
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5is_library_threadsafe
+ * Signature: ()Z
+ */
+JNIEXPORT jboolean JNICALL
+Java_hdf_hdf5lib_H5_H5is_1library_1threadsafe(JNIEnv *env, jclass clss)
+{
+ hbool_t is_ts = false;
+ H5is_library_threadsafe(&is_ts);
+ return (jboolean)is_ts;
+} /* end Java_hdf_hdf5lib_H5_H5is_1library_1threadsafe */
+
+
+#ifdef __cplusplus
+} /* end extern "C" */
+#endif /* __cplusplus */
diff --git a/java/src/jni/h5Imp.h b/java/src/jni/h5Imp.h
new file mode 100644
index 0000000..6600378
--- /dev/null
+++ b/java/src/jni/h5Imp.h
@@ -0,0 +1,95 @@
+/* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
+ * Copyright by The HDF Group. *
+ * Copyright by the Board of Trustees of the University of Illinois. *
+ * All rights reserved. *
+ * *
+ * This file is part of HDF5. The full HDF5 copyright notice, including *
+ * terms governing use, modification, and redistribution, is contained in *
+ * the files COPYING and Copyright.html. COPYING can be found at the root *
+ * of the source code distribution tree; Copyright.html can be found at the *
+ * root level of an installed copy of the electronic HDF5 document set and *
+ * is linked from the top-level documents page. It can also be found at *
+ * http://hdfgroup.org/HDF5/doc/Copyright.html. If you do not have *
+ * access to either file, you may request a copy from help@hdfgroup.org. *
+ * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */
+
+#include <jni.h>
+/* Header for class hdf_hdf5lib_H5_H5 */
+
+#ifndef _Included_hdf_hdf5lib_H5_H5
+#define _Included_hdf_hdf5lib_H5_H5
+
+#ifdef __cplusplus
+extern "C" {
+#endif /* __cplusplus */
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5open
+ * Signature: ()I
+ */
+JNIEXPORT jint JNICALL Java_hdf_hdf5lib_H5_H5open
+ (JNIEnv *, jclass);
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5close
+ * Signature: ()I
+ */
+JNIEXPORT jint JNICALL Java_hdf_hdf5lib_H5_H5close
+ (JNIEnv *, jclass);
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5dont_atexit
+ * Signature: ()I
+ */
+JNIEXPORT jint JNICALL Java_hdf_hdf5lib_H5_H5dont_1atexit
+ (JNIEnv *, jclass);
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5get_libversion
+ * Signature: ([I)I
+ */
+JNIEXPORT jint JNICALL Java_hdf_hdf5lib_H5_H5get_1libversion
+ (JNIEnv *, jclass, jintArray);
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5check_version
+ * Signature: (III)I
+ */
+JNIEXPORT jint JNICALL Java_hdf_hdf5lib_H5_H5check_1version
+ (JNIEnv *, jclass, jint, jint, jint);
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5garbage_collect
+ * Signature: ()I
+ *
+ */
+JNIEXPORT jint JNICALL Java_hdf_hdf5lib_H5_H5garbage_1collect
+ (JNIEnv *, jclass);
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5set_free_list_limits
+ * Signature: (IIIIII)I
+ */
+JNIEXPORT jint JNICALL Java_hdf_hdf5lib_H5_H5set_1free_1list_1limits
+ (JNIEnv *, jclass, jint, jint, jint, jint, jint, jint );
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5is_library_threadsafe
+ * Signature: ()Z
+ */
+JNIEXPORT jboolean JNICALL Java_hdf_hdf5lib_H5_H5is_1library_1threadsafe
+ (JNIEnv *, jclass);
+
+#ifdef __cplusplus
+} /* end extern "C" */
+#endif /* __cplusplus */
+
+#endif /* _Included_hdf_hdf5lib_H5_H5 */
diff --git a/java/src/jni/h5aImp.c b/java/src/jni/h5aImp.c
new file mode 100644
index 0000000..a081271
--- /dev/null
+++ b/java/src/jni/h5aImp.c
@@ -0,0 +1,879 @@
+/* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
+ * Copyright by The HDF Group. *
+ * Copyright by the Board of Trustees of the University of Illinois. *
+ * All rights reserved. *
+ * *
+ * This file is part of HDF5. The full HDF5 copyright notice, including *
+ * terms governing use, modification, and redistribution, is contained in *
+ * the files COPYING and Copyright.html. COPYING can be found at the root *
+ * of the source code distribution tree; Copyright.html can be found at the *
+ * root level of an installed copy of the electronic HDF5 document set and *
+ * is linked from the top-level documents page. It can also be found at *
+ * http://hdfgroup.org/HDF5/doc/Copyright.html. If you do not have *
+ * access to either file, you may request a copy from help@hdfgroup.org. *
+ * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */
+
+/*
+ * For details of the HDF libraries, see the HDF Documentation at:
+ * http://hdfdfgroup.org/HDF5/doc/
+ *
+ */
+
+#ifdef __cplusplus
+extern "C" {
+#endif /* __cplusplus */
+
+#include "hdf5.h"
+#include "h5util.h"
+#include <jni.h>
+#include <stdlib.h>
+#include <string.h>
+#include "h5aImp.h"
+
+extern JavaVM *jvm;
+extern jobject visit_callback;
+
+#ifdef __cplusplus
+#define CBENVPTR (cbenv)
+#define CBENVPAR
+#define JVMPTR (jvm)
+#define JVMPAR
+#define JVMPAR2
+#else
+#define CBENVPTR (*cbenv)
+#define CBENVPAR cbenv,
+#define JVMPTR (*jvm)
+#define JVMPAR jvm
+#define JVMPAR2 jvm,
+#endif
+
+/********************/
+/* Local Prototypes */
+/********************/
+
+static herr_t H5A_iterate_cb(hid_t g_id, const char *name, const H5A_info_t *info, void *op_data);
+
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5Acreate
+ * Signature: (JLjava/lang/String;JJJ)J
+ */
+JNIEXPORT jlong JNICALL
+Java_hdf_hdf5lib_H5__1H5Acreate(JNIEnv *env, jclass clss, jlong loc_id, jstring name, jlong type_id,
+ jlong space_id, jlong create_plist)
+{
+ hid_t attr_id = -1;
+ const char *aName;
+
+ PIN_JAVA_STRING(name, aName, -1);
+
+ attr_id = H5Acreate2((hid_t)loc_id, aName, (hid_t)type_id, (hid_t)space_id, (hid_t)create_plist, (hid_t)H5P_DEFAULT);
+
+ UNPIN_JAVA_STRING(name, aName);
+
+ if (attr_id < 0)
+ h5libraryError(env);
+
+ return (jlong)attr_id;
+} /* end Java_hdf_hdf5lib_H5__1H5Acreate */
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5Aopen_name
+ * Signature: (JLjava/lang/String;)J
+ */
+JNIEXPORT jlong JNICALL
+Java_hdf_hdf5lib_H5__1H5Aopen_1name(JNIEnv *env, jclass clss, jlong loc_id, jstring name)
+{
+ hid_t attr_id = -1;
+ const char *aName;
+
+ PIN_JAVA_STRING(name, aName, -1);
+
+ attr_id = H5Aopen_name((hid_t)loc_id, aName);
+
+ UNPIN_JAVA_STRING(name,aName);
+
+ if (attr_id < 0)
+ h5libraryError(env);
+
+ return (jlong)attr_id;
+} /* end Java_hdf_hdf5lib_H5__1H5Aopen_1name */
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5Aopen_idx
+ * Signature: (JI)J
+ */
+JNIEXPORT jlong JNICALL
+Java_hdf_hdf5lib_H5__1H5Aopen_1idx(JNIEnv *env, jclass clss, jlong loc_id, jint idx)
+{
+ hid_t attr_id = H5Aopen_idx((hid_t)loc_id, (unsigned int) idx);
+
+ if (attr_id < 0)
+ h5libraryError(env);
+
+ return (jlong)attr_id;
+} /* end Java_hdf_hdf5lib_H5__1H5Aopen_1idx */
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5Awrite
+ * Signature: (JJ[B)I
+ */
+JNIEXPORT jint JNICALL
+Java_hdf_hdf5lib_H5_H5Awrite(JNIEnv *env, jclass clss, jlong attr_id, jlong mem_type_id, jbyteArray buf)
+{
+ herr_t status = -1;
+ jbyte *byteP;
+ jboolean isCopy;
+
+ if (buf == NULL) {
+ h5nullArgument( env,"H5Awrite: buf is NULL");
+ } /* end if */
+ else {
+ byteP = ENVPTR->GetByteArrayElements(ENVPAR buf, &isCopy);
+ if (byteP == NULL) {
+ h5JNIFatalError(env,"H5Awrite: buf is not pinned");
+ } /* end if */
+ else {
+ status = H5Awrite((hid_t)attr_id, (hid_t)mem_type_id, byteP);
+
+ /* free the buffer without copying back */
+ ENVPTR->ReleaseByteArrayElements(ENVPAR buf, byteP, JNI_ABORT);
+
+ if (status < 0)
+ h5libraryError(env);
+ }
+ } /* end else */
+
+ return (jint)status;
+} /* end Java_hdf_hdf5lib_H5_H5Awrite */
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5Aread
+ * Signature: (JJ[B)I
+ */
+JNIEXPORT jint JNICALL
+Java_hdf_hdf5lib_H5_H5Aread(JNIEnv *env, jclass clss, jlong attr_id, jlong mem_type_id, jbyteArray buf)
+{
+ herr_t status = -1;
+ jbyte *byteP;
+ jboolean isCopy;
+
+ if (buf == NULL) {
+ h5nullArgument( env,"H5Aread: buf is NULL");
+ } /* end if */
+ else {
+ byteP = ENVPTR->GetByteArrayElements(ENVPAR buf, &isCopy);
+ if (byteP == NULL) {
+ h5JNIFatalError( env,"H5Aread: buf is not pinned");
+ } /* end if */
+ else {
+ status = H5Aread((hid_t)attr_id, (hid_t)mem_type_id, byteP);
+
+ if (status < 0) {
+ ENVPTR->ReleaseByteArrayElements(ENVPAR buf, byteP, JNI_ABORT);
+ h5libraryError(env);
+ } /* end if */
+ else {
+ ENVPTR->ReleaseByteArrayElements(ENVPAR buf, byteP, 0);
+ } /* end else */
+ } /* end else */
+ } /* end else */
+
+ return (jint)status;
+} /* end Java_hdf_hdf5lib_H5_H5Aread */
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5Aget_space
+ * Signature: (J)J
+ */
+JNIEXPORT jlong JNICALL
+Java_hdf_hdf5lib_H5__1H5Aget_1space(JNIEnv *env, jclass clss, jlong attr_id)
+{
+ hid_t retVal = -1;
+
+ retVal = H5Aget_space((hid_t)attr_id);
+ if (retVal < 0)
+ h5libraryError(env);
+
+ return (jlong)retVal;
+} /* end Java_hdf_hdf5lib_H5__1H5Aget_1space */
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5Aget_type
+ * Signature: (J)J
+ */
+JNIEXPORT jlong JNICALL
+Java_hdf_hdf5lib_H5__1H5Aget_1type(JNIEnv *env, jclass clss, jlong attr_id)
+{
+ hid_t retVal = -1;
+
+ retVal = H5Aget_type((hid_t)attr_id);
+ if (retVal < 0)
+ h5libraryError(env);
+
+ return (jlong)retVal;
+} /* end Java_hdf_hdf5lib_H5__1H5Aget_1type */
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5Aget_name
+ * Signature: (J)Ljava/lang/String;
+ */
+JNIEXPORT jstring JNICALL
+Java_hdf_hdf5lib_H5_H5Aget_1name(JNIEnv *env, jclass clss, jlong attr_id)
+{
+ char *aName;
+ jstring str = NULL;
+ ssize_t buf_size;
+
+ /* get the length of the name */
+ buf_size = H5Aget_name((hid_t)attr_id, NULL, 0);
+
+ if (buf_size <= 0) {
+ h5badArgument(env, "H5Aget_name: buf_size <= 0");
+ } /* end if */
+ else {
+ buf_size++; /* add extra space for the null terminator */
+ aName = (char*)HDmalloc(sizeof(char) * (size_t)buf_size);
+ if (aName == NULL) {
+ h5outOfMemory(env, "H5Aget_name: malloc failed");
+ } /* end if */
+ else {
+ buf_size = H5Aget_name((hid_t)attr_id, (size_t)buf_size, aName);
+ if (buf_size < 0) {
+ HDfree(aName);
+ h5libraryError(env);
+ } /* end if */
+ else {
+ /* save the string; */
+ str = ENVPTR->NewStringUTF(ENVPAR aName);
+ HDfree(aName);
+ } /* end else */
+ } /* end else */
+ } /* end else */
+ return str;
+} /* end Java_hdf_hdf5lib_H5_H5Aget_1name */
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5Aget_num_attrs
+ * Signature: (J)I
+ */
+JNIEXPORT jint JNICALL
+Java_hdf_hdf5lib_H5_H5Aget_1num_1attrs(JNIEnv *env, jclass clss, jlong loc_id)
+{
+ int retVal = -1;
+
+ retVal = H5Aget_num_attrs((hid_t)loc_id);
+ if (retVal < 0)
+ h5libraryError(env);
+
+ return (jint)retVal;
+} /* end Java_hdf_hdf5lib_H5_H5Aget_1num_1attrs */
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5Adelete
+ * Signature: (JLjava/lang/String;)I
+ */
+JNIEXPORT jint JNICALL
+Java_hdf_hdf5lib_H5_H5Adelete(JNIEnv *env, jclass clss, jlong loc_id, jstring name)
+{
+ herr_t status = -1;
+ const char *aName;
+
+ PIN_JAVA_STRING(name, aName, -1);
+
+ status = H5Adelete((hid_t)loc_id, aName);
+
+ UNPIN_JAVA_STRING(name, aName);
+
+ if (status < 0)
+ h5libraryError(env);
+
+ return (jint)status;
+} /* end Java_hdf_hdf5lib_H5_H5Adelete */
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5Aclose
+ * Signature: (J)I
+ */
+JNIEXPORT jint JNICALL
+Java_hdf_hdf5lib_H5__1H5Aclose(JNIEnv *env, jclass clss, jlong attr_id)
+{
+ herr_t retVal = -1;
+
+ if (attr_id > 0)
+ retVal = H5Aclose((hid_t)attr_id);
+
+ if (retVal < 0)
+ h5libraryError(env);
+
+ return (jint)retVal;
+} /* end Java_hdf_hdf5lib_H5__1H5Aclose */
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: _H5Acreate2
+ * Signature: (JLjava/lang/String;JJJJ)J
+ */
+JNIEXPORT jlong JNICALL
+Java_hdf_hdf5lib_H5__1H5Acreate2(JNIEnv *env, jclass clss, jlong loc_id, jstring name, jlong type_id,
+ jlong space_id, jlong create_plist, jlong access_plist)
+{
+ hid_t status = -1;
+ const char *aName;
+
+ PIN_JAVA_STRING(name, aName, -1);
+
+ status = H5Acreate2((hid_t)loc_id, aName, (hid_t)type_id,
+ (hid_t)space_id, (hid_t)create_plist, (hid_t)access_plist );
+
+ UNPIN_JAVA_STRING(name, aName);
+
+ if (status < 0)
+ h5libraryError(env);
+
+ return (jlong)status;
+} /* end Java_hdf_hdf5lib_H5__1H5Acreate2 */
+
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: _H5Aopen
+ * Signature: (JLjava/lang/String;J)J
+ */
+JNIEXPORT jlong JNICALL
+Java_hdf_hdf5lib_H5__1H5Aopen(JNIEnv *env, jclass clss, jlong obj_id, jstring name, jlong access_plist)
+
+{
+ hid_t retVal = -1;
+ const char *aName;
+
+ PIN_JAVA_STRING(name, aName, -1);
+
+ retVal = H5Aopen((hid_t)obj_id, aName, (hid_t)access_plist);
+
+ UNPIN_JAVA_STRING(name, aName);
+
+ if (retVal < 0)
+ h5libraryError(env);
+
+ return (jlong)retVal;
+} /* end Java_hdf_hdf5lib_H5__1H5Aopen */
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: _H5Aopen_by_idx
+ * Signature: (JLjava/lang/String;IIJJJ)J
+ */
+JNIEXPORT jlong JNICALL
+Java_hdf_hdf5lib_H5__1H5Aopen_1by_1idx(JNIEnv *env, jclass clss, jlong loc_id, jstring name, jint idx_type, jint order, jlong n, jlong aapl_id, jlong lapl_id)
+{
+ hid_t retVal = -1;
+ const char *aName;
+
+ PIN_JAVA_STRING(name, aName, -1);
+
+ retVal = H5Aopen_by_idx((hid_t)loc_id, aName, (H5_index_t)idx_type,
+ (H5_iter_order_t)order, (hsize_t)n, (hid_t)aapl_id, (hid_t)lapl_id);
+
+ UNPIN_JAVA_STRING(name, aName);
+
+ if (retVal < 0)
+ h5libraryError(env);
+
+ return (jlong)retVal;
+} /* end Java_hdf_hdf5lib_H5__1H5Aopen_1by_1idx */
+
+/*
+* Class: hdf_hdf5lib_H5
+* Method: _H5Acreate_by_name
+* Signature: (JLjava/lang/String;Ljava/lang/String;JJJJJ)J
+*/
+JNIEXPORT jlong JNICALL
+Java_hdf_hdf5lib_H5__1H5Acreate_1by_1name(JNIEnv *env, jclass clss, jlong loc_id, jstring obj_name, jstring attr_name, jlong type_id, jlong space_id, jlong acpl_id, jlong aapl_id, jlong lapl_id)
+{
+ hid_t retVal = -1;
+ const char *aName;
+ const char *attrName;
+
+ PIN_JAVA_STRING_TWO(obj_name, aName, attr_name, attrName, -1);
+
+ retVal = H5Acreate_by_name((hid_t)loc_id, aName, attrName, (hid_t)type_id,
+ (hid_t)space_id, (hid_t)acpl_id, (hid_t)aapl_id, (hid_t)lapl_id);
+
+ UNPIN_JAVA_STRING_TWO(obj_name, aName, attr_name, attrName);
+
+ if (retVal < 0)
+ h5libraryError(env);
+
+ return (jlong)retVal;
+} /* end Java_hdf_hdf5lib_H5__1H5Acreate_1by_1name */
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5Aexists_by_name
+ * Signature: (JLjava/lang/String;Ljava/lang/String;J)Z
+ */
+JNIEXPORT jboolean JNICALL
+Java_hdf_hdf5lib_H5_H5Aexists_1by_1name(JNIEnv *env, jclass clss, jlong loc_id, jstring obj_name, jstring attr_name, jlong lapl_id)
+{
+ htri_t retVal = -1;
+ const char *aName;
+ const char *attrName;
+
+ PIN_JAVA_STRING_TWO(obj_name, aName, attr_name, attrName, JNI_FALSE);
+
+ retVal = H5Aexists_by_name((hid_t)loc_id, aName, attrName, (hid_t)lapl_id);
+
+ UNPIN_JAVA_STRING_TWO(obj_name, aName, attr_name, attrName);
+
+ if (retVal < 0)
+ h5libraryError(env);
+
+ return (jboolean)retVal;
+} /* end Java_hdf_hdf5lib_H5_H5Aexists_1by_1name */
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5Arename
+ * Signature: (JLjava/lang/String;Ljava/lang/String)I
+ */
+JNIEXPORT jint JNICALL
+Java_hdf_hdf5lib_H5_H5Arename(JNIEnv *env, jclass clss, jlong loc_id, jstring old_attr_name, jstring new_attr_name)
+{
+ herr_t retVal = -1;
+ const char *oName;
+ const char *nName;
+
+ PIN_JAVA_STRING_TWO(old_attr_name, oName, new_attr_name, nName, -1);
+
+ retVal = H5Arename((hid_t)loc_id, oName, nName);
+
+ UNPIN_JAVA_STRING_TWO(old_attr_name, oName, new_attr_name, nName);
+
+ if (retVal < 0)
+ h5libraryError(env);
+
+ return (jint)retVal;
+} /* end Java_hdf_hdf5lib_H5_H5Arename */
+
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5Arename_by_name
+ * Signature: (JLjava/lang/String;Ljava/lang/String;Ljava/lang/String;J)I
+ */
+JNIEXPORT jint JNICALL
+Java_hdf_hdf5lib_H5_H5Arename_1by_1name(JNIEnv *env, jclass clss, jlong loc_id, jstring obj_name, jstring old_attr_name, jstring new_attr_name, jlong lapl_id)
+{
+ herr_t retVal = -1;
+ const char *aName;
+ const char *oName;
+ const char *nName;
+
+ PIN_JAVA_STRING_THREE(obj_name, aName, old_attr_name, oName, new_attr_name, nName, -1);
+
+ retVal = H5Arename_by_name((hid_t)loc_id, aName, oName, nName, (hid_t)lapl_id);
+
+ UNPIN_JAVA_STRING_THREE(obj_name, aName, old_attr_name, oName, new_attr_name, nName);
+
+ if (retVal < 0)
+ h5libraryError(env);
+
+ return (jint)retVal;
+} /* end Java_hdf_hdf5lib_H5_H5Arename_1by_1name */
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5Aget_name_by_idx
+ * Signature: (JLjava/lang/String;IIJJ)Ljava/lang/String;
+ */
+JNIEXPORT jstring JNICALL
+Java_hdf_hdf5lib_H5_H5Aget_1name_1by_1idx(JNIEnv *env, jclass clss, jlong loc_id, jstring obj_name, jint idx_type, jint order, jlong n, jlong lapl_id)
+{
+ size_t buf_size;
+ char *aValue;
+ jlong status_size;
+ jstring str = NULL;
+ const char *aName;
+
+ PIN_JAVA_STRING(obj_name, aName, NULL);
+
+ /* get the length of the attribute name */
+ status_size = H5Aget_name_by_idx((hid_t)loc_id, aName, (H5_index_t)idx_type,
+ (H5_iter_order_t) order, (hsize_t) n, (char*)NULL, (size_t)0, (hid_t)lapl_id);
+
+ if(status_size < 0) {
+ UNPIN_JAVA_STRING(obj_name, aName);
+ h5libraryError(env);
+ } /* end if */
+ else {
+ buf_size = (size_t)status_size + 1;/* add extra space for the null terminator */
+
+ aValue = (char*)HDmalloc(sizeof(char) * buf_size);
+ if (aValue == NULL) {
+ UNPIN_JAVA_STRING(obj_name, aName);
+ h5outOfMemory(env, "H5Aget_name_by_idx: malloc failed ");
+ } /* end if */
+ else {
+ status_size = H5Aget_name_by_idx((hid_t)loc_id, aName, (H5_index_t)idx_type,
+ (H5_iter_order_t) order, (hsize_t) n, (char*)aValue, (size_t)buf_size, (hid_t)lapl_id);
+
+ UNPIN_JAVA_STRING(obj_name, aName);
+
+ if (status_size < 0) {
+ HDfree(aValue);
+ h5libraryError(env);
+ } /* end if */
+ else {
+ str = ENVPTR->NewStringUTF(ENVPAR aValue);
+ HDfree(aValue);
+ if (str == NULL) {
+ /* exception -- fatal JNI error */
+ h5JNIFatalError(env, "H5Aget_name_by_idx: return string not created");
+ } /* end if */
+ } /* end else */
+ } /* end else */
+ } /* end else */
+ return str;
+} /* end Java_hdf_hdf5lib_H5_H5Aget_1name_1by_1idx */
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5Aget_storage_size
+ * Signature: (J)J
+ */
+JNIEXPORT jlong JNICALL
+Java_hdf_hdf5lib_H5_H5Aget_1storage_1size(JNIEnv *env, jclass clss, jlong attr_id)
+{
+ hsize_t retVal = (hsize_t)-1;
+
+ retVal = H5Aget_storage_size((hid_t)attr_id);
+/* probably returns '0' if fails--don't do an exception */
+ return (jlong)retVal;
+} /* end Java_hdf_hdf5lib_H5_H5Aget_1storage_1size */
+
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5Aget_info
+ * Signature: (J)Lhdf/hdf5lib/structs/H5A_info_t;
+ */
+JNIEXPORT jobject JNICALL
+Java_hdf_hdf5lib_H5_H5Aget_1info(JNIEnv *env, jclass clss, jlong attr_id)
+{
+ herr_t status = -1;
+ H5A_info_t ainfo;
+ jvalue args[4];
+ jobject ret_obj = NULL;
+
+ status = H5Aget_info((hid_t)attr_id, &ainfo);
+
+ if (status < 0) {
+ h5libraryError(env);
+ } /* end if */
+ else {
+ args[0].z = ainfo.corder_valid;
+ args[1].j = ainfo.corder;
+ args[2].i = ainfo.cset;
+ args[3].j = (jlong)ainfo.data_size;
+ CALL_CONSTRUCTOR("hdf/hdf5lib/structs/H5A_info_t", "(ZJIJ)V", args);
+ } /* end else */
+ return ret_obj;
+} /* end Java_hdf_hdf5lib_H5_H5Aget_1info */
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5Aget_info_by_idx
+ * Signature: (JLjava/lang/String;IIJJ)Lhdf/hdf5lib/structs/H5A_info_t;
+ */
+JNIEXPORT jobject JNICALL
+Java_hdf_hdf5lib_H5_H5Aget_1info_1by_1idx(JNIEnv *env, jclass clss, jlong loc_id, jstring obj_name, jint idx_type, jint order, jlong n, jlong lapl_id)
+{
+ herr_t status;
+ H5A_info_t ainfo;
+ jvalue args[4];
+ jobject ret_obj = NULL;
+ const char *aName;
+
+ PIN_JAVA_STRING(obj_name, aName, NULL);
+
+ status = H5Aget_info_by_idx((hid_t)loc_id, aName, (H5_index_t)idx_type,
+ (H5_iter_order_t)order, (hsize_t)n, &ainfo, (hid_t)lapl_id);
+
+ UNPIN_JAVA_STRING(obj_name, aName);
+
+ if (status < 0) {
+ h5libraryError(env);
+ } /* end if */
+ else {
+ args[0].z = ainfo.corder_valid;
+ args[1].j = ainfo.corder;
+ args[2].i = ainfo.cset;
+ args[3].j = (jlong)ainfo.data_size;
+ CALL_CONSTRUCTOR("hdf/hdf5lib/structs/H5A_info_t", "(ZJIJ)V", args);
+ } /* end else */
+ return ret_obj;
+} /* end Java_hdf_hdf5lib_H5_H5Aget_1info_1by_1idx */
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5Aget_info_by_name
+ * Signature: (JLjava/lang/String;Ljava/lang/String;J)Lhdf/hdf5lib/structs/H5A_info_t;
+ */
+JNIEXPORT jobject JNICALL
+Java_hdf_hdf5lib_H5_H5Aget_1info_1by_1name(JNIEnv *env, jclass clss, jlong loc_id, jstring obj_name, jstring attr_name, jlong lapl_id)
+{
+ const char *aName;
+ const char *attrName;
+ herr_t status;
+ H5A_info_t ainfo;
+ jvalue args[4];
+ jobject ret_obj = NULL;
+
+ PIN_JAVA_STRING_TWO(obj_name, aName, attr_name, attrName, NULL);
+
+ status = H5Aget_info_by_name((hid_t)loc_id, aName, attrName, &ainfo, (hid_t)lapl_id);
+
+ UNPIN_JAVA_STRING_TWO(obj_name, aName, attr_name, attrName);
+
+ if (status < 0) {
+ h5libraryError(env);
+ } /* end if */
+ else {
+ args[0].z = ainfo.corder_valid;
+ args[1].j = ainfo.corder;
+ args[2].i = ainfo.cset;
+ args[3].j = (jlong)ainfo.data_size;
+ CALL_CONSTRUCTOR("hdf/hdf5lib/structs/H5A_info_t", "(ZJIJ)V", args);
+ } /* end else */
+ return ret_obj;
+} /* end Java_hdf_hdf5lib_H5_H5Aget_1info_1by_1name */
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5Adelete_by_name
+ * Signature: (JLjava/lang/String;Ljava/lang/String;J)I
+ */
+JNIEXPORT jint JNICALL
+Java_hdf_hdf5lib_H5_H5Adelete_1by_1name(JNIEnv *env, jclass clss, jlong loc_id, jstring obj_name, jstring attr_name, jlong lapl_id)
+{
+ herr_t retVal = -1;
+ const char *aName;
+ const char *attrName;
+
+ PIN_JAVA_STRING_TWO(obj_name, aName, attr_name, attrName, -1);
+
+ retVal = H5Adelete_by_name((hid_t)loc_id, aName, attrName, (hid_t)lapl_id);
+
+ UNPIN_JAVA_STRING_TWO(obj_name, aName, attr_name, attrName);
+
+ if (retVal < 0)
+ h5libraryError(env);
+
+ return (jint)retVal;
+} /* end Java_hdf_hdf5lib_H5_H5Adelete_1by_1name */
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5Aexists
+ * Signature: (JLjava/lang/String;)Z
+ */
+JNIEXPORT jboolean JNICALL
+Java_hdf_hdf5lib_H5_H5Aexists(JNIEnv *env, jclass clss, jlong obj_id, jstring attr_name)
+{
+ htri_t bval = JNI_FALSE;
+ const char *aName;
+
+ PIN_JAVA_STRING(attr_name, aName, JNI_FALSE);
+
+ bval = H5Aexists((hid_t)obj_id, aName);
+
+ UNPIN_JAVA_STRING(attr_name, aName);
+
+ if (bval > 0)
+ bval = JNI_TRUE;
+ else if (bval < 0)
+ h5libraryError(env);
+
+ return (jboolean)bval;
+} /* end Java_hdf_hdf5lib_H5_H5Aexists */
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5Adelete_by_idx
+ * Signature: (JLjava/lang/String;IIJJ)V
+ */
+JNIEXPORT void JNICALL
+Java_hdf_hdf5lib_H5_H5Adelete_1by_1idx(JNIEnv *env, jclass clss, jlong loc_id, jstring obj_name, jint idx_type, jint order, jlong n, jlong lapl_id)
+{
+ herr_t status = -1;
+ const char *aName;
+
+ PIN_JAVA_STRING0(obj_name, aName);
+
+ status = H5Adelete_by_idx((hid_t)loc_id, aName, (H5_index_t)idx_type, (H5_iter_order_t)order, (hsize_t)n, (hid_t)lapl_id);
+
+ UNPIN_JAVA_STRING(obj_name, aName);
+
+ if (status < 0)
+ h5libraryError(env);
+} /* end Java_hdf_hdf5lib_H5_H5Adelete_1by_1idx */
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: _H5Aopen_by_name
+ * Signature: (JLjava/lang/String;Ljava/lang/String;JJ)J
+ */
+JNIEXPORT jlong JNICALL
+Java_hdf_hdf5lib_H5__1H5Aopen_1by_1name(JNIEnv *env, jclass clss, jlong loc_id, jstring obj_name, jstring attr_name, jlong aapl_id, jlong lapl_id)
+
+{
+ hid_t status = -1;
+ const char *aName;
+ const char *oName;
+
+ PIN_JAVA_STRING_TWO(obj_name, oName, attr_name, aName, -1);
+
+ status = H5Aopen_by_name((hid_t)loc_id, oName, aName, (hid_t)aapl_id, (hid_t)lapl_id);
+
+ UNPIN_JAVA_STRING_TWO(obj_name, oName, attr_name, aName);
+
+ if (status < 0)
+ h5libraryError(env);
+
+ return (jlong)status;
+} /* end Java_hdf_hdf5lib_H5__1H5Aopen_1by_1name */
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5Aget_create_plist
+ * Signature: (J)J
+ */
+JNIEXPORT jlong JNICALL
+Java_hdf_hdf5lib_H5__1H5Aget_1create_1plist(JNIEnv *env, jclass clss, jlong attr_id)
+{
+ hid_t retVal = -1;
+
+ retVal = H5Aget_create_plist((hid_t)attr_id);
+ if (retVal < 0)
+ h5libraryError(env);
+
+ return (jlong)retVal;
+} /* end Java_hdf_hdf5lib_H5__1H5Aget_1create_1plist */
+
+static herr_t
+H5A_iterate_cb(hid_t g_id, const char *name, const H5A_info_t *info, void *op_data) {
+ JNIEnv *cbenv;
+ jint status = -1;
+ jclass cls;
+ jmethodID mid;
+ jstring str;
+ jmethodID constructor;
+ jvalue args[4];
+ jobject cb_info_t = NULL;
+
+ if(JVMPTR->AttachCurrentThread(JVMPAR2 (void**)&cbenv, NULL) == 0) {
+ cls = CBENVPTR->GetObjectClass(CBENVPAR visit_callback);
+ if (cls != 0) {
+ mid = CBENVPTR->GetMethodID(CBENVPAR cls, "callback", "(JLjava/lang/String;Lhdf/hdf5lib/structs/H5A_info_t;Lhdf/hdf5lib/callbacks/H5A_iterate_t;)I");
+ if (mid != 0) {
+ str = CBENVPTR->NewStringUTF(CBENVPAR name);
+
+ args[0].z = info->corder_valid;
+ args[1].j = info->corder;
+ args[2].i = info->cset;
+ args[3].j = (jlong)info->data_size;
+ /* get a reference to your class if you don't have it already */
+ cls = CBENVPTR->FindClass(CBENVPAR "hdf/hdf5lib/structs/H5A_info_t");
+ if (cls != 0) {
+ /* get a reference to the constructor; the name is <init> */
+ constructor = CBENVPTR->GetMethodID(CBENVPAR cls, "<init>", "(ZJIJ)V");
+ if (constructor != 0) {
+ cb_info_t = CBENVPTR->NewObjectA(CBENVPAR cls, constructor, args);
+
+ status = CBENVPTR->CallIntMethod(CBENVPAR visit_callback, mid, g_id, str, cb_info_t, op_data);
+ } /* end if (constructor != 0) */
+ } /* end if (cls != 0) */
+ } /* end if (mid != 0) */
+ } /* end if (cls != 0) */
+ } /* end if */
+ JVMPTR->DetachCurrentThread(JVMPAR);
+
+ return (herr_t)status;
+} /* end H5A_iterate_cb */
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5Aiterate
+ * Signature: (JIIJLjava/lang/Object;Ljava/lang/Object;)I
+ */
+JNIEXPORT jint JNICALL
+Java_hdf_hdf5lib_H5_H5Aiterate(JNIEnv *env, jclass clss, jlong grp_id, jint idx_type, jint order,
+ jlong idx, jobject callback_op, jobject op_data)
+{
+ hsize_t start_idx = (hsize_t)idx;
+ herr_t status = -1;
+
+ ENVPTR->GetJavaVM(ENVPAR &jvm);
+ visit_callback = callback_op;
+
+ if ((op_data == NULL) || (callback_op == NULL)) {
+ h5nullArgument(env, "H5Literate_by_name: op_data or callback_op is NULL");
+ } /* end if */
+ else {
+ status = H5Aiterate2((hid_t)grp_id, (H5_index_t)idx_type, (H5_iter_order_t)order, (hsize_t*)&start_idx, (H5A_operator2_t)H5A_iterate_cb, (void*)op_data);
+
+ if (status < 0)
+ h5libraryError(env);
+ } /* end else */
+
+ return (jint)status;
+} /* end Java_hdf_hdf5lib_H5_H5Aiterate */
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5Aiterate_by_name
+ * Signature: (JLjava/lang/String;IIJLjava/lang/Object;Ljava/lang/Object;J)I
+ */
+JNIEXPORT jint JNICALL
+Java_hdf_hdf5lib_H5_H5Aiterate_1by_1name(JNIEnv *env, jclass clss, jlong grp_id, jstring name, jint idx_type, jint order,
+ jlong idx, jobject callback_op, jobject op_data, jlong access_id)
+{
+ const char *lName;
+ hsize_t start_idx = (hsize_t)idx;
+ herr_t status = -1;
+
+ ENVPTR->GetJavaVM(ENVPAR &jvm);
+ visit_callback = callback_op;
+
+ if ((op_data == NULL) || (callback_op == NULL)) {
+ h5nullArgument(env, "H5Literate_by_name: op_data or callback_op is NULL");
+ } /* end if */
+ else {
+ PIN_JAVA_STRING(name, lName, -1);
+
+ status = H5Aiterate_by_name((hid_t)grp_id, lName, (H5_index_t)idx_type, (H5_iter_order_t)order, (hsize_t*)&start_idx, (H5A_operator2_t)H5A_iterate_cb, (void*)op_data, (hid_t)access_id);
+
+ UNPIN_JAVA_STRING(name, lName);
+
+ if (status < 0)
+ h5libraryError(env);
+ } /* end else */
+
+ return (jint)status;
+} /* end Java_hdf_hdf5lib_H5_H5Aiterate_1by_1name */
+
+#ifdef __cplusplus
+} /* end extern "C" */
+#endif /* __cplusplus */
diff --git a/java/src/jni/h5aImp.h b/java/src/jni/h5aImp.h
new file mode 100644
index 0000000..bbc61f5
--- /dev/null
+++ b/java/src/jni/h5aImp.h
@@ -0,0 +1,273 @@
+/* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
+ * Copyright by The HDF Group. *
+ * Copyright by the Board of Trustees of the University of Illinois. *
+ * All rights reserved. *
+ * *
+ * This file is part of HDF5. The full HDF5 copyright notice, including *
+ * terms governing use, modification, and redistribution, is contained in *
+ * the files COPYING and Copyright.html. COPYING can be found at the root *
+ * of the source code distribution tree; Copyright.html can be found at the *
+ * root level of an installed copy of the electronic HDF5 document set and *
+ * is linked from the top-level documents page. It can also be found at *
+ * http://hdfgroup.org/HDF5/doc/Copyright.html. If you do not have *
+ * access to either file, you may request a copy from help@hdfgroup.org. *
+ * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */
+
+#include <jni.h>
+/* Header for class hdf_hdf5lib_H5_H5A */
+
+#ifndef _Included_hdf_hdf5lib_H5_H5A
+#define _Included_hdf_hdf5lib_H5_H5A
+
+#ifdef __cplusplus
+extern "C" {
+#endif /* __cplusplus */
+
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5Acreate
+ * Signature: (JLjava/lang/String;JJJ)J
+ */
+JNIEXPORT jlong JNICALL Java_hdf_hdf5lib_H5__1H5Acreate
+ (JNIEnv *, jclass, jlong, jstring, jlong, jlong, jlong);
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5Aopen_name
+ * Signature: (JLjava/lang/String;)J
+ */
+JNIEXPORT jlong JNICALL Java_hdf_hdf5lib_H5__1H5Aopen_1name
+ (JNIEnv *, jclass, jlong, jstring);
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5Aopen_idx
+ * Signature: (JI)J
+ */
+JNIEXPORT jlong JNICALL Java_hdf_hdf5lib_H5__1H5Aopen_1idx
+ (JNIEnv *, jclass, jlong, jint);
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5Awrite
+ * Signature: (JJ[B)I
+ */
+JNIEXPORT jint JNICALL Java_hdf_hdf5lib_H5_H5Awrite
+ (JNIEnv *, jclass, jlong, jlong, jbyteArray);
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5Aread
+ * Signature: (JJ[B)I
+ */
+JNIEXPORT jint JNICALL Java_hdf_hdf5lib_H5_H5Aread
+ (JNIEnv *, jclass, jlong, jlong, jbyteArray);
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5Aget_space
+ * Signature: (J)J
+ */
+JNIEXPORT jlong JNICALL Java_hdf_hdf5lib_H5__1H5Aget_1space
+ (JNIEnv *, jclass, jlong);
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5Aget_type
+ * Signature: (J)J
+ */
+JNIEXPORT jlong JNICALL Java_hdf_hdf5lib_H5__1H5Aget_1type
+ (JNIEnv *, jclass, jlong);
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5Aget_name
+ * Signature: (J)Ljava/lang/String;
+ */
+JNIEXPORT jstring JNICALL Java_hdf_hdf5lib_H5_H5Aget_1name
+ (JNIEnv *, jclass, jlong);
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5Aget_num_attrs
+ * Signature: (J)I
+ */
+JNIEXPORT jint JNICALL Java_hdf_hdf5lib_H5_H5Aget_1num_1attrs
+ (JNIEnv *, jclass, jlong);
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5Adelete
+ * Signature: (JLjava/lang/String;)I
+ */
+JNIEXPORT jint JNICALL Java_hdf_hdf5lib_H5_H5Adelete
+ (JNIEnv *, jclass, jlong, jstring);
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5Aclose
+ * Signature: (J)I
+ */
+JNIEXPORT jint JNICALL Java_hdf_hdf5lib_H5__1H5Aclose
+ (JNIEnv *, jclass, jlong);
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: _H5Acreate2
+ * Signature: (JLjava/lang/String;JJJJ)J
+ */
+JNIEXPORT jlong JNICALL Java_hdf_hdf5lib_H5__1H5Acreate2
+(JNIEnv *, jclass, jlong, jstring, jlong, jlong, jlong, jlong);
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: _H5Aopen
+ * Signature: (JLjava/lang/String;J)J
+ */
+JNIEXPORT jlong JNICALL Java_hdf_hdf5lib_H5__1H5Aopen
+ (JNIEnv *, jclass, jlong, jstring, jlong);
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: _H5Aopen_by_idx
+ * Signature: (JLjava/lang/String;IIJJJ)J
+ */
+JNIEXPORT jlong JNICALL Java_hdf_hdf5lib_H5__1H5Aopen_1by_1idx
+ (JNIEnv *, jclass, jlong, jstring, jint, jint, jlong, jlong, jlong);
+
+/*
+* Class: hdf_hdf5lib_H5
+* Method: _H5Acreate_by_name
+* Signature: (JLjava/lang/String;Ljava/lang/String;JJJJJ)J
+*/
+JNIEXPORT jlong JNICALL Java_hdf_hdf5lib_H5__1H5Acreate_1by_1name
+(JNIEnv *, jclass, jlong, jstring, jstring, jlong, jlong, jlong, jlong, jlong);
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5Aexists_by_name
+ * Signature: (JLjava/lang/String;Ljava/lang/String;J)Z
+ */
+JNIEXPORT jboolean JNICALL Java_hdf_hdf5lib_H5_H5Aexists_1by_1name
+ (JNIEnv *, jclass, jlong, jstring, jstring, jlong);
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5Arename
+ * Signature: (JLjava/lang/String;Ljava/lang/String)I
+ */
+JNIEXPORT jint JNICALL Java_hdf_hdf5lib_H5_H5Arename
+ (JNIEnv *, jclass, jlong, jstring, jstring);
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5Arename_by_name
+ * Signature: (JLjava/lang/String;Ljava/lang/String;Ljava/lang/String;J)I
+ */
+JNIEXPORT jint JNICALL Java_hdf_hdf5lib_H5_H5Arename_1by_1name
+ (JNIEnv *, jclass, jlong, jstring, jstring, jstring, jlong);
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5Aget_name_by_idx
+ * Signature: (JLjava/lang/String;IIJJ)Ljava/lang/String;
+ */
+JNIEXPORT jstring JNICALL Java_hdf_hdf5lib_H5_H5Aget_1name_1by_1idx
+ (JNIEnv *, jclass, jlong, jstring, jint, jint, jlong, jlong);
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5Aget_storage_size
+ * Signature: (J)J
+ */
+JNIEXPORT jlong JNICALL Java_hdf_hdf5lib_H5_H5Aget_1storage_1size
+ (JNIEnv *, jclass, jlong);
+
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5Aget_info
+ * Signature: (J)Lhdf/hdf5lib/structs/H5A_info_t;
+ */
+JNIEXPORT jobject JNICALL Java_hdf_hdf5lib_H5_H5Aget_1info
+ (JNIEnv *, jclass, jlong);
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5Aget_info_by_idx
+ * Signature: (JLjava/lang/String;IIJJ)Lhdf/hdf5lib/structs/H5A_info_t;
+ */
+JNIEXPORT jobject JNICALL Java_hdf_hdf5lib_H5_H5Aget_1info_1by_1idx
+ (JNIEnv *, jclass, jlong, jstring, jint, jint, jlong, jlong);
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5Aget_info_by_name
+ * Signature: (JLjava/lang/String;Ljava/lang/String;J)Lhdf/hdf5lib/structs/H5A_info_t;
+ */
+JNIEXPORT jobject JNICALL Java_hdf_hdf5lib_H5_H5Aget_1info_1by_1name
+ (JNIEnv *, jclass, jlong, jstring, jstring, jlong);
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5Adelete_by_name
+ * Signature: (JLjava/lang/String;Ljava/lang/String;J)I
+ */
+JNIEXPORT jint JNICALL Java_hdf_hdf5lib_H5_H5Adelete_1by_1name
+ (JNIEnv *, jclass, jlong, jstring, jstring, jlong);
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5Aexists
+ * Signature: (JLjava/lang/String;)Z
+ */
+JNIEXPORT jboolean JNICALL Java_hdf_hdf5lib_H5_H5Aexists
+ (JNIEnv *, jclass, jlong, jstring);
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5Adelete_by_idx
+ * Signature: (JLjava/lang/String;IIJJ)V
+ */
+JNIEXPORT void JNICALL Java_hdf_hdf5lib_H5_H5Adelete_1by_1idx
+ (JNIEnv *, jclass, jlong, jstring, jint, jint, jlong, jlong);
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: _H5Aopen_by_name
+ * Signature: (JLjava/lang/String;Ljava/lang/String;JJ)J
+ */
+JNIEXPORT jlong JNICALL Java_hdf_hdf5lib_H5__1H5Aopen_1by_1name
+ (JNIEnv *, jclass, jlong, jstring, jstring, jlong, jlong);
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5Aget_create_plist
+ * Signature: (J)J
+ */
+JNIEXPORT jlong JNICALL Java_hdf_hdf5lib_H5__1H5Aget_1create_1plist
+ (JNIEnv *, jclass, jlong);
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5Aiterate
+ * Signature: (JIIJLjava/lang/Object;Ljava/lang/Object;)I
+ */
+JNIEXPORT jint JNICALL Java_hdf_hdf5lib_H5_H5Aiterate
+ (JNIEnv*, jclass, jlong, jint, jint, jlong, jobject, jobject);
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5Aiterate_by_name
+ * Signature: (JLjava/lang/String;IIJLjava/lang/Object;Ljava/lang/Object;J)I
+ */
+JNIEXPORT jint JNICALL Java_hdf_hdf5lib_H5_H5Aiterate_1by_1name
+ (JNIEnv*, jclass, jlong, jstring, jint, jint, jlong, jobject, jobject, jlong);
+
+
+#ifdef __cplusplus
+} /* end extern "C" */
+#endif /* __cplusplus */
+
+#endif /* _Included_hdf_hdf5lib_H5_H5A */
diff --git a/java/src/jni/h5dImp.c b/java/src/jni/h5dImp.c
new file mode 100644
index 0000000..f70cb5e
--- /dev/null
+++ b/java/src/jni/h5dImp.c
@@ -0,0 +1,1758 @@
+/* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
+ * Copyright by The HDF Group. *
+ * Copyright by the Board of Trustees of the University of Illinois. *
+ * All rights reserved. *
+ * *
+ * This file is part of HDF5. The full HDF5 copyright notice, including *
+ * terms governing use, modification, and redistribution, is contained in *
+ * the files COPYING and Copyright.html. COPYING can be found at the root *
+ * of the source code distribution tree; Copyright.html can be found at the *
+ * root level of an installed copy of the electronic HDF5 document set and *
+ * is linked from the top-level documents page. It can also be found at *
+ * http://hdfgroup.org/HDF5/doc/Copyright.html. If you do not have *
+ * access to either file, you may request a copy from help@hdfgroup.org. *
+ * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */
+
+/*
+ * For details of the HDF libraries, see the HDF Documentation at:
+ * http://hdfdfgroup.org/HDF5/doc/
+ *
+ */
+
+#ifdef __cplusplus
+extern "C" {
+#endif /* __cplusplus */
+
+#include <jni.h>
+#include <stdio.h>
+#include <stdlib.h>
+#include <string.h>
+#include "hdf5.h"
+#include "h5util.h"
+#include "h5dImp.h"
+
+extern JavaVM *jvm;
+extern jobject visit_callback;
+
+#ifdef __cplusplus
+ #ifdef _WINDOWS
+ #include <direct.h>
+ #endif
+ #define CBENVPTR (cbenv)
+ #define CBENVPAR
+ #define JVMPTR (jvm)
+ #define JVMPAR
+ #define JVMPAR2
+#else
+ #define CBENVPTR (*cbenv)
+ #define CBENVPAR cbenv,
+ #define JVMPTR (*jvm)
+ #define JVMPAR jvm
+ #define JVMPAR2 jvm,
+#endif
+
+/********************/
+/* Local Prototypes */
+/********************/
+
+static herr_t H5DreadVL_str (JNIEnv *env, hid_t did, hid_t tid, hid_t mem_sid, hid_t file_sid, hid_t xfer_plist_id, jobjectArray buf);
+static herr_t H5DreadVL_array (JNIEnv *env, hid_t did, hid_t tid, hid_t mem_sid, hid_t file_sid, hid_t xfer_plist_id, jobjectArray buf);
+static herr_t H5DwriteVL_str (JNIEnv *env, hid_t did, hid_t tid, hid_t mem_sid, hid_t file_sid, hid_t xfer_plist_id, jobjectArray buf);
+static herr_t H5DwriteVL_array (JNIEnv *env, hid_t did, hid_t tid, hid_t mem_sid, hid_t file_sid, hid_t xfer_plist_id, jobjectArray buf);
+
+/********************/
+/* Local Macros */
+/********************/
+
+#define PIN_BYTE_ARRAY() { \
+ if (isCriticalPinning) \
+ buffP = (jbyte*)ENVPTR->GetPrimitiveArrayCritical(ENVPAR buf, &isCopy); \
+ else \
+ buffP = ENVPTR->GetByteArrayElements(ENVPAR buf, &isCopy); \
+}
+
+#define UNPIN_BYTE_ARRAY(mode) { \
+ if (isCriticalPinning) \
+ ENVPTR->ReleasePrimitiveArrayCritical(ENVPAR buf, buffP, mode); \
+ else \
+ ENVPTR->ReleaseByteArrayElements(ENVPAR buf, buffP, mode); \
+}
+
+#define PIN_SHORT_ARRAY() { \
+ if (isCriticalPinning) \
+ buffP = (jshort*)ENVPTR->GetPrimitiveArrayCritical(ENVPAR buf, &isCopy); \
+ else \
+ buffP = ENVPTR->GetShortArrayElements(ENVPAR buf, &isCopy); \
+}
+
+#define UNPIN_SHORT_ARRAY(mode) { \
+ if (isCriticalPinning) \
+ ENVPTR->ReleasePrimitiveArrayCritical(ENVPAR buf, buffP, mode); \
+ else \
+ ENVPTR->ReleaseShortArrayElements(ENVPAR buf, buffP, mode); \
+}
+
+#define PIN_INT_ARRAY() { \
+ if (isCriticalPinning) \
+ buffP = (jint*)ENVPTR->GetPrimitiveArrayCritical(ENVPAR buf, &isCopy); \
+ else \
+ buffP = ENVPTR->GetIntArrayElements(ENVPAR buf, &isCopy); \
+}
+
+#define UNPIN_INT_ARRAY(mode) { \
+ if (isCriticalPinning) \
+ ENVPTR->ReleasePrimitiveArrayCritical(ENVPAR buf, buffP, mode); \
+ else \
+ ENVPTR->ReleaseIntArrayElements(ENVPAR buf, buffP, mode); \
+}
+
+#define PIN_LONG_ARRAY() { \
+ if (isCriticalPinning) \
+ buffP = (jlong*)ENVPTR->GetPrimitiveArrayCritical(ENVPAR buf, &isCopy); \
+ else \
+ buffP = ENVPTR->GetLongArrayElements(ENVPAR buf,&isCopy); \
+}
+
+#define UNPIN_LONG_ARRAY(mode) { \
+ if (isCriticalPinning) \
+ ENVPTR->ReleasePrimitiveArrayCritical(ENVPAR buf, buffP, mode); \
+ else \
+ ENVPTR->ReleaseLongArrayElements(ENVPAR buf, buffP, mode); \
+}
+
+#define PIN_FLOAT_ARRAY() { \
+ if (isCriticalPinning) \
+ buffP = (jfloat*)ENVPTR->GetPrimitiveArrayCritical(ENVPAR buf, &isCopy); \
+ else \
+ buffP = ENVPTR->GetFloatArrayElements(ENVPAR buf, &isCopy); \
+}
+
+#define UNPIN_FLOAT_ARRAY(mode) { \
+ if (isCriticalPinning) \
+ ENVPTR->ReleasePrimitiveArrayCritical(ENVPAR buf, buffP, mode); \
+ else \
+ ENVPTR->ReleaseFloatArrayElements(ENVPAR buf, buffP, mode); \
+}
+
+#define PIN_DOUBLE_ARRAY() { \
+ if (isCriticalPinning) \
+ buffP = (jdouble*)ENVPTR->GetPrimitiveArrayCritical(ENVPAR buf, &isCopy); \
+ else \
+ buffP = ENVPTR->GetDoubleArrayElements(ENVPAR buf, &isCopy); \
+}
+
+#define UNPIN_DOUBLE_ARRAY(mode) { \
+ if (isCriticalPinning) \
+ ENVPTR->ReleasePrimitiveArrayCritical(ENVPAR buf, buffP, mode); \
+ else \
+ ENVPTR->ReleaseDoubleArrayElements(ENVPAR buf, buffP, mode); \
+}
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: _H5Dcreate
+ * Signature: (JLjava/lang/String;JJJ)J
+ */
+JNIEXPORT jlong JNICALL
+Java_hdf_hdf5lib_H5__1H5Dcreate(JNIEnv *env, jclass clss, jlong loc_id, jstring name, jlong type_id,
+ jlong space_id, jlong create_plist_id)
+{
+ hid_t dset_id = -1;
+ const char *fileName;
+
+ PIN_JAVA_STRING(name, fileName, -1);
+
+ dset_id = H5Dcreate2((hid_t)loc_id, fileName, (hid_t)type_id, (hid_t)space_id, H5P_DEFAULT, (hid_t)create_plist_id, H5P_DEFAULT);
+
+ UNPIN_JAVA_STRING(name, fileName);
+
+ if (dset_id < 0)
+ h5libraryError(env);
+
+ return (jlong)dset_id;
+} /* end Java_hdf_hdf5lib_H5__1H5Dcreate */
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: _H5Dopen
+ * Signature: (JLjava/lang/String;)J
+ */
+JNIEXPORT jlong JNICALL
+Java_hdf_hdf5lib_H5__1H5Dopen(JNIEnv *env, jclass clss, jlong loc_id, jstring name)
+{
+ hid_t dset_id = -1;
+ const char *fileName;
+
+ PIN_JAVA_STRING(name, fileName, -1);
+
+ dset_id = H5Dopen2((hid_t)loc_id, fileName, H5P_DEFAULT);
+
+ UNPIN_JAVA_STRING(name, fileName);
+ if (dset_id < 0)
+ h5libraryError(env);
+
+ return (jlong)dset_id;
+} /* end Java_hdf_hdf5lib_H5__1H5Dopen */
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: _H5Dget_space
+ * Signature: (J)J
+ */
+JNIEXPORT jlong JNICALL
+Java_hdf_hdf5lib_H5__1H5Dget_1space(JNIEnv *env, jclass clss, jlong dataset_id)
+{
+ hid_t retVal = -1;
+
+ retVal = H5Dget_space((hid_t)dataset_id);
+ if (retVal < 0)
+ h5libraryError(env);
+
+ return (jlong)retVal;
+} /* end Java_hdf_hdf5lib_H5__1H5Dget_1space */
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: _H5Dget_type
+ * Signature: (J)J
+ */
+JNIEXPORT jlong JNICALL
+Java_hdf_hdf5lib_H5__1H5Dget_1type(JNIEnv *env, jclass clss, jlong dataset_id)
+{
+ hid_t retVal = -1;
+
+ retVal = H5Dget_type((hid_t)dataset_id);
+ if (retVal < 0)
+ h5libraryError(env);
+
+ return (jlong)retVal;
+} /* end Java_hdf_hdf5lib_H5__1H5Dget_1type */
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: _H5Dget_create_plist
+ * Signature: (J)J
+ */
+JNIEXPORT jlong JNICALL
+Java_hdf_hdf5lib_H5__1H5Dget_1create_1plist(JNIEnv *env, jclass clss, jlong dataset_id)
+{
+ hid_t retVal = -1;
+
+ retVal = H5Dget_create_plist((hid_t)dataset_id);
+ if (retVal < 0)
+ h5libraryError(env);
+
+ return (jlong)retVal;
+} /* end Java_hdf_hdf5lib_H5__1H5Dget_1create_1plist */
+
+static htri_t
+H5Tdetect_variable_str(hid_t tid) {
+ htri_t ret_val = 0;
+
+ if (H5Tget_class(tid) == H5T_COMPOUND) {
+ unsigned i;
+ unsigned nm = (unsigned)H5Tget_nmembers(tid);
+ for(i = 0; i < nm; i++) {
+ htri_t status = 0;
+ hid_t mtid = 0;
+ if((mtid = H5Tget_member_type(tid, i)) < 0)
+ return -1; /* exit immediately on error */
+ if((status = H5Tdetect_variable_str(mtid)) < 0)
+ return status; /* exit immediately on error */
+ ret_val |= status;
+ H5Tclose (mtid);
+ } /* end for */
+ } /* end if */
+ else
+ ret_val = H5Tis_variable_str(tid);
+
+ return ret_val;
+} /* end H5Tdetect_variable_str */
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5Dread
+ * Signature: (JJJJJ[BZ)I
+ */
+JNIEXPORT jint JNICALL
+Java_hdf_hdf5lib_H5_H5Dread(JNIEnv *env, jclass clss, jlong dataset_id, jlong mem_type_id, jlong mem_space_id,
+ jlong file_space_id, jlong xfer_plist_id, jbyteArray buf, jboolean isCriticalPinning)
+{
+ herr_t status;
+ jbyte *buffP;
+ jboolean isCopy;
+ htri_t data_class;
+
+ if (buf == NULL) {
+ h5nullArgument(env, "H5Dread: buf is NULL");
+ } /* end if */
+ else if((data_class = H5Tdetect_class(mem_type_id, H5T_VLEN)) < 0) {
+ h5JNIFatalError(env, "H5Dread: H5Tdetect_class() failed");
+ } /* end else if */
+ else if(data_class == 1) {
+ h5badArgument(env, "H5Dread: buf does not support variable length type");
+ } /* end else if */
+ else {
+ /* recursive detect any vlen string in type (compound, array ...) */
+ if((data_class = H5Tdetect_variable_str(mem_type_id)) < 0) {
+ h5JNIFatalError(env, "H5Dread: H5Tdetect_variable_str() failed");
+ } /* end if */
+ else if(data_class == 1) {
+ h5badArgument(env, "H5Dread: buf does not support variable length type");
+ } /* end else if */
+ else {
+ PIN_BYTE_ARRAY();
+
+ if (buffP == NULL) {
+ h5JNIFatalError(env, "H5Dread: buf not pinned");
+ } /* end if */
+ else {
+ status = H5Dread((hid_t)dataset_id, (hid_t)mem_type_id, (hid_t)mem_space_id,
+ (hid_t)file_space_id, (hid_t)xfer_plist_id, buffP);
+
+ if (status < 0) {
+ UNPIN_BYTE_ARRAY(JNI_ABORT);
+ h5libraryError(env);
+ } /* end if */
+ else {
+ UNPIN_BYTE_ARRAY(0); /* update java buffer for return */
+ } /* end else */
+ } /* end else */
+ } /* end else */
+ } /* end else */
+
+ return (jint)status;
+} /* end Java_hdf_hdf5lib_H5_H5Dread */
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5Dwrite
+ * Signature: (JJJJJ[BZ)I
+ */
+JNIEXPORT jint JNICALL
+Java_hdf_hdf5lib_H5_H5Dwrite(JNIEnv *env, jclass clss, jlong dataset_id, jlong mem_type_id, jlong mem_space_id,
+ jlong file_space_id, jlong xfer_plist_id, jbyteArray buf, jboolean isCriticalPinning)
+{
+ herr_t status;
+ jbyte *buffP;
+ jboolean isCopy;
+ htri_t data_class;
+
+ if (buf == NULL) {
+ h5nullArgument(env, "H5Dwrite: buf is NULL");
+ } /* end if */
+ else if((data_class = H5Tdetect_class(mem_type_id, H5T_VLEN)) < 0) {
+ h5JNIFatalError(env, "H5Dwrite: H5Tdetect_class() failed");
+ } /* end else if */
+ else if(data_class == 1) {
+ h5badArgument(env, "H5Dwrite: buf does not support variable length type");
+ } /* end else if */
+ else {
+ /* recursive detect any vlen string in type (compound, array ...) */
+ if((data_class = H5Tdetect_variable_str(mem_type_id)) < 0) {
+ h5JNIFatalError(env, "H5Dwrite: H5Tdetect_variable_str() failed");
+ } /* end if */
+ else if(data_class == 1) {
+ h5badArgument(env, "H5Dwrite: buf does not support variable length type");
+ } /* end else if */
+ else {
+ PIN_BYTE_ARRAY();
+ if (buffP == NULL) {
+ h5JNIFatalError(env, "H5Dwrite: buf not pinned");
+ } /* end if */
+ else {
+ status = H5Dwrite((hid_t)dataset_id, (hid_t)mem_type_id, (hid_t)mem_space_id,
+ (hid_t)file_space_id, (hid_t)xfer_plist_id, buffP);
+
+ UNPIN_BYTE_ARRAY(JNI_ABORT); /* no need to update buffer */
+
+ if (status < 0)
+ h5libraryError(env);
+ } /* end else */
+ } /* end else */
+ } /* end else */
+
+ return (jint)status;
+} /* end Java_hdf_hdf5lib_H5_H5Dwrite */
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: _H5Dclose
+ * Signature: (J)I
+ */
+JNIEXPORT jint JNICALL
+Java_hdf_hdf5lib_H5__1H5Dclose(JNIEnv *env, jclass clss, jlong dataset_id)
+{
+ herr_t retVal = -1;
+
+ retVal = H5Dclose((hid_t)dataset_id);
+
+ if (retVal < 0)
+ h5libraryError(env);
+
+ return (jint)retVal;
+} /* end Java_hdf_hdf5lib_H5__1H5Dclose */
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5Dget_storage_size
+ * Signature: (J)J
+ */
+JNIEXPORT jlong JNICALL
+Java_hdf_hdf5lib_H5_H5Dget_1storage_1size(JNIEnv *env, jclass clss, jlong dataset_id)
+{
+ hsize_t retVal = (hsize_t)-1;
+
+ if (dataset_id < 0) {
+ h5badArgument(env, "H5Dget_storage_size: not a dataset");
+ } /* end if */
+ else {
+ retVal = H5Dget_storage_size((hid_t)dataset_id);
+ } /* end else */
+
+ return (jlong)retVal;
+} /* end Java_hdf_hdf5lib_H5_H5Dget_1storage_1size */
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5Dvlen_reclaim
+ * Signature: (JJJ[B)I
+ */
+JNIEXPORT jint JNICALL
+Java_hdf_hdf5lib_H5_H5Dvlen_1reclaim(JNIEnv *env, jclass clss, jlong type_id, jlong space_id,
+ jlong xfer_plist_id, jbyteArray buf)
+{
+ herr_t status = -1;
+ jbyte *byteP;
+ jboolean isCopy;
+
+ if (buf == NULL) {
+ h5nullArgument(env, "H5Dvlen_reclaim: buf is NULL");
+ } /* end if */
+ else {
+ byteP = ENVPTR->GetByteArrayElements(ENVPAR buf, &isCopy);
+ if (byteP == NULL) {
+ h5JNIFatalError(env, "H5Dvlen_reclaim: buf not pinned");
+ } /* end if */
+ else {
+ status = H5Dvlen_reclaim((hid_t)type_id, (hid_t)space_id, (hid_t)xfer_plist_id, byteP);
+
+ ENVPTR->ReleaseByteArrayElements(ENVPAR buf, byteP, JNI_ABORT);
+
+ if (status < 0)
+ h5libraryError(env);
+ } /* end else */
+ } /* end else */
+
+ return (jint)status;
+} /* end Java_hdf_hdf5lib_H5_H5Dvlen_1reclaim */
+
+/*
+ ////////////////////////////////////////////////////////////////////
+ // //
+ // New APIs for read data from library //
+ // Using H5Dread(..., Object buf) requires function calls //
+ // theArray.emptyBytes() and theArray.arrayify(buf), which //
+ // triples the actual memory needed by the data set. //
+ // Using the following APIs solves the problem. //
+ // //
+ ////////////////////////////////////////////////////////////////////
+*/
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5Dread_short
+ * Signature: (JJJJJ[SZ)I
+ */
+JNIEXPORT jint JNICALL
+Java_hdf_hdf5lib_H5_H5Dread_1short(JNIEnv *env, jclass clss, jlong dataset_id, jlong mem_type_id, jlong mem_space_id,
+ jlong file_space_id, jlong xfer_plist_id, jshortArray buf, jboolean isCriticalPinning)
+{
+ herr_t status = -1;
+ jshort *buffP;
+ jboolean isCopy;
+ htri_t data_class;
+
+ if (buf == NULL) {
+ h5nullArgument(env, "H5Dread_short: buf is NULL");
+ } /* end if */
+ else if((data_class = H5Tdetect_class(mem_type_id, H5T_VLEN)) < 0) {
+ h5JNIFatalError(env, "H5Dread: H5Tdetect_class() failed");
+ } /* end else if */
+ else if(data_class == 1) {
+ h5badArgument(env, "H5Dread_short: buf does not support variable length type");
+ } /* end else if */
+ else {
+ /* recursive detect any vlen string in type (compound, array ...) */
+ if((data_class = H5Tdetect_variable_str(mem_type_id)) < 0) {
+ h5JNIFatalError(env, "H5Dread_short: H5Tdetect_variable_str() failed");
+ } /* end if */
+ else if(data_class == 1) {
+ h5badArgument(env, "H5Dread_short: buf does not support variable length type");
+ } /* end else if */
+ else {
+ PIN_SHORT_ARRAY();
+ if (buffP == NULL) {
+ h5JNIFatalError(env, "H5Dread_short: buf not pinned");
+ } /* end if */
+ else {
+ status = H5Dread((hid_t)dataset_id, (hid_t)mem_type_id, (hid_t)mem_space_id,
+ (hid_t)file_space_id, (hid_t)xfer_plist_id, buffP);
+
+ if (status < 0) {
+ UNPIN_SHORT_ARRAY(JNI_ABORT);
+ h5libraryError(env);
+ } /* end if */
+ else {
+ UNPIN_SHORT_ARRAY(0);
+ } /* end else */
+ } /* end else */
+ } /* end else */
+ } /* end else */
+
+ return (jint)status;
+} /* end Java_hdf_hdf5lib_H5_H5Dread_1short */
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5Dwrite_short
+ * Signature: (JJJJJ[SZ)I
+ */
+JNIEXPORT jint JNICALL
+Java_hdf_hdf5lib_H5_H5Dwrite_1short(JNIEnv *env, jclass clss, jlong dataset_id, jlong mem_type_id, jlong mem_space_id,
+ jlong file_space_id, jlong xfer_plist_id, jshortArray buf, jboolean isCriticalPinning)
+{
+ herr_t status = -1;
+ jshort *buffP;
+ jboolean isCopy;
+ htri_t data_class;
+
+ if (buf == NULL ) {
+ h5nullArgument(env, "H5Dwrite_short: buf is NULL");
+ } /* end if */
+ else if((data_class = H5Tdetect_class(mem_type_id, H5T_VLEN)) < 0) {
+ h5JNIFatalError(env, "H5Dwrite_short: H5Tdetect_class() failed");
+ } /* end else if */
+ else if(data_class == 1) {
+ h5badArgument(env, "H5Dwrite_short: buf does not support variable length type");
+ } /* end else if */
+ else {
+ /* recursive detect any vlen string in type (compound, array ...) */
+ if((data_class = H5Tdetect_variable_str(mem_type_id)) < 0) {
+ h5JNIFatalError(env, "H5Dwrite_short: H5Tdetect_variable_str() failed");
+ } /* end if */
+ else if(data_class == 1) {
+ h5badArgument(env, "H5Dwrite_short: buf does not support variable length type");
+ } /* end else if */
+ else {
+ PIN_SHORT_ARRAY();
+ if (buffP == NULL) {
+ h5JNIFatalError(env, "H5Dwrite_short: buf not pinned");
+ } /* end if */
+ else {
+ status = H5Dwrite((hid_t)dataset_id, (hid_t)mem_type_id, (hid_t)mem_space_id,
+ (hid_t)file_space_id, (hid_t)xfer_plist_id, buffP);
+
+ UNPIN_SHORT_ARRAY(JNI_ABORT);
+
+ if (status < 0)
+ h5libraryError(env);
+ } /* end else */
+ } /* end else */
+ } /* end else */
+
+ return (jint)status;
+} /* end Java_hdf_hdf5lib_H5_H5Dwrite_1short */
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5Dread_int
+ * Signature: (JJJJJ[IZ)I
+ */
+JNIEXPORT jint JNICALL
+Java_hdf_hdf5lib_H5_H5Dread_1int(JNIEnv *env, jclass clss, jlong dataset_id, jlong mem_type_id, jlong mem_space_id,
+ jlong file_space_id, jlong xfer_plist_id, jintArray buf, jboolean isCriticalPinning)
+{
+ herr_t status = -1;
+ jint *buffP;
+ jboolean isCopy;
+ htri_t data_class;
+
+ if (buf == NULL) {
+ h5nullArgument(env, "H5Dread_int: buf is NULL");
+ } /* end if */
+ else if((data_class = H5Tdetect_class(mem_type_id, H5T_VLEN)) < 0) {
+ h5JNIFatalError(env, "H5Dread_int: H5Tdetect_class() failed");
+ } /* end else if */
+ else if(data_class == 1) {
+ h5badArgument(env, "H5Dread_int: buf does not support variable length type");
+ } /* end else if */
+ else {
+ /* recursive detect any vlen string in type (compound, array ...) */
+ if((data_class = H5Tdetect_variable_str(mem_type_id)) < 0) {
+ h5JNIFatalError(env, "H5Dread_int: H5Tdetect_variable_str() failed");
+ } /* end if */
+ else if(data_class == 1) {
+ h5badArgument(env, "H5Dread_int: buf does not support variable length type");
+ } /* end else if */
+ else {
+ PIN_INT_ARRAY();
+ if (buffP == NULL) {
+ h5JNIFatalError(env, "H5Dread_int: buf not pinned");
+ } /* end if */
+ else {
+ status = H5Dread((hid_t)dataset_id, (hid_t)mem_type_id, (hid_t)mem_space_id,
+ (hid_t)file_space_id, (hid_t)xfer_plist_id, buffP);
+
+ if (status < 0) {
+ UNPIN_INT_ARRAY(JNI_ABORT);
+ h5libraryError(env);
+ } /* end if */
+ else {
+ UNPIN_INT_ARRAY(0);
+ } /* end else */
+ } /* end else */
+ } /* end else */
+ } /* end else */
+
+ return (jint)status;
+} /* end Java_hdf_hdf5lib_H5_H5Dread_1int */
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5Dwrite_int
+ * Signature: (JJJJJ[IZ)I
+ */
+JNIEXPORT jint JNICALL
+Java_hdf_hdf5lib_H5_H5Dwrite_1int(JNIEnv *env, jclass clss, jlong dataset_id, jlong mem_type_id, jlong mem_space_id,
+ jlong file_space_id, jlong xfer_plist_id, jintArray buf, jboolean isCriticalPinning)
+{
+ herr_t status = -1;
+ jint *buffP;
+ jboolean isCopy;
+ htri_t data_class;
+
+ if (buf == NULL) {
+ h5nullArgument(env, "H5Dwrite_int: buf is NULL");
+ } /* end if */
+ else if((data_class = H5Tdetect_class(mem_type_id, H5T_VLEN)) < 0) {
+ h5JNIFatalError(env, "H5Dwrite_int: H5Tdetect_class() failed");
+ } /* end else if */
+ else if(data_class == 1) {
+ h5badArgument(env, "H5Dwrite_int: buf does not support variable length type");
+ } /* end else if */
+ else {
+ /* recursive detect any vlen string in type (compound, array ...) */
+ if((data_class = H5Tdetect_variable_str(mem_type_id)) < 0) {
+ h5JNIFatalError(env, "H5Dwrite_int: H5Tdetect_variable_str() failed");
+ } /* end if */
+ else if(data_class == 1) {
+ h5badArgument(env, "H5Dwrite_int: buf does not support variable length type");
+ } /* end else if */
+ else {
+ PIN_INT_ARRAY();
+ if (buffP == NULL) {
+ h5JNIFatalError(env, "H5Dwrite_int: buf not pinned");
+ } /* end if */
+ else {
+ status = H5Dwrite((hid_t)dataset_id, (hid_t)mem_type_id, (hid_t)mem_space_id,
+ (hid_t)file_space_id, (hid_t)xfer_plist_id, buffP);
+
+ UNPIN_INT_ARRAY(JNI_ABORT);
+
+ if (status < 0)
+ h5libraryError(env);
+ } /* end else */
+ } /* end else */
+ } /* end else */
+
+ return (jint)status;
+} /* end Java_hdf_hdf5lib_H5_H5Dwrite_1int */
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5Dread_long
+ * Signature: (JJJJJ[JZ)I
+ */
+JNIEXPORT jint JNICALL
+Java_hdf_hdf5lib_H5_H5Dread_1long(JNIEnv *env, jclass clss, jlong dataset_id, jlong mem_type_id, jlong mem_space_id,
+ jlong file_space_id, jlong xfer_plist_id, jlongArray buf, jboolean isCriticalPinning)
+{
+ herr_t status = -1;
+ jlong *buffP;
+ jboolean isCopy;
+ htri_t data_class;
+
+ if (buf == NULL) {
+ h5nullArgument(env, "H5Dread_long: buf is NULL");
+ } /* end if */
+ else if((data_class = H5Tdetect_class(mem_type_id, H5T_VLEN)) < 0) {
+ h5JNIFatalError(env, "H5Dread_long: H5Tdetect_class() failed");
+ } /* end else if */
+ else if(data_class == 1) {
+ h5badArgument(env, "H5Dread_long: buf does not support variable length type");
+ } /* end else if */
+ else {
+ /* recursive detect any vlen string in type (compound, array ...) */
+ if((data_class = H5Tdetect_variable_str(mem_type_id)) < 0) {
+ h5JNIFatalError(env, "H5Dread_long: H5Tdetect_variable_str() failed");
+ } /* end if */
+ else if(data_class == 1) {
+ h5badArgument(env, "H5Dread_long: buf does not support variable length type");
+ } /* end else if */
+ else {
+ PIN_LONG_ARRAY();
+ if (buffP == NULL) {
+ h5JNIFatalError(env, "H5Dread_long: buf not pinned");
+ } /* end if */
+ else {
+ status = H5Dread((hid_t)dataset_id, (hid_t)mem_type_id, (hid_t)mem_space_id,
+ (hid_t)file_space_id, (hid_t)xfer_plist_id, buffP);
+
+ if (status < 0) {
+ UNPIN_LONG_ARRAY(JNI_ABORT);
+ h5libraryError(env);
+ } /* end if */
+ else {
+ UNPIN_LONG_ARRAY(0);
+ } /* end else */
+ } /* end else */
+ } /* end else */
+ } /* end else */
+
+ return (jint)status;
+} /* end Java_hdf_hdf5lib_H5_H5Dread_1long */
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5Dwrite_long
+ * Signature: (JJJJJ[JZ)I
+ */
+JNIEXPORT jint JNICALL
+Java_hdf_hdf5lib_H5_H5Dwrite_1long(JNIEnv *env, jclass clss, jlong dataset_id, jlong mem_type_id, jlong mem_space_id,
+ jlong file_space_id, jlong xfer_plist_id, jlongArray buf, jboolean isCriticalPinning)
+{
+ herr_t status = -1;
+ jlong *buffP;
+ jboolean isCopy;
+ htri_t data_class;
+
+ if (buf == NULL) {
+ h5nullArgument(env, "H5Dwrite_long: buf is NULL");
+ } /* end if */
+ else if((data_class = H5Tdetect_class(mem_type_id, H5T_VLEN)) < 0) {
+ h5JNIFatalError(env, "H5Dwrite_long: H5Tdetect_class() failed");
+ } /* end else if */
+ else if(data_class == 1) {
+ h5badArgument(env, "H5Dwrite_long: buf does not support variable length type");
+ } /* end else if */
+ else {
+ /* recursive detect any vlen string in type (compound, array ...) */
+ if((data_class = H5Tdetect_variable_str(mem_type_id)) < 0) {
+ h5JNIFatalError(env, "H5Dwrite_long: H5Tdetect_variable_str() failed");
+ } /* end if */
+ else if(data_class == 1) {
+ h5badArgument(env, "H5Dwrite_long: buf does not support variable length type");
+ } /* end else if */
+ else {
+ PIN_LONG_ARRAY();
+ if (buffP == NULL) {
+ h5JNIFatalError(env, "H5Dwrite_long: buf not pinned");
+ } /* end if */
+ else {
+ status = H5Dwrite((hid_t)dataset_id, (hid_t)mem_type_id, (hid_t)mem_space_id,
+ (hid_t)file_space_id, (hid_t)xfer_plist_id, buffP);
+
+ UNPIN_LONG_ARRAY(JNI_ABORT);
+ if (status < 0)
+ h5libraryError(env);
+ } /* end else */
+ } /* end else */
+ } /* end else */
+
+ return (jint)status;
+} /* end Java_hdf_hdf5lib_H5_H5Dwrite_1long */
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5Dread_float
+ * Signature: (JJJJJ[FZ)I
+ */
+JNIEXPORT jint JNICALL
+Java_hdf_hdf5lib_H5_H5Dread_1float(JNIEnv *env, jclass clss, jlong dataset_id, jlong mem_type_id, jlong mem_space_id,
+ jlong file_space_id, jlong xfer_plist_id, jfloatArray buf, jboolean isCriticalPinning)
+{
+ herr_t status = -1;
+ jfloat *buffP;
+ jboolean isCopy;
+ htri_t data_class;
+
+ if (buf == NULL) {
+ h5nullArgument(env, "H5Dread_float: buf is NULL");
+ } /* end if */
+ else if((data_class = H5Tdetect_class(mem_type_id, H5T_VLEN)) < 0) {
+ h5JNIFatalError(env, "H5Dread_float: H5Tdetect_class() failed");
+ } /* end else if */
+ else if(data_class == 1) {
+ h5badArgument(env, "H5Dread_float: buf does not support variable length type");
+ } /* end else if */
+ else {
+ /* recursive detect any vlen string in type (compound, array ...) */
+ if((data_class = H5Tdetect_variable_str(mem_type_id)) < 0) {
+ h5JNIFatalError(env, "H5Dread_float: H5Tdetect_variable_str() failed");
+ } /* end if */
+ else if(data_class == 1) {
+ h5badArgument(env, "H5Dread_float: buf does not support variable length type");
+ } /* end else if */
+ else {
+ PIN_FLOAT_ARRAY();
+ if (buffP == NULL) {
+ h5JNIFatalError(env, "H5Dread_float: buf not pinned");
+ } /* end if */
+ else {
+ status = H5Dread((hid_t)dataset_id, (hid_t)mem_type_id, (hid_t)mem_space_id,
+ (hid_t)file_space_id, (hid_t)xfer_plist_id, buffP);
+
+ if (status < 0) {
+ UNPIN_FLOAT_ARRAY(JNI_ABORT);
+ h5libraryError(env);
+ } /* end if */
+ else {
+ UNPIN_FLOAT_ARRAY(0);
+ } /* end else */
+ } /* end else */
+ } /* end else */
+ } /* end else */
+
+ return (jint)status;
+} /* end Java_hdf_hdf5lib_H5_H5Dread_1float */
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5Dwrite_float
+ * Signature: (JJJJJ[FZ)I
+ */
+JNIEXPORT jint JNICALL
+Java_hdf_hdf5lib_H5_H5Dwrite_1float(JNIEnv *env, jclass clss, jlong dataset_id, jlong mem_type_id, jlong mem_space_id,
+ jlong file_space_id, jlong xfer_plist_id, jfloatArray buf, jboolean isCriticalPinning)
+{
+ herr_t status = -1;
+ jfloat *buffP;
+ jboolean isCopy;
+ htri_t data_class;
+
+ if (buf == NULL) {
+ h5nullArgument(env, "H5Dwrite_float: buf is NULL");
+ } /* end if */
+ else if((data_class = H5Tdetect_class(mem_type_id, H5T_VLEN)) < 0) {
+ h5JNIFatalError(env, "H5Dwrite_float: H5Tdetect_class() failed");
+ } /* end else if */
+ else if(data_class == 1) {
+ h5badArgument(env, "H5Dwrite_float: buf does not support variable length type");
+ } /* end else if */
+ else {
+ /* recursive detect any vlen string in type (compound, array ...) */
+ if((data_class = H5Tdetect_variable_str(mem_type_id)) < 0) {
+ h5JNIFatalError(env, "H5Dwrite_float: H5Tdetect_variable_str() failed");
+ } /* end if */
+ else if(data_class == 1) {
+ h5badArgument(env, "H5Dwrite_float: buf does not support variable length type");
+ } /* end else if */
+ else {
+ PIN_FLOAT_ARRAY();
+ if (buffP == NULL) {
+ h5JNIFatalError(env, "H5Dwrite_float: buf not pinned");
+ } /* end if */
+ else {
+ status = H5Dwrite((hid_t)dataset_id, (hid_t)mem_type_id, (hid_t)mem_space_id,
+ (hid_t)file_space_id, (hid_t)xfer_plist_id, buffP);
+
+ UNPIN_FLOAT_ARRAY(JNI_ABORT);
+ if (status < 0)
+ h5libraryError(env);
+ } /* end else */
+ } /* end else */
+ } /* end else */
+
+ return (jint)status;
+} /* end Java_hdf_hdf5lib_H5_H5Dwrite_1float */
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5Dread_double
+ * Signature: (JJJJJ[DZ)I
+ */
+JNIEXPORT jint JNICALL
+Java_hdf_hdf5lib_H5_H5Dread_1double(JNIEnv *env, jclass clss, jlong dataset_id, jlong mem_type_id, jlong mem_space_id,
+ jlong file_space_id, jlong xfer_plist_id, jdoubleArray buf, jboolean isCriticalPinning)
+{
+ herr_t status = -1;
+ jdouble *buffP;
+ jboolean isCopy;
+ htri_t data_class;
+
+ if (buf == NULL) {
+ h5nullArgument(env, "H5Dread_double: buf is NULL");
+ } /* end if */
+ else if((data_class = H5Tdetect_class(mem_type_id, H5T_VLEN)) < 0) {
+ h5JNIFatalError(env, "H5Dread_double: H5Tdetect_class() failed");
+ } /* end else if */
+ else if(data_class == 1) {
+ h5badArgument(env, "H5Dread_double: buf does not support variable length type");
+ } /* end else if */
+ else {
+ /* recursive detect any vlen string in type (compound, array ...) */
+ if((data_class = H5Tdetect_variable_str(mem_type_id)) < 0) {
+ h5JNIFatalError(env, "H5Dread_double: H5Tdetect_variable_str() failed");
+ } /* end if */
+ else if(data_class == 1) {
+ h5badArgument(env, "H5Dread_double: buf does not support variable length type");
+ } /* end else if */
+ else {
+ PIN_DOUBLE_ARRAY();
+ if (buffP == NULL) {
+ h5JNIFatalError(env, "H5Dread_double: buf not pinned");
+ } /* end if */
+ else {
+ status = H5Dread((hid_t)dataset_id, (hid_t)mem_type_id, (hid_t)mem_space_id,
+ (hid_t)file_space_id, (hid_t)xfer_plist_id, buffP);
+
+ if (status < 0) {
+ UNPIN_DOUBLE_ARRAY(JNI_ABORT);
+ h5libraryError(env);
+ } /* end if */
+ else {
+ UNPIN_DOUBLE_ARRAY(0);
+ } /* end else */
+ } /* end else */
+ } /* end else */
+ } /* end else */
+
+ return (jint)status;
+} /* end Java_hdf_hdf5lib_H5_H5Dread_1double */
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5Dwrite_double
+ * Signature: (JJJJJ[DZ)I
+ */
+JNIEXPORT jint JNICALL
+Java_hdf_hdf5lib_H5_H5Dwrite_1double(JNIEnv *env, jclass clss, jlong dataset_id, jlong mem_type_id, jlong mem_space_id,
+ jlong file_space_id, jlong xfer_plist_id, jdoubleArray buf, jboolean isCriticalPinning)
+{
+ herr_t status = -1;
+ jdouble *buffP;
+ jboolean isCopy;
+ htri_t data_class;
+
+ if (buf == NULL) {
+ h5nullArgument(env, "H5Dwrite_double: buf is NULL");
+ } /* end if */
+ else if((data_class = H5Tdetect_class(mem_type_id, H5T_VLEN)) < 0) {
+ h5JNIFatalError(env, "H5Dwrite_double: H5Tdetect_class() failed");
+ } /* end else if */
+ else if(data_class == 1) {
+ h5badArgument(env, "H5Dwrite_double: buf does not support variable length type");
+ } /* end else if */
+ else {
+ /* recursive detect any vlen string in type (compound, array ...) */
+ if((data_class = H5Tdetect_variable_str(mem_type_id)) < 0) {
+ h5JNIFatalError(env, "H5Dwrite_double: H5Tdetect_variable_str() failed");
+ } /* end if */
+ else if(data_class == 1) {
+ h5badArgument(env, "H5Dwrite_double: buf does not support variable length type");
+ } /* end else if */
+ else {
+ PIN_DOUBLE_ARRAY();
+ if (buffP == NULL) {
+ h5JNIFatalError(env, "H5Dwrite_double: buf not pinned");
+ } /* end if */
+ else {
+ status = H5Dwrite((hid_t)dataset_id, (hid_t)mem_type_id, (hid_t)mem_space_id,
+ (hid_t)file_space_id, (hid_t)xfer_plist_id, buffP);
+
+ UNPIN_DOUBLE_ARRAY(JNI_ABORT);
+ if (status < 0)
+ h5libraryError(env);
+ } /* end else */
+ } /* end else */
+ } /* end else */
+
+ return (jint)status;
+} /* end Java_hdf_hdf5lib_H5_H5Dwrite_1double */
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5Dread_string
+ * Signature: (JJJJJ[Ljava/lang/String;)I
+ */
+JNIEXPORT jint JNICALL
+Java_hdf_hdf5lib_H5_H5Dread_1string(JNIEnv *env, jclass clss, jlong dataset_id, jlong mem_type_id, jlong mem_space_id,
+ jlong file_space_id, jlong xfer_plist_id, jobjectArray j_buf)
+{
+ herr_t status = -1;
+ char *c_buf;
+ char *cstr;
+ size_t str_len;
+ jsize i;
+ jsize n;
+ size_t pos;
+ jstring jstr;
+
+ c_buf = cstr = NULL;
+ if (j_buf == NULL) {
+ h5nullArgument(env, "H5Dread_string: buf is NULL");
+ } /* end if */
+ else if ((n = ENVPTR->GetArrayLength(ENVPAR j_buf)) <= 0) {
+ h5nullArgument(env, "H5Dread_string: buf length <= 0");
+ } /* end else if */
+ else if ((str_len = H5Tget_size((hid_t)mem_type_id)) <=0) {
+ h5libraryError(env);
+ } /* end else if */
+ else {
+ if ((cstr = (char*)HDmalloc(str_len + 1)) == NULL) {
+ h5JNIFatalError(env, "H5Dread_string: memory allocation failed.");
+ } /* end if */
+ else {
+ if ((c_buf = (char*)HDmalloc((size_t)n * str_len)) == NULL) {
+ if (cstr)
+ HDfree(cstr);
+ cstr = NULL;
+ h5JNIFatalError(env, "H5Dread_string: memory allocation failed.");
+ } /* end if */
+ else {
+ status = H5Dread((hid_t)dataset_id, (hid_t)mem_type_id, (hid_t)mem_space_id,
+ (hid_t)file_space_id, (hid_t)xfer_plist_id, c_buf);
+
+ if (status < 0) {
+ if (cstr)
+ HDfree(cstr);
+ cstr = NULL;
+ if (c_buf)
+ HDfree(c_buf);
+ c_buf = NULL;
+ h5libraryError(env);
+ } /* end if */
+ else {
+ pos = 0;
+ for (i = 0; i < n; i++) {
+ HDmemcpy(cstr, c_buf+pos, str_len);
+ cstr[str_len] = '\0';
+ jstr = ENVPTR->NewStringUTF(ENVPAR cstr);
+ ENVPTR->SetObjectArrayElement(ENVPAR j_buf, i, jstr);
+ pos += str_len;
+ } /* end for */
+ } /* end else */
+
+ if (c_buf)
+ HDfree(c_buf);
+ } /* end else cbuf allocation*/
+
+ if (cstr)
+ HDfree(cstr);
+ } /* end else cstr allocation*/
+ } /* end else */
+
+ return (jint)status;
+} /* end Java_hdf_hdf5lib_H5_H5Dread_1string */
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5Dwrite_string
+ * Signature: (JJJJJ[Ljava/lang/String;)I
+ */
+JNIEXPORT jint JNICALL
+Java_hdf_hdf5lib_H5_H5Dwrite_1string(JNIEnv *env, jclass clss, jlong dataset_id, jlong mem_type_id, jlong mem_space_id,
+ jlong file_space_id, jlong xfer_plist_id, jobjectArray j_buf)
+{
+ herr_t status = -1;
+ char *c_buf;
+ jsize str_len;
+ jsize i;
+ jsize n;
+
+ if (j_buf == NULL) {
+ h5nullArgument(env, "H5Dwrite_string: buf is NULL");
+ } /* end if */
+ else if ((n = ENVPTR->GetArrayLength(ENVPAR j_buf)) <= 0) {
+ h5nullArgument(env, "H5Dwrite_string: buf length <= 0");
+ } /* end else if */
+ else if ((str_len = (jsize)H5Tget_size((hid_t)mem_type_id)) <=0) {
+ h5libraryError(env);
+ } /* end else if */
+ else {
+ if ((c_buf = (char*)HDmalloc((size_t)n * (size_t)str_len)) == NULL) {
+ h5JNIFatalError(env, "H5Dwrite_string: memory allocation failed.");
+ } /* end if */
+ else {
+ for (i = 0; i < n; i++) {
+ jstring obj = (jstring)ENVPTR->GetObjectArrayElement(ENVPAR (jobjectArray)j_buf, i);
+ if (obj != 0) {
+ jsize length = ENVPTR->GetStringUTFLength(ENVPAR obj);
+ const char *utf8 = ENVPTR->GetStringUTFChars(ENVPAR obj, 0);
+
+ if (utf8) {
+ strncpy(&c_buf[i * str_len], utf8, str_len);
+ } /* end if */
+
+ ENVPTR->ReleaseStringUTFChars(ENVPAR obj, utf8);
+ ENVPTR->DeleteLocalRef(ENVPAR obj);
+ } /* end if */
+ } /* end for */
+
+ status = H5Dwrite((hid_t)dataset_id, (hid_t)mem_type_id, (hid_t)mem_space_id,
+ (hid_t)file_space_id, (hid_t)xfer_plist_id, c_buf);
+
+ if (c_buf)
+ HDfree(c_buf);
+ c_buf = NULL;
+
+ if (status < 0) {
+ h5libraryError(env);
+ } /* end if */
+ } /* end else */
+ } /* end else */
+
+ return (jint)status;
+} /* end Java_hdf_hdf5lib_H5_H5Dwrite_1string */
+
+/**
+ * Read VLEN data into array of arrays.
+ * Object[] buf contains VL arrays of data points
+ * Currently only deal with variable length of atomic data types
+ */
+/* old version */
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5Dread_VLStrings
+ * Signature: (JJJJJ[Ljava/lang/String;)I
+ */
+JNIEXPORT jint JNICALL
+Java_hdf_hdf5lib_H5_H5Dread_1VLStrings(JNIEnv *env, jclass clss, jlong dataset_id, jlong mem_type_id, jlong mem_space_id,
+ jlong file_space_id, jlong xfer_plist_id, jobjectArray buf)
+{
+ herr_t status = -1;
+ htri_t isVlenStr=0;
+
+ if (buf == NULL) {
+ h5nullArgument(env, "H5Dread_VLStrings: buf is NULL");
+ } /* end if */
+ else {
+ isVlenStr = H5Tis_variable_str((hid_t)mem_type_id);
+
+ if (isVlenStr) {
+ status = H5DreadVL_str(env, (hid_t)dataset_id, (hid_t)mem_type_id,
+ (hid_t)mem_space_id, (hid_t)file_space_id,
+ (hid_t)xfer_plist_id, buf);
+ } /* end if */
+ else
+ h5badArgument(env, "H5Dread_VLStrings: type is not variable length String");
+ } /* end else */
+
+ return (jint)status;
+} /* end Java_hdf_hdf5lib_H5_H5Dread_1VLStrings */
+
+herr_t
+H5DreadVL_str (JNIEnv *env, hid_t did, hid_t tid, hid_t mem_sid, hid_t
+ file_sid, hid_t xfer_plist_id, jobjectArray buf)
+{
+ char **strs;
+ jstring jstr;
+ jint i;
+ jint n;
+ herr_t status = -1;
+
+ n = ENVPTR->GetArrayLength(ENVPAR buf);
+ strs =(char**)HDcalloc((size_t)n, sizeof(char*));
+
+ if (strs == NULL) {
+ h5JNIFatalError(env, "H5DreadVL_str: failed to allocate buff for read variable length strings");
+ } /* end if */
+ else {
+ status = H5Dread(did, tid, mem_sid, file_sid, xfer_plist_id, strs);
+
+ if (status < 0) {
+ H5Dvlen_reclaim(tid, mem_sid, xfer_plist_id, strs);
+ HDfree(strs);
+ h5JNIFatalError(env, "H5DreadVL_str: failed to read variable length strings");
+ } /* end if */
+ else {
+ for (i=0; i < n; i++) {
+ jstr = ENVPTR->NewStringUTF(ENVPAR strs[i]);
+ ENVPTR->SetObjectArrayElement(ENVPAR buf, i, jstr);
+ H5free_memory (strs[i]);
+ } /* end for */
+
+ /*
+ for repeatedly reading a dataset with a large number of strs (e.g., 1,000,000 strings,
+ H5Dvlen_reclaim() may crash on Windows because the Java GC will not be able to collect
+ free space in time. Instead, use "H5free_memory(strs[i])" above to free individual strings
+ after it is done.
+ H5Dvlen_reclaim(tid, mem_sid, xfer_plist_id, strs);
+ */
+
+ HDfree(strs);
+ } /* end else */
+ } /* end else */
+
+ return status;
+} /* end H5DreadVL_str */
+
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5Dwrite_VLStrings
+ * Signature: (JJJJJ[Ljava/lang/String;)I
+ */
+JNIEXPORT jint JNICALL
+Java_hdf_hdf5lib_H5_H5Dwrite_1VLStrings(JNIEnv *env, jclass clss, jlong dataset_id, jlong mem_type_id, jlong mem_space_id,
+ jlong file_space_id, jlong xfer_plist_id, jobjectArray buf)
+{
+ herr_t status = -1;
+ htri_t isVlenStr=0;
+
+ if (buf == NULL) {
+ h5nullArgument(env, "H5Dwrite_VLStrings: buf is NULL");
+ } /* end if */
+ else {
+ isVlenStr = H5Tis_variable_str((hid_t)mem_type_id);
+
+ if (isVlenStr) {
+ status = H5DwriteVL_str(env, (hid_t)dataset_id, (hid_t)mem_type_id,
+ (hid_t)mem_space_id, (hid_t)file_space_id,
+ (hid_t)xfer_plist_id, buf);
+ } /* end if */
+ else
+ h5badArgument(env, "H5Dwrite_VLStrings: type is not variable length String");
+ } /* end else */
+
+ return (jint)status;
+} /* end Java_hdf_hdf5lib_H5_H5Dwrite_1VLStrings */
+
+herr_t
+H5DwriteVL_str(JNIEnv *env, hid_t dataset_id, hid_t mem_type_id, hid_t mem_space_id,
+ hid_t file_space_id, hid_t xfer_plist_id, jobjectArray buf)
+{
+ herr_t status = -1;
+ char **wdata;
+ jsize size;
+ jsize i;
+
+ size = ENVPTR->GetArrayLength(ENVPAR (jarray) buf);
+ wdata = (char**)HDmalloc((size_t)size * sizeof (char*));
+
+ if (!wdata) {
+ h5JNIFatalError(env, "H5DwriteVL_string: cannot allocate buffer");
+ } /* end if */
+ else {
+ HDmemset(wdata, 0, (size_t)size * sizeof(char*));
+ for (i = 0; i < size; ++i) {
+ jstring obj = (jstring) ENVPTR->GetObjectArrayElement(ENVPAR (jobjectArray)buf, i);
+ if (obj != 0) {
+ jsize length = ENVPTR->GetStringUTFLength(ENVPAR obj);
+ const char *utf8 = ENVPTR->GetStringUTFChars(ENVPAR obj, 0);
+
+ if (utf8) {
+ wdata[i] = (char*)HDmalloc((size_t)length + 1);
+ if (wdata[i]) {
+ HDmemset(wdata[i], 0, (size_t)length + 1);
+ HDstrncpy(wdata[i], utf8, (size_t)length + 1);
+ } /* end if */
+ } /* end if */
+
+ ENVPTR->ReleaseStringUTFChars(ENVPAR obj, utf8);
+ ENVPTR->DeleteLocalRef(ENVPAR obj);
+ } /* end if */
+ } /* end for (i = 0; i < size; ++i) */
+
+ status = H5Dwrite(dataset_id, mem_type_id, mem_space_id, file_space_id, xfer_plist_id, wdata);
+
+ /* now free memory*/
+ for (i = 0; i < size; i++) {
+ if(wdata[i]) {
+ HDfree(wdata[i]);
+ } /* end if */
+ } /* end for */
+ HDfree(wdata);
+
+ if (status < 0)
+ h5libraryError(env);
+ } /* end else */
+
+ return status;
+} /* end H5DwriteVL_str */
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5Dread_reg_ref
+ * Signature: (JJJJJ[Ljava/lang/String;)I
+ */
+JNIEXPORT jint JNICALL
+Java_hdf_hdf5lib_H5_H5Dread_1reg_1ref (JNIEnv *env, jclass clss,
+ jlong dataset_id, jlong mem_type_id, jlong mem_space_id,
+ jlong file_space_id, jlong xfer_plist_id, jobjectArray buf)
+{
+ herr_t status = -1;
+ h5str_t h5str;
+ size_t size;
+ hdset_reg_ref_t *ref_data;
+ jint i;
+ jint n;
+ jstring jstr;
+
+ hid_t region = -1;
+ hid_t did = (hid_t) dataset_id;
+ hid_t tid = (hid_t) mem_type_id;
+ hid_t mem_sid = (hid_t) mem_space_id;
+ hid_t file_sid = (hid_t) file_space_id;
+
+ n = ENVPTR->GetArrayLength(ENVPAR buf);
+ size = sizeof(hdset_reg_ref_t); /*H5Tget_size(tid);*/
+ ref_data = (hdset_reg_ref_t*)HDmalloc(size * (size_t)n);
+
+ if (ref_data == NULL) {
+ h5JNIFatalError(env, "H5Dread_reg_ref: failed to allocate buff for read");
+ return -1;
+ } /* end if */
+
+ status = H5Dread(did, tid, mem_sid, file_sid, xfer_plist_id, ref_data);
+
+ if (status < 0) {
+ HDfree(ref_data);
+ h5JNIFatalError(env, "H5Dread_reg_ref: failed to read data");
+ return -1;
+ } /* end if */
+
+ HDmemset(&h5str, 0, sizeof(h5str_t));
+ h5str_new(&h5str, 1024);
+ for (i=0; i<n; i++) {
+ h5str.s[0] = '\0';
+ h5str_sprintf(&h5str, did, tid, ref_data[i], 0);
+ jstr = ENVPTR->NewStringUTF(ENVPAR h5str.s);
+
+ ENVPTR->SetObjectArrayElement(ENVPAR buf, i, jstr);
+ } /* end for */
+
+ h5str_free(&h5str);
+ HDfree(ref_data);
+
+ return (jint)status;
+} /* end Java_hdf_hdf5lib_H5_H5Dread_1reg_1ref */
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5Dread_reg_ref_data
+ * Signature: (JJJJJ[Ljava/lang/String;)I
+ */
+JNIEXPORT jint JNICALL
+Java_hdf_hdf5lib_H5_H5Dread_1reg_1ref_1data (JNIEnv *env, jclass clss,
+ jlong dataset_id, jlong mem_type_id, jlong mem_space_id,
+ jlong file_space_id, jlong xfer_plist_id, jobjectArray buf)
+{
+ herr_t status = -1;
+ h5str_t h5str;
+ size_t size;
+ hdset_reg_ref_t *ref_data;
+ jint i;
+ jint n;
+ jstring jstr;
+
+ hid_t region_obj;
+ H5S_sel_type region_type;
+
+ hid_t region = -1;
+ hid_t did = (hid_t) dataset_id;
+ hid_t tid = (hid_t) mem_type_id;
+ hid_t mem_sid = (hid_t) mem_space_id;
+ hid_t file_sid = (hid_t) file_space_id;
+
+ n = ENVPTR->GetArrayLength(ENVPAR buf);
+ size = sizeof(hdset_reg_ref_t); /*H5Tget_size(tid);*/
+ ref_data = (hdset_reg_ref_t*)HDmalloc(size * (size_t)n);
+
+ if (ref_data == NULL) {
+ h5JNIFatalError(env, "H5Dread_reg_ref_data: failed to allocate buff for read");
+ return -1;
+ } /* end if */
+
+ status = H5Dread(did, tid, mem_sid, file_sid, xfer_plist_id, ref_data);
+
+ if (status < 0) {
+ HDfree(ref_data);
+ h5JNIFatalError(env, "H5Dread_reg_ref_data: failed to read data");
+ return -1;
+ } /* end if */
+
+ HDmemset(&h5str, 0, sizeof(h5str_t));
+ h5str_new(&h5str, 1024);
+ for (i=0; i<n; i++) {
+ h5str.s[0] = '\0';
+
+ /* get name of the dataset the region reference points to using H5Rget_name */
+ region_obj = H5Rdereference2(did, H5P_DEFAULT, H5R_DATASET_REGION, ref_data[i]);
+ if (region_obj >= 0) {
+ region = H5Rget_region(did, H5R_DATASET_REGION, ref_data[i]);
+ if (region >= 0) {
+ region_type = H5Sget_select_type(region);
+ if(region_type==H5S_SEL_POINTS) {
+ h5str_dump_region_points_data(&h5str, region, region_obj);
+ } /* end if */
+ else {
+ h5str_dump_region_blocks_data(&h5str, region, region_obj);
+ } /* end else */
+
+ H5Sclose(region);
+ } /* end if */
+ H5Dclose(region_obj);
+ } /* end if */
+ jstr = ENVPTR->NewStringUTF(ENVPAR h5str.s);
+
+ ENVPTR->SetObjectArrayElement(ENVPAR buf, i, jstr);
+ } /* end for */
+
+ h5str_free(&h5str);
+ HDfree(ref_data);
+
+ return (jint)status;
+} /* end Java_hdf_hdf5lib_H5_H5Dread_1reg_1ref_1data */
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: _H5Dcreate2
+ * Signature: (JLjava/lang/String;JJJJJ)J
+ */
+JNIEXPORT jlong JNICALL
+Java_hdf_hdf5lib_H5__1H5Dcreate2(JNIEnv *env, jclass clss, jlong loc_id, jstring name, jlong type_id,
+ jlong space_id, jlong link_plist_id, jlong create_plist_id, jlong access_plist_id)
+{
+ hid_t dset_id = -1;
+ const char *fileName;
+
+ PIN_JAVA_STRING(name, fileName, -1);
+
+ dset_id = H5Dcreate2((hid_t)loc_id, fileName, (hid_t)type_id, (hid_t)space_id, (hid_t)link_plist_id, (hid_t)create_plist_id, (hid_t)access_plist_id);
+
+ UNPIN_JAVA_STRING(name, fileName);
+ if (dset_id < 0)
+ h5libraryError(env);
+
+ return (jlong)dset_id;
+} /* end Java_hdf_hdf5lib_H5__1H5Dcreate2 */
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: _H5Dopen2
+ * Signature: (JLjava/lang/String;J)J
+ */
+JNIEXPORT jlong JNICALL
+Java_hdf_hdf5lib_H5__1H5Dopen2(JNIEnv *env, jclass clss, jlong loc_id, jstring name, jlong access_plist)
+{
+ hid_t dset_id;
+ const char *fileName;
+
+ PIN_JAVA_STRING(name, fileName, -1);
+
+ dset_id = H5Dopen2((hid_t)loc_id, fileName, (hid_t)access_plist);
+
+ UNPIN_JAVA_STRING(name, fileName);
+ if (dset_id < 0)
+ h5libraryError(env);
+
+ return (jlong)dset_id;
+} /* end Java_hdf_hdf5lib_H5__1H5Dopen2 */
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: _H5Dcreate_anon
+ * Signature: (JJJJJ)J
+ */
+JNIEXPORT jlong JNICALL
+Java_hdf_hdf5lib_H5__1H5Dcreate_1anon(JNIEnv *env, jclass clss, jlong loc_id, jlong type_id, jlong space_id, jlong dcpl_id, jlong dapl_id)
+{
+ hid_t dset_id = -1;
+
+ dset_id = H5Dcreate_anon((hid_t)loc_id, (hid_t)type_id, (hid_t)space_id, (hid_t)dcpl_id, (hid_t)dapl_id);
+ if (dset_id < 0)
+ h5libraryError(env);
+
+ return (jlong)dset_id;
+} /* end Java_hdf_hdf5lib_H5__1H5Dcreate_1anon */
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5Dget_space_status
+ * Signature: (J)I;
+ */
+JNIEXPORT jint JNICALL
+Java_hdf_hdf5lib_H5_H5Dget_1space_1status(JNIEnv *env, jclass clss, jlong loc_id)
+{
+ H5D_space_status_t space_status = H5D_SPACE_STATUS_ERROR;
+
+ if (H5Dget_space_status((hid_t)loc_id, &space_status) < 0)
+ h5libraryError(env);
+
+ return (jint)space_status;
+} /* end Java_hdf_hdf5lib_H5_H5Dget_1space_1status */
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5Dget_access_plist
+ * Signature: (J)J
+ */
+JNIEXPORT jlong JNICALL
+Java_hdf_hdf5lib_H5_H5Dget_1access_1plist(JNIEnv *env, jclass clss, jlong loc_id)
+{
+ hid_t retVal = -1;
+
+ retVal = H5Dget_access_plist((hid_t)loc_id);
+ if (retVal < 0)
+ h5libraryError(env);
+
+ return (jlong)retVal;
+} /* end Java_hdf_hdf5lib_H5_H5Dget_1access_1plist */
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5Dget_offset
+ * Signature: (J)J
+ */
+JNIEXPORT jlong JNICALL
+Java_hdf_hdf5lib_H5_H5Dget_1offset(JNIEnv *env, jclass clss, jlong loc_id)
+{
+ haddr_t offset = HADDR_UNDEF;
+
+ offset = H5Dget_offset((hid_t)loc_id);
+ if (offset == HADDR_UNDEF)
+ h5libraryError(env);
+
+ return (jlong)offset;
+} /* end Java_hdf_hdf5lib_H5_H5Dget_1offset */
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5Dvlen_get_buf_size
+ * Signature: (JJJ)J
+ */
+JNIEXPORT jlong JNICALL
+Java_hdf_hdf5lib_H5_H5Dvlen_1get_1buf_1size(JNIEnv *env, jclass clss, jlong dataset_id, jlong type_id, jlong space_id)
+{
+ hsize_t sz = 0;
+
+ if (H5Dvlen_get_buf_size((hid_t)dataset_id, (hid_t)type_id, (hid_t)space_id, &sz) < 0)
+ h5libraryError(env);
+
+ return (jlong)sz;
+} /* end Java_hdf_hdf5lib_H5_H5Dvlen_1get_1buf_1size_1long */
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5Dfill
+ * Signature: ([BJ[BJJ)V
+ */
+JNIEXPORT void JNICALL
+Java_hdf_hdf5lib_H5_H5Dfill(JNIEnv *env, jclass clss, jbyteArray fill, jlong fill_type_id, jbyteArray buf, jlong buf_type_id, jlong space_id)
+{
+ herr_t status;
+ jbyte *fillP;
+ jbyte *buffP;
+ jboolean isCopy1;
+ jboolean isCopy2;
+
+
+ if (buf == NULL) {
+ h5nullArgument(env, "H5Dfill: buf is NULL");
+ return;
+ } /* end if */
+ buffP = ENVPTR->GetByteArrayElements(ENVPAR buf, &isCopy2);
+ if (buffP == NULL) {
+ h5JNIFatalError(env, "H5Dfill: buf not pinned");
+ return;
+ } /* end if */
+
+ if(fill) {
+ fillP = ENVPTR->GetByteArrayElements(ENVPAR fill, &isCopy1);
+ if (fillP == NULL) {
+ ENVPTR->ReleaseByteArrayElements(ENVPAR buf, buffP, JNI_ABORT);
+ h5JNIFatalError( env, "H5Dfill: fill not pinned");
+ return;
+ } /* end if */
+ } /* end if */
+ else
+ fillP = NULL;
+
+ status = H5Dfill((const void*)fillP, (hid_t)fill_type_id, (void*)buffP, (hid_t)buf_type_id, (hid_t)space_id);
+ if(fillP) {
+ /* free the buffer without copying back */
+ /* end if */ ENVPTR->ReleaseByteArrayElements(ENVPAR fill, fillP, JNI_ABORT);
+ }
+ if (status < 0) {
+ ENVPTR->ReleaseByteArrayElements(ENVPAR buf, buffP, JNI_ABORT);
+ h5libraryError(env);
+ } /* end if */
+ else {
+ if (isCopy2 == JNI_TRUE) {
+ ENVPTR->ReleaseByteArrayElements(ENVPAR buf, buffP, 0);
+ } /* end if */
+ } /* end else */
+} /* end Java_hdf_hdf5lib_H5_H5Dfill */
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5Dset_extent
+ * Signature: (J[J)V
+ */
+JNIEXPORT void JNICALL
+Java_hdf_hdf5lib_H5_H5Dset_1extent(JNIEnv *env, jclass clss, jlong loc_id, jlongArray buf)
+{
+ herr_t status;
+ hsize_t *dims;
+ jlong *buffP;
+ jsize rank;
+ jboolean isCopy;
+ int i = 0;
+
+ if (buf == NULL) {
+ h5nullArgument(env, "H5Dset_extent: buf is NULL");
+ return;
+ } /* end if */
+
+ rank = ENVPTR->GetArrayLength(ENVPAR buf);
+ if (rank <= 0) {
+ h5JNIFatalError(env, "H5Dset_extent: rank <=0");
+ } /* end if */
+ else {
+ buffP = ENVPTR->GetLongArrayElements(ENVPAR buf, &isCopy);
+ if (buffP == NULL) {
+ h5JNIFatalError( env, "H5Dset_extent: buf not pinned");
+ } /* end if */
+ else {
+ dims = (hsize_t*)HDmalloc((size_t)rank * sizeof(hsize_t));
+ for (i = 0; i< rank; i++)
+ dims[i] = (hsize_t)buffP[i];
+
+ status = H5Dset_extent((hid_t)loc_id, (hsize_t*)dims);
+
+ HDfree (dims);
+
+ /* free the buffer without copying back */
+ ENVPTR->ReleaseLongArrayElements(ENVPAR buf, buffP, JNI_ABORT);
+
+ if (status < 0) {
+ h5libraryError(env);
+ } /* end if */
+ } /* end else */
+ } /* end else */
+} /* end Java_hdf_hdf5lib_H5_H5Dset_1extent */
+
+static herr_t
+H5D_iterate_cb(void* elem, hid_t elem_id, unsigned ndim, const hsize_t *point, void *op_data) {
+ JNIEnv *cbenv;
+ jint status;
+ jclass cls;
+ jmethodID mid;
+ jbyteArray elemArray;
+ jlongArray pointArray;
+ jsize size;
+
+ if(JVMPTR->AttachCurrentThread(JVMPAR2 (void**)&cbenv, NULL) != 0) {
+ JVMPTR->DetachCurrentThread(JVMPAR);
+ return -1;
+ } /* end if */
+ cls = CBENVPTR->GetObjectClass(CBENVPAR visit_callback);
+ if (cls == 0) {
+ JVMPTR->DetachCurrentThread(JVMPAR);
+ return -1;
+ } /* end if */
+ mid = CBENVPTR->GetMethodID(CBENVPAR cls, "callback", "([BJI[JLhdf/hdf5lib/callbacks/H5D_iterate_t;)I");
+ if (mid == 0) {
+ JVMPTR->DetachCurrentThread(JVMPAR);
+ return -1;
+ } /* end if */
+
+ if (elem == NULL) {
+ JVMPTR->DetachCurrentThread(JVMPAR);
+ return -1;
+ } /* end if */
+ if (point == NULL) {
+ JVMPTR->DetachCurrentThread(JVMPAR);
+ return -1;
+ } /* end if */
+
+ size = (jsize)H5Tget_size(elem_id);
+ elemArray = CBENVPTR->NewByteArray(CBENVPAR size);
+ if (elemArray == NULL) {
+ JVMPTR->DetachCurrentThread(JVMPAR);
+ return -1;
+ } /* end if */
+ CBENVPTR->SetByteArrayRegion(CBENVPAR elemArray, 0, size, (jbyte *)elem);
+
+ pointArray = CBENVPTR->NewLongArray(CBENVPAR 2);
+ if (pointArray == NULL) {
+ JVMPTR->DetachCurrentThread(JVMPAR);
+ return -1;
+ } /* end if */
+ CBENVPTR->SetLongArrayRegion(CBENVPAR pointArray, 0, 2, (const jlong *)point);
+
+ status = CBENVPTR->CallIntMethod(CBENVPAR visit_callback, mid, (void*)elemArray, elem_id, ndim, pointArray, op_data);
+
+ CBENVPTR->GetByteArrayRegion(CBENVPAR elemArray, 0, size, (jbyte *)elem);
+
+ JVMPTR->DetachCurrentThread(JVMPAR);
+
+ return status;
+} /* end H5D_iterate_cb */
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5Diterate
+ * Signature: ([BJJLjava/lang/Object;Ljava/lang/Object;)I
+ */
+JNIEXPORT jint JNICALL
+Java_hdf_hdf5lib_H5_H5Diterate(JNIEnv *env, jclass clss, jbyteArray buf, jlong buf_type, jlong space,
+ jobject callback_op, jobject op_data)
+{
+ herr_t status = -1;
+ jboolean isCopy;
+ jbyte *buffP;
+
+ ENVPTR->GetJavaVM(ENVPAR &jvm);
+ visit_callback = callback_op;
+
+ if (op_data == NULL) {
+ h5nullArgument(env, "H5Diterate: op_data is NULL");
+ return -1;
+ } /* end if */
+ if (callback_op == NULL) {
+ h5nullArgument(env, "H5Diterate: callback_op is NULL");
+ return -1;
+ } /* end if */
+
+ if (buf == NULL) {
+ h5nullArgument(env, "H5Diterate: buf is NULL");
+ return -1;
+ } /* end if */
+ buffP = ENVPTR->GetByteArrayElements(ENVPAR buf, &isCopy);
+ if (buffP == NULL) {
+ h5JNIFatalError(env, "H5Diterate: buf not pinned");
+ } /* end if */
+ else {
+ status = H5Diterate((void*)buffP, (hid_t)buf_type, (hid_t)space, (H5D_operator_t)H5D_iterate_cb, (void*)op_data);
+
+ if (status < 0) {
+ ENVPTR->ReleaseByteArrayElements(ENVPAR buf, buffP, JNI_ABORT);
+ h5libraryError(env);
+ } /* end if */
+ else {
+ if (isCopy == JNI_TRUE) {
+ ENVPTR->ReleaseByteArrayElements(ENVPAR buf, buffP, 0);
+ } /* end if */
+ } /* end else */
+ } /* end else */
+
+ return (jint)status;
+} /* end Java_hdf_hdf5lib_H5_H5Diterate */
+
+
+#ifdef __cplusplus
+} /* end extern "C" */
+#endif /* __cplusplus */
diff --git a/java/src/jni/h5dImp.h b/java/src/jni/h5dImp.h
new file mode 100644
index 0000000..12078e8
--- /dev/null
+++ b/java/src/jni/h5dImp.h
@@ -0,0 +1,319 @@
+/* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
+ * Copyright by The HDF Group. *
+ * Copyright by the Board of Trustees of the University of Illinois. *
+ * All rights reserved. *
+ * *
+ * This file is part of HDF5. The full HDF5 copyright notice, including *
+ * terms governing use, modification, and redistribution, is contained in *
+ * the files COPYING and Copyright.html. COPYING can be found at the root *
+ * of the source code distribution tree; Copyright.html can be found at the *
+ * root level of an installed copy of the electronic HDF5 document set and *
+ * is linked from the top-level documents page. It can also be found at *
+ * http://hdfgroup.org/HDF5/doc/Copyright.html. If you do not have *
+ * access to either file, you may request a copy from help@hdfgroup.org. *
+ * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */
+
+#include <jni.h>
+/* Header for class hdf_hdf5lib_H5_H5D */
+
+#ifndef _Included_hdf_hdf5lib_H5_H5D
+#define _Included_hdf_hdf5lib_H5_H5D
+
+#ifdef __cplusplus
+extern "C" {
+#endif /* __cplusplus */
+
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: _H5Dcreate
+ * Signature: (JLjava/lang/String;JJJ)J
+ */
+JNIEXPORT jlong JNICALL Java_hdf_hdf5lib_H5__1H5Dcreate
+ (JNIEnv*, jclass, jlong, jstring, jlong, jlong, jlong);
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: _H5Dopen
+ * Signature: (JLjava/lang/String;)J
+ */
+JNIEXPORT jlong JNICALL Java_hdf_hdf5lib_H5__1H5Dopen
+ (JNIEnv*, jclass, jlong, jstring);
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: _H5Dget_space
+ * Signature: (J)J
+ */
+JNIEXPORT jlong JNICALL Java_hdf_hdf5lib_H5__1H5Dget_1space
+ (JNIEnv*, jclass, jlong);
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: _H5Dget_type
+ * Signature: (J)J
+ */
+JNIEXPORT jlong JNICALL Java_hdf_hdf5lib_H5__1H5Dget_1type
+ (JNIEnv*, jclass, jlong);
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: _H5Dget_create_plist
+ * Signature: (J)J
+ */
+JNIEXPORT jlong JNICALL Java_hdf_hdf5lib_H5__1H5Dget_1create_1plist
+ (JNIEnv*, jclass, jlong);
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5Dread
+ * Signature: (JJJJJ[BZ)I
+ */
+JNIEXPORT jint JNICALL Java_hdf_hdf5lib_H5_H5Dread
+ (JNIEnv*, jclass, jlong, jlong, jlong, jlong, jlong, jbyteArray, jboolean);
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5Dwrite
+ * Signature: (JJJJJ[BZ)I
+ */
+JNIEXPORT jint JNICALL Java_hdf_hdf5lib_H5_H5Dwrite
+ (JNIEnv*, jclass, jlong, jlong, jlong, jlong, jlong, jbyteArray, jboolean);
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: _H5Dclose
+ * Signature: (J)I
+ */
+JNIEXPORT jint JNICALL Java_hdf_hdf5lib_H5__1H5Dclose
+ (JNIEnv*, jclass, jlong);
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5Dget_storage_size
+ * Signature: (J)J
+ */
+JNIEXPORT jlong JNICALL Java_hdf_hdf5lib_H5_H5Dget_1storage_1size
+ (JNIEnv*, jclass, jlong);
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5Dvlen_reclaim
+ * Signature: (JJJ[B)I
+ */
+JNIEXPORT jint JNICALL Java_hdf_hdf5lib_H5_H5Dvlen_1reclaim
+ (JNIEnv*, jclass, jlong, jlong, jlong, jbyteArray);
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5Dread_short
+ * Signature: (JJJJJ[SZ)I
+ */
+JNIEXPORT jint JNICALL Java_hdf_hdf5lib_H5_H5Dread_1short
+ (JNIEnv*, jclass, jlong, jlong, jlong, jlong, jlong, jshortArray, jboolean);
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5Dwrite_short
+ * Signature: (JJJJJ[SZ)I
+ */
+JNIEXPORT jint JNICALL Java_hdf_hdf5lib_H5_H5Dwrite_1short
+ (JNIEnv*, jclass, jlong, jlong, jlong, jlong, jlong, jshortArray, jboolean);
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5Dread_int
+ * Signature: (JJJJJ[IZ)I
+ */
+JNIEXPORT jint JNICALL Java_hdf_hdf5lib_H5_H5Dread_1int
+ (JNIEnv*, jclass, jlong, jlong, jlong, jlong, jlong, jintArray, jboolean);
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5Dwrite_int
+ * Signature: (JJJJJ[IZ)I
+ */
+JNIEXPORT jint JNICALL Java_hdf_hdf5lib_H5_H5Dwrite_1int
+ (JNIEnv*, jclass, jlong, jlong, jlong, jlong, jlong, jintArray, jboolean);
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5Dread_long
+ * Signature: (JJJJJ[JZ)I
+ */
+JNIEXPORT jint JNICALL Java_hdf_hdf5lib_H5_H5Dread_1long
+ (JNIEnv*, jclass, jlong, jlong, jlong, jlong, jlong, jlongArray, jboolean);
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5Dwrite_long
+ * Signature: (JJJJJ[JZ)I
+ */
+JNIEXPORT jint JNICALL Java_hdf_hdf5lib_H5_H5Dwrite_1long
+ (JNIEnv*, jclass, jlong, jlong, jlong, jlong, jlong, jlongArray, jboolean);
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5Dread_float
+ * Signature: (JJJJJ[FZ)I
+ */
+JNIEXPORT jint JNICALL Java_hdf_hdf5lib_H5_H5Dread_1float
+ (JNIEnv*, jclass, jlong, jlong, jlong, jlong, jlong, jfloatArray, jboolean);
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5Dwrite_float
+ * Signature: (JJJJJ[FZ)I
+ */
+JNIEXPORT jint JNICALL Java_hdf_hdf5lib_H5_H5Dwrite_1float
+ (JNIEnv*, jclass, jlong, jlong, jlong, jlong, jlong, jfloatArray, jboolean);
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5Dread_double
+ * Signature: (JJJJJ[DZ)I
+ */
+JNIEXPORT jint JNICALL Java_hdf_hdf5lib_H5_H5Dread_1double
+ (JNIEnv*, jclass, jlong, jlong, jlong, jlong, jlong, jdoubleArray, jboolean);
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5Dwrite_double
+ * Signature: (JJJJJ[DZ)I
+ */
+JNIEXPORT jint JNICALL Java_hdf_hdf5lib_H5_H5Dwrite_1double
+ (JNIEnv*, jclass, jlong, jlong, jlong, jlong, jlong, jdoubleArray, jboolean);
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5Dread_string
+ * Signature: (JJJJJ[Ljava/lang/String;)I
+ */
+JNIEXPORT jint JNICALL Java_hdf_hdf5lib_H5_H5Dread_1string
+ (JNIEnv*, jclass, jlong, jlong, jlong, jlong, jlong, jobjectArray);
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5Dwrite_string
+ * Signature: (JJJJJ[Ljava/lang/String;)I
+ */
+JNIEXPORT jint JNICALL Java_hdf_hdf5lib_H5_H5Dwrite_1string
+(JNIEnv*, jclass, jlong, jlong, jlong, jlong, jlong, jobjectArray);
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5Dread_VLStrings
+ * Signature: (JJJJJ[Ljava/lang/String;)I
+ */
+JNIEXPORT jint JNICALL Java_hdf_hdf5lib_H5_H5Dread_1VLStrings
+(JNIEnv*, jclass, jlong, jlong, jlong, jlong, jlong, jobjectArray);
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5Dwrite_VLStrings
+ * Signature: (JJJJJ[BZ)I
+ */
+JNIEXPORT jint JNICALL Java_hdf_hdf5lib_H5_H5Dwrite_1VLStrings
+ (JNIEnv*, jclass, jlong, jlong, jlong, jlong, jlong, jobjectArray);
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5Dread_reg_ref
+ * Signature: (JJJJJ[Ljava/lang/String;)I
+ */
+JNIEXPORT jint JNICALL Java_hdf_hdf5lib_H5_H5Dread_1reg_1ref
+(JNIEnv*, jclass, jlong, jlong, jlong, jlong, jlong, jobjectArray);
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5Dread_reg_ref_data
+ * Signature: (JJJJJ[Ljava/lang/String;)I
+ */
+JNIEXPORT jint JNICALL Java_hdf_hdf5lib_H5_H5Dread_1reg_1ref_1data
+ (JNIEnv*, jclass, jlong, jlong, jlong, jlong, jlong, jobjectArray);
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: _H5Dcreate2
+ * Signature: (JLjava/lang/String;JJJJJ)J
+ */
+JNIEXPORT jlong JNICALL Java_hdf_hdf5lib_H5__1H5Dcreate2
+ (JNIEnv*, jclass, jlong, jstring, jlong, jlong, jlong, jlong, jlong);
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: _H5Dopen2
+ * Signature: (JLjava/lang/String;J)J
+ */
+JNIEXPORT jlong JNICALL Java_hdf_hdf5lib_H5__1H5Dopen2
+ (JNIEnv*, jclass, jlong, jstring, jlong);
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: _H5Dcreate_anon
+ * Signature: (JJJJJ)J
+ */
+JNIEXPORT jlong JNICALL Java_hdf_hdf5lib_H5__1H5Dcreate_1anon
+ (JNIEnv*, jclass, jlong, jlong, jlong, jlong, jlong);
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5Dget_space_status
+ * Signature: (J)I;
+ */
+JNIEXPORT jint JNICALL Java_hdf_hdf5lib_H5_H5Dget_1space_1status
+ (JNIEnv*, jclass, jlong);
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5Dget_access_plist
+ * Signature: (J)J
+ */
+JNIEXPORT jlong JNICALL Java_hdf_hdf5lib_H5_H5Dget_1access_1plist
+ (JNIEnv*, jclass, jlong);
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5Dget_offset
+ * Signature: (J)J
+ */
+JNIEXPORT jlong JNICALL Java_hdf_hdf5lib_H5_H5Dget_1offset
+ (JNIEnv*, jclass, jlong);
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5Dvlen_get_buf_size
+ * Signature: (JJJ)J
+ */
+JNIEXPORT jlong JNICALL Java_hdf_hdf5lib_H5_H5Dvlen_1get_1buf_1size
+ (JNIEnv*, jclass, jlong, jlong, jlong);
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5Dfill
+ * Signature: ([BJ[BJJ)V
+ */
+JNIEXPORT void JNICALL Java_hdf_hdf5lib_H5_H5Dfill
+ (JNIEnv*, jclass, jbyteArray, jlong, jbyteArray, jlong, jlong);
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5Dset_extent
+ * Signature: (J[J)V
+ */
+JNIEXPORT void JNICALL Java_hdf_hdf5lib_H5_H5Dset_1extent
+ (JNIEnv*, jclass, jlong, jlongArray);
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5Diterate
+ * Signature: ([BJJLjava/lang/Object;Ljava/lang/Object;)I
+ */
+JNIEXPORT jint JNICALL Java_hdf_hdf5lib_H5_H5Diterate
+ (JNIEnv*, jclass, jbyteArray, jlong, jlong, jobject, jobject);
+
+#ifdef __cplusplus
+} /* end extern "C" */
+#endif /* __cplusplus */
+
+#endif /* _Included_hdf_hdf5lib_H5_H5D */
diff --git a/java/src/jni/h5eImp.c b/java/src/jni/h5eImp.c
new file mode 100644
index 0000000..f0c4135
--- /dev/null
+++ b/java/src/jni/h5eImp.c
@@ -0,0 +1,524 @@
+/* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
+ * Copyright by The HDF Group. *
+ * Copyright by the Board of Trustees of the University of Illinois. *
+ * All rights reserved. *
+ * *
+ * This file is part of HDF5. The full HDF5 copyright notice, including *
+ * terms governing use, modification, and redistribution, is contained in *
+ * the files COPYING and Copyright.html. COPYING can be found at the root *
+ * of the source code distribution tree; Copyright.html can be found at the *
+ * root level of an installed copy of the electronic HDF5 document set and *
+ * is linked from the top-level documents page. It can also be found at *
+ * http://hdfgroup.org/HDF5/doc/Copyright.html. If you do not have *
+ * access to either file, you may request a copy from help@hdfgroup.org. *
+ * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */
+
+/*
+ * For details of the HDF libraries, see the HDF Documentation at:
+ * http://hdfdfgroup.org/HDF5/doc/
+ *
+ */
+
+#ifdef __cplusplus
+extern "C" {
+#endif /* __cplusplus */
+
+/*
+ * This code is the C-interface called by Java programs to access the
+ * general library functions of the HDF5 library.
+ *
+ * Each routine wraps a single HDF entry point, generally with the
+ * analogous arguments and return codes.
+ *
+ * For details of the HDF libraries, see the HDF Documentation at:
+ * http://www.hdfgroup.org/HDF5/doc/
+ *
+ */
+
+#include <jni.h>
+#include <stdlib.h>
+#include "hdf5.h"
+#include "h5jni.h"
+#include "h5eImp.h"
+
+extern JavaVM *jvm;
+extern jobject visit_callback;
+
+#ifdef __cplusplus
+ #define CBENVPTR (cbenv)
+ #define CBENVPAR
+ #define JVMPTR (jvm)
+ #define JVMPAR
+ #define JVMPAR2
+#else /* __cplusplus */
+ #define CBENVPTR (*cbenv)
+ #define CBENVPAR cbenv,
+ #define JVMPTR (*jvm)
+ #define JVMPAR jvm
+ #define JVMPAR2 jvm,
+#endif /* __cplusplus */
+
+/********************/
+/* Local Prototypes */
+/********************/
+
+static herr_t H5E_walk_cb(int nindx, const H5E_error2_t *info, void *op_data);
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5Eauto_is_v2
+ * Signature: (J)Z
+ */
+JNIEXPORT jboolean JNICALL
+Java_hdf_hdf5lib_H5_H5Eauto_1is_1v2(JNIEnv *env, jclass cls, jlong stk_id)
+{
+ unsigned int is_stack = 0;
+
+ if (stk_id < 0) {
+ h5badArgument(env, "H5Eauto_is_v2: invalid argument");
+ } /* end if */
+ else if (H5Eauto_is_v2((hid_t)stk_id, &is_stack) < 0)
+ h5libraryError(env);
+
+ return (jboolean)is_stack;
+} /* end Java_hdf_hdf5lib_H5_H5Eauto_1is_1v2 */
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5Eregister_class
+ * Signature: (Ljava/lang/String;Ljava/lang/String;Ljava/lang/String;)J
+ */
+JNIEXPORT jlong JNICALL
+Java_hdf_hdf5lib_H5_H5Eregister_1class(JNIEnv *env, jclass cls, jstring cls_name, jstring lib_name, jstring version)
+{
+ hid_t ret_val = -1;
+ const char* the_cls_name;
+ const char* the_lib_name;
+ const char* the_version;
+
+ PIN_JAVA_STRING_THREE(cls_name, the_cls_name, lib_name, the_lib_name, version, the_version, -1);
+
+ ret_val = H5Eregister_class(the_cls_name, the_lib_name, the_version);
+
+ UNPIN_JAVA_STRING_THREE(cls_name, the_cls_name, lib_name, the_lib_name, version, the_version);
+
+ if (ret_val < 0)
+ h5libraryError(env);
+
+ return (jlong)ret_val;
+} /* end Java_hdf_hdf5lib_H5_H5Eregister_1class */
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5Eunregister_class
+ * Signature: (J)V
+ */
+JNIEXPORT void JNICALL
+Java_hdf_hdf5lib_H5_H5Eunregister_1class(JNIEnv *env, jclass cls, jlong cls_id)
+{
+ if (cls_id < 0) {
+ h5badArgument(env, "H5Eunregister_class: invalid argument");
+ } /* end if */
+ else if (H5Eunregister_class((hid_t)cls_id) < 0)
+ h5libraryError(env);
+} /* end Java_hdf_hdf5lib_H5_H5Eunregister_1class */
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5Eclose_msg
+ * Signature: (J)V
+ */
+JNIEXPORT void JNICALL
+Java_hdf_hdf5lib_H5_H5Eclose_1msg(JNIEnv *env, jclass cls, jlong err_id)
+{
+ if (err_id < 0) {
+ h5badArgument(env, "H5Eclose_msg: invalid argument");
+ } /* end if */
+ else if (H5Eclose_msg((hid_t)err_id) < 0)
+ h5libraryError(env);
+} /* end Java_hdf_hdf5lib_H5_H5Eclose_1msg */
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5Ecreate_msg
+ * Signature: (JILjava/lang/String;)J
+ */
+JNIEXPORT jlong JNICALL
+Java_hdf_hdf5lib_H5_H5Ecreate_1msg(JNIEnv *env, jclass cls, jlong err_id, jint msg_type, jstring err_msg)
+{
+ hid_t ret_val = -1;
+ const char *the_err_msg;
+ H5E_type_t error_msg_type = (H5E_type_t)msg_type;
+
+ if (err_id < 0) {
+ h5badArgument(env, "H5Ecreate_msg: invalid argument");
+ } /* end if */
+ else {
+ PIN_JAVA_STRING(err_msg, the_err_msg, -1);
+
+ ret_val = H5Ecreate_msg((hid_t)err_id, error_msg_type, the_err_msg);
+
+ UNPIN_JAVA_STRING(err_msg, the_err_msg);
+
+ if (ret_val < 0)
+ h5libraryError(env);
+ } /* end else */
+
+ return (jlong)ret_val;
+} /* end Java_hdf_hdf5lib_H5_H5Ecreate_1msg */
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5Ecreate_stack
+ * Signature: ()J
+ */
+JNIEXPORT jlong JNICALL
+Java_hdf_hdf5lib_H5_H5Ecreate_1stack(JNIEnv *env, jclass cls)
+{
+ hid_t ret_val = -1;
+
+ ret_val = H5Ecreate_stack();
+ if (ret_val < 0)
+ h5libraryError(env);
+
+ return (jlong)ret_val;
+} /* end Java_hdf_hdf5lib_H5_H5Ecreate_1stack */
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5Eget_current_stack
+ * Signature: ()J
+ */
+JNIEXPORT jlong JNICALL
+Java_hdf_hdf5lib_H5_H5Eget_1current_1stack(JNIEnv *env, jclass cls)
+{
+ hid_t ret_val = -1;
+
+ ret_val = H5Eget_current_stack();
+ if (ret_val < 0)
+ h5libraryError(env);
+
+ return (jlong)ret_val;
+} /* end Java_hdf_hdf5lib_H5_H5Eget_1current_1stack */
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5Eclose_stack
+ * Signature: (J)V
+ */
+JNIEXPORT void JNICALL
+Java_hdf_hdf5lib_H5_H5Eclose_1stack(JNIEnv *env, jclass cls, jlong stk_id)
+{
+ if (stk_id < 0) {
+ h5badArgument(env, "H5Eclose_stack: invalid argument");
+ } /* end if */
+ else if (H5Eclose_stack((hid_t)stk_id) < 0)
+ h5libraryError(env);
+} /* end Java_hdf_hdf5lib_H5_H5Eclose_1stack */
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5Eprint2
+ * Signature: (JLjava/lang/Object;)V
+ */
+JNIEXPORT void JNICALL
+Java_hdf_hdf5lib_H5_H5Eprint2(JNIEnv *env, jclass cls, jlong stk_id, jobject stream_obj)
+{
+ herr_t ret_val = -1;
+
+ if (stk_id < 0) {
+ h5badArgument(env, "H5Eprint2: invalid argument");
+ } /* end if */
+ else {
+ if(!stream_obj)
+ ret_val = H5Eprint2((hid_t)stk_id, stdout);
+ else
+ ret_val = H5Eprint2((hid_t)stk_id, (FILE*)stream_obj);
+
+ if (ret_val < 0)
+ h5libraryError(env);
+ } /* end else */
+} /* end Java_hdf_hdf5lib_H5_H5Eprint2 */
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5Eget_class_name
+ * Signature: (J)Ljava/lang/String;
+ */
+JNIEXPORT jstring JNICALL
+Java_hdf_hdf5lib_H5_H5Eget_1class_1name(JNIEnv *env, jclass cls, jlong cls_id)
+{
+ char *namePtr;
+ jstring str = NULL;
+ ssize_t buf_size;
+
+ if (cls_id < 0) {
+ h5badArgument(env, "H5Eget_class_name: invalid argument");
+ } /* end if */
+ else {
+ /* get the length of the name */
+ buf_size = H5Eget_class_name((hid_t)cls_id, NULL, 0);
+
+ if (buf_size < 0) {
+ h5badArgument( env, "H5Eget_class_name: buf_size < 0");
+ } /* end if */
+ else if (buf_size == 0) {
+ h5badArgument( env, "H5Eget_class_name: No class name");
+ } /* end else if */
+ else {
+ buf_size++; /* add extra space for the null terminator */
+ namePtr = (char*)HDmalloc(sizeof(char) * (size_t)buf_size);
+ if (namePtr == NULL) {
+ h5outOfMemory( env, "H5Eget_class_name: malloc failed");
+ } /* end if */
+ else {
+ buf_size = H5Eget_class_name((hid_t)cls_id, (char *)namePtr, (size_t)buf_size);
+
+ if (buf_size < 0) {
+ HDfree(namePtr);
+ h5libraryError(env);
+ } /* end if */
+ else {
+ str = ENVPTR->NewStringUTF(ENVPAR namePtr);
+ HDfree(namePtr);
+ } /* end else */
+ } /* end else */
+ } /* end else */
+ } /* end else */
+ return str;
+} /* end Java_hdf_hdf5lib_H5_H5Eget_1class_1name */
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5Eset_current_stack
+ * Signature: (J)V
+ */
+JNIEXPORT void JNICALL
+Java_hdf_hdf5lib_H5_H5Eset_1current_1stack(JNIEnv *env, jclass cls, jlong stk_id)
+{
+ if (stk_id < 0) {
+ h5badArgument(env, "H5Eset_current_stack: invalid argument");
+ } /* end if */
+ else if (H5Eset_current_stack((hid_t)stk_id) < 0)
+ h5libraryError(env);
+} /* end Java_hdf_hdf5lib_H5_H5Eset_1current_1stack */
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5Epop
+ * Signature: (JJ)V
+ */
+JNIEXPORT void JNICALL
+Java_hdf_hdf5lib_H5_H5Epop(JNIEnv *env, jclass cls, jlong stk_id, jlong count)
+{
+ if (stk_id < 0) {
+ h5badArgument(env, "H5Epop: invalid argument");
+ } /* end if */
+ else if (H5Epop((hid_t)stk_id, (size_t)count) < 0)
+ h5libraryError(env);
+} /* end Java_hdf_hdf5lib_H5_H5Epop */
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5Epush2
+ * Signature: (JLjava/lang/String;Ljava/lang/String;IJJJLjava/lang/String;)V
+ */
+JNIEXPORT void JNICALL
+Java_hdf_hdf5lib_H5_H5Epush2(JNIEnv *env, jclass cls, jlong stk_id, jstring filename, jstring funcname,
+ jint linenumber, jlong class_id, jlong major_id, jlong minor_id, jstring err_desc)
+{
+ herr_t ret_val = -1;
+ const char* fName;
+ const char* fncName;
+ const char* errMsg;
+
+ if (stk_id < 0) {
+ h5badArgument(env, "H5Epush: invalid argument");
+ } /* end if */
+ else if (class_id < 0) {
+ h5badArgument(env, "H5Epush: invalid class_id argument");
+ } /* end else if */
+ else if (major_id < 0) {
+ h5badArgument(env, "H5Epush: invalid major_id argument");
+ } /* end else if */
+ else if (minor_id < 0) {
+ h5badArgument(env, "H5Epush: invalid minor_id argument");
+ } /* end else if */
+ else {
+ PIN_JAVA_STRING_THREE0(filename, fName, funcname, fncName, err_desc, errMsg);
+
+ ret_val = H5Epush2((hid_t)stk_id, fName, fncName, (unsigned)linenumber, (hid_t)class_id,
+ (hid_t)major_id, (hid_t)minor_id, errMsg);
+
+ UNPIN_JAVA_STRING_THREE(filename, fName, funcname, fncName, err_desc, errMsg);
+
+ if (ret_val < 0)
+ h5libraryError(env);
+ } /* end else */
+} /* end Java_hdf_hdf5lib_H5_H5Epush2 */
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5Eclear2
+ * Signature: (J)V
+ */
+JNIEXPORT void JNICALL
+Java_hdf_hdf5lib_H5_H5Eclear2(JNIEnv *env, jclass cls, jlong stk_id)
+{
+ if (stk_id < 0) {
+ h5badArgument(env, "H5Eclear2: invalid argument");
+ } /* end if */
+ else if (H5Eclear2((hid_t)stk_id) < 0)
+ h5libraryError(env);
+} /* end Java_hdf_hdf5lib_H5_H5Eclear2 */
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5Eget_msg
+ * Signature: (J[I)Ljava/lang/String;
+ */
+JNIEXPORT jstring JNICALL
+Java_hdf_hdf5lib_H5_H5Eget_1msg(JNIEnv *env, jclass cls, jlong msg_id, jintArray error_msg_type_list)
+{
+ char *namePtr;
+ jstring str = NULL;
+ jboolean isCopy;
+ ssize_t buf_size;
+ jint *theArray;
+ H5E_type_t error_msg_type;
+
+ if (msg_id < 0) {
+ h5badArgument(env, "H5Eget_msg: invalid argument");
+ } /* end if */
+ else if (error_msg_type_list == NULL) {
+ h5nullArgument(env, "H5Eget_msg: error_msg_type_list is NULL");
+ } /* end if */
+ else {
+ /* get the length of the name */
+ buf_size = H5Eget_msg((hid_t)msg_id, NULL, NULL, 0);
+
+ if ((buf_size < 0) || (buf_size == 0)) {
+ h5JNIFatalError(env, "H5Eget_msg: Invalid message");
+ } /* end if */
+ else {
+ buf_size++; /* add extra space for the null terminator */
+ namePtr = (char*)HDmalloc(sizeof(char) * (size_t)buf_size);
+ if (namePtr == NULL) {
+ h5outOfMemory(env, "H5Eget_msg: malloc failed");
+ } /* end if */
+ else {
+ theArray = (jint*)ENVPTR->GetIntArrayElements(ENVPAR error_msg_type_list, &isCopy);
+ if (theArray == NULL) {
+ HDfree(namePtr);
+ h5JNIFatalError(env, "H5Eget_msg: error_msg_type_list not pinned");
+ } /* end if */
+ else {
+ buf_size = H5Eget_msg((hid_t)msg_id, &error_msg_type, (char *)namePtr, (size_t)buf_size);
+
+ if (buf_size < 0) {
+ HDfree(namePtr);
+ ENVPTR->ReleaseIntArrayElements(ENVPAR error_msg_type_list, theArray, JNI_ABORT);
+ h5libraryError(env);
+ } /* end if */
+ else {
+ theArray[0] = error_msg_type;
+ ENVPTR->ReleaseIntArrayElements(ENVPAR error_msg_type_list, theArray, 0);
+
+ str = ENVPTR->NewStringUTF(ENVPAR namePtr);
+ HDfree(namePtr);
+ } /* end else */
+ } /* end else */
+ } /* end else */
+ } /* end else */
+ } /* end else */
+
+ return str;
+} /* end Java_hdf_hdf5lib_H5_H5Eget_1msg */
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5Eget_num
+ * Signature: (J)J
+ */
+JNIEXPORT jlong JNICALL
+Java_hdf_hdf5lib_H5_H5Eget_1num(JNIEnv *env, jclass cls, jlong stk_id)
+{
+ ssize_t ret_val = -1;
+
+ if (stk_id < 0) {
+ h5badArgument(env, "H5Eget_num: invalid argument");
+ } /* end if */
+ else {
+ ret_val = H5Eget_num((hid_t)stk_id);
+ if (ret_val < 0)
+ h5libraryError(env);
+ } /* end else */
+ return (jlong)ret_val;
+} /* end Java_hdf_hdf5lib_H5_H5Eget_1num */
+
+static herr_t
+H5E_walk_cb(int nindx, const H5E_error2_t *info, void *op_data)
+{
+ JNIEnv *cbenv;
+ jint status = -1;
+ jclass cls;
+ jmethodID mid;
+ jstring str1, str2, str3;
+ jmethodID constructor;
+ jvalue args[7];
+ jobject cb_info_t = NULL;
+
+ if(JVMPTR->AttachCurrentThread(JVMPAR2 (void**)&cbenv, NULL) == 0) {
+ cls = CBENVPTR->GetObjectClass(CBENVPAR visit_callback);
+ if (cls != 0) {
+ mid = CBENVPTR->GetMethodID(CBENVPAR cls, "callback", "(ILhdf/hdf5lib/structs/H5E_error2_t;Lhdf/hdf5lib/callbacks/H5E_walk_t;)I");
+ if (mid != 0) {
+ args[0].j = info->cls_id;
+ args[1].j = info->maj_num;
+ args[2].j = info->min_num;
+ args[3].i = (jint)info->line;
+ str1 = CBENVPTR->NewStringUTF(CBENVPAR info->func_name);
+ args[4].l = str1;
+ str2 = CBENVPTR->NewStringUTF(CBENVPAR info->file_name);
+ args[5].l = str2;
+ str3 = CBENVPTR->NewStringUTF(CBENVPAR info->desc);
+ args[6].l = str3;
+ // get a reference to your class if you don't have it already
+ cls = CBENVPTR->FindClass(CBENVPAR "hdf/hdf5lib/structs/H5E_error2_t");
+ if (cls != 0) {
+ // get a reference to the constructor; the name is <init>
+ constructor = CBENVPTR->GetMethodID(CBENVPAR cls, "<init>", "(JJJILjava/lang/String;Ljava/lang/String;Ljava/lang/String;)V");
+ if (constructor != 0) {
+ cb_info_t = CBENVPTR->NewObjectA(CBENVPAR cls, constructor, args);
+
+ status = CBENVPTR->CallIntMethod(CBENVPAR visit_callback, mid, nindx, cb_info_t, op_data);
+ } /* end if (constructor != 0) */
+ } /* end if(cls != 0) */
+ } /* end if (mid != 0) */
+ } /* end if (cls != 0) */
+ } /* end if */
+ JVMPTR->DetachCurrentThread(JVMPAR);
+ return (herr_t)status;
+} /* end H5E_walk_cb */
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5Ewalk2
+ * Signature: (JJLjava/lang/Object;Ljava/lang/Object;)V
+ */
+JNIEXPORT void JNICALL
+Java_hdf_hdf5lib_H5_H5Ewalk2(JNIEnv *env, jclass cls, jlong stk_id, jlong direction, jobject callback_op, jobject op_data)
+{
+ ENVPTR->GetJavaVM(ENVPAR &jvm);
+ visit_callback = callback_op;
+
+ if ((op_data == NULL) || (callback_op == NULL)) {
+ h5nullArgument(env, "H5Ewalk2: op_data or callback_op is NULL");
+ } /* end if */
+ else if (H5Ewalk2(stk_id, (H5E_direction_t)direction, (H5E_walk2_t)H5E_walk_cb, (void*)op_data) < 0)
+ h5libraryError(env);
+} /* end iJava_hdf_hdf5lib_H5_H5Ewalk2f */
+
+#ifdef __cplusplus
+} /* end extern "C" */
+#endif /* __cplusplus */
diff --git a/java/src/jni/h5eImp.h b/java/src/jni/h5eImp.h
new file mode 100644
index 0000000..c40c013
--- /dev/null
+++ b/java/src/jni/h5eImp.h
@@ -0,0 +1,167 @@
+/* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
+ * Copyright by The HDF Group. *
+ * Copyright by the Board of Trustees of the University of Illinois. *
+ * All rights reserved. *
+ * *
+ * This file is part of HDF5. The full HDF5 copyright notice, including *
+ * terms governing use, modification, and redistribution, is contained in *
+ * the files COPYING and Copyright.html. COPYING can be found at the root *
+ * of the source code distribution tree; Copyright.html can be found at the *
+ * root level of an installed copy of the electronic HDF5 document set and *
+ * is linked from the top-level documents page. It can also be found at *
+ * http://hdfgroup.org/HDF5/doc/Copyright.html. If you do not have *
+ * access to either file, you may request a copy from help@hdfgroup.org. *
+ * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */
+
+#include <jni.h>
+/* Header for class hdf_hdf5lib_H5_H5E */
+
+#ifndef _Included_hdf_hdf5lib_H5_H5E
+#define _Included_hdf_hdf5lib_H5_H5E
+
+#ifdef __cplusplus
+extern "C" {
+#endif /* __cplusplus */
+
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5Eauto_is_v2
+ * Signature: (J)Z
+ */
+JNIEXPORT jboolean JNICALL Java_hdf_hdf5lib_H5_H5Eauto_1is_1v2
+ (JNIEnv *, jclass, jlong);
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5Eregister_class
+ * Signature: (Ljava/lang/String;Ljava/lang/String;Ljava/lang/String;)J
+ */
+JNIEXPORT jlong JNICALL Java_hdf_hdf5lib_H5_H5Eregister_1class
+ (JNIEnv *, jclass, jstring, jstring, jstring);
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5Eunregister_class
+ * Signature: (J)V
+ */
+JNIEXPORT void JNICALL Java_hdf_hdf5lib_H5_H5Eunregister_1class
+ (JNIEnv *, jclass, jlong);
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5Eclose_msg
+ * Signature: (J)V
+ */
+JNIEXPORT void JNICALL Java_hdf_hdf5lib_H5_H5Eclose_1msg
+ (JNIEnv *, jclass, jlong);
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5Ecreate_msg
+ * Signature: (JILjava/lang/String;)J
+ */
+JNIEXPORT jlong JNICALL Java_hdf_hdf5lib_H5_H5Ecreate_1msg
+ (JNIEnv *, jclass, jlong, jint, jstring);
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5Ecreate_stack
+ * Signature: ()J
+ */
+JNIEXPORT jlong JNICALL Java_hdf_hdf5lib_H5_H5Ecreate_1stack
+ (JNIEnv *, jclass);
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5Eget_current_stack
+ * Signature: ()J
+ */
+JNIEXPORT jlong JNICALL Java_hdf_hdf5lib_H5_H5Eget_1current_1stack
+ (JNIEnv *, jclass);
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5Eclose_stack
+ * Signature: (J)V
+ */
+JNIEXPORT void JNICALL Java_hdf_hdf5lib_H5_H5Eclose_1stack
+ (JNIEnv *, jclass, jlong);
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5Eprint2
+ * Signature: (JLjava/lang/Object;)V
+ */
+JNIEXPORT void JNICALL Java_hdf_hdf5lib_H5_H5Eprint2
+ (JNIEnv *, jclass, jlong, jobject);
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5Eget_class_name
+ * Signature: (J)Ljava/lang/String;
+ */
+JNIEXPORT jstring JNICALL Java_hdf_hdf5lib_H5_H5Eget_1class_1name
+ (JNIEnv *, jclass, jlong);
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5Eset_current_stack
+ * Signature: (J)V
+ */
+JNIEXPORT void JNICALL Java_hdf_hdf5lib_H5_H5Eset_1current_1stack
+ (JNIEnv *, jclass, jlong);
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5Epop
+ * Signature: (JJ)V
+ */
+JNIEXPORT void JNICALL Java_hdf_hdf5lib_H5_H5Epop
+ (JNIEnv *, jclass, jlong, jlong);
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5Epush2
+ * Signature: (JLjava/lang/String;Ljava/lang/String;IJJJLjava/lang/String;)V
+ */
+JNIEXPORT void JNICALL Java_hdf_hdf5lib_H5_H5Epush2
+ (JNIEnv *, jclass, jlong, jstring, jstring, jint, jlong, jlong, jlong, jstring);
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5Eclear2
+ * Signature: (J)V
+ */
+JNIEXPORT void JNICALL Java_hdf_hdf5lib_H5_H5Eclear2
+ (JNIEnv *, jclass, jlong);
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5Eget_msg
+ * Signature: (J[I)Ljava/lang/String;
+ */
+JNIEXPORT jstring JNICALL Java_hdf_hdf5lib_H5_H5Eget_1msg
+ (JNIEnv *, jclass, jlong, jintArray);
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5Eget_num
+ * Signature: (J)J
+ */
+JNIEXPORT jlong JNICALL Java_hdf_hdf5lib_H5_H5Eget_1num
+ (JNIEnv *, jclass, jlong);
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5Ewalk2
+ * Signature: (JJLjava/lang/Object;Ljava/lang/Object;)V
+ */
+JNIEXPORT void JNICALL Java_hdf_hdf5lib_H5_H5Ewalk2
+ (JNIEnv *, jclass, jlong, jlong, jobject, jobject);
+
+#ifdef __cplusplus
+} /* end extern "C" */
+#endif /* __cplusplus */
+
+#endif /* _Included_hdf_hdf5lib_H5_H5E */
diff --git a/java/src/jni/h5fImp.c b/java/src/jni/h5fImp.c
new file mode 100644
index 0000000..b484fb2
--- /dev/null
+++ b/java/src/jni/h5fImp.c
@@ -0,0 +1,516 @@
+/* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
+ * Copyright by The HDF Group. *
+ * Copyright by the Board of Trustees of the University of Illinois. *
+ * All rights reserved. *
+ * *
+ * This file is part of HDF5. The full HDF5 copyright notice, including *
+ * terms governing use, modification, and redistribution, is contained in *
+ * the files COPYING and Copyright.html. COPYING can be found at the root *
+ * of the source code distribution tree; Copyright.html can be found at the *
+ * root level of an installed copy of the electronic HDF5 document set and *
+ * is linked from the top-level documents page. It can also be found at *
+ * http://hdfgroup.org/HDF5/doc/Copyright.html. If you do not have *
+ * access to either file, you may request a copy from help@hdfgroup.org. *
+ * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */
+
+/*
+ * For details of the HDF libraries, see the HDF Documentation at:
+ * http://hdfdfgroup.org/HDF5/doc/
+ *
+ */
+
+#ifdef __cplusplus
+extern "C" {
+#endif /* __cplusplus */
+
+#include <jni.h>
+#include <stdio.h>
+#include <stdlib.h>
+#include "hdf5.h"
+#include "h5fImp.h"
+#include "h5util.h"
+
+extern JavaVM *jvm;
+extern jobject visit_callback;
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5Fopen
+ * Signature: (Ljava/lang/String;IJ)J
+ */
+JNIEXPORT jlong JNICALL
+Java_hdf_hdf5lib_H5__1H5Fopen(JNIEnv *env, jclass clss, jstring name, jint flags, jlong access_id)
+{
+ hid_t status = -1;
+ const char *fileName;
+
+ PIN_JAVA_STRING(name, fileName, -1);
+
+ status = H5Fopen(fileName, (unsigned)flags, (hid_t)access_id );
+
+ UNPIN_JAVA_STRING(name, fileName);
+
+ if (status < 0)
+ h5libraryError(env);
+
+ return (jlong)status;
+} /* end Java_hdf_hdf5lib_H5__1H5Fopen */
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5Fcreate
+ * Signature: (Ljava/lang/String;IJJ)J
+ */
+JNIEXPORT jlong JNICALL
+Java_hdf_hdf5lib_H5__1H5Fcreate(JNIEnv *env, jclass clss, jstring name, jint flags, jlong create_id, jlong access_id)
+{
+ hid_t status = -1;
+ const char *fileName;
+
+ PIN_JAVA_STRING(name, fileName, -1);
+
+ status = H5Fcreate(fileName, (unsigned)flags, create_id, access_id);
+
+ UNPIN_JAVA_STRING(name, fileName);
+
+ if (status < 0)
+ h5libraryError(env);
+
+ return (jlong)status;
+} /* end Java_hdf_hdf5lib_H5__1H5Fcreate */
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5Fflush
+ * Signature: (JI)I
+ */
+JNIEXPORT jint JNICALL
+Java_hdf_hdf5lib_H5_H5Fflush(JNIEnv *env, jclass clss, jlong object_id, jint scope)
+{
+ herr_t retVal = -1;
+
+ retVal = H5Fflush((hid_t)object_id, (H5F_scope_t)scope );
+ if (retVal < 0)
+ h5libraryError(env);
+
+ return (jint)retVal;
+} /* end Java_hdf_hdf5lib_H5_H5Fflush */
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5Fget_name
+ * Signature: (J)Ljava/lang/String;
+ */
+JNIEXPORT jstring JNICALL
+Java_hdf_hdf5lib_H5_H5Fget_1name(JNIEnv *env, jclass cls, jlong file_id)
+{
+ char *namePtr;
+ jstring str = NULL;
+ ssize_t buf_size;
+
+ /* get the length of the name */
+ buf_size = H5Fget_name((hid_t)file_id, NULL, 0);
+
+ if (buf_size <= 0) {
+ h5badArgument(env, "H5Fget_name: buf_size <= 0");
+ } /* end if */
+ else {
+ buf_size++; /* add extra space for the null terminator */
+ namePtr = (char*)HDmalloc(sizeof(char) * (size_t)buf_size);
+ if (namePtr == NULL) {
+ h5outOfMemory(env, "H5Fget_name: malloc failed");
+ } /* end if */
+ else {
+ buf_size = H5Fget_name((hid_t)file_id, namePtr, (size_t)buf_size);
+ if (buf_size < 0) {
+ HDfree(namePtr);
+ h5libraryError(env);
+ } /* end if */
+ else {
+ str = ENVPTR->NewStringUTF(ENVPAR namePtr);
+ HDfree(namePtr);
+ }/* end else */
+ }/* end else */
+ }/* end else */
+
+ return str;
+} /* end Java_hdf_hdf5lib_H5_H5Fget_1name */
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5Fis_hdf5
+ * Signature: (Ljava/lang/String;)Z
+ */
+JNIEXPORT jboolean JNICALL
+Java_hdf_hdf5lib_H5_H5Fis_1hdf5(JNIEnv *env, jclass clss, jstring name)
+{
+ htri_t bval = JNI_FALSE;
+ const char *fileName;
+
+ PIN_JAVA_STRING(name, fileName, JNI_FALSE);
+
+ bval = H5Fis_hdf5(fileName);
+
+ UNPIN_JAVA_STRING(name, fileName);
+
+ if (bval > 0)
+ bval = JNI_TRUE;
+ else if (bval < 0)
+ h5libraryError(env);
+
+ return (jboolean)bval;
+} /* end Java_hdf_hdf5lib_H5_H5Fis_1hdf5 */
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5Fget_create_plist
+ * Signature: (J)J
+ */
+JNIEXPORT jlong JNICALL
+Java_hdf_hdf5lib_H5__1H5Fget_1create_1plist(JNIEnv *env, jclass clss, jlong file_id)
+{
+ hid_t retVal = -1;
+
+ retVal = H5Fget_create_plist((hid_t)file_id );
+ if (retVal < 0)
+ h5libraryError(env);
+
+ return (jlong)retVal;
+} /* end Java_hdf_hdf5lib_H5__1H5Fget_1create_1plist */
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5Fget_access_plist
+ * Signature: (J)J
+ */
+JNIEXPORT jlong JNICALL
+Java_hdf_hdf5lib_H5__1H5Fget_1access_1plist(JNIEnv *env, jclass clss, jlong file_id)
+{
+ hid_t retVal = -1;
+
+ retVal = H5Fget_access_plist((hid_t)file_id);
+ if (retVal < 0)
+ h5libraryError(env);
+
+ return (jlong)retVal;
+} /* end Java_hdf_hdf5lib_H5__1H5Fget_1access_1plist */
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5Fget_intent
+ * Signature: (J)I
+ */
+JNIEXPORT jint JNICALL
+Java_hdf_hdf5lib_H5_H5Fget_1intent(JNIEnv *env, jclass cls, jlong file_id)
+{
+ unsigned intent = 0;
+
+ if (H5Fget_intent((hid_t)file_id, &intent) < 0)
+ h5libraryError(env);
+
+ return (jint)intent;
+} /* end Java_hdf_hdf5lib_H5_H5Fget_1intent */
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5Fclose
+ * Signature: (J)I
+ */
+JNIEXPORT jint JNICALL
+Java_hdf_hdf5lib_H5__1H5Fclose(JNIEnv *env, jclass clss, jlong file_id)
+{
+ herr_t status = -1;
+
+ if (file_id > 0)
+ status = H5Fclose((hid_t)file_id );
+
+ if (status < 0)
+ h5libraryError(env);
+
+ return (jint)status;
+} /* end Java_hdf_hdf5lib_H5__1H5Fclose */
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5Fmount
+ * Signature: (JLjava/lang/String;JJ)I
+ */
+JNIEXPORT jint JNICALL
+Java_hdf_hdf5lib_H5_H5Fmount(JNIEnv *env, jclass clss, jlong loc_id, jstring name, jlong child_id, jlong plist_id)
+{
+ herr_t status = -1;
+ const char *fileName;
+
+ PIN_JAVA_STRING(name, fileName, -1);
+
+ status = H5Fmount((hid_t)loc_id, fileName, (hid_t)child_id, (hid_t)plist_id);
+
+ UNPIN_JAVA_STRING(name, fileName);
+
+ if (status < 0)
+ h5libraryError(env);
+
+ return (jint)status;
+} /* end Java_hdf_hdf5lib_H5_H5Fmount */
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5Funmount
+ * Signature: (JLjava/lang/String;)I
+ */
+JNIEXPORT jint JNICALL
+Java_hdf_hdf5lib_H5_H5Funmount(JNIEnv *env, jclass clss, jlong loc_id, jstring name)
+{
+ herr_t status = -1;
+ const char *fileName;
+
+ PIN_JAVA_STRING(name, fileName, -1);
+
+ status = H5Funmount((hid_t)loc_id, fileName);
+
+ UNPIN_JAVA_STRING(name, fileName);
+
+ if (status < 0)
+ h5libraryError(env);
+
+ return (jint)status;
+} /* end Java_hdf_hdf5lib_H5_H5Funmount */
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5Fget_freespace
+ * Signature: (J)J
+ */
+JNIEXPORT jlong JNICALL
+Java_hdf_hdf5lib_H5_H5Fget_1freespace(JNIEnv *env, jclass cls, jlong file_id)
+{
+ hssize_t ret_val = H5Fget_freespace((hid_t)file_id);
+
+ if (ret_val < 0)
+ h5libraryError(env);
+
+ return (jlong)ret_val;
+} /* end Java_hdf_hdf5lib_H5_H5Fget_1freespace */
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5Freopen
+ * Signature: (J)J
+ */
+JNIEXPORT jlong JNICALL
+Java_hdf_hdf5lib_H5__1H5Freopen(JNIEnv *env, jclass clss, jlong file_id)
+{
+ hid_t retVal = -1;
+
+ retVal = H5Freopen((hid_t)file_id);
+ if (retVal < 0)
+ h5libraryError(env);
+
+ return (jlong)retVal;
+} /* end Java_hdf_hdf5lib_H5__1H5Freopen */
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5Fget_obj_ids
+ * Signature: (JIJ[J)J
+ */
+JNIEXPORT jlong JNICALL
+Java_hdf_hdf5lib_H5_H5Fget_1obj_1ids(JNIEnv *env, jclass cls, jlong file_id, jint types, jlong maxObjs,
+ jlongArray obj_id_list)
+{
+ ssize_t ret_val = -1;
+ jlong *obj_id_listP;
+ jboolean isCopy;
+ hid_t *id_list;
+ size_t rank;
+ size_t i;
+
+ if (obj_id_list == NULL) {
+ h5nullArgument(env, "H5Fget_obj_ids: obj_id_list is NULL");
+ } /* end if */
+ else {
+ obj_id_listP = ENVPTR->GetLongArrayElements(ENVPAR obj_id_list, &isCopy);
+ if (obj_id_listP == NULL) {
+ h5JNIFatalError(env, "H5Fget_obj_ids: obj_id_list not pinned");
+ } /* end if */
+ else {
+ rank = (size_t)ENVPTR->GetArrayLength(ENVPAR obj_id_list);
+
+ id_list = (hid_t *)HDmalloc(rank * sizeof(hid_t));
+ if (id_list == NULL) {
+ ENVPTR->ReleaseLongArrayElements(ENVPAR obj_id_list, obj_id_listP, JNI_ABORT);
+ h5JNIFatalError(env, "H5Fget_obj_ids: obj_id_list not converted to hid_t");
+ } /* end if */
+ else {
+ ret_val = H5Fget_obj_ids((hid_t)file_id, (unsigned int)types, (size_t)maxObjs, id_list);
+
+ if (ret_val < 0) {
+ ENVPTR->ReleaseLongArrayElements(ENVPAR obj_id_list, obj_id_listP, JNI_ABORT);
+ HDfree(id_list);
+ h5libraryError(env);
+ } /* end if */
+ else {
+ for (i = 0; i < rank; i++) {
+ obj_id_listP[i] = (jlong)id_list[i];
+ } /* end for */
+ HDfree(id_list);
+ ENVPTR->ReleaseLongArrayElements(ENVPAR obj_id_list, obj_id_listP, 0);
+ } /* end else */
+ } /* end else */
+ } /* end else */
+ } /* end else */
+
+ return (jlong)ret_val;
+} /* end Java_hdf_hdf5lib_H5_H5Fget_1obj_1ids */
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5Fget_obj_count
+ * Signature: (JI)J
+ */
+JNIEXPORT jlong JNICALL
+Java_hdf_hdf5lib_H5_H5Fget_1obj_1count(JNIEnv *env, jclass cls, jlong file_id, jint types)
+{
+ ssize_t ret_val = H5Fget_obj_count((hid_t)file_id, (unsigned int)types);
+
+ if (ret_val < 0)
+ h5libraryError(env);
+
+ return (jlong)ret_val;
+} /* end Java_hdf_hdf5lib_H5_H5Fget_1obj_1count_1long */
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5Fget_filesize
+ * Signature: (J)J
+ */
+JNIEXPORT jlong JNICALL
+Java_hdf_hdf5lib_H5_H5Fget_1filesize(JNIEnv *env, jclass clss, jlong file_id)
+{
+ hsize_t size = 0;
+
+ if (H5Fget_filesize ((hid_t)file_id, &size) < 0)
+ h5libraryError(env);
+
+ return (jlong) size;
+} /* end Java_hdf_hdf5lib_H5_H5Fget_1filesize */
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5Fget_mdc_hit_rate
+ * Signature: (J)D
+ */
+JNIEXPORT jdouble JNICALL
+Java_hdf_hdf5lib_H5_H5Fget_1mdc_1hit_1rate(JNIEnv *env, jclass cls, jlong file_id)
+{
+ double rate = 0.0;
+
+ if (H5Fget_mdc_hit_rate((hid_t)file_id, &rate) < 0)
+ h5libraryError(env);
+
+ return (jdouble)rate;
+} /* end Java_hdf_hdf5lib_H5_H5Fget_1mdc_1hit_1rate */
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5Fget_mdc_size
+ * Signature: (J[J)I
+ */
+JNIEXPORT jint JNICALL
+Java_hdf_hdf5lib_H5_H5Fget_1mdc_1size(JNIEnv *env, jclass cls, jlong file_id, jlongArray metadata_cache)
+{
+ jint size = 0;
+ jlong *metadata_cache_ptr;
+ size_t max_size = 0, min_clean_size = 0, cur_size = 0;
+ int cur_num_entries = -1;
+ jboolean isCopy;
+
+ if (metadata_cache == NULL) {
+ h5nullArgument(env, "H5Fget_mdc_size: metadata_cache is NULL");
+ } /* end if */
+ else {
+ size = (int)ENVPTR->GetArrayLength(ENVPAR metadata_cache);
+ if (size < 3) {
+ h5badArgument(env, "H5Fget_mdc_size: length of metadata_cache < 3.");
+ } /* end if */
+ else {
+ if (H5Fget_mdc_size((hid_t)file_id, &max_size, &min_clean_size, &cur_size, &cur_num_entries) < 0) {
+ h5libraryError(env);
+ } /* end if */
+ else {
+ metadata_cache_ptr = ENVPTR->GetLongArrayElements(ENVPAR metadata_cache, &isCopy);
+ metadata_cache_ptr[0] = (jlong)max_size;
+ metadata_cache_ptr[1] = (jlong)min_clean_size;
+ metadata_cache_ptr[2] = (jlong)cur_size;
+ ENVPTR->ReleaseLongArrayElements(ENVPAR metadata_cache, metadata_cache_ptr, 0);
+ } /* end else */
+ } /* end else */
+ } /* end else */
+
+ return (jint)cur_num_entries;
+} /* end Java_hdf_hdf5lib_H5_H5Fget_1mdc_1size */
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5Fget_info
+ * Signature: (J)Lhdf/hdf5lib/structs/H5F_info2_t;
+ */
+JNIEXPORT jobject JNICALL
+Java_hdf_hdf5lib_H5_H5Fget_1info(JNIEnv *env, jclass clss, jlong obj_id)
+{
+ H5F_info2_t finfo;
+ jvalue args[9];
+ jobject ihinfobuf;
+ jobject ret_obj = NULL;
+
+ if (H5Fget_info2((hid_t)obj_id, &finfo) < 0) {
+ h5libraryError(env);
+ } /* end if */
+ else {
+ args[0].j = (jlong)finfo.sohm.msgs_info.index_size;
+ args[1].j = (jlong)finfo.sohm.msgs_info.heap_size;
+ CALL_CONSTRUCTOR("hdf/hdf5lib/structs/H5_ih_info_t", "(JJ)V", args);
+ ihinfobuf = ret_obj;
+
+ args[0].i = (jint)finfo.super.version;
+ args[1].j = (jlong)finfo.super.super_size;
+ args[2].j = (jlong)finfo.super.super_ext_size;
+ args[3].i = (jint)finfo.free.version;
+ args[4].j = (jlong)finfo.free.meta_size;
+ args[5].j = (jlong)finfo.free.tot_space;
+ args[6].j = (jint)finfo.sohm.version;
+ args[7].j = (jlong)finfo.sohm.hdr_size;
+ args[8].l = ihinfobuf;
+ CALL_CONSTRUCTOR("hdf/hdf5lib/structs/H5F_info2_t", "(IJJIJJIJLhdf/hdf5lib/structs/H5_ih_info_t;)V", args);
+ } /* end else */
+ return ret_obj;
+} /* end Java_hdf_hdf5lib_H5_H5Fget_1info */
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5Freset_mdc_hit_rate_stats
+ * Signature: (J)V
+ */
+JNIEXPORT void JNICALL
+Java_hdf_hdf5lib_H5_H5Freset_1mdc_1hit_1rate_1stats(JNIEnv *env, jclass cls, jlong file_id)
+{
+ if (H5Freset_mdc_hit_rate_stats((hid_t)file_id) < 0)
+ h5libraryError(env);
+} /* end Java_hdf_hdf5lib_H5_H5Freset_1mdc_1hit_1rate_1stats */
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5Fclear_elink_file_cache
+ * Signature: (J)V
+ */
+JNIEXPORT void JNICALL
+Java_hdf_hdf5lib_H5_H5Fclear_1elink_1file_1cache(JNIEnv *env, jclass cls, jlong file_id)
+{
+ if (H5Fclear_elink_file_cache((hid_t)file_id) < 0)
+ h5libraryError(env);
+} /* end Java_hdf_hdf5lib_H5_H5Fclear_1elink_1file_1cache */
+
+
+#ifdef __cplusplus
+} /* end extern "C" */
+#endif /* __cplusplus */
diff --git a/java/src/jni/h5fImp.h b/java/src/jni/h5fImp.h
new file mode 100644
index 0000000..5a72fab
--- /dev/null
+++ b/java/src/jni/h5fImp.h
@@ -0,0 +1,198 @@
+/* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
+ * Copyright by The HDF Group. *
+ * Copyright by the Board of Trustees of the University of Illinois. *
+ * All rights reserved. *
+ * *
+ * This file is part of HDF5. The full HDF5 copyright notice, including *
+ * terms governing use, modification, and redistribution, is contained in *
+ * the files COPYING and Copyright.html. COPYING can be found at the root *
+ * of the source code distribution tree; Copyright.html can be found at the *
+ * root level of an installed copy of the electronic HDF5 document set and *
+ * is linked from the top-level documents page. It can also be found at *
+ * http://hdfgroup.org/HDF5/doc/Copyright.html. If you do not have *
+ * access to either file, you may request a copy from help@hdfgroup.org. *
+ * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */
+
+#include <jni.h>
+/* Header for class hdf_hdf5lib_H5_H5F */
+
+#ifndef _Included_hdf_hdf5lib_H5_H5F
+#define _Included_hdf_hdf5lib_H5_H5F
+
+#ifdef __cplusplus
+extern "C" {
+#endif /* __cplusplus */
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5Fopen
+ * Signature: (Ljava/lang/String;IJ)J
+ */
+JNIEXPORT jlong JNICALL Java_hdf_hdf5lib_H5__1H5Fopen
+ (JNIEnv*, jclass, jstring, jint, jlong);
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5Fcreate
+ * Signature: (Ljava/lang/String;IJJ)J
+ */
+JNIEXPORT jlong JNICALL Java_hdf_hdf5lib_H5__1H5Fcreate
+ (JNIEnv*, jclass, jstring, jint, jlong, jlong);
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5Fflush
+ * Signature: (JI)I
+ */
+JNIEXPORT jint JNICALL Java_hdf_hdf5lib_H5_H5Fflush
+ (JNIEnv*, jclass, jlong, jint);
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5Fget_name
+ * Signature: (J)Ljava/lang/String;
+ */
+JNIEXPORT jstring JNICALL Java_hdf_hdf5lib_H5_H5Fget_1name
+ (JNIEnv*, jclass, jlong);
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5Fis_hdf5
+ * Signature: (Ljava/lang/String;)Z
+ */
+JNIEXPORT jboolean JNICALL Java_hdf_hdf5lib_H5_H5Fis_1hdf5
+ (JNIEnv*, jclass, jstring);
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5Fget_create_plist
+ * Signature: (J)J
+ */
+JNIEXPORT jlong JNICALL Java_hdf_hdf5lib_H5__1H5Fget_1create_1plist
+ (JNIEnv*, jclass, jlong);
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5Fget_access_plist
+ * Signature: (J)J
+ */
+JNIEXPORT jlong JNICALL Java_hdf_hdf5lib_H5__1H5Fget_1access_1plist
+ (JNIEnv*, jclass, jlong);
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5Fget_intent
+ * Signature: (J)I
+ */
+JNIEXPORT jint JNICALL Java_hdf_hdf5lib_H5_H5Fget_1intent
+ (JNIEnv*, jclass, jlong);
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5Fclose
+ * Signature: (J)I
+ */
+JNIEXPORT jint JNICALL Java_hdf_hdf5lib_H5__1H5Fclose
+ (JNIEnv*, jclass, jlong);
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5Fmount
+ * Signature: (JLjava/lang/String;JJ)I
+ */
+JNIEXPORT jint JNICALL Java_hdf_hdf5lib_H5_H5Fmount
+ (JNIEnv*, jclass, jlong, jstring, jlong, jlong);
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5Funmount
+ * Signature: (JLjava/lang/String;)I
+ */
+JNIEXPORT jint JNICALL Java_hdf_hdf5lib_H5_H5Funmount
+ (JNIEnv*, jclass, jlong, jstring);
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5Fget_freespace
+ * Signature: (J)J
+ */
+JNIEXPORT jlong JNICALL Java_hdf_hdf5lib_H5_H5Fget_1freespace
+ (JNIEnv*, jclass, jlong);
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5Freopen
+ * Signature: (J)J
+ */
+JNIEXPORT jlong JNICALL Java_hdf_hdf5lib_H5__1H5Freopen
+ (JNIEnv*, jclass, jlong);
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5Fget_obj_ids
+ * Signature: (JIJ[J)J
+ */
+JNIEXPORT jlong JNICALL Java_hdf_hdf5lib_H5_H5Fget_1obj_1ids
+ (JNIEnv*, jclass, jlong, jint, jlong, jlongArray);
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5Fget_obj_count
+ * Signature: (JI)J
+ */
+JNIEXPORT jlong JNICALL Java_hdf_hdf5lib_H5_H5Fget_1obj_1count
+ (JNIEnv*, jclass, jlong, jint);
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5Fget_filesize
+ * Signature: (J)J
+ */
+JNIEXPORT jlong JNICALL Java_hdf_hdf5lib_H5_H5Fget_1filesize
+ (JNIEnv*, jclass, jlong);
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5Fget_mdc_hit_rate
+ * Signature: (J)D
+ */
+JNIEXPORT jdouble JNICALL Java_hdf_hdf5lib_H5_H5Fget_1mdc_1hit_1rate
+ (JNIEnv*, jclass, jlong);
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5Fget_mdc_size
+ * Signature: (J[J)I
+ */
+JNIEXPORT jint JNICALL Java_hdf_hdf5lib_H5_H5Fget_1mdc_1size
+ (JNIEnv*, jclass, jlong, jlongArray);
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5Freset_mdc_hit_rate_stats
+ * Signature: (J)V
+ */
+JNIEXPORT void JNICALL Java_hdf_hdf5lib_H5_H5Freset_1mdc_1hit_1rate_1stats
+ (JNIEnv*, jclass, jlong);
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5Fget_info
+ * Signature: (J)Lhdf/hdf5lib/structs/H5F_info2_t;
+ */
+JNIEXPORT jobject JNICALL Java_hdf_hdf5lib_H5_H5Fget_1info
+ (JNIEnv*, jclass, jlong);
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5Fclear_elink_file_cache
+ * Signature: (J)V
+ */
+JNIEXPORT void JNICALL Java_hdf_hdf5lib_H5_H5Fclear_1elink_1file_1cache
+ (JNIEnv *, jclass, jlong);
+
+#ifdef __cplusplus
+} /* end extern "C" */
+#endif /* __cplusplus */
+
+#endif /* _Included_hdf_hdf5lib_H5_H5F */
diff --git a/java/src/jni/h5gImp.c b/java/src/jni/h5gImp.c
new file mode 100644
index 0000000..632942a
--- /dev/null
+++ b/java/src/jni/h5gImp.c
@@ -0,0 +1,257 @@
+/* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
+ * Copyright by The HDF Group. *
+ * Copyright by the Board of Trustees of the University of Illinois. *
+ * All rights reserved. *
+ * *
+ * This file is part of HDF5. The full HDF5 copyright notice, including *
+ * terms governing use, modification, and redistribution, is contained in *
+ * the files COPYING and Copyright.html. COPYING can be found at the root *
+ * of the source code distribution tree; Copyright.html can be found at the *
+ * root level of an installed copy of the electronic HDF5 document set and *
+ * is linked from the top-level documents page. It can also be found at *
+ * http://hdfgroup.org/HDF5/doc/Copyright.html. If you do not have *
+ * access to either file, you may request a copy from help@hdfgroup.org. *
+ * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */
+
+/*
+ * For details of the HDF libraries, see the HDF Documentation at:
+ * http://hdfdfgroup.org/HDF5/doc/
+ *
+ */
+
+#ifdef __cplusplus
+extern "C" {
+#endif /* __cplusplus */
+
+#include <jni.h>
+#include <stdio.h>
+#include <stdlib.h>
+#include <string.h>
+#include "hdf5.h"
+#include "h5util.h"
+#include "h5gImp.h"
+
+extern JavaVM *jvm;
+extern jobject visit_callback;
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: _H5Gclose
+ * Signature: (J)I
+ */
+JNIEXPORT jint JNICALL
+Java_hdf_hdf5lib_H5__1H5Gclose(JNIEnv *env, jclass clss, jlong group_id)
+{
+ herr_t retVal = -1;
+
+ retVal = H5Gclose((hid_t)group_id);
+ if (retVal < 0)
+ h5libraryError(env);
+
+ return (jint)retVal;
+} /* end Java_hdf_hdf5lib_H5__1H5Gclose */
+
+/*
+ * Create a java object of hdf.h5.structs.H5G_info_t
+ * public class H5G_info_t {
+ * public H5G_STORAGE_TYPE storage_type; // Type of storage for links in group
+ * public long nlinks; // Number of links in group
+ * public long max_corder; // Current max. creation order value for group
+ * public int mounted; // Whether group has a file mounted on it
+ * }
+ *
+ */
+jobject
+create_H5G_info_t(JNIEnv *env, H5G_info_t group_info)
+{
+ jclass cls;
+ jboolean jmounted;
+ jint storage_type;
+ jobject obj;
+ jfieldID fid_storage_type, fid_nlinks, fid_max_corder, fid_mounted;
+
+ cls = ENVPTR->FindClass(ENVPAR "hdf/hdf5lib/structs/H5G_info_t");
+ if (cls == NULL)
+ return NULL;
+
+ obj = ENVPTR->AllocObject(ENVPAR cls);
+ if (obj == NULL)
+ return NULL;
+
+ fid_storage_type = ENVPTR->GetFieldID(ENVPAR cls, "storage_type", "I");
+ fid_nlinks = ENVPTR->GetFieldID(ENVPAR cls, "nlinks", "J");
+ fid_max_corder = ENVPTR->GetFieldID(ENVPAR cls, "max_corder", "J");
+ fid_mounted = ENVPTR->GetFieldID(ENVPAR cls, "mounted", "Z");
+
+ if (fid_storage_type==NULL || fid_nlinks==NULL || fid_max_corder==NULL || fid_mounted == NULL)
+ return NULL;
+
+ jmounted = (group_info.mounted==0) ? JNI_FALSE : JNI_TRUE;
+ storage_type = (jint)group_info.storage_type;
+
+ ENVPTR->SetIntField(ENVPAR obj, fid_storage_type, (jint)storage_type);
+ ENVPTR->SetLongField(ENVPAR obj, fid_nlinks, (jlong)group_info.nlinks);
+ ENVPTR->SetLongField(ENVPAR obj, fid_max_corder, (jlong)group_info.max_corder);
+ ENVPTR->SetBooleanField(ENVPAR obj, fid_mounted, jmounted);
+
+ return obj;
+} /* end create_H5G_info_t */
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: _H5Gcreate2
+ * Signature: (JLjava/lang/String;JJJ)J
+ */
+JNIEXPORT jlong JNICALL
+Java_hdf_hdf5lib_H5__1H5Gcreate2(JNIEnv *env, jclass clss, jlong loc_id, jstring name,
+ jlong link_plist_id, jlong create_plist_id, jlong access_plist_id)
+{
+ hid_t group_id = -1;
+ const char *gName;
+
+ PIN_JAVA_STRING(name, gName, -1);
+
+ group_id = H5Gcreate2((hid_t)loc_id, gName, (hid_t)link_plist_id, (hid_t)create_plist_id, (hid_t)access_plist_id );
+
+ UNPIN_JAVA_STRING(name, gName);
+ if (group_id < 0)
+ h5libraryError(env);
+
+ return (jlong)group_id;
+} /* end Java_hdf_hdf5lib_H5__1H5Gcreate2 */
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: _H5Gcreate_anon
+ * Signature: (JJJ)J
+ */
+JNIEXPORT jlong JNICALL
+Java_hdf_hdf5lib_H5__1H5Gcreate_1anon(JNIEnv *env, jclass cls, jlong loc_id, jlong gcpl_id, jlong gapl_id)
+{
+ hid_t group_id = -1;
+
+ group_id = H5Gcreate_anon((hid_t)loc_id, (hid_t)gcpl_id, (hid_t)gapl_id);
+ if (group_id < 0)
+ h5libraryError(env);
+
+ return (jlong)group_id;
+} /* end JNICALL */
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: _H5Gopen2
+ * Signature: (JLjava/lang/String;J)J
+ */
+JNIEXPORT jlong JNICALL
+Java_hdf_hdf5lib_H5__1H5Gopen2(JNIEnv *env, jclass clss, jlong loc_id, jstring name, jlong access_plist_id)
+{
+ hid_t group_id = -1;
+ const char *gName;
+
+ PIN_JAVA_STRING(name, gName, -1);
+
+ group_id = H5Gopen2((hid_t)loc_id, gName, (hid_t)access_plist_id );
+
+ UNPIN_JAVA_STRING(name, gName);
+
+ if (group_id < 0)
+ h5libraryError(env);
+
+ return (jlong)group_id;
+} /* end Java_hdf_hdf5lib_H5__1H5Gopen2 */
+
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5Gget_create_plist
+ * Signature: (J)J
+ */
+JNIEXPORT jlong JNICALL
+Java_hdf_hdf5lib_H5_H5Gget_1create_1plist(JNIEnv *env, jclass cls, jlong loc_id)
+{
+ hid_t plist_id = H5Gget_create_plist((hid_t)loc_id);
+
+ if (plist_id < 0)
+ h5libraryError(env);
+
+ return (jlong)plist_id;
+} /* end Java_hdf_hdf5lib_H5_H5Gget_1create_1plist */
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5Gget_info
+ * Signature: (J)Lhdf/hdf5lib/structs/H5G_info_t;
+ */
+JNIEXPORT jobject JNICALL
+Java_hdf_hdf5lib_H5_H5Gget_1info(JNIEnv *env, jclass cls, jlong loc_id)
+{
+ H5G_info_t group_info;
+
+ if (H5Gget_info((hid_t)loc_id, &group_info) < 0) {
+ h5libraryError(env);
+ return NULL;
+ } /* end if */
+
+ return create_H5G_info_t(env, group_info);
+} /* end Java_hdf_hdf5lib_H5_H5Gget_1info */
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5Gget_info_by_name
+ * Signature: (JLjava/lang/String;J)Lhdf/hdf5lib/structs/H5G_info_t;
+ */
+JNIEXPORT jobject JNICALL
+Java_hdf_hdf5lib_H5_H5Gget_1info_1by_1name(JNIEnv *env, jclass cls, jlong loc_id, jstring name, jlong lapl_id)
+{
+ herr_t ret_val = -1;
+ const char *gName;
+ H5G_info_t group_info;
+
+ PIN_JAVA_STRING(name, gName, NULL);
+
+ ret_val = H5Gget_info_by_name((hid_t)loc_id, gName, &group_info, (hid_t)lapl_id);
+
+ UNPIN_JAVA_STRING(name, gName);
+
+ if (ret_val < 0) {
+ h5libraryError(env);
+ return NULL;
+ } /* end if */
+
+ return create_H5G_info_t(env, group_info);
+} /* end Java_hdf_hdf5lib_H5_H5Gget_1info_1by_1name */
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5Gget_info_by_idx
+ * Signature: (JLjava/lang/String;IIJJ)Lhdf/hdf5lib/structs/H5G_info_t;
+ */
+JNIEXPORT jobject JNICALL
+Java_hdf_hdf5lib_H5_H5Gget_1info_1by_1idx(JNIEnv *env, jclass cls, jlong loc_id, jstring name, jint index_type,
+ jint order, jlong n, jlong lapl_id)
+{
+ herr_t ret_val = -1;
+ const char *gName;
+ H5G_info_t group_info;
+ H5_index_t cindex_type = (H5_index_t)index_type;
+ H5_iter_order_t corder = (H5_iter_order_t)order;
+
+ PIN_JAVA_STRING(name, gName, NULL);
+
+ ret_val = H5Gget_info_by_idx((hid_t)loc_id, gName, cindex_type,
+ corder, (hsize_t)n, &group_info, (hid_t)lapl_id);
+
+ UNPIN_JAVA_STRING(name, gName);
+
+ if (ret_val < 0) {
+ h5libraryError(env);
+ return NULL;
+ } /* end if */
+
+ return create_H5G_info_t(env, group_info);
+} /* end Java_hdf_hdf5lib_H5_H5Gget_1info_1by_1idx */
+
+
+#ifdef __cplusplus
+} /* end extern "C" */
+#endif /* __cplusplus */
diff --git a/java/src/jni/h5gImp.h b/java/src/jni/h5gImp.h
new file mode 100644
index 0000000..2b9bc57
--- /dev/null
+++ b/java/src/jni/h5gImp.h
@@ -0,0 +1,96 @@
+/* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
+ * Copyright by The HDF Group. *
+ * Copyright by the Board of Trustees of the University of Illinois. *
+ * All rights reserved. *
+ * *
+ * This file is part of HDF5. The full HDF5 copyright notice, including *
+ * terms governing use, modification, and redistribution, is contained in *
+ * the files COPYING and Copyright.html. COPYING can be found at the root *
+ * of the source code distribution tree; Copyright.html can be found at the *
+ * root level of an installed copy of the electronic HDF5 document set and *
+ * is linked from the top-level documents page. It can also be found at *
+ * http://hdfgroup.org/HDF5/doc/Copyright.html. If you do not have *
+ * access to either file, you may request a copy from help@hdfgroup.org. *
+ * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */
+
+#include <jni.h>
+/* Header for class hdf_hdf5lib_H5_H5G */
+
+#ifndef _Included_hdf_hdf5lib_H5_H5G
+#define _Included_hdf_hdf5lib_H5_H5G
+
+#ifdef __cplusplus
+extern "C" {
+#endif /* __cplusplus */
+
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: _H5Gclose
+ * Signature: (J)I
+ */
+JNIEXPORT jint JNICALL Java_hdf_hdf5lib_H5__1H5Gclose
+ (JNIEnv*, jclass, jlong);
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: _H5Gcreate2
+ * Signature: (JLjava/lang/String;JJJ)J
+ */
+JNIEXPORT jlong JNICALL Java_hdf_hdf5lib_H5__1H5Gcreate2
+ (JNIEnv*, jclass, jlong, jstring, jlong, jlong, jlong);
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: _H5Gcreate_anon
+ * Signature: (JJJ)J
+ */
+JNIEXPORT jlong JNICALL Java_hdf_hdf5lib_H5__1H5Gcreate_1anon
+ (JNIEnv*, jclass, jlong, jlong, jlong);
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: _H5Gopen2
+ * Signature: (JLjava/lang/String;J)J
+ */
+JNIEXPORT jlong JNICALL Java_hdf_hdf5lib_H5__1H5Gopen2
+ (JNIEnv*, jclass, jlong, jstring, jlong);
+
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5Gget_create_plist
+ * Signature: (J)J
+ */
+JNIEXPORT jlong JNICALL Java_hdf_hdf5lib_H5_H5Gget_1create_1plist
+(JNIEnv*, jclass, jlong);
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5Gget_info
+ * Signature: (J)Lhdf/hdf5lib/structs/H5G_info_t;
+ */
+JNIEXPORT jobject JNICALL Java_hdf_hdf5lib_H5_H5Gget_1info
+ (JNIEnv*, jclass, jlong);
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5Gget_info_by_name
+ * Signature: (JLjava/lang/String;J)Lhdf/hdf5lib/structs/H5G_info_t;
+ */
+JNIEXPORT jobject JNICALL Java_hdf_hdf5lib_H5_H5Gget_1info_1by_1name
+ (JNIEnv*, jclass, jlong, jstring, jlong);
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5Gget_info_by_idx
+ * Signature: (JLjava/lang/String;IIJJ)Lhdf/hdf5lib/structs/H5G_info_t;
+ */
+JNIEXPORT jobject JNICALL Java_hdf_hdf5lib_H5_H5Gget_1info_1by_1idx
+ (JNIEnv*, jclass, jlong, jstring, jint, jint, jlong, jlong);
+
+#ifdef __cplusplus
+} /* end extern "C" */
+#endif /* __cplusplus */
+
+#endif /* _Included_hdf_hdf5lib_H5_H5G */
diff --git a/java/src/jni/h5iImp.c b/java/src/jni/h5iImp.c
new file mode 100644
index 0000000..e7d663f
--- /dev/null
+++ b/java/src/jni/h5iImp.c
@@ -0,0 +1,289 @@
+/* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
+ * Copyright by The HDF Group. *
+ * Copyright by the Board of Trustees of the University of Illinois. *
+ * All rights reserved. *
+ * *
+ * This file is part of HDF5. The full HDF5 copyright notice, including *
+ * terms governing use, modification, and redistribution, is contained in *
+ * the files COPYING and Copyright.html. COPYING can be found at the root *
+ * of the source code distribution tree; Copyright.html can be found at the *
+ * root level of an installed copy of the electronic HDF5 document set and *
+ * is linked from the top-level documents page. It can also be found at *
+ * http://hdfgroup.org/HDF5/doc/Copyright.html. If you do not have *
+ * access to either file, you may request a copy from help@hdfgroup.org. *
+ * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */
+
+/*
+ * For details of the HDF libraries, see the HDF Documentation at:
+ * http://hdfdfgroup.org/HDF5/doc/
+ *
+ */
+
+#ifdef __cplusplus
+extern "C" {
+#endif /* __cplusplus */
+
+#include "hdf5.h"
+#include <stdlib.h>
+#include <jni.h>
+#include "h5jni.h"
+#include "h5iImp.h"
+
+extern JavaVM *jvm;
+extern jobject visit_callback;
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5Iget_type
+ * Signature: (J)I
+ */
+JNIEXPORT jint JNICALL
+Java_hdf_hdf5lib_H5_H5Iget_1type(JNIEnv *env, jclass clss, jlong obj_id)
+{
+ H5I_type_t retVal = H5I_BADID;
+
+ retVal = H5Iget_type((hid_t)obj_id);
+ if (retVal == H5I_BADID)
+ h5libraryError(env);
+
+ return (jint)retVal;
+} /* end Java_hdf_hdf5lib_H5_H5Iget_1type */
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5Iget_name
+ * Signature: (JLjava/lang/String;J)J
+ */
+JNIEXPORT jlong JNICALL
+Java_hdf_hdf5lib_H5_H5Iget_1name(JNIEnv *env, jclass clss, jlong obj_id, jobjectArray name, jlong buf_size)
+{
+ char *aName;
+ jstring str;
+ hssize_t size = -1;
+ long bs;
+
+ bs = (long)buf_size;
+ if (bs <= 0) {
+ h5badArgument(env, "H5Iget_name: buf_size <= 0");
+ } /* end if */
+ else {
+ aName = (char*)HDmalloc(sizeof(char) * (size_t)bs);
+ if (aName == NULL) {
+ h5outOfMemory(env, "H5Iget_name: malloc failed");
+ } /* end if */
+ else {
+ size = H5Iget_name((hid_t)obj_id, aName, (size_t)buf_size);
+ if (size < 0) {
+ h5libraryError(env);
+ } /* end if */
+ else {
+ str = ENVPTR->NewStringUTF(ENVPAR aName);
+ ENVPTR->SetObjectArrayElement(ENVPAR name, 0, str);
+ }
+ HDfree(aName);
+ }
+ }
+ return (jlong)size;
+} /* end Java_hdf_hdf5lib_H5_H5Iget_1name */
+
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5Iget_ref
+ * Signature: (J)I
+ */
+JNIEXPORT jint JNICALL
+Java_hdf_hdf5lib_H5_H5Iget_1ref(JNIEnv *env, jclass clss, jlong obj_id)
+{
+ int retVal = -1;
+
+ retVal = H5Iget_ref((hid_t)obj_id);
+ if (retVal < 0)
+ h5libraryError(env);
+
+ return (jint)retVal;
+} /* end Java_hdf_hdf5lib_H5_H5Iget_1ref */
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5Iinc_ref
+ * Signature: (J)I
+ */
+JNIEXPORT jint JNICALL
+Java_hdf_hdf5lib_H5_H5Iinc_1ref(JNIEnv *env, jclass clss, jlong obj_id)
+{
+ int retVal = -1;
+
+ retVal = H5Iinc_ref((hid_t)obj_id);
+ if (retVal < 0)
+ h5libraryError(env);
+
+ return (jint)retVal;
+} /* end Java_hdf_hdf5lib_H5_H5Iinc_1ref */
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5Idec_1ref
+ * Signature: (J)I
+ */
+JNIEXPORT jint JNICALL
+Java_hdf_hdf5lib_H5_H5Idec_1ref(JNIEnv *env, jclass clss, jlong obj_id)
+{
+ int retVal = -1;
+
+ retVal = H5Idec_ref((hid_t)obj_id);
+ if (retVal < 0)
+ h5libraryError(env);
+
+ return (jint)retVal;
+} /* end Java_hdf_hdf5lib_H5_H5Idec_1ref */
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5Iget_file_id
+ * Signature: (J)J
+ */
+
+JNIEXPORT jlong JNICALL
+Java_hdf_hdf5lib_H5_H5Iget_1file_1id(JNIEnv *env, jclass clss, jlong obj_id)
+{
+ hid_t file_id = -1;
+
+ file_id = H5Iget_file_id((hid_t)obj_id);
+ if (file_id < 0)
+ h5libraryError(env);
+
+ return (jlong) file_id;
+} /* end Java_hdf_hdf5lib_H5_H5Iget_1file_1id */
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5Iget_type_ref
+ * Signature: (I)I
+ */
+JNIEXPORT jint JNICALL
+Java_hdf_hdf5lib_H5_H5Iget_1type_1ref(JNIEnv *env, jclass clss, jint type)
+{
+ int retVal = -1;
+
+ retVal = H5Iget_type_ref((H5I_type_t)type);
+ if (retVal < 0)
+ h5libraryError(env);
+
+ return (jint)retVal;
+} /* end Java_hdf_hdf5lib_H5_H5Iget_1type_1ref */
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5Idec_type_ref
+ * Signature: (I)I
+ */
+JNIEXPORT jint JNICALL
+Java_hdf_hdf5lib_H5_H5Idec_1type_1ref(JNIEnv *env, jclass clss, jint type)
+{
+ int retVal = -1;
+
+ retVal = H5Idec_type_ref((H5I_type_t)type);
+ if (retVal < 0)
+ h5libraryError(env);
+
+ return (jint)retVal;
+} /* end Java_hdf_hdf5lib_H5_H5Idec_1type_1ref */
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5Iinc_type_ref
+ * Signature: (I)I
+ */
+JNIEXPORT jint JNICALL
+Java_hdf_hdf5lib_H5_H5Iinc_1type_1ref(JNIEnv *env, jclass clss, jint type)
+{
+ int retVal = -1;
+
+ retVal = H5Iinc_type_ref((H5I_type_t)type);
+ if (retVal < 0)
+ h5libraryError(env);
+
+ return (jint)retVal;
+} /* end Java_hdf_hdf5lib_H5_H5Iinc_1type_1ref */
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5Inmembers
+ * Signature: (I)I
+ */
+JNIEXPORT jint JNICALL
+Java_hdf_hdf5lib_H5_H5Inmembers(JNIEnv *env, jclass clss, jint type)
+{
+ hsize_t num_members;
+
+ if (H5Inmembers((H5I_type_t)type, &num_members) < 0)
+ h5libraryError(env);
+
+ return (jint)num_members;
+} /* end Java_hdf_hdf5lib_H5_H5Inmembers */
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5Iis_valid
+ * Signature: (J)Z
+ */
+JNIEXPORT jboolean JNICALL
+Java_hdf_hdf5lib_H5_H5Iis_1valid(JNIEnv *env, jclass clss, jlong obj_id)
+{
+ htri_t bval = JNI_FALSE;
+
+ bval = H5Iis_valid((hid_t)obj_id);
+ if (bval > 0)
+ bval = JNI_TRUE;
+ else if (bval < 0)
+ h5libraryError(env);
+
+ return (jboolean)bval;
+} /* end Java_hdf_hdf5lib_H5_H5Iis_1valid */
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5Itype_exists
+ * Signature: (I)Z
+ */
+JNIEXPORT jboolean JNICALL
+Java_hdf_hdf5lib_H5_H5Itype_1exists(JNIEnv *env, jclass clss, jint type)
+{
+ htri_t bval = JNI_FALSE;
+
+ bval = H5Itype_exists((H5I_type_t)type);
+ if (bval > 0)
+ bval = JNI_TRUE;
+ else if (bval < 0)
+ h5libraryError(env);
+
+ return (jboolean)bval;
+} /* end else Java_hdf_hdf5lib_H5_H5Itype_1exists */
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5Iclear_type
+ * Signature: (IZ)V
+ */
+JNIEXPORT void JNICALL
+Java_hdf_hdf5lib_H5_H5Iclear_1type(JNIEnv *env, jclass clss, jint type, jboolean force)
+{
+ if (H5Iclear_type((H5I_type_t)type, (hbool_t)force) < 0)
+ h5libraryError(env);
+} /* end Java_hdf_hdf5lib_H5_H5Iclear_1type */
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5Idestroy_type
+ * Signature: (I)V
+ */
+JNIEXPORT void JNICALL
+Java_hdf_hdf5lib_H5_H5Idestroy_1type(JNIEnv *env, jclass clss, jint type)
+{
+ if (H5Idestroy_type((H5I_type_t)type) < 0)
+ h5libraryError(env);
+} /* end Java_hdf_hdf5lib_H5_H5Idestroy_1type */
+
+#ifdef __cplusplus
+} /* end extern "C" */
+#endif /* __cplusplus */
diff --git a/java/src/jni/h5iImp.h b/java/src/jni/h5iImp.h
new file mode 100644
index 0000000..fe36b5b
--- /dev/null
+++ b/java/src/jni/h5iImp.h
@@ -0,0 +1,143 @@
+/* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
+ * Copyright by The HDF Group. *
+ * Copyright by the Board of Trustees of the University of Illinois. *
+ * All rights reserved. *
+ * *
+ * This file is part of HDF5. The full HDF5 copyright notice, including *
+ * terms governing use, modification, and redistribution, is contained in *
+ * the files COPYING and Copyright.html. COPYING can be found at the root *
+ * of the source code distribution tree; Copyright.html can be found at the *
+ * root level of an installed copy of the electronic HDF5 document set and *
+ * is linked from the top-level documents page. It can also be found at *
+ * http://hdfgroup.org/HDF5/doc/Copyright.html. If you do not have *
+ * access to either file, you may request a copy from help@hdfgroup.org. *
+ * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */
+
+#include <jni.h>
+/* Header for class hdf_hdf5lib_H5_H5I */
+
+#ifndef _Included_hdf_hdf5lib_H5_H5I
+#define _Included_hdf_hdf5lib_H5_H5I
+
+#ifdef __cplusplus
+extern "C" {
+#endif /* __cplusplus */
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5Iget_type
+ * Signature: (J)I
+ */
+JNIEXPORT jint JNICALL Java_hdf_hdf5lib_H5_H5Iget_1type
+ (JNIEnv*, jclass, jlong);
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5Iget_name
+ * Signature: (JLjava/lang/String;J)J
+ */
+JNIEXPORT jlong JNICALL Java_hdf_hdf5lib_H5_H5Iget_1name
+ (JNIEnv*, jclass, jlong, jobjectArray, jlong);
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5Iget_ref
+ * Signature: (J)I
+ */
+JNIEXPORT jint JNICALL Java_hdf_hdf5lib_H5_H5Iget_1ref
+ (JNIEnv*, jclass, jlong);
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5Iinc_ref
+ * Signature: (J)I
+ */
+JNIEXPORT jint JNICALL Java_hdf_hdf5lib_H5_H5Iinc_1ref
+ (JNIEnv*, jclass, jlong);
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5Idec_1ref
+ * Signature: (J)I
+ */
+JNIEXPORT jint JNICALL Java_hdf_hdf5lib_H5_H5Idec_1ref
+ (JNIEnv*, jclass, jlong);
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5Iget_file_id
+ * Signature: (J)J
+ */
+
+JNIEXPORT jlong JNICALL Java_hdf_hdf5lib_H5_H5Iget_1file_1id
+ (JNIEnv*, jclass, jlong);
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5Iget_type_ref
+ * Signature: (I)I
+ */
+JNIEXPORT jint JNICALL Java_hdf_hdf5lib_H5_H5Iget_1type_1ref
+ (JNIEnv*, jclass, jint);
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5Idec_type_ref
+ * Signature: (I)I
+ */
+JNIEXPORT jint JNICALL Java_hdf_hdf5lib_H5_H5Idec_1type_1ref
+ (JNIEnv*, jclass, jint);
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5Iinc_type_ref
+ * Signature: (I)I
+ */
+JNIEXPORT jint JNICALL Java_hdf_hdf5lib_H5_H5Iinc_1type_1ref
+ (JNIEnv*, jclass, jint);
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5Inmembers
+ * Signature: (I)I
+ */
+JNIEXPORT jint JNICALL Java_hdf_hdf5lib_H5_H5Inmembers
+ (JNIEnv*, jclass, jint);
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5Iis_valid
+ * Signature: (J)Z
+ */
+JNIEXPORT jboolean JNICALL Java_hdf_hdf5lib_H5_H5Iis_1valid
+ (JNIEnv*, jclass, jlong);
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5Itype_exists
+ * Signature: (I)Z
+ */
+JNIEXPORT jboolean JNICALL Java_hdf_hdf5lib_H5_H5Itype_1exists
+ (JNIEnv*, jclass, jint);
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5Iclear_type
+ * Signature: (IZ)V
+ */
+JNIEXPORT void JNICALL Java_hdf_hdf5lib_H5_H5Iclear_1type
+ (JNIEnv*, jclass, jint, jboolean);
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5Idestroy_type
+ * Signature: (I)V
+ */
+JNIEXPORT void JNICALL Java_hdf_hdf5lib_H5_H5Idestroy_1type
+ (JNIEnv*, jclass, jint);
+
+#ifdef __cplusplus
+} /* end extern "C" */
+#endif /* __cplusplus */
+
+#endif /* _Included_hdf_hdf5lib_H5_H5I */
diff --git a/java/src/jni/h5jni.h b/java/src/jni/h5jni.h
new file mode 100644
index 0000000..da49eeb
--- /dev/null
+++ b/java/src/jni/h5jni.h
@@ -0,0 +1,249 @@
+/* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
+ * Copyright by The HDF Group. *
+ * Copyright by the Board of Trustees of the University of Illinois. *
+ * All rights reserved. *
+ * *
+ * This file is part of HDF5. The full HDF5 copyright notice, including *
+ * terms governing use, modification, and redistribution, is contained in *
+ * the files COPYING and Copyright.html. COPYING can be found at the root *
+ * of the source code distribution tree; Copyright.html can be found at the *
+ * root level of an installed copy of the electronic HDF5 document set and *
+ * is linked from the top-level documents page. It can also be found at *
+ * http://hdfgroup.org/HDF5/doc/Copyright.html. If you do not have *
+ * access to either file, you may request a copy from help@hdfgroup.org. *
+ * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */
+
+/*
+ * For details of the HDF libraries, see the HDF Documentation at:
+ * http://hdfdfgroup.org/HDF5/doc/
+ *
+ */
+
+#include <jni.h>
+#include "H5version.h"
+#include <string.h>
+#include "H5private.h"
+
+#ifndef _Included_h5jni
+#define _Included_h5jni
+
+#ifdef __cplusplus
+ #define ENVPTR (env)
+ #define ENVPAR
+ #define ENVONLY
+ #define CBENVPTR (cbenv)
+ #define CBENVPAR
+ #define JVMPTR (jvm)
+ #define JVMPAR
+ #define JVMPAR2
+#else /* __cplusplus */
+ #define ENVPTR (*env)
+ #define ENVPAR env,
+ #define ENVONLY env
+ #define CBENVPTR (*cbenv)
+ #define CBENVPAR cbenv,
+ #define JVMPTR (*jvm)
+ #define JVMPAR jvm
+ #define JVMPAR2 jvm,
+#endif /* __cplusplus */
+
+/* Macros for class access */
+/* Calling code must define ret_obj as jobject */
+#define CALL_CONSTRUCTOR(classname,classsig,args) { \
+ jclass cls; \
+ jmethodID constructor; \
+ cls = ENVPTR->FindClass(ENVPAR (classname)); \
+ if (cls == 0) { \
+ h5JNIFatalError(env, "JNI error: GetObjectClass\n"); \
+ ret_obj = NULL; \
+ } \
+ constructor = ENVPTR->GetMethodID(ENVPAR cls, "<init>", (classsig)); \
+ if (constructor == 0) { \
+ h5JNIFatalError(env, "JNI error: GetMethodID failed\n"); \
+ ret_obj = NULL; \
+ } \
+ ret_obj = ENVPTR->NewObjectA(ENVPAR cls, constructor, (args)); \
+}
+
+
+/* Macros for string access */
+#define PIN_JAVA_STRING(javastr,localstr,retdefault) { \
+ jboolean isCopy; \
+ if ((javastr) == NULL) { \
+ h5nullArgument(env, "java string is NULL"); \
+ return (retdefault); \
+ } \
+ (localstr) = ENVPTR->GetStringUTFChars(ENVPAR (javastr), &isCopy); \
+ if ((localstr) == NULL) { \
+ h5JNIFatalError(env, "local c string is not pinned"); \
+ return (retdefault); \
+ } \
+}
+
+#define PIN_JAVA_STRING0(javastr,localstr) { \
+ jboolean isCopy; \
+ if ((javastr) == NULL) { \
+ h5nullArgument(env, "java string is NULL"); \
+ return; \
+ } \
+ (localstr) = ENVPTR->GetStringUTFChars(ENVPAR (javastr), &isCopy); \
+ if ((localstr) == NULL) { \
+ h5JNIFatalError(env, "local c string is not pinned"); \
+ return; \
+ } \
+}
+
+#define UNPIN_JAVA_STRING(javastr,localstr) { \
+ ENVPTR->ReleaseStringUTFChars(ENVPAR (javastr), (localstr)); \
+}
+
+#define PIN_JAVA_STRING_TWO(javastr,localstr,java2str,local2str,retdefault) { \
+ jboolean isCopy; \
+ if ((javastr) == NULL) { \
+ h5nullArgument(env, "java string is NULL"); \
+ return (retdefault); \
+ } \
+ if ((java2str) == NULL) { \
+ h5nullArgument(env, "second java string is NULL"); \
+ return (retdefault); \
+ } \
+ (localstr) = ENVPTR->GetStringUTFChars(ENVPAR (javastr), &isCopy); \
+ if ((localstr) == NULL) { \
+ h5JNIFatalError(env, "local c string is not pinned"); \
+ return (retdefault); \
+ } \
+ (local2str) = ENVPTR->GetStringUTFChars(ENVPAR (java2str), &isCopy); \
+ if ((local2str) == NULL) { \
+ ENVPTR->ReleaseStringUTFChars(ENVPAR (javastr), (localstr)); \
+ h5JNIFatalError(env, "second local c string is not pinned"); \
+ return (retdefault); \
+ } \
+}
+
+#define PIN_JAVA_STRING_TWO0(javastr,localstr,java2str,local2str) { \
+ jboolean isCopy; \
+ if ((javastr) == NULL) { \
+ h5nullArgument(env, "java string is NULL"); \
+ return; \
+ } \
+ if ((java2str) == NULL) { \
+ h5nullArgument(env, "second java string is NULL"); \
+ return; \
+ } \
+ (localstr) = ENVPTR->GetStringUTFChars(ENVPAR (javastr), &isCopy); \
+ if ((localstr) == NULL) { \
+ h5JNIFatalError(env, "local c string is not pinned"); \
+ return; \
+ } \
+ (local2str) = ENVPTR->GetStringUTFChars(ENVPAR (java2str), &isCopy); \
+ if ((local2str) == NULL) { \
+ ENVPTR->ReleaseStringUTFChars(ENVPAR (javastr), (localstr)); \
+ h5JNIFatalError(env, "second local c string is not pinned"); \
+ return; \
+ } \
+}
+
+#define UNPIN_JAVA_STRING_TWO(javastr,localstr,java2str,local2str) { \
+ ENVPTR->ReleaseStringUTFChars(ENVPAR (javastr), (localstr)); \
+ ENVPTR->ReleaseStringUTFChars(ENVPAR (java2str), (local2str)); \
+}
+
+#define PIN_JAVA_STRING_THREE(javastr,localstr,java2str,local2str,java3str,local3str,retdefault) { \
+ jboolean isCopy; \
+ if ((javastr) == NULL) { \
+ h5nullArgument(env, "java string is NULL"); \
+ return (retdefault); \
+ } \
+ if ((java2str) == NULL) { \
+ h5nullArgument(env, "second java string is NULL"); \
+ return (retdefault); \
+ } \
+ if ((java3str) == NULL) { \
+ h5nullArgument(env, "third java string is NULL"); \
+ return (retdefault); \
+ } \
+ (localstr) = ENVPTR->GetStringUTFChars(ENVPAR (javastr), &isCopy); \
+ if ((localstr) == NULL) { \
+ h5JNIFatalError(env, "local c string is not pinned"); \
+ return (retdefault); \
+ } \
+ (local2str) = ENVPTR->GetStringUTFChars(ENVPAR (java2str), &isCopy); \
+ if ((local2str) == NULL) { \
+ ENVPTR->ReleaseStringUTFChars(ENVPAR (javastr), (localstr)); \
+ h5JNIFatalError(env, "second local c string is not pinned"); \
+ return (retdefault); \
+ } \
+ (local3str) = ENVPTR->GetStringUTFChars(ENVPAR (java3str), &isCopy); \
+ if ((local3str) == NULL) { \
+ ENVPTR->ReleaseStringUTFChars(ENVPAR (javastr), (localstr)); \
+ ENVPTR->ReleaseStringUTFChars(ENVPAR (java2str), (local2str)); \
+ h5JNIFatalError(env, "third local c string is not pinned"); \
+ return (retdefault); \
+ } \
+}
+
+#define PIN_JAVA_STRING_THREE0(javastr,localstr,java2str,local2str,java3str,local3str) { \
+ jboolean isCopy; \
+ if ((javastr) == NULL) { \
+ h5nullArgument(env, "java string is NULL"); \
+ return; \
+ } \
+ if ((java2str) == NULL) { \
+ h5nullArgument(env, "second java string is NULL"); \
+ return; \
+ } \
+ if ((java3str) == NULL) { \
+ h5nullArgument(env, "third java string is NULL"); \
+ return; \
+ } \
+ (localstr) = ENVPTR->GetStringUTFChars(ENVPAR (javastr), &isCopy); \
+ if ((localstr) == NULL) { \
+ h5JNIFatalError(env, "local c string is not pinned"); \
+ return; \
+ } \
+ (local2str) = ENVPTR->GetStringUTFChars(ENVPAR (java2str), &isCopy); \
+ if ((local2str) == NULL) { \
+ ENVPTR->ReleaseStringUTFChars(ENVPAR (javastr), (localstr)); \
+ h5JNIFatalError(env, "second local c string is not pinned"); \
+ return; \
+ } \
+ (local3str) = ENVPTR->GetStringUTFChars(ENVPAR (java3str), &isCopy); \
+ if ((local3str) == NULL) { \
+ ENVPTR->ReleaseStringUTFChars(ENVPAR (javastr), (localstr)); \
+ ENVPTR->ReleaseStringUTFChars(ENVPAR (java2str), (local2str)); \
+ h5JNIFatalError(env, "third local c string is not pinned"); \
+ return; \
+ } \
+}
+
+#define UNPIN_JAVA_STRING_THREE(javastr,localstr,java2str,local2str,java3str,local3str) { \
+ ENVPTR->ReleaseStringUTFChars(ENVPAR (javastr), (localstr)); \
+ ENVPTR->ReleaseStringUTFChars(ENVPAR (java2str), (local2str)); \
+ ENVPTR->ReleaseStringUTFChars(ENVPAR (java3str), (local3str)); \
+}
+
+#ifdef __cplusplus
+extern "C" {
+#endif /* __cplusplus */
+
+extern jboolean h5JNIFatalError(JNIEnv *, const char *);
+extern jboolean h5nullArgument(JNIEnv *, const char *);
+extern jboolean h5badArgument (JNIEnv *, const char *);
+extern jboolean h5outOfMemory (JNIEnv *, const char *);
+extern jboolean h5libraryError(JNIEnv *env );
+extern jboolean h5raiseException(JNIEnv *, const char *, const char *);
+extern jboolean h5unimplemented( JNIEnv *env, const char *functName);
+
+/* implemented at H5.c */
+extern jint get_enum_value(JNIEnv *env, jobject enum_obj);
+extern jobject get_enum_object(JNIEnv *env, const char* enum_class_name,
+ jint enum_val, const char* enum_field_desc);
+
+/* implemented at H5G.c */
+extern jobject create_H5G_info_t(JNIEnv *env, H5G_info_t group_info);
+
+#ifdef __cplusplus
+} /* end extern "C" */
+#endif /* __cplusplus */
+
+#endif /* _Included_h5jni */
diff --git a/java/src/jni/h5lImp.c b/java/src/jni/h5lImp.c
new file mode 100644
index 0000000..739e43b
--- /dev/null
+++ b/java/src/jni/h5lImp.c
@@ -0,0 +1,736 @@
+/* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
+ * Copyright by The HDF Group. *
+ * Copyright by the Board of Trustees of the University of Illinois. *
+ * All rights reserved. *
+ * *
+ * This file is part of HDF5. The full HDF5 copyright notice, including *
+ * terms governing use, modification, and redistribution, is contained in *
+ * the files COPYING and Copyright.html. COPYING can be found at the root *
+ * of the source code distribution tree; Copyright.html can be found at the *
+ * root level of an installed copy of the electronic HDF5 document set and *
+ * is linked from the top-level documents page. It can also be found at *
+ * http://hdfgroup.org/HDF5/doc/Copyright.html. If you do not have *
+ * access to either file, you may request a copy from help@hdfgroup.org. *
+ * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */
+
+/*
+ * For details of the HDF libraries, see the HDF Documentation at:
+ * http://hdfdfgroup.org/HDF5/doc/
+ *
+ */
+
+#ifdef __cplusplus
+extern "C" {
+#endif /* __cplusplus */
+
+#include <jni.h>
+#include <stdlib.h>
+#include "hdf5.h"
+#include "h5jni.h"
+#include "h5lImp.h"
+
+extern JavaVM *jvm;
+extern jobject visit_callback;
+
+/********************/
+/* Local Prototypes */
+/********************/
+
+static herr_t H5L_iterate_cb(hid_t g_id, const char *name, const H5L_info_t *info, void *op_data);
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5Lcopy
+ * Signature: (JLjava/lang/String;JLjava/lang/String;JJ)V
+ */
+JNIEXPORT void JNICALL
+Java_hdf_hdf5lib_H5_H5Lcopy(JNIEnv *env, jclass clss, jlong cur_loc_id, jstring cur_name, jlong dst_loc_id,
+ jstring dst_name, jlong create_id, jlong access_id)
+{
+ herr_t status = -1;
+ const char *lCurName;
+ const char *lDstName;
+
+ PIN_JAVA_STRING_TWO0(cur_name, lCurName, dst_name, lDstName);
+
+ status = H5Lcopy((hid_t)cur_loc_id, lCurName, (hid_t)dst_loc_id, lDstName, (hid_t)create_id, (hid_t)access_id);
+
+ UNPIN_JAVA_STRING_TWO(cur_name, lCurName, dst_name, lDstName);
+
+ if (status < 0)
+ h5libraryError(env);
+} /* end Java_hdf_hdf5lib_H5_H5Lcopy */
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5Lcreate_external
+ * Signature: (Ljava/lang/String;Ljava/lang/String;JLjava/lang/String;JJ)V
+ */
+JNIEXPORT void JNICALL
+Java_hdf_hdf5lib_H5_H5Lcreate_1external(JNIEnv *env, jclass clss, jstring file_name, jstring cur_name,
+ jlong dst_loc_id, jstring dst_name, jlong create_id, jlong access_id)
+{
+ herr_t status = -1;
+ const char *lFileName;
+ const char *lCurName;
+ const char *lDstName;
+
+ PIN_JAVA_STRING_THREE0(file_name, lFileName, cur_name, lCurName, dst_name, lDstName);
+
+ status = H5Lcreate_external(lFileName, lCurName, (hid_t)dst_loc_id, lDstName, (hid_t)create_id, (hid_t)access_id);
+
+ UNPIN_JAVA_STRING_THREE(file_name, lFileName, cur_name, lCurName, dst_name, lDstName);
+
+ if (status < 0)
+ h5libraryError(env);
+} /* end Java_hdf_hdf5lib_H5_H5Lcreate_1external */
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5Lcreate_hard
+ * Signature: (JLjava/lang/String;JLjava/lang/String;JJ)V
+ */
+JNIEXPORT void JNICALL
+Java_hdf_hdf5lib_H5_H5Lcreate_1hard(JNIEnv *env, jclass clss, jlong cur_loc_id, jstring cur_name,
+ jlong dst_loc_id, jstring dst_name, jlong create_id, jlong access_id)
+{
+ herr_t status = -1;
+ const char *lCurName;
+ const char *lDstName;
+
+ PIN_JAVA_STRING_TWO0(cur_name, lCurName, dst_name, lDstName);
+
+ status = H5Lcreate_hard((hid_t)cur_loc_id, lCurName, (hid_t)dst_loc_id, lDstName, (hid_t)create_id, (hid_t)access_id);
+
+ UNPIN_JAVA_STRING_TWO(cur_name, lCurName, dst_name, lDstName);
+
+ if (status < 0)
+ h5libraryError(env);
+} /* end Java_hdf_hdf5lib_H5_H5Lcreate_1hard */
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5Lcreate_soft
+ * Signature: (Ljava/lang/String;JLjava/lang/String;JJ)V
+ */
+JNIEXPORT void JNICALL
+Java_hdf_hdf5lib_H5_H5Lcreate_1soft(JNIEnv *env, jclass clss, jstring cur_name, jlong dst_loc_id,
+ jstring dst_name, jlong create_id, jlong access_id)
+{
+ herr_t status = -1;
+ const char *lCurName;
+ const char *lDstName;
+
+ PIN_JAVA_STRING_TWO0(cur_name, lCurName, dst_name, lDstName);
+
+ status = H5Lcreate_soft(lCurName, (hid_t)dst_loc_id, lDstName, (hid_t)create_id, (hid_t)access_id);
+
+ UNPIN_JAVA_STRING_TWO(cur_name, lCurName, dst_name, lDstName);
+
+ if (status < 0)
+ h5libraryError(env);
+} /* end Java_hdf_hdf5lib_H5_H5Lcreate_1soft */
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5Ldelete
+ * Signature: (JLjava/lang/String;J)V
+ */
+JNIEXPORT void JNICALL
+Java_hdf_hdf5lib_H5_H5Ldelete(JNIEnv *env, jclass clss, jlong loc_id, jstring name, jlong access_id)
+{
+ herr_t status = -1;
+ const char *lName;
+
+ PIN_JAVA_STRING0(name, lName);
+
+ status = H5Ldelete((hid_t)loc_id, lName, (hid_t)access_id);
+
+ UNPIN_JAVA_STRING(name, lName);
+
+ if (status < 0)
+ h5libraryError(env);
+} /* end Java_hdf_hdf5lib_H5_H5Ldelete */
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5Ldelete_by_idx
+ * Signature: (JLjava/lang/String;IIJJ)V
+ */
+JNIEXPORT void JNICALL
+Java_hdf_hdf5lib_H5_H5Ldelete_1by_1idx(JNIEnv *env, jclass clss, jlong loc_id, jstring name,
+ jint index_field, jint order, jlong link_n, jlong access_id)
+{
+ hsize_t n = (hsize_t)link_n;
+ herr_t status;
+ const char *lName;
+
+ PIN_JAVA_STRING0(name, lName);
+
+ status = H5Ldelete_by_idx((hid_t)loc_id, lName, (H5_index_t)index_field, (H5_iter_order_t)order, n, (hid_t)access_id);
+
+ UNPIN_JAVA_STRING(name, lName);
+
+ if (status < 0)
+ h5libraryError(env);
+} /* end Java_hdf_hdf5lib_H5_H5Ldelete_1by_1idx */
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5Lexists
+ * Signature: (JLjava/lang/String;J)Z
+ */
+JNIEXPORT jboolean JNICALL
+Java_hdf_hdf5lib_H5_H5Lexists(JNIEnv *env, jclass clss, jlong loc_id, jstring name, jlong access_id)
+{
+ htri_t bval = JNI_FALSE;
+ const char *lName;
+
+ PIN_JAVA_STRING(name, lName, JNI_FALSE);
+
+ bval = H5Lexists((hid_t)loc_id, lName, (hid_t)access_id);
+
+ UNPIN_JAVA_STRING(name, lName);
+
+ if (bval > 0)
+ bval = JNI_TRUE;
+ else if (bval < 0)
+ h5libraryError(env);
+
+ return (jboolean)bval;
+} /* end Java_hdf_hdf5lib_H5_H5Lexists */
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5Lget_info
+ * Signature: (JLjava/lang/String;J)Lhdf/hdf5lib/structs/H5L_info_t;
+ */
+JNIEXPORT jobject JNICALL
+Java_hdf_hdf5lib_H5_H5Lget_1info(JNIEnv *env, jclass clss, jlong loc_id, jstring name, jlong access_id)
+{
+ jobject ret_obj = NULL;
+ jvalue args[5];
+ herr_t status;
+ H5L_info_t infobuf;
+ const char *lName;
+
+ PIN_JAVA_STRING(name, lName, NULL);
+
+ status = H5Lget_info((hid_t)loc_id, lName, &infobuf, (hid_t)access_id);
+
+ UNPIN_JAVA_STRING(name, lName);
+
+ if (status < 0) {
+ h5libraryError(env);
+ } /* end if */
+ else {
+ args[0].i = infobuf.type;
+ args[1].z = infobuf.corder_valid;
+ args[2].j = infobuf.corder;
+ args[3].i = infobuf.cset;
+ if(infobuf.type==0)
+ args[4].j = (jlong)infobuf.u.address;
+ else
+ args[4].j = (jlong)infobuf.u.val_size;
+ CALL_CONSTRUCTOR("hdf/hdf5lib/structs/H5L_info_t", "(IZJIJ)V", args);
+ } /* end else */
+ return ret_obj;
+} /* end Java_hdf_hdf5lib_H5_H5Lget_1info */
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5Lget_info_by_idx
+ * Signature: (JLjava/lang/String;IIJJ)Lhdf/hdf5lib/structs/H5L_info_t;
+ */
+JNIEXPORT jobject JNICALL
+Java_hdf_hdf5lib_H5_H5Lget_1info_1by_1idx(JNIEnv *env, jclass clss, jlong loc_id, jstring name,
+ jint index_field, jint order, jlong link_n, jlong access_id)
+{
+ jobject ret_obj = NULL;
+ jvalue args[5];
+ herr_t status;
+ H5L_info_t infobuf;
+ const char *lName;
+
+ PIN_JAVA_STRING(name, lName, NULL);
+
+ status = H5Lget_info_by_idx((hid_t)loc_id, lName, (H5_index_t)index_field, (H5_iter_order_t)order, (hsize_t)link_n, &infobuf, (hid_t)access_id);
+
+ UNPIN_JAVA_STRING(name, lName);
+
+ if (status < 0) {
+ h5libraryError(env);
+ } /* end if */
+ else {
+ args[0].i = infobuf.type;
+ args[1].z = infobuf.corder_valid;
+ args[2].j = infobuf.corder;
+ args[3].i = infobuf.cset;
+ if(infobuf.type==0)
+ args[4].j = (jlong)infobuf.u.address;
+ else
+ args[4].j = (jlong)infobuf.u.val_size;
+ CALL_CONSTRUCTOR("hdf/hdf5lib/structs/H5L_info_t", "(IZJIJ)V", args);
+ } /* end els */
+ return ret_obj;
+} /* end Java_hdf_hdf5lib_H5_H5Lget_1info_1by_1idx */
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5Lget_name_by_idx
+ * Signature: (JLjava/lang/String;IIJJ)Ljava/lang/String;
+ */
+JNIEXPORT jobject JNICALL
+Java_hdf_hdf5lib_H5_H5Lget_1name_1by_1idx(JNIEnv *env, jclass clss, jlong loc_id, jstring name,
+ jint index_field, jint order, jlong link_n, jlong access_id)
+{
+ jlong status_size;
+ jstring str = NULL;
+ size_t buf_size;
+ const char *lName;
+ char *lValue;
+
+ PIN_JAVA_STRING(name, lName, NULL);
+
+ /* get the length of the link name */
+ status_size = H5Lget_name_by_idx((hid_t)loc_id, lName, (H5_index_t)index_field, (H5_iter_order_t)order, (hsize_t)link_n, (char*)NULL, (size_t)0, H5P_DEFAULT);
+ if(status_size < 0) {
+ UNPIN_JAVA_STRING(name, lName);
+ h5libraryError(env);
+ } /* end if */
+ else {
+ buf_size = (size_t)status_size + 1;/* add extra space for the null terminator */
+
+ lValue = (char*)HDmalloc(sizeof(char) * buf_size);
+ if (lValue == NULL) {
+ UNPIN_JAVA_STRING(name, lName);
+ h5outOfMemory(env, "H5Lget_name_by_idx: malloc failed ");
+ } /* end if */
+ else {
+ status_size = H5Lget_name_by_idx((hid_t)loc_id, lName, (H5_index_t)index_field, (H5_iter_order_t)order, (hsize_t)link_n, (char*)lValue, (size_t)buf_size, (hid_t)access_id);
+
+ UNPIN_JAVA_STRING(name, lName);
+
+ if (status_size < 0) {
+ HDfree(lValue);
+ h5libraryError(env);
+ } /* end if */
+ else {
+ str = ENVPTR->NewStringUTF(ENVPAR lValue);
+ HDfree(lValue);
+ if (str == NULL)
+ h5JNIFatalError(env, "H5Lget_name_by_idx: return string not created");
+ } /* end else */
+ } /* end else */
+ } /* end else */
+ return str;
+} /* end Java_hdf_hdf5lib_H5_H5Lget_1name_1by_1idx */
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5Lget_value
+ * Signature: (JLjava/lang/String;[Ljava/lang/String;J)I
+ */
+JNIEXPORT jint JNICALL
+Java_hdf_hdf5lib_H5_H5Lget_1value(JNIEnv *env, jclass clss, jlong loc_id, jstring name, jobjectArray link_value, jlong access_id)
+{
+ size_t buf_size;
+ herr_t status;
+ H5L_info_t infobuf;
+ const char *lName;
+ char *lValue;
+ const char *file_name;
+ const char *obj_name;
+ jstring str;
+
+ PIN_JAVA_STRING(name, lName, -1);
+
+ infobuf.type = -1;
+ /* get the length of the link val */
+ status = H5Lget_info((hid_t)loc_id, lName, &infobuf, H5P_DEFAULT);
+ if(status < 0) {
+ UNPIN_JAVA_STRING(name, lName);
+ h5libraryError(env);
+ } /* end if */
+ else {
+ buf_size = infobuf.u.val_size + 1;/* add extra space for the null terminator */
+
+ if(infobuf.type == H5L_TYPE_HARD) {
+ UNPIN_JAVA_STRING(name, lName);
+ h5JNIFatalError(env, "H5Lget_val: link is hard type");
+ } /* end if */
+ else {
+ lValue = (char*)HDmalloc(sizeof(char) * buf_size);
+ if (lValue == NULL) {
+ UNPIN_JAVA_STRING(name, lName);
+ h5outOfMemory(env, "H5Lget_val: malloc failed");
+ } /* end if */
+ else {
+ status = H5Lget_val((hid_t)loc_id, lName, (void*)lValue, buf_size, (hid_t)access_id);
+
+ UNPIN_JAVA_STRING(name, lName);
+
+ if (status < 0) {
+ HDfree(lValue);
+ h5libraryError(env);
+ } /* end if */
+ else if(infobuf.type == H5L_TYPE_EXTERNAL) {
+ status = H5Lunpack_elink_val(lValue, (size_t)infobuf.u.val_size, (unsigned*)NULL, &file_name, &obj_name);
+ if (status < 0) {
+ HDfree(lValue);
+ h5libraryError(env);
+ } /* end if */
+ else {
+ str = ENVPTR->NewStringUTF(ENVPAR obj_name);
+ if (str == NULL) {
+ HDfree(lValue);
+ h5JNIFatalError(env, "H5Lget_val: return string not created");
+ } /* end if */
+ else {
+ ENVPTR->SetObjectArrayElement(ENVPAR link_value, 0, str);
+
+ str = ENVPTR->NewStringUTF(ENVPAR file_name);
+ if (str == NULL) {
+ HDfree(lValue);
+ h5JNIFatalError(env, "H5Lget_val: return string not created");
+ } /* end if */
+ else {
+ ENVPTR->SetObjectArrayElement(ENVPAR link_value, 1, str);
+ HDfree(lValue);
+ } /* end else */
+ } /* end else */
+ } /* end else */
+ } /* end else if */
+ else {
+ str = ENVPTR->NewStringUTF(ENVPAR lValue);
+ if (str == NULL) {
+ /* exception -- fatal JNI error */
+ HDfree(lValue);
+ h5JNIFatalError(env, "H5Lget_val: return string not created");
+ } /* end if */
+ else {
+ ENVPTR->SetObjectArrayElement(ENVPAR link_value, 0, str);
+ HDfree(lValue);
+ } /* end else */
+ } /* end else */
+ } /* end else */
+ } /* end else */
+ } /* end else */
+
+ return infobuf.type;
+} /* end Java_hdf_hdf5lib_H5_H5Lget_1val */
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5Lget_value_by_idx
+ * Signature: (JLjava/lang/String;IIJ[Ljava/lang/String;J)I
+ */
+JNIEXPORT jint JNICALL
+Java_hdf_hdf5lib_H5_H5Lget_1value_1by_1idx(JNIEnv *env, jclass clss, jlong loc_id, jstring name, jint index_field, jint order,
+ jlong link_n, jobjectArray link_value, jlong access_id)
+{
+ herr_t status;
+ size_t buf_size;
+ H5L_info_t infobuf;
+ const char *lName;
+ void *lValue;
+ const char *file_name;
+ const char *obj_name;
+ jstring str;
+
+ PIN_JAVA_STRING(name, lName, -1);
+
+ infobuf.type = -1;
+ /* get the length of the link valuee */
+ status = H5Lget_info_by_idx((hid_t)loc_id, lName, (H5_index_t)index_field, (H5_iter_order_t)order, (hsize_t)link_n, &infobuf, (hid_t)access_id);
+ if(status < 0) {
+ UNPIN_JAVA_STRING(name, lName);
+ h5libraryError(env);
+ return -1;
+ } /* end if */
+ else {
+ buf_size = infobuf.u.val_size;
+ if(buf_size < 0) {
+ UNPIN_JAVA_STRING(name, lName);
+ h5libraryError(env);
+ return -1;
+ } /* end if */
+ else {
+ lValue = (void*)HDmalloc(buf_size);
+ if (lValue == NULL) {
+ UNPIN_JAVA_STRING(name, lName);
+ h5outOfMemory(env, "H5Lget_val_by_idx: malloc failed ");
+ return -1;
+ } /* end if */
+ else {
+ status = H5Lget_val_by_idx((hid_t)loc_id, lName, (H5_index_t)index_field, (H5_iter_order_t)order, (hsize_t)link_n, (void*)lValue, (size_t)buf_size, (hid_t)access_id);
+
+ UNPIN_JAVA_STRING(name, lName);
+
+ if (status < 0) {
+ HDfree(lValue);
+ h5libraryError(env);
+ } /* end if */
+ else if(infobuf.type == H5L_TYPE_EXTERNAL) {
+ status = H5Lunpack_elink_val((char*)lValue, (size_t)infobuf.u.val_size, (unsigned*)NULL, (const char**)&file_name, (const char**)&obj_name);
+ if (status < 0) {
+ HDfree(lValue);
+ h5libraryError(env);
+ } /* end if */
+ else {
+ str = ENVPTR->NewStringUTF(ENVPAR obj_name);
+ if (str == NULL) {
+ HDfree(lValue);
+ h5JNIFatalError(env, "H5Lget_val_by_idx: return string not created");
+ } /* end if */
+ else {
+ ENVPTR->SetObjectArrayElement(ENVPAR link_value, 0, str);
+
+ str = ENVPTR->NewStringUTF(ENVPAR file_name);
+ if (str == NULL) {
+ HDfree(lValue);
+ h5JNIFatalError(env, "H5Lget_val_by_idx: return string not created");
+ } /* end if */
+ else {
+ ENVPTR->SetObjectArrayElement(ENVPAR link_value, 1, str);
+ HDfree(lValue);
+ } /* end else */
+ } /* end else */
+ } /* end else */
+ } /* end else if */
+ else {
+ str = ENVPTR->NewStringUTF(ENVPAR (char *)lValue);
+ if (str == NULL) {
+ HDfree(lValue);
+ h5JNIFatalError(env, "H5Lget_val_by_idx: return string not created");
+ } /* end if */
+ else {
+ ENVPTR->SetObjectArrayElement(ENVPAR link_value, 0, str);
+ HDfree(lValue);
+ } /* end else */
+ } /* end else */
+ } /* end else */
+ } /* end else */
+ } /* end else */
+ return infobuf.type;
+} /* end Java_hdf_hdf5lib_H5_H5Lget_1val_1by_1idx */
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5Lmove
+ * Signature: (JLjava/lang/String;JLjava/lang/String;JJ)V
+ */
+JNIEXPORT void JNICALL
+Java_hdf_hdf5lib_H5_H5Lmove(JNIEnv *env, jclass clss, jlong cur_loc_id, jstring cur_name,
+ jlong dst_loc_id, jstring dst_name, jlong create_id, jlong access_id)
+{
+ herr_t status = -1;
+ const char *lCurName;
+ const char *lDstName;
+
+ PIN_JAVA_STRING_TWO0(cur_name, lCurName, dst_name, lDstName);
+
+ status = H5Lmove((hid_t)cur_loc_id, lCurName, (hid_t)dst_loc_id, lDstName, (hid_t)create_id, (hid_t)access_id);
+
+ UNPIN_JAVA_STRING_TWO(cur_name, lCurName, dst_name, lDstName);
+
+ if (status < 0)
+ h5libraryError(env);
+
+} /* end Java_hdf_hdf5lib_H5_H5Lmove */
+
+static herr_t
+H5L_iterate_cb(hid_t g_id, const char *name, const H5L_info_t *info, void *op_data) {
+ JNIEnv *cbenv;
+ jint status;
+ jclass cls;
+ jmethodID mid;
+ jstring str;
+ jmethodID constructor;
+ jvalue args[5];
+ jobject cb_info_t = NULL;
+
+ if(JVMPTR->AttachCurrentThread(JVMPAR2 (void**)&cbenv, NULL) == 0) {
+ cls = CBENVPTR->GetObjectClass(CBENVPAR visit_callback);
+ if (cls != 0) {
+ mid = CBENVPTR->GetMethodID(CBENVPAR cls, "callback", "(JLjava/lang/String;Lhdf/hdf5lib/structs/H5L_info_t;Lhdf/hdf5lib/callbacks/H5L_iterate_t;)I");
+ if (mid != 0) {
+ str = CBENVPTR->NewStringUTF(CBENVPAR name);
+
+ args[0].i = info->type;
+ args[1].z = info->corder_valid;
+ args[2].j = info->corder;
+ args[3].i = info->cset;
+ if(info->type==0)
+ args[4].j = (jlong)info->u.address;
+ else
+ args[4].j = (jlong)info->u.val_size;
+ // get a reference to your class if you don't have it already
+ cls = CBENVPTR->FindClass(CBENVPAR "hdf/hdf5lib/structs/H5L_info_t");
+ if (cls != 0) {
+ // get a reference to the constructor; the name is <init>
+ constructor = CBENVPTR->GetMethodID(CBENVPAR cls, "<init>", "(IZJIJ)V");
+ if (constructor != 0) {
+ cb_info_t = CBENVPTR->NewObjectA(CBENVPAR cls, constructor, args);
+
+ status = CBENVPTR->CallIntMethod(CBENVPAR visit_callback, mid, g_id, str, cb_info_t, op_data);
+ } /* end if */
+ } /* end if */
+ } /* end if */
+ } /* end if */
+ } /* end if */
+ JVMPTR->DetachCurrentThread(JVMPAR);
+ return status;
+} /* end H5L_iterate_cb */
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5Lvisit
+ * Signature: (JIILjava/lang/Object;Ljava/lang/Object;)I
+ */
+JNIEXPORT jint JNICALL
+Java_hdf_hdf5lib_H5_H5Lvisit(JNIEnv *env, jclass clss, jlong grp_id, jint idx_type, jint order,
+ jobject callback_op, jobject op_data)
+{
+ herr_t status = -1;
+
+ ENVPTR->GetJavaVM(ENVPAR &jvm);
+ visit_callback = callback_op;
+
+ if ((op_data == NULL) || (callback_op == NULL)) {
+ h5nullArgument(env, "H5Lvisit: op_data or callback_op is NULL");
+ } /* end if */
+ else {
+ status = H5Lvisit((hid_t)grp_id, (H5_index_t)idx_type, (H5_iter_order_t)order, (H5L_iterate_t)H5L_iterate_cb, (void*)op_data);
+ if (status < 0)
+ h5libraryError(env);
+ } /* end else */
+
+ return status;
+} /* end Java_hdf_hdf5lib_H5_H5Lvisit */
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5Lvisit_by_name
+ * Signature: (JLjava/lang/String;IILjava/lang/Object;Ljava/lang/Object;J)I
+ */
+JNIEXPORT jint JNICALL
+Java_hdf_hdf5lib_H5_H5Lvisit_1by_1name(JNIEnv *env, jclass clss, jlong grp_id, jstring name, jint idx_type, jint order,
+ jobject callback_op, jobject op_data, jlong access_id)
+{
+ herr_t status = -1;
+ const char *lName;
+
+ ENVPTR->GetJavaVM(ENVPAR &jvm);
+ visit_callback = callback_op;
+
+ if ((op_data == NULL) || (callback_op == NULL)) {
+ h5nullArgument(env, "H5Lvisit_by_name: op_data or callback_op is NULL");
+ } /* end if */
+ else {
+ PIN_JAVA_STRING(name, lName, -1);
+
+ status = H5Lvisit_by_name((hid_t)grp_id, lName, (H5_index_t)idx_type, (H5_iter_order_t)order, (H5L_iterate_t)H5L_iterate_cb, (void*)op_data, (hid_t)access_id);
+
+ UNPIN_JAVA_STRING(name, lName);
+
+ if (status < 0)
+ h5libraryError(env);
+ } /* end else */
+
+ return status;
+} /* end Java_hdf_hdf5lib_H5_H5Lvisit_1by_1name */
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5Literate
+ * Signature: (JIIJLjava/lang/Object;Ljava/lang/Object;)I
+ */
+JNIEXPORT jint JNICALL
+Java_hdf_hdf5lib_H5_H5Literate(JNIEnv *env, jclass clss, jlong grp_id, jint idx_type, jint order,
+ jlong idx, jobject callback_op, jobject op_data)
+{
+ hsize_t start_idx = (hsize_t)idx;
+ herr_t status = -1;
+
+ ENVPTR->GetJavaVM(ENVPAR &jvm);
+ visit_callback = callback_op;
+
+ if ((op_data == NULL) || (callback_op == NULL)) {
+ h5nullArgument(env, "H5Literate: op_data or callback_op is NULL");
+ } /* end if */
+ else {
+ status = H5Literate((hid_t)grp_id, (H5_index_t)idx_type, (H5_iter_order_t)order, (hsize_t*)&start_idx, (H5L_iterate_t)H5L_iterate_cb, (void*)op_data);
+
+ if (status < 0)
+ h5libraryError(env);
+ } /* end else */
+
+ return status;
+} /* end Java_hdf_hdf5lib_H5_H5Literate */
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5Literate_by_name
+ * Signature: (JLjava/lang/String;IIJLjava/lang/Object;Ljava/lang/Object;J)I
+ */
+JNIEXPORT jint JNICALL
+Java_hdf_hdf5lib_H5_H5Literate_1by_1name(JNIEnv *env, jclass clss, jlong grp_id, jstring name, jint idx_type, jint order,
+ jlong idx, jobject callback_op, jobject op_data, jlong access_id)
+{
+ hsize_t start_idx = (hsize_t)idx;
+ herr_t status = -1;
+ const char *lName;
+
+ ENVPTR->GetJavaVM(ENVPAR &jvm);
+ visit_callback = callback_op;
+
+ if ((op_data == NULL) || (callback_op == NULL)) {
+ h5nullArgument(env, "H5Literate_by_name: op_data or callback_op is NULL");
+ } /* end if */
+ else {
+ PIN_JAVA_STRING(name, lName, -1);
+
+ status = H5Literate_by_name((hid_t)grp_id, lName, (H5_index_t)idx_type, (H5_iter_order_t)order, (hsize_t*)&start_idx, (H5L_iterate_t)H5L_iterate_cb, (void*)op_data, (hid_t)access_id);
+
+ UNPIN_JAVA_STRING(name, lName);
+
+ if (status < 0)
+ h5libraryError(env);
+ } /* end else */
+
+ return status;
+} /* end Java_hdf_hdf5lib_H5_H5Literate_1by_1name */
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5Lis_registered
+ * Signature: (I)I
+ */
+JNIEXPORT jint JNICALL
+Java_hdf_hdf5lib_H5_H5Lis_1registered(JNIEnv *env, jclass clss, jint link_cls_id)
+{
+ htri_t ret_val = H5Lis_registered((H5L_type_t)link_cls_id);
+
+ if (ret_val < 0)
+ h5libraryError(env);
+
+ return (int)ret_val;
+} /* end Java_hdf_hdf5lib_H5_H5Lis_1registered */
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5Lunregister
+ * Signature: (I)V
+ */
+JNIEXPORT void JNICALL
+Java_hdf_hdf5lib_H5_H5Lunregister(JNIEnv *env, jclass clss, jint link_cls_id)
+{
+ if (H5Lunregister((H5L_type_t)link_cls_id) < 0)
+ h5libraryError(env);
+} /* end Java_hdf_hdf5lib_H5_H5Lunregister */
+
+
+#ifdef __cplusplus
+} /* end extern "C" */
+#endif /* __cplusplus */
diff --git a/java/src/jni/h5lImp.h b/java/src/jni/h5lImp.h
new file mode 100644
index 0000000..f0a9a63
--- /dev/null
+++ b/java/src/jni/h5lImp.h
@@ -0,0 +1,183 @@
+/* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
+ * Copyright by The HDF Group. *
+ * Copyright by the Board of Trustees of the University of Illinois. *
+ * All rights reserved. *
+ * *
+ * This file is part of HDF5. The full HDF5 copyright notice, including *
+ * terms governing use, modification, and redistribution, is contained in *
+ * the files COPYING and Copyright.html. COPYING can be found at the root *
+ * of the source code distribution tree; Copyright.html can be found at the *
+ * root level of an installed copy of the electronic HDF5 document set and *
+ * is linked from the top-level documents page. It can also be found at *
+ * http://hdfgroup.org/HDF5/doc/Copyright.html. If you do not have *
+ * access to either file, you may request a copy from help@hdfgroup.org. *
+ * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */
+
+#include <jni.h>
+/* Header for class hdf_hdf5lib_H5_H5_H5L */
+
+#ifndef _Included_hdf_hdf5lib_H5_H5L
+#define _Included_hdf_hdf5lib_H5_H5L
+
+#ifdef __cplusplus
+extern "C" {
+#endif /* __cplusplus */
+
+
+ /*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5Lcopy
+ * Signature: (JLjava/lang/String;JLjava/lang/String;JJ)V
+ */
+ JNIEXPORT void JNICALL Java_hdf_hdf5lib_H5_H5Lcopy
+ (JNIEnv*, jclass, jlong, jstring, jlong, jstring, jlong, jlong);
+
+ /*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5Lcreate_external
+ * Signature: (Ljava/lang/String;Ljava/lang/String;JLjava/lang/String;JJ)V
+ */
+ JNIEXPORT void JNICALL Java_hdf_hdf5lib_H5_H5Lcreate_1external
+ (JNIEnv*, jclass, jstring, jstring, jlong, jstring, jlong, jlong);
+
+ /*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5Lcreate_hard
+ * Signature: (JLjava/lang/String;JLjava/lang/String;JJ)V
+ */
+ JNIEXPORT void JNICALL Java_hdf_hdf5lib_H5_H5Lcreate_1hard
+ (JNIEnv*, jclass, jlong, jstring, jlong, jstring, jlong, jlong);
+
+ /*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5Lcreate_soft
+ * Signature: (Ljava/lang/String;JLjava/lang/String;JJ)V
+ */
+ JNIEXPORT void JNICALL Java_hdf_hdf5lib_H5_H5Lcreate_1soft
+ (JNIEnv*, jclass, jstring, jlong, jstring, jlong, jlong);
+
+ /*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5Ldelete
+ * Signature: (JLjava/lang/String;J)V
+ */
+ JNIEXPORT void JNICALL Java_hdf_hdf5lib_H5_H5Ldelete
+ (JNIEnv*, jclass, jlong, jstring, jlong);
+
+ /*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5Ldelete_by_idx
+ * Signature: (JLjava/lang/String;IIJJ)V
+ */
+ JNIEXPORT void JNICALL Java_hdf_hdf5lib_H5_H5Ldelete_1by_1idx
+ (JNIEnv*, jclass, jlong, jstring, jint, jint, jlong, jlong);
+
+ /*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5Lexists
+ * Signature: (JLjava/lang/String;J)Z
+ */
+ JNIEXPORT jboolean JNICALL Java_hdf_hdf5lib_H5_H5Lexists
+ (JNIEnv*, jclass, jlong, jstring, jlong);
+
+ /*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5Lget_info
+ * Signature: (JLjava/lang/String;J)Lhdf/hdf5lib/structs/H5L_info_t;
+ */
+ JNIEXPORT jobject JNICALL Java_hdf_hdf5lib_H5_H5Lget_1info
+ (JNIEnv*, jclass, jlong, jstring, jlong);
+
+ /*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5Lget_info_by_idx
+ * Signature: (JLjava/lang/String;IIJJ)Lhdf/hdf5lib/structs/H5L_info_t;
+ */
+ JNIEXPORT jobject JNICALL Java_hdf_hdf5lib_H5_H5Lget_1info_1by_1idx
+ (JNIEnv*, jclass, jlong, jstring, jint, jint, jlong, jlong);
+
+ /*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5Lget_name_by_idx
+ * Signature: (JLjava/lang/String;IIJJ)Ljava/lang/String;
+ */
+ JNIEXPORT jobject JNICALL Java_hdf_hdf5lib_H5_H5Lget_1name_1by_1idx
+ (JNIEnv*, jclass, jlong, jstring, jint, jint, jlong, jlong);
+
+ /*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5Lget_value
+ * Signature: (JLjava/lang/String;[Ljava/lang/String;J)I
+ */
+ JNIEXPORT jint JNICALL Java_hdf_hdf5lib_H5_H5Lget_1value
+ (JNIEnv*, jclass, jlong, jstring, jobjectArray, jlong);
+
+ /*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5Lget_value_by_idx
+ * Signature: (JLjava/lang/String;IIJ[Ljava/lang/String;J)I
+ */
+ JNIEXPORT jint JNICALL Java_hdf_hdf5lib_H5_H5Lget_1value_1by_1idx
+ (JNIEnv*, jclass, jlong, jstring, jint, jint, jlong, jobjectArray, jlong);
+
+ /*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5Lmove
+ * Signature: (JLjava/lang/String;JLjava/lang/String;JJ)V
+ */
+ JNIEXPORT void JNICALL Java_hdf_hdf5lib_H5_H5Lmove
+ (JNIEnv*, jclass, jlong, jstring, jlong, jstring, jlong, jlong);
+
+ /*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5Lvisit
+ * Signature: (JIILjava/lang/Object;Ljava/lang/Object;)I
+ */
+ JNIEXPORT jint JNICALL Java_hdf_hdf5lib_H5_H5Lvisit
+ (JNIEnv*, jclass, jlong, jint, jint, jobject, jobject);
+
+ /*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5Lvisit_by_name
+ * Signature: (JLjava/lang/String;IILjava/lang/Object;Ljava/lang/Object;J)I
+ */
+ JNIEXPORT jint JNICALL Java_hdf_hdf5lib_H5_H5Lvisit_1by_1name
+ (JNIEnv*, jclass, jlong, jstring, jint, jint, jobject, jobject, jlong);
+
+ /*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5Literate
+ * Signature: (JIIJLjava/lang/Object;Ljava/lang/Object;)I
+ */
+ JNIEXPORT jint JNICALL Java_hdf_hdf5lib_H5_H5Literate
+ (JNIEnv*, jclass, jlong, jint, jint, jlong, jobject, jobject);
+
+ /*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5Literate_by_name
+ * Signature: (JLjava/lang/String;IIJLjava/lang/Object;Ljava/lang/Object;J)I
+ */
+ JNIEXPORT jint JNICALL Java_hdf_hdf5lib_H5_H5Literate_1by_1name
+ (JNIEnv*, jclass, jlong, jstring, jint, jint, jlong, jobject, jobject, jlong);
+
+ /*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5Lis_registered
+ * Signature: (I)I
+ */
+ JNIEXPORT jint JNICALL Java_hdf_hdf5lib_H5_H5Lis_1registered
+ (JNIEnv*, jclass, jint);
+
+ /*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5Lunregister
+ * Signature: (I)V
+ */
+ JNIEXPORT void JNICALL Java_hdf_hdf5lib_H5_H5Lunregister
+ (JNIEnv*, jclass, jint);
+
+#ifdef __cplusplus
+} /* end extern "C" */
+#endif /* __cplusplus */
+
+#endif /* _Included_hdf_hdf5lib_H5_H5L */
diff --git a/java/src/jni/h5oImp.c b/java/src/jni/h5oImp.c
new file mode 100644
index 0000000..c38ec9a
--- /dev/null
+++ b/java/src/jni/h5oImp.c
@@ -0,0 +1,766 @@
+/* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
+ * Copyright by The HDF Group. *
+ * Copyright by the Board of Trustees of the University of Illinois. *
+ * All rights reserved. *
+ * *
+ * This file is part of HDF5. The full HDF5 copyright notice, including *
+ * terms governing use, modification, and redistribution, is contained in *
+ * the files COPYING and Copyright.html. COPYING can be found at the root *
+ * of the source code distribution tree; Copyright.html can be found at the *
+ * root level of an installed copy of the electronic HDF5 document set and *
+ * is linked from the top-level documents page. It can also be found at *
+ * http://hdfgroup.org/HDF5/doc/Copyright.html. If you do not have *
+ * access to either file, you may request a copy from help@hdfgroup.org. *
+ * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */
+
+/*
+ * For details of the HDF libraries, see the HDF Documentation at:
+ * http://hdfdfgroup.org/HDF5/doc/
+ *
+ */
+
+#ifdef __cplusplus
+extern "C" {
+#endif /* __cplusplus */
+
+#include <stdlib.h>
+#include "hdf5.h"
+#include "h5jni.h"
+#include "h5oImp.h"
+
+extern JavaVM *jvm;
+extern jobject visit_callback;
+
+/********************/
+/* Local Prototypes */
+/********************/
+
+static herr_t H5O_iterate_cb(hid_t g_id, const char *name, const H5O_info_t *info, void *op_data);
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: _H5Oopen
+ * Signature: (JLjava/lang/String;J)J
+ */
+JNIEXPORT jlong JNICALL
+Java_hdf_hdf5lib_H5__1H5Oopen(JNIEnv *env, jclass clss, jlong loc_id, jstring name, jlong access_plist_id)
+{
+ hid_t status;
+ const char *oName;
+
+ PIN_JAVA_STRING(name, oName, -1);
+
+ status = H5Oopen((hid_t)loc_id, oName, (hid_t)access_plist_id );
+
+ UNPIN_JAVA_STRING(name, oName);
+
+ if (status < 0)
+ h5libraryError(env);
+
+ return (jlong)status;
+} /* end Java_hdf_hdf5lib_H5__1H5Oopen */
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: _H5Oclose
+ * Signature: (J)I
+ */
+JNIEXPORT jint JNICALL
+Java_hdf_hdf5lib_H5__1H5Oclose(JNIEnv *env, jclass clss, jlong object_id)
+{
+ herr_t retVal = H5Oclose((hid_t)object_id);
+
+ if (retVal < 0)
+ h5libraryError(env);
+
+ return (jint)retVal;
+} /* end Java_hdf_hdf5lib_H5__1H5Oclose */
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5Ocopy
+ * Signature: (JLjava/lang/String;JLjava/lang/String;JJ)V
+ */
+JNIEXPORT void JNICALL
+Java_hdf_hdf5lib_H5_H5Ocopy(JNIEnv *env, jclass clss, jlong cur_loc_id, jstring cur_name,
+ jlong dst_loc_id, jstring dst_name, jlong create_id, jlong access_id)
+{
+ herr_t status = -1;
+ const char *lCurName;
+ const char *lDstName;
+
+ PIN_JAVA_STRING_TWO0(cur_name, lCurName, dst_name, lDstName);
+
+ status = H5Ocopy((hid_t)cur_loc_id, lCurName, (hid_t)dst_loc_id, lDstName, (hid_t)create_id, (hid_t)access_id);
+
+ UNPIN_JAVA_STRING_TWO(cur_name, lCurName, dst_name, lDstName);
+
+ if (status < 0)
+ h5libraryError(env);
+} /* end Java_hdf_hdf5lib_H5_H5Ocopy */
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5Oget_info
+ * Signature: (J)Lhdf/hdf5lib/structs/H5O_info_t;
+ */
+JNIEXPORT jobject JNICALL
+Java_hdf_hdf5lib_H5_H5Oget_1info(JNIEnv *env, jclass clss, jlong loc_id)
+{
+ herr_t status = -1;
+ H5O_info_t infobuf;
+ jvalue args[12];
+ jobject hdrinfobuf;
+ jobject ihinfobuf1;
+ jobject ihinfobuf2;
+ jobject ret_obj = NULL;
+
+ status = H5Oget_info((hid_t)loc_id, &infobuf);
+
+ if (status < 0) {
+ h5libraryError(env);
+ return NULL;
+ } /* end if */
+
+ args[0].i = (jint)infobuf.hdr.version;
+ args[1].i = (jint)infobuf.hdr.nmesgs;
+ args[2].i = (jint)infobuf.hdr.nchunks;
+ args[3].i = (jint)infobuf.hdr.flags;
+ args[4].j = (jlong)infobuf.hdr.space.total;
+ args[5].j = (jlong)infobuf.hdr.space.meta;
+ args[6].j = (jlong)infobuf.hdr.space.mesg;
+ args[7].j = (jlong)infobuf.hdr.space.free;
+ args[8].j = (jlong)infobuf.hdr.mesg.present;
+ args[9].j = (jlong)infobuf.hdr.mesg.shared;
+ CALL_CONSTRUCTOR("hdf/hdf5lib/structs/H5O_hdr_info_t", "(IIIIJJJJJJ)V", args);
+ hdrinfobuf = ret_obj;
+
+ args[0].j = (jlong)infobuf.meta_size.obj.index_size;
+ args[1].j = (jlong)infobuf.meta_size.obj.heap_size;
+ CALL_CONSTRUCTOR("hdf/hdf5lib/structs/H5_ih_info_t", "(JJ)V", args);
+ ihinfobuf1 = ret_obj;
+ args[0].j = (jlong)infobuf.meta_size.attr.index_size;
+ args[1].j = (jlong)infobuf.meta_size.attr.heap_size;
+ CALL_CONSTRUCTOR("hdf/hdf5lib/structs/H5_ih_info_t", "(JJ)V", args);
+ ihinfobuf2 = ret_obj;
+
+ args[0].j = (jlong)infobuf.fileno;
+ args[1].j = (jlong)infobuf.addr;
+ args[2].i = infobuf.type;
+ args[3].i = (jint)infobuf.rc;
+ args[4].j = (jlong)infobuf.num_attrs;
+ args[5].j = infobuf.atime;
+ args[6].j = infobuf.mtime;
+ args[7].j = infobuf.ctime;
+ args[8].j = infobuf.btime;
+ args[9].l = hdrinfobuf;
+ args[10].l = ihinfobuf1;
+ args[11].l = ihinfobuf2;
+ CALL_CONSTRUCTOR("hdf/hdf5lib/structs/H5O_info_t", "(JJIIJJJJJLhdf/hdf5lib/structs/H5O_hdr_info_t;Lhdf/hdf5lib/structs/H5_ih_info_t;Lhdf/hdf5lib/structs/H5_ih_info_t;)V", args);
+
+ return ret_obj;
+} /* end Java_hdf_hdf5lib_H5_H5Oget_1info */
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5Oget_info_by_name
+ * Signature: (JLjava/lang/String;J)Lhdf/hdf5lib/structs/H5O_info_t;
+ */
+JNIEXPORT jobject JNICALL
+Java_hdf_hdf5lib_H5_H5Oget_1info_1by_1name(JNIEnv *env, jclass clss, jlong loc_id, jstring name, jlong access_id)
+{
+ const char *lName;
+ herr_t status = -1;
+ H5O_info_t infobuf;
+ jvalue args[12];
+ jobject hdrinfobuf;
+ jobject ihinfobuf1;
+ jobject ihinfobuf2;
+ jobject ret_obj = NULL;
+
+ PIN_JAVA_STRING(name, lName, NULL);
+
+ status = H5Oget_info_by_name((hid_t)loc_id, lName, &infobuf, (hid_t)access_id);
+
+ UNPIN_JAVA_STRING(name, lName);
+
+ if (status < 0) {
+ h5libraryError(env);
+ return NULL;
+ } /* end if */
+
+ args[0].i = (jint)infobuf.hdr.version;
+ args[1].i = (jint)infobuf.hdr.nmesgs;
+ args[2].i = (jint)infobuf.hdr.nchunks;
+ args[3].i = (jint)infobuf.hdr.flags;
+ args[4].j = (jlong)infobuf.hdr.space.total;
+ args[5].j = (jlong)infobuf.hdr.space.meta;
+ args[6].j = (jlong)infobuf.hdr.space.mesg;
+ args[7].j = (jlong)infobuf.hdr.space.free;
+ args[8].j = (jlong)infobuf.hdr.mesg.present;
+ args[9].j = (jlong)infobuf.hdr.mesg.shared;
+ CALL_CONSTRUCTOR("hdf/hdf5lib/structs/H5O_hdr_info_t", "(IIIIJJJJJJ)V", args);
+ hdrinfobuf = ret_obj;
+
+ args[0].j = (jlong)infobuf.meta_size.obj.index_size;
+ args[1].j = (jlong)infobuf.meta_size.obj.heap_size;
+ CALL_CONSTRUCTOR("hdf/hdf5lib/structs/H5_ih_info_t", "(JJ)V", args);
+ ihinfobuf1 = ret_obj;
+ args[0].j = (jlong)infobuf.meta_size.attr.index_size;
+ args[1].j = (jlong)infobuf.meta_size.attr.heap_size;
+ CALL_CONSTRUCTOR("hdf/hdf5lib/structs/H5_ih_info_t", "(JJ)V", args);
+ ihinfobuf2 = ret_obj;
+
+ args[0].j = (jlong)infobuf.fileno;
+ args[1].j = (jlong)infobuf.addr;
+ args[2].i = infobuf.type;
+ args[3].i = (jint)infobuf.rc;
+ args[4].j = (jlong)infobuf.num_attrs;
+ args[5].j = infobuf.atime;
+ args[6].j = infobuf.mtime;
+ args[7].j = infobuf.ctime;
+ args[8].j = infobuf.btime;
+ args[9].l = hdrinfobuf;
+ args[10].l = ihinfobuf1;
+ args[11].l = ihinfobuf2;
+ CALL_CONSTRUCTOR("hdf/hdf5lib/structs/H5O_info_t", "(JJIIJJJJJLhdf/hdf5lib/structs/H5O_hdr_info_t;Lhdf/hdf5lib/structs/H5_ih_info_t;Lhdf/hdf5lib/structs/H5_ih_info_t;)V", args);
+
+ return ret_obj;
+} /* end Java_hdf_hdf5lib_H5_H5Oget_1info_1by_1name */
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5Oget_info_by_idx
+ * Signature: (JLjava/lang/String;IIJJ)Lhdf/hdf5lib/structs/H5O_info_t;
+ */
+JNIEXPORT jobject JNICALL
+Java_hdf_hdf5lib_H5_H5Oget_1info_1by_1idx(JNIEnv *env, jclass clss, jlong loc_id,
+ jstring name, jint index_field, jint order, jlong link_n, jlong access_id)
+{
+ const char *lName;
+ herr_t status;
+ H5O_info_t infobuf;
+ jvalue args[12];
+ jobject hdrinfobuf;
+ jobject ihinfobuf1;
+ jobject ihinfobuf2;
+ jobject ret_obj = NULL;
+
+ PIN_JAVA_STRING(name, lName, NULL);
+
+ status = H5Oget_info_by_idx((hid_t)loc_id, lName, (H5_index_t)index_field, (H5_iter_order_t)order, (hsize_t)link_n, &infobuf, (hid_t)access_id);
+
+ UNPIN_JAVA_STRING(name, lName);
+
+ if (status < 0) {
+ h5libraryError(env);
+ return NULL;
+ } /* end if */
+
+ args[0].i = (jint)infobuf.hdr.version;
+ args[1].i = (jint)infobuf.hdr.nmesgs;
+ args[2].i = (jint)infobuf.hdr.nchunks;
+ args[3].i = (jint)infobuf.hdr.flags;
+ args[4].j = (jlong)infobuf.hdr.space.total;
+ args[5].j = (jlong)infobuf.hdr.space.meta;
+ args[6].j = (jlong)infobuf.hdr.space.mesg;
+ args[7].j = (jlong)infobuf.hdr.space.free;
+ args[8].j = (jlong)infobuf.hdr.mesg.present;
+ args[9].j = (jlong)infobuf.hdr.mesg.shared;
+ CALL_CONSTRUCTOR("hdf/hdf5lib/structs/H5O_hdr_info_t", "(IIIIJJJJJJ)V", args);
+ hdrinfobuf = ret_obj;
+
+ args[0].j = (jlong)infobuf.meta_size.obj.index_size;
+ args[1].j = (jlong)infobuf.meta_size.obj.heap_size;
+ CALL_CONSTRUCTOR("hdf/hdf5lib/structs/H5_ih_info_t", "(JJ)V", args);
+ ihinfobuf1 = ret_obj;
+ args[0].j = (jlong)infobuf.meta_size.attr.index_size;
+ args[1].j = (jlong)infobuf.meta_size.attr.heap_size;
+ CALL_CONSTRUCTOR("hdf/hdf5lib/structs/H5_ih_info_t", "(JJ)V", args);
+ ihinfobuf2 = ret_obj;
+
+ args[0].j = (jlong)infobuf.fileno;
+ args[1].j = (jlong)infobuf.addr;
+ args[2].i = infobuf.type;
+ args[3].i = (jint)infobuf.rc;
+ args[4].j = (jlong)infobuf.num_attrs;
+ args[5].j = infobuf.atime;
+ args[6].j = infobuf.mtime;
+ args[7].j = infobuf.ctime;
+ args[8].j = infobuf.btime;
+ args[9].l = hdrinfobuf;
+ args[10].l = ihinfobuf1;
+ args[11].l = ihinfobuf2;
+ CALL_CONSTRUCTOR("hdf/hdf5lib/structs/H5O_info_t", "(JJIIJJJJJLhdf/hdf5lib/structs/H5O_hdr_info_t;Lhdf/hdf5lib/structs/H5_ih_info_t;Lhdf/hdf5lib/structs/H5_ih_info_t;)V", args);
+
+ return ret_obj;
+} /* end Java_hdf_hdf5lib_H5_H5Oget_1info_1by_1idx */
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5Olink
+ * Signature: (JJLjava/lang/String;JJ)V
+ */
+JNIEXPORT void JNICALL
+Java_hdf_hdf5lib_H5_H5Olink(JNIEnv *env, jclass clss, jlong cur_loc_id, jlong dst_loc_id,
+ jstring dst_name, jlong create_id, jlong access_id)
+{
+ herr_t status = -1;
+ const char *lDstName;
+
+ PIN_JAVA_STRING0(dst_name, lDstName);
+
+ status = H5Olink((hid_t)cur_loc_id, (hid_t)dst_loc_id, lDstName, (hid_t)create_id, (hid_t)access_id);
+
+ UNPIN_JAVA_STRING(dst_name, lDstName);
+
+ if (status < 0)
+ h5libraryError(env);
+} /* end Java_hdf_hdf5lib_H5_H5Olink */
+
+static herr_t
+H5O_iterate_cb(hid_t g_id, const char *name, const H5O_info_t *info, void *op_data)
+{
+ JNIEnv *cbenv;
+ jint status = -1;
+ jclass cls;
+ jmethodID mid;
+ jstring str;
+ jmethodID constructor;
+ jvalue args[12];
+ jobject hdrinfobuf;
+ jobject ihinfobuf1;
+ jobject ihinfobuf2;
+ jobject cb_info_t = NULL;
+
+ if(JVMPTR->AttachCurrentThread(JVMPAR2 (void**)&cbenv, NULL) != 0) {
+ /* printf("JNI H5O_iterate_cb error: AttachCurrentThread failed\n"); */
+ JVMPTR->DetachCurrentThread(JVMPAR);
+ return -1;
+ } /* end if */
+ cls = CBENVPTR->GetObjectClass(CBENVPAR visit_callback);
+ if (cls == 0) {
+ /* printf("JNI H5O_iterate_cb error: GetObjectClass failed\n"); */
+ JVMPTR->DetachCurrentThread(JVMPAR);
+ return -1;
+ } /* end if */
+ mid = CBENVPTR->GetMethodID(CBENVPAR cls, "callback", "(JLjava/lang/String;Lhdf/hdf5lib/structs/H5O_info_t;Lhdf/hdf5lib/callbacks/H5O_iterate_t;)I");
+ if (mid == 0) {
+ /* printf("JNI H5O_iterate_cb error: GetMethodID failed\n"); */
+ JVMPTR->DetachCurrentThread(JVMPAR);
+ return -1;
+ } /* end if */
+ str = CBENVPTR->NewStringUTF(CBENVPAR name);
+
+ args[0].i = (jint)info->hdr.version;
+ args[1].i = (jint)info->hdr.nmesgs;
+ args[2].i = (jint)info->hdr.nchunks;
+ args[3].i = (jint)info->hdr.flags;
+ args[4].j = (jlong)info->hdr.space.total;
+ args[5].j = (jlong)info->hdr.space.meta;
+ args[6].j = (jlong)info->hdr.space.mesg;
+ args[7].j = (jlong)info->hdr.space.free;
+ args[8].j = (jlong)info->hdr.mesg.present;
+ args[9].j = (jlong)info->hdr.mesg.shared;
+ // get a reference to the H5_hdr_info_t class
+ cls = CBENVPTR->FindClass(CBENVPAR "hdf/hdf5lib/structs/H5O_hdr_info_t");
+ if (cls == 0) {
+ JVMPTR->DetachCurrentThread(JVMPAR);
+ return -1;
+ } /* end if */
+ // get a reference to the constructor; the name is <init>
+ constructor = CBENVPTR->GetMethodID(CBENVPAR cls, "<init>", "(IIIIJJJJJJ)V");
+ if (constructor == 0) {
+ JVMPTR->DetachCurrentThread(JVMPAR);
+ return -1;
+ } /* end if */
+ hdrinfobuf = CBENVPTR->NewObjectA(CBENVPAR cls, constructor, args);
+
+ args[0].j = (jlong)info->meta_size.obj.index_size;
+ args[1].j = (jlong)info->meta_size.obj.heap_size;
+ // get a reference to the H5_ih_info_t class
+ cls = CBENVPTR->FindClass(CBENVPAR "hdf/hdf5lib/structs/H5_ih_info_t");
+ if (cls == 0) {
+ JVMPTR->DetachCurrentThread(JVMPAR);
+ return -1;
+ } /* end if */
+ // get a reference to the constructor; the name is <init>
+ constructor = CBENVPTR->GetMethodID(CBENVPAR cls, "<init>", "(JJ)V");
+ if (constructor == 0) {
+ JVMPTR->DetachCurrentThread(JVMPAR);
+ return -1;
+ } /* end if */
+ ihinfobuf1 = CBENVPTR->NewObjectA(CBENVPAR cls, constructor, args);
+ args[0].j = (jlong)info->meta_size.attr.index_size;
+ args[1].j = (jlong)info->meta_size.attr.heap_size;
+ ihinfobuf2 = CBENVPTR->NewObjectA(CBENVPAR cls, constructor, args);
+
+ args[0].j = (jlong)info->fileno;
+ args[1].j = (jlong)info->addr;
+ args[2].i = info->type;
+ args[3].i = (jint)info->rc;
+ args[4].j = (jlong)info->num_attrs;
+ args[5].j = info->atime;
+ args[6].j = info->mtime;
+ args[7].j = info->ctime;
+ args[8].j = info->btime;
+ args[9].l = hdrinfobuf;
+ args[10].l = ihinfobuf1;
+ args[11].l = ihinfobuf2;
+ // get a reference to the H5O_info_t class
+ cls = CBENVPTR->FindClass(CBENVPAR "hdf/hdf5lib/structs/H5O_info_t");
+ if (cls == 0) {
+ JVMPTR->DetachCurrentThread(JVMPAR);
+ return -1;
+ } /* end if */
+ // get a reference to the constructor; the name is <init>
+ constructor = CBENVPTR->GetMethodID(CBENVPAR cls, "<init>", "(JJIIJJJJJLhdf/hdf5lib/structs/H5O_hdr_info_t;Lhdf/hdf5lib/structs/H5_ih_info_t;Lhdf/hdf5lib/structs/H5_ih_info_t;)V");
+ if (constructor == 0) {
+ JVMPTR->DetachCurrentThread(JVMPAR);
+ return -1;
+ } /* end if */
+ cb_info_t = CBENVPTR->NewObjectA(CBENVPAR cls, constructor, args);
+
+ status = CBENVPTR->CallIntMethod(CBENVPAR visit_callback, mid, g_id, str, cb_info_t, op_data);
+
+ JVMPTR->DetachCurrentThread(JVMPAR);
+
+ return status;
+} /* end H5O_iterate_cb */
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5Ovisit
+ * Signature: (JIILjava/lang/Object;Ljava/lang/Object;)I
+ */
+JNIEXPORT jint JNICALL
+Java_hdf_hdf5lib_H5_H5Ovisit(JNIEnv *env, jclass clss, jlong grp_id, jint idx_type, jint order,
+ jobject callback_op, jobject op_data)
+{
+ herr_t status = -1;
+
+ ENVPTR->GetJavaVM(ENVPAR &jvm);
+ visit_callback = callback_op;
+
+ if (op_data == NULL) {
+ h5nullArgument(env, "H5Ovisit: op_data is NULL");
+ return -1;
+ } /* end if */
+ if (callback_op == NULL) {
+ h5nullArgument(env, "H5Ovisit: callback_op is NULL");
+ return -1;
+ } /* end if */
+
+ status = H5Ovisit((hid_t)grp_id, (H5_index_t)idx_type, (H5_iter_order_t)order, (H5O_iterate_t)H5O_iterate_cb, (void*)op_data);
+
+ if (status < 0)
+ h5libraryError(env);
+
+ return status;
+} /* end Java_hdf_hdf5lib_H5_H5Ovisit */
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5Ovisit_by_name
+ * Signature: (JLjava/lang/String;IILjava/lang/Object;Ljava/lang/Object;J)I
+ */
+JNIEXPORT jint JNICALL
+Java_hdf_hdf5lib_H5_H5Ovisit_1by_1name(JNIEnv *env, jclass clss, jlong grp_id, jstring name, jint idx_type, jint order,
+ jobject callback_op, jobject op_data, jlong access_id)
+{
+ herr_t status = -1;
+ const char *lName;
+
+ ENVPTR->GetJavaVM(ENVPAR &jvm);
+ visit_callback = callback_op;
+
+ if (op_data == NULL) {
+ h5nullArgument(env, "H5Ovisit_by_name: op_data is NULL");
+ return -1;
+ } /* end if */
+ if (callback_op == NULL) {
+ h5nullArgument(env, "H5Ovisit_by_name: callback_op is NULL");
+ return -1;
+ } /* end if */
+
+ PIN_JAVA_STRING(name, lName, -1);
+
+ status = H5Ovisit_by_name((hid_t)grp_id, lName, (H5_index_t)idx_type, (H5_iter_order_t)order, (H5O_iterate_t)H5O_iterate_cb, (void*)op_data, (hid_t)access_id);
+
+ UNPIN_JAVA_STRING(name, lName);
+
+ if (status < 0)
+ h5libraryError(env);
+
+ return status;
+} /* end Java_hdf_hdf5lib_H5_H5Ovisit_1by_1name */
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5Oset_comment
+ * Signature: (JLjava/lang/String;)V
+ */
+JNIEXPORT void JNICALL
+Java_hdf_hdf5lib_H5_H5Oset_1comment(JNIEnv *env, jclass clss, jlong loc_id, jstring comment)
+{
+ herr_t status = -1;
+ const char *oComment;
+ jboolean isCopy;
+
+ if (comment == NULL) {
+ oComment = NULL;
+ } /* end if */
+ else {
+ oComment = ENVPTR->GetStringUTFChars(ENVPAR comment, &isCopy);
+ if (oComment == NULL) {
+ h5JNIFatalError( env, "H5Oset_comment: comment not pinned");
+ return;
+ } /* end if */
+ } /* end else */
+
+ status = H5Oset_comment((hid_t)loc_id, oComment);
+
+ if(oComment)
+ ENVPTR->ReleaseStringUTFChars(ENVPAR comment, oComment);
+
+ if (status < 0)
+ h5libraryError(env);
+} /* end Java_hdf_hdf5lib_H5_H5Oset_1comment */
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5Oset_comment_by_name
+ * Signature: (JLjava/lang/String;Ljava/lang/String;J)V
+ */
+JNIEXPORT void JNICALL
+Java_hdf_hdf5lib_H5_H5Oset_1comment_1by_1name(JNIEnv *env, jclass clss, jlong loc_id,
+ jstring name, jstring comment, jlong access_id)
+{
+ herr_t status = -1;
+ const char *oName;
+ const char *oComment;
+
+ PIN_JAVA_STRING0(name, oName);
+
+ if (comment == NULL) {
+ oComment = NULL;
+ } /* end if */
+ else {
+ jboolean isCopy;
+ oComment = ENVPTR->GetStringUTFChars(ENVPAR comment, &isCopy);
+ if (oComment == NULL) {
+ UNPIN_JAVA_STRING(name, oName);
+ h5JNIFatalError( env, "H5Oset_comment_by_name: comment not pinned");
+ return;
+ } /* end if */
+ } /* end else */
+
+ status = H5Oset_comment_by_name((hid_t)loc_id, oName, oComment, (hid_t)access_id);
+
+ UNPIN_JAVA_STRING(name, oName);
+
+ if(oComment)
+ ENVPTR->ReleaseStringUTFChars(ENVPAR comment, oComment);
+
+ if (status < 0)
+ h5libraryError(env);
+} /* end Java_hdf_hdf5lib_H5_H5Oset_1comment_1by_1name */
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5Oget_comment
+ * Signature: (J)Ljava/lang/String;
+ */
+JNIEXPORT jstring JNICALL
+Java_hdf_hdf5lib_H5_H5Oget_1comment(JNIEnv *env, jclass clss, jlong loc_id)
+{
+ char *oComment;
+ ssize_t buf_size;
+ ssize_t status;
+ jstring str = NULL;
+
+ /* get the length of the comment */
+ buf_size = H5Oget_comment((hid_t)loc_id, NULL, 0);
+ if (buf_size < 0) {
+ h5badArgument( env, "H5Oget_comment: buf_size < 0");
+ return NULL;
+ } /* end if */
+ if (buf_size == 0) {
+ return NULL;
+ } /* end if */
+
+ buf_size++; /* add extra space for the null terminator */
+ oComment = (char *)HDmalloc(sizeof(char) * (size_t)buf_size);
+ if (oComment == NULL) {
+ /* exception -- out of memory */
+ h5outOfMemory( env, "H5Oget_comment: malloc failed");
+ return NULL;
+ } /* end if */
+
+ status = H5Oget_comment((hid_t)loc_id, oComment, (size_t)buf_size);
+
+ if (status >= 0) {
+ /* may throw OutOfMemoryError */
+ str = ENVPTR->NewStringUTF(ENVPAR oComment);
+ HDfree(oComment);
+ if (str == NULL) {
+ h5JNIFatalError( env, "H5Oget_comment: return string not allocated");
+ } /* end if */
+ } /* end if */
+ else {
+ HDfree(oComment);
+ h5libraryError(env);
+ } /* end else */
+
+ return (jstring)str;
+} /* end Java_hdf_hdf5lib_H5_H5Oget_1comment */
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5Oget_comment_by_name
+ * Signature: (JLjava/lang/String;J)Ljava/lang/String;
+ */
+JNIEXPORT jstring JNICALL
+Java_hdf_hdf5lib_H5_H5Oget_1comment_1by_1name(JNIEnv *env, jclass clss, jlong loc_id, jstring name, jlong access_id)
+{
+ char *oComment;
+ const char *oName;
+ ssize_t buf_size;
+ ssize_t status;
+ jstring str = NULL;
+
+ PIN_JAVA_STRING(name, oName, NULL);
+
+ /* get the length of the comment */
+ buf_size = H5Oget_comment_by_name((hid_t)loc_id, oName, NULL, 0, (hid_t)access_id);
+ if (buf_size < 0) {
+ UNPIN_JAVA_STRING(name, oName);
+ h5badArgument( env, "H5Oget_comment_by_name: buf_size < 0");
+ return NULL;
+ } /* end if */
+ if (buf_size == 0) {
+ UNPIN_JAVA_STRING(name, oName);
+ return NULL;
+ } /* end if */
+
+ buf_size++; /* add extra space for the null terminator */
+ oComment = (char *)HDmalloc(sizeof(char) * (size_t)buf_size);
+ if (oComment == NULL) {
+ UNPIN_JAVA_STRING(name, oName);
+ h5outOfMemory( env, "H5Oget_comment_by_name: malloc failed");
+ return NULL;
+ } /* end if */
+
+ status = H5Oget_comment_by_name((hid_t)loc_id, oName, oComment, (size_t)buf_size, (hid_t)access_id);
+ UNPIN_JAVA_STRING(name, oName);
+
+ if (status >= 0) {
+ /* may throw OutOfMemoryError */
+ str = ENVPTR->NewStringUTF(ENVPAR oComment);
+ HDfree(oComment);
+ if (str == NULL) {
+ h5JNIFatalError( env, "H5Oget_comment_by_name: return string not allocated");
+ } /* end if */
+ } /* end if */
+ else {
+ HDfree(oComment);
+ h5libraryError(env);
+ } /* end else */
+
+ return (jstring)str;
+} /* end Java_hdf_hdf5lib_H5_H5Oget_1comment_1by_1name */
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5Oexists_by_name
+ * Signature: (JLjava/lang/String;J)Z
+ */
+JNIEXPORT jboolean JNICALL
+Java_hdf_hdf5lib_H5_H5Oexists_1by_1name(JNIEnv *env, jclass clss, jlong loc_id, jstring name, jlong access_id)
+{
+ htri_t bval = JNI_FALSE;
+ const char *oName;
+
+ PIN_JAVA_STRING(name, oName, JNI_FALSE);
+
+ bval = H5Oexists_by_name((hid_t)loc_id, oName, (hid_t)access_id);
+
+ UNPIN_JAVA_STRING(name, oName);
+
+ if (bval > 0)
+ bval = JNI_TRUE;
+ else if (bval < 0)
+ h5libraryError(env);
+
+ return (jboolean)bval;
+} /* end Java_hdf_hdf5lib_H5_H5Oexists_1by_1name */
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5Odecr_refcount
+ * Signature: (J)V
+ */
+JNIEXPORT void JNICALL
+Java_hdf_hdf5lib_H5_H5Odecr_1refcount(JNIEnv *env, jclass clss, jlong object_id)
+{
+ if (H5Odecr_refcount((hid_t)object_id) < 0)
+ h5libraryError(env);
+}
+/* end Java_hdf_hdf5lib_H5_H5Odecr_1refcount */
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5Oincr_refcount
+ * Signature: (J)V
+ */
+JNIEXPORT void JNICALL
+Java_hdf_hdf5lib_H5_H5Oincr_1refcount(JNIEnv *env, jclass clss, jlong object_id)
+{
+ if (H5Oincr_refcount((hid_t)object_id) < 0)
+ h5libraryError(env);
+} /* end Java_hdf_hdf5lib_H5_H5Oincr_1refcount */
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: _H5Oopen_by_addr
+ * Signature: (JJ)J;
+ */
+JNIEXPORT jlong JNICALL
+Java_hdf_hdf5lib_H5__1H5Oopen_1by_1addr(JNIEnv *env, jclass clss, jlong loc_id, jlong addr)
+{
+ hid_t retVal = -1;
+
+ retVal = H5Oopen_by_addr((hid_t)loc_id, (haddr_t)addr );
+ if (retVal < 0)
+ h5libraryError(env);
+
+ return (jlong)retVal;
+} /* end Java_hdf_hdf5lib_H5__1H5Oopen_1by_1addr */
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: _H5Oopen_by_idx
+ * Signature: (JLjava/lang/String;IIJJ)J;
+ */
+JNIEXPORT jlong JNICALL
+Java_hdf_hdf5lib_H5__1H5Oopen_1by_1idx(JNIEnv *env, jclass clss, jlong loc_id, jstring name,
+ jint index_field, jint order, jlong link_n, jlong lapl_id)
+{
+ hid_t retVal = -1;
+ const char *oName;
+
+ PIN_JAVA_STRING(name, oName, -1);
+
+ retVal = H5Oopen_by_idx((hid_t)loc_id, oName, (H5_index_t)index_field, (H5_iter_order_t)order, (hsize_t)link_n, (hid_t)lapl_id );
+
+ UNPIN_JAVA_STRING(name, oName);
+
+ if (retVal < 0)
+ h5libraryError(env);
+
+ return (jlong)retVal;
+} /* end Java_hdf_hdf5lib_H5__1H5Oopen_1by_1idx */
+
+
+#ifdef __cplusplus
+} /* end extern "C" */
+#endif /* __cplusplus */
diff --git a/java/src/jni/h5oImp.h b/java/src/jni/h5oImp.h
new file mode 100644
index 0000000..e6889b2
--- /dev/null
+++ b/java/src/jni/h5oImp.h
@@ -0,0 +1,175 @@
+/* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
+ * Copyright by The HDF Group. *
+ * Copyright by the Board of Trustees of the University of Illinois. *
+ * All rights reserved. *
+ * *
+ * This file is part of HDF5. The full HDF5 copyright notice, including *
+ * terms governing use, modification, and redistribution, is contained in *
+ * the files COPYING and Copyright.html. COPYING can be found at the root *
+ * of the source code distribution tree; Copyright.html can be found at the *
+ * root level of an installed copy of the electronic HDF5 document set and *
+ * is linked from the top-level documents page. It can also be found at *
+ * http://hdfgroup.org/HDF5/doc/Copyright.html. If you do not have *
+ * access to either file, you may request a copy from help@hdfgroup.org. *
+ * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */
+
+#include <jni.h>
+/* Header for class hdf_hdf5lib_H5_H5_H5O */
+
+#ifndef _Included_hdf_hdf5lib_H5_H5O
+#define _Included_hdf_hdf5lib_H5_H5O
+
+#ifdef __cplusplus
+extern "C" {
+#endif /* __cplusplus */
+
+
+ /*
+ * Class: hdf_hdf5lib_H5
+ * Method: _H5Oopen
+ * Signature: (JLjava/lang/String;J)J
+ */
+ JNIEXPORT jlong JNICALL Java_hdf_hdf5lib_H5__1H5Oopen
+ (JNIEnv*, jclass, jlong, jstring, jlong);
+
+ /*
+ * Class: hdf_hdf5lib_H5
+ * Method: _H5Oclose
+ * Signature: (J)I
+ */
+ JNIEXPORT jint JNICALL Java_hdf_hdf5lib_H5__1H5Oclose
+ (JNIEnv*, jclass, jlong);
+
+ /*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5Ocopy
+ * Signature: (JLjava/lang/String;JLjava/lang/String;JJ)V
+ */
+ JNIEXPORT void JNICALL Java_hdf_hdf5lib_H5_H5Ocopy
+ (JNIEnv*, jclass, jlong, jstring, jlong, jstring, jlong, jlong);
+
+ /*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5Oget_info
+ * Signature: (J)Lhdf/hdf5lib/structs/H5O_info_t;
+ */
+ JNIEXPORT jobject JNICALL Java_hdf_hdf5lib_H5_H5Oget_1info
+ (JNIEnv*, jclass, jlong);
+
+ /*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5Oget_info_by_name
+ * Signature: (JLjava/lang/String;J)Lhdf/hdf5lib/structs/H5O_info_t;
+ */
+ JNIEXPORT jobject JNICALL Java_hdf_hdf5lib_H5_H5Oget_1info_1by_1name
+ (JNIEnv*, jclass, jlong, jstring, jlong);
+
+ /*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5Oget_info_by_idx
+ * Signature: (JLjava/lang/String;IIJJ)Lhdf/hdf5lib/structs/H5O_info_t;
+ */
+ JNIEXPORT jobject JNICALL Java_hdf_hdf5lib_H5_H5Oget_1info_1by_1idx
+ (JNIEnv*, jclass, jlong, jstring, jint, jint, jlong, jlong);
+
+ /*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5Olink
+ * Signature: (JJLjava/lang/String;JJ)V
+ */
+ JNIEXPORT void JNICALL Java_hdf_hdf5lib_H5_H5Olink
+ (JNIEnv*, jclass, jlong, jlong, jstring, jlong, jlong);
+
+ /*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5Ovisit
+ * Signature: (JIILjava/lang/Object;Ljava/lang/Object;)I
+ */
+ JNIEXPORT jint JNICALL Java_hdf_hdf5lib_H5_H5Ovisit
+ (JNIEnv*, jclass, jlong, jint, jint, jobject, jobject);
+
+ /*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5Ovisit_by_name
+ * Signature: (JLjava/lang/String;IILjava/lang/Object;Ljava/lang/Object;J)I
+ */
+ JNIEXPORT jint JNICALL Java_hdf_hdf5lib_H5_H5Ovisit_1by_1name
+ (JNIEnv*, jclass, jlong, jstring, jint, jint, jobject, jobject, jlong);
+
+ /*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5Oset_comment
+ * Signature: (JLjava/lang/String;)V
+ */
+ JNIEXPORT void JNICALL Java_hdf_hdf5lib_H5_H5Oset_1comment
+ (JNIEnv*, jclass, jlong, jstring);
+
+ /*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5Oset_comment_by_name
+ * Signature: (JLjava/lang/String;Ljava/lang/String;J)V
+ */
+ JNIEXPORT void JNICALL Java_hdf_hdf5lib_H5_H5Oset_1comment_1by_1name
+ (JNIEnv*, jclass, jlong, jstring, jstring, jlong);
+
+ /*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5Oget_comment
+ * Signature: (J)Ljava/lang/String;
+ */
+ JNIEXPORT jstring JNICALL Java_hdf_hdf5lib_H5_H5Oget_1comment
+ (JNIEnv*, jclass, jlong);
+
+ /*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5Oget_comment_by_name
+ * Signature: (JLjava/lang/String;J)Ljava/lang/String;
+ */
+ JNIEXPORT jstring JNICALL Java_hdf_hdf5lib_H5_H5Oget_1comment_1by_1name
+ (JNIEnv*, jclass, jlong, jstring, jlong);
+
+ /*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5Oexists_by_name
+ * Signature: (JLjava/lang/String;J)Z
+ */
+ JNIEXPORT jboolean JNICALL Java_hdf_hdf5lib_H5_H5Oexists_1by_1name
+ (JNIEnv*, jclass, jlong, jstring, jlong);
+
+ /*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5Odecr_refcount
+ * Signature: (J)V
+ */
+ JNIEXPORT void JNICALL Java_hdf_hdf5lib_H5_H5Odecr_1refcount
+ (JNIEnv*, jclass, jlong);
+
+ /*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5Oincr_refcount
+ * Signature: (J)V
+ */
+ JNIEXPORT void JNICALL Java_hdf_hdf5lib_H5_H5Oincr_1refcount
+ (JNIEnv*, jclass, jlong);
+
+ /*
+ * Class: hdf_hdf5lib_H5
+ * Method: _H5Oopen_by_addr
+ * Signature: (JJ)J;
+ */
+ JNIEXPORT jlong JNICALL Java_hdf_hdf5lib_H5__1H5Oopen_1by_1addr
+ (JNIEnv*, jclass, jlong, jlong);
+
+ /*
+ * Class: hdf_hdf5lib_H5
+ * Method: _H5Oopen_by_idx
+ * Signature: (JLjava/lang/String;IIJJ)J;
+ */
+ JNIEXPORT jlong JNICALL Java_hdf_hdf5lib_H5__1H5Oopen_1by_1idx
+ (JNIEnv*, jclass, jlong, jstring, jint, jint, jlong, jlong);
+
+#ifdef __cplusplus
+} /* end extern "C" */
+#endif /* __cplusplus */
+
+#endif /* _Included_hdf_hdf5lib_H5_H5O */
diff --git a/java/src/jni/h5pImp.c b/java/src/jni/h5pImp.c
new file mode 100644
index 0000000..ab39739
--- /dev/null
+++ b/java/src/jni/h5pImp.c
@@ -0,0 +1,5340 @@
+/* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
+ * Copyright by The HDF Group. *
+ * Copyright by the Board of Trustees of the University of Illinois. *
+ * All rights reserved. *
+ * *
+ * This file is part of HDF5. The full HDF5 copyright notice, including *
+ * terms governing use, modification, and redistribution, is contained in *
+ * the files COPYING and Copyright.html. COPYING can be found at the root *
+ * of the source code distribution tree; Copyright.html can be found at the *
+ * root level of an installed copy of the electronic HDF5 document set and *
+ * is linked from the top-level documents page. It can also be found at *
+ * http://hdfgroup.org/HDF5/doc/Copyright.html. If you do not have *
+ * access to either file, you may request a copy from help@hdfgroup.org. *
+ * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */
+
+/*
+ * For details of the HDF libraries, see the HDF Documentation at:
+ * http://hdfdfgroup.org/HDF5/doc/
+ *
+ */
+
+#ifdef __cplusplus
+extern "C" {
+#endif /* __cplusplus */
+
+#include <stdlib.h>
+#include "hdf5.h"
+#include "h5util.h"
+#include "h5pImp.h"
+
+extern JavaVM *jvm;
+extern jobject visit_callback;
+extern jobject copy_callback;
+extern jobject close_callback;
+extern jobject create_callback;
+extern jobject compare_callback;
+extern jobject get_callback;
+extern jobject set_callback;
+extern jobject delete_callback;
+
+/********************/
+/* Local Prototypes */
+/********************/
+
+static herr_t H5P_cls_create_func_cb(hid_t prop_id, void *create_data);
+static herr_t H5P_cls_copy_func_cb(hid_t new_prop_id, hid_t old_prop_id, void *copy_data);
+static herr_t H5P_cls_close_func_cb(hid_t prop_id, void *close_data);
+
+static herr_t H5P_prp_create_func_cb(const char *name, size_t size, void *value);
+static herr_t H5P_prp_copy_func_cb(const char *name, size_t size, void *value);
+static herr_t H5P_prp_close_func_cb(const char *name, size_t size, void *value);
+static int H5P_prp_compare_func_cb(void *value1, void *value2, size_t size);
+static herr_t H5P_prp_get_func_cb(hid_t prop_id, const char *name, size_t size, void *value);
+static herr_t H5P_prp_set_func_cb(hid_t prop_id, const char *name, size_t size, void *value);
+static herr_t H5P_prp_delete_func_cb(hid_t prop_id, const char *name, size_t size, void *value);
+
+static herr_t H5P_iterate_cb(hid_t prop_id, const char *name, void *op_data);
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5Pcreate
+ * Signature: (J)J
+ */
+JNIEXPORT jlong JNICALL
+Java_hdf_hdf5lib_H5__1H5Pcreate(JNIEnv *env, jclass clss, jlong type)
+{
+ hid_t retVal = -1;
+
+ retVal = H5Pcreate((hid_t)type);
+ if (retVal < 0)
+ h5libraryError(env);
+
+ return (jlong)retVal;
+} /* end Java_hdf_hdf5lib_H5__1H5Pcreate */
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5Pclose
+ * Signature: (J)I
+ */
+JNIEXPORT jint JNICALL
+Java_hdf_hdf5lib_H5__1H5Pclose(JNIEnv *env, jclass clss, jlong plist)
+{
+ herr_t retVal = 0;
+
+ if (plist > 0)
+ retVal = H5Pclose((hid_t)plist);
+
+ if (retVal < 0)
+ h5libraryError(env);
+
+ return (jint)retVal;
+} /* end Java_hdf_hdf5lib_H5__1H5Pclose */
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5Pget_class
+ * Signature: (J)J
+ */
+JNIEXPORT jlong JNICALL
+Java_hdf_hdf5lib_H5_H5Pget_1class(JNIEnv *env, jclass clss, jlong plist)
+{
+ hid_t retVal = -1;
+
+ retVal = retVal = H5Pget_class((hid_t) plist);
+ if (retVal == H5P_ROOT)
+ h5libraryError(env);
+
+ return (jlong)retVal;
+} /* end Java_hdf_hdf5lib_H5_H5Pget_1class */
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5Pcopy
+ * Signature: (J)J
+ */
+JNIEXPORT jlong JNICALL
+Java_hdf_hdf5lib_H5__1H5Pcopy(JNIEnv *env, jclass clss, jlong plist)
+{
+ hid_t retVal = -1;
+
+ retVal = H5Pcopy((hid_t)plist);
+ if (retVal < 0)
+ h5libraryError(env);
+
+ return (jlong)retVal;
+} /* end Java_hdf_hdf5lib_H5__1H5Pcopy */
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5Pget_version
+ * Signature: (J[I)I
+ */
+JNIEXPORT jint JNICALL
+Java_hdf_hdf5lib_H5_H5Pget_1version(JNIEnv *env, jclass clss, jlong plist, jintArray version_info)
+{
+ herr_t status = -1;
+ jint *theArray;
+ jboolean isCopy;
+
+ if (version_info == NULL) {
+ h5nullArgument(env, "H5Pget_version: version_info input array is NULL");
+ } /* end if */
+ else if (ENVPTR->GetArrayLength(ENVPAR version_info) < 4) {
+ h5badArgument(env, "H5Pget_version: version_info input array < 4");
+ } /* end else if */
+ else {
+ theArray = (jint *)ENVPTR->GetIntArrayElements(ENVPAR version_info, &isCopy);
+ if (theArray == NULL) {
+ h5JNIFatalError(env, "H5Pget_version: version_info not pinned");
+ } /* end if */
+ else {
+ status = H5Pget_version((hid_t)plist, (unsigned *)&(theArray[0]),
+ (unsigned *)&(theArray[1]), (unsigned *)&(theArray[2]), (unsigned *)&(theArray[3]));
+ if (status < 0) {
+ ENVPTR->ReleaseIntArrayElements(ENVPAR version_info, theArray, JNI_ABORT);
+ h5libraryError(env);
+ } /* end if */
+ else
+ ENVPTR->ReleaseIntArrayElements(ENVPAR version_info, theArray, 0);
+ } /* end else */
+ } /* end else */
+
+ return (jint)status;
+} /* end Java_hdf_hdf5lib_H5_H5Pget_1version */
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5Pset_userblock
+ * Signature: (JJ)I
+ */
+JNIEXPORT jint JNICALL
+Java_hdf_hdf5lib_H5_H5Pset_1userblock(JNIEnv *env, jclass clss, jlong plist, jlong size)
+{
+ herr_t retVal = -1;
+ long sz = (long)size;
+
+ retVal = H5Pset_userblock((hid_t)plist, (hsize_t)sz);
+ if (retVal < 0)
+ h5libraryError(env);
+
+ return (jint)retVal;
+} /* end Java_hdf_hdf5lib_H5_H5Pset_1userblock */
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5Pget_userblock
+ * Signature: (J[J)I
+ */
+JNIEXPORT jint JNICALL
+Java_hdf_hdf5lib_H5_H5Pget_1userblock(JNIEnv *env, jclass clss, jlong plist, jlongArray size)
+{
+ herr_t status = -1;
+ jlong *theArray;
+ jboolean isCopy;
+ hsize_t s;
+
+ if (size == NULL) {
+ h5nullArgument(env, "H5Pget_userblock: size is NULL");
+ } /* end if */
+ else {
+ theArray = (jlong*)ENVPTR->GetLongArrayElements(ENVPAR size, &isCopy);
+ if (theArray == NULL) {
+ h5JNIFatalError(env, "H5Pget_userblock: size not pinned");
+ } /* end if */
+ else {
+ status = H5Pget_userblock((hid_t)plist, &s);
+
+ if (status < 0) {
+ ENVPTR->ReleaseLongArrayElements(ENVPAR size, theArray, JNI_ABORT);
+ h5libraryError(env);
+ } /* end if */
+ else {
+ theArray[0] = (jlong)s;
+ ENVPTR->ReleaseLongArrayElements(ENVPAR size, theArray, 0);
+ } /* end else */
+ } /* end else */
+ } /* end else */
+
+ return (jint)status;
+} /* end Java_hdf_hdf5lib_H5_H5Pget_1userblock */
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5Pset_sizes
+ * Signature: (JII)I
+ */
+JNIEXPORT jint JNICALL
+Java_hdf_hdf5lib_H5_H5Pset_1sizes(JNIEnv *env, jclass clss, jlong plist, jint sizeof_addr, jint sizeof_size)
+{
+ herr_t retVal = -1;
+
+ retVal = H5Pset_sizes((hid_t)plist, (size_t)sizeof_addr, (size_t)sizeof_size);
+ if (retVal < 0)
+ h5libraryError(env);
+
+ return (jint)retVal;
+} /* end Java_hdf_hdf5lib_H5_H5Pset_1sizes */
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5Pget_sizes
+ * Signature: (J[J)I
+ */
+JNIEXPORT jint JNICALL
+Java_hdf_hdf5lib_H5_H5Pget_1sizes(JNIEnv *env, jclass clss, jlong plist, jlongArray size)
+{
+ herr_t status = -1;
+ jlong *theArray;
+ jboolean isCopy;
+ size_t ss;
+ size_t sa;
+
+ if (size == NULL) {
+ h5nullArgument(env, "H5Pget_sizes: size is NULL");
+ } /* end if */
+ else if (ENVPTR->GetArrayLength(ENVPAR size) < 2) {
+ h5badArgument(env, "H5Pget_sizes: size input array < 2 elements");
+ }
+ else {
+ theArray = (jlong *)ENVPTR->GetLongArrayElements(ENVPAR size, &isCopy);
+ if (theArray == NULL) {
+ h5JNIFatalError(env, "H5Pget_sizes: size not pinned");
+ } /* end if */
+ else {
+ status = H5Pget_sizes((hid_t)plist, &sa, &ss);
+ if (status < 0) {
+ ENVPTR->ReleaseLongArrayElements(ENVPAR size, theArray, JNI_ABORT);
+ h5libraryError(env);
+ } /* end if */
+ else {
+ theArray[0] = (jlong)sa;
+ theArray[1] = (jlong)ss;
+ ENVPTR->ReleaseLongArrayElements(ENVPAR size, theArray, 0);
+ } /* end else */
+ } /* end else */
+ } /* end else */
+
+ return (jint)status;
+} /* end Java_hdf_hdf5lib_H5_H5Pget_1sizes */
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5Pset_sym_k
+ * Signature: (JII)I
+ */
+JNIEXPORT jint JNICALL
+Java_hdf_hdf5lib_H5_H5Pset_1sym_1k(JNIEnv *env, jclass clss, jlong plist, jint ik, jint lk)
+{
+ herr_t retVal = -1;
+
+ retVal = H5Pset_sym_k((hid_t)plist, (unsigned)ik, (unsigned)lk);
+ if (retVal < 0)
+ h5libraryError(env);
+
+ return (jint)retVal;
+} /* end Java_hdf_hdf5lib_H5_H5Pset_1sym_1k */
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5Pget_sym_k
+ * Signature: (J[I)I
+ */
+JNIEXPORT jint JNICALL
+Java_hdf_hdf5lib_H5_H5Pget_1sym_1k(JNIEnv *env, jclass clss, jlong plist, jintArray size)
+{
+ herr_t status = -1;
+ jint *theArray;
+ jboolean isCopy;
+
+ if (size == NULL) {
+ h5nullArgument(env, "H5Pget_sym_k: size is NULL");
+ } /* end if */
+ else if (ENVPTR->GetArrayLength(ENVPAR size) < 2) {
+ h5badArgument(env, "H5Pget_sym_k: size < 2 elements");
+ } /* end else if */
+ else {
+ theArray = (jint *)ENVPTR->GetIntArrayElements(ENVPAR size, &isCopy);
+ if (theArray == NULL) {
+ h5JNIFatalError(env, "H5Pget_sym_k: size not pinned");
+ } /* end if */
+ else {
+ status = H5Pget_sym_k((hid_t)plist, (unsigned *)&(theArray[0]), (unsigned *)&(theArray[1]));
+ if (status < 0) {
+ ENVPTR->ReleaseIntArrayElements(ENVPAR size, theArray, JNI_ABORT);
+ h5libraryError(env);
+ } /* end if */
+ else
+ ENVPTR->ReleaseIntArrayElements(ENVPAR size, theArray, 0);
+ } /* end else */
+ } /* end else */
+
+ return (jint)status;
+} /* end Java_hdf_hdf5lib_H5_H5Pget_1sym_1k */
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5Pset_istore_k
+ * Signature: (JI)I
+ */
+JNIEXPORT jint JNICALL
+Java_hdf_hdf5lib_H5_H5Pset_1istore_1k(JNIEnv *env, jclass clss, jlong plist, jint ik)
+{
+ herr_t retVal = -1;
+
+ retVal = H5Pset_istore_k((hid_t)plist, (unsigned)ik);
+ if (retVal < 0)
+ h5libraryError(env);
+
+ return (jint)retVal;
+} /* end Java_hdf_hdf5lib_H5_H5Pset_1istore_1k */
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5Pget_istore_k
+ * Signature: (J[I)I
+ */
+JNIEXPORT jint JNICALL
+Java_hdf_hdf5lib_H5_H5Pget_1istore_1k(JNIEnv *env, jclass clss, jlong plist, jintArray ik)
+{
+ herr_t status = -1;
+ jint *theArray;
+ jboolean isCopy;
+
+ if (ik == NULL) {
+ h5nullArgument(env, "H5Pget_store_k: ik is NULL");
+ } /* end if */
+ else {
+ theArray = (jint *)ENVPTR->GetIntArrayElements(ENVPAR ik, &isCopy);
+ if (theArray == NULL) {
+ h5JNIFatalError(env, "H5Pget_store_k: size not pinned");
+ } /* end if */
+ else {
+ status = H5Pget_istore_k((hid_t)plist, (unsigned *)&(theArray[0]));
+ if (status < 0) {
+ ENVPTR->ReleaseIntArrayElements(ENVPAR ik, theArray, JNI_ABORT);
+ h5libraryError(env);
+ } /* end if */
+ else
+ ENVPTR->ReleaseIntArrayElements(ENVPAR ik, theArray, 0);
+ } /* end else */
+ } /* end else */
+
+ return (jint)status;
+} /* end Java_hdf_hdf5lib_H5_H5Pget_1istore_1k */
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5Pset_layout
+ * Signature: (JI)I
+ */
+JNIEXPORT jint JNICALL
+Java_hdf_hdf5lib_H5_H5Pset_1layout(JNIEnv *env, jclass clss, jlong plist, jint layout)
+{
+ herr_t retVal = -1;
+
+ retVal = H5Pset_layout((hid_t)plist, (H5D_layout_t)layout);
+ if (retVal < 0)
+ h5libraryError(env);
+
+ return (jint)retVal;
+} /* end Java_hdf_hdf5lib_H5_H5Pset_1layout */
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5Pget_layout
+ * Signature: (J)I
+ */
+JNIEXPORT jint JNICALL
+Java_hdf_hdf5lib_H5_H5Pget_1layout(JNIEnv *env, jclass clss, jlong plist)
+{
+ H5D_layout_t retVal = H5D_LAYOUT_ERROR;
+
+ retVal = H5Pget_layout((hid_t)plist);
+ if (retVal == H5D_LAYOUT_ERROR)
+ h5libraryError(env);
+
+ return (jint)retVal;
+} /* end Java_hdf_hdf5lib_H5_H5Pget_1layout */
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5Pset_chunk
+ * Signature: (JI[B)I
+ */
+JNIEXPORT jint JNICALL
+Java_hdf_hdf5lib_H5_H5Pset_1chunk(JNIEnv *env, jclass clss, jlong plist, jint ndims, jbyteArray dim)
+{
+ herr_t status = -1;
+ jbyte *theArray;
+ jboolean isCopy;
+ hsize_t *da;
+ hsize_t *lp;
+ jlong *jlp;
+ size_t i;
+ size_t rank;
+
+ if (dim == NULL) {
+ h5nullArgument(env, "H5Pset_chunk: dim array is NULL");
+ } /* end if */
+ else {
+ i = (size_t)ENVPTR->GetArrayLength(ENVPAR dim);
+ rank = i / sizeof(jlong);
+ if (rank < ndims) {
+ h5badArgument(env, "H5Pset_chunk: dims array < ndims");
+ } /* end if */
+ else {
+ theArray = (jbyte *)ENVPTR->GetByteArrayElements(ENVPAR dim, &isCopy);
+ if (theArray == NULL) {
+ h5JNIFatalError(env, "H5Pset_chunk: dim array not pinned");
+ } /* end if */
+ else {
+ da = lp = (hsize_t *)HDmalloc(rank * sizeof(hsize_t));
+ if (da == NULL) {
+ ENVPTR->ReleaseByteArrayElements(ENVPAR dim, theArray, JNI_ABORT);
+ h5JNIFatalError(env, "H5Pset_chunk: dims not converted to hsize_t");
+ } /* end if */
+ else {
+ jlp = (jlong *)theArray;
+ for (i = 0; i < rank; i++) {
+ *lp = (hsize_t)*jlp;
+ lp++;
+ jlp++;
+ } /* end if */
+
+ status = H5Pset_chunk((hid_t)plist, (int)ndims, da);
+
+ ENVPTR->ReleaseByteArrayElements(ENVPAR dim, theArray, JNI_ABORT);
+ HDfree(da);
+
+ if (status < 0)
+ h5libraryError(env);
+ } /* end else */
+ } /* end else */
+ } /* end else */
+ } /* end else */
+
+ return (jint)status;
+} /* end Java_hdf_hdf5lib_H5_H5Pset_1chunk */
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5Pget_chunk
+ * Signature: (JI[J)I
+ */
+JNIEXPORT jint JNICALL
+Java_hdf_hdf5lib_H5_H5Pget_1chunk(JNIEnv *env, jclass clss, jlong plist, jint max_ndims, jlongArray dims)
+{
+ herr_t status = -1;
+ jlong *theArray;
+ jboolean isCopy;
+ hsize_t *da;
+ int i;
+
+ if (dims == NULL) {
+ h5nullArgument(env, "H5Pget_chunk: dims is NULL");
+ } /* end if */
+ else if (ENVPTR->GetArrayLength(ENVPAR dims) < max_ndims) {
+ h5badArgument(env, "H5Pget_chunk: dims array < max_ndims");
+ } /* end else if */
+ else {
+ theArray = (jlong *)ENVPTR->GetLongArrayElements(ENVPAR dims, &isCopy);
+ if (theArray == NULL) {
+ h5JNIFatalError(env, "H5Pget_chunk: input dims not pinned");
+ } /* end if */
+ else {
+ da = (hsize_t *)HDmalloc((size_t)max_ndims * sizeof(hsize_t));
+ if (da == NULL) {
+ ENVPTR->ReleaseLongArrayElements(ENVPAR dims, theArray, JNI_ABORT);
+ h5JNIFatalError(env, "H5Pget_chunk: dims not converted to hsize_t");
+ } /* end if */
+ else {
+ status = H5Pget_chunk((hid_t)plist, (int)max_ndims, da);
+
+ if (status < 0) {
+ ENVPTR->ReleaseLongArrayElements(ENVPAR dims, theArray, JNI_ABORT);
+ HDfree (da);
+ h5libraryError(env);
+ } /* end if */
+ else {
+ for (i = 0; i < max_ndims; i++) {
+ theArray[i] = (jlong)da[i];
+ }
+ HDfree (da);
+ ENVPTR->ReleaseLongArrayElements(ENVPAR dims, theArray, 0);
+ } /* end else */
+ } /* end else */
+ } /* end else */
+ } /* end else */
+
+ return (jint)status;
+} /* end Java_hdf_hdf5lib_H5_H5Pget_1chunk */
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5Pset_alignment
+ * Signature: (JJJ)I
+ */
+JNIEXPORT jint JNICALL
+Java_hdf_hdf5lib_H5_H5Pset_1alignment(JNIEnv *env, jclass clss, jlong plist, jlong threshold, jlong alignment)
+{
+ herr_t retVal = -1;
+ long thr = (long)threshold;
+ long align = (long)alignment;
+
+ retVal = H5Pset_alignment((hid_t)plist, (hsize_t)thr, (hsize_t)align);
+ if (retVal < 0)
+ h5libraryError(env);
+
+ return (jint)retVal;
+} /* end Java_hdf_hdf5lib_H5_H5Pset_1alignment */
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5Pget_alignment
+ * Signature: (J[J)I
+ */
+JNIEXPORT jint JNICALL
+Java_hdf_hdf5lib_H5_H5Pget_1alignment(JNIEnv *env, jclass clss, jlong plist, jlongArray alignment)
+{
+ herr_t status = -1;
+ jlong *theArray;
+ jboolean isCopy;
+ hsize_t t;
+ hsize_t a;
+
+ if (alignment == NULL) {
+ h5nullArgument(env, "H5Pget_alignment: input alignment is NULL");
+ } /* end if */
+ else {
+ if (ENVPTR->GetArrayLength(ENVPAR alignment) < 2) {
+ h5badArgument(env, "H5Pget_alignment: alignment input array < 2");
+ } /* end if */
+ else {
+ theArray = (jlong *)ENVPTR->GetLongArrayElements(ENVPAR alignment, &isCopy);
+ if (theArray == NULL) {
+ h5JNIFatalError(env, "H5Pget_alignment: input array not pinned");
+ } /* end if */
+ else {
+ status = H5Pget_alignment((hid_t)plist, &t, &a);
+ if (status < 0) {
+ ENVPTR->ReleaseLongArrayElements(ENVPAR alignment, theArray, JNI_ABORT);
+ h5libraryError(env);
+ } /* end if */
+ else {
+ theArray[0] = (jlong)t;
+ theArray[1] = (jlong)a;
+ ENVPTR->ReleaseLongArrayElements(ENVPAR alignment, theArray, 0);
+ } /* end else */
+ } /* end else */
+ } /* end else */
+ } /* end else */
+
+ return (jint)status;
+} /* end Java_hdf_hdf5lib_H5_H5Pget_1alignment */
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5Pset_external
+ * Signature: (JLjava/lang/String;JJ)I
+ */
+JNIEXPORT jint JNICALL
+Java_hdf_hdf5lib_H5_H5Pset_1external(JNIEnv *env, jclass clss, jlong plist, jstring name, jlong offset, jlong size)
+{
+ herr_t status = -1;
+ const char *fileName;
+ off_t off;
+ hsize_t sz;
+ hid_t plid;
+
+ plid = (hid_t)plist;
+ off = (off_t)offset;
+ sz = (hsize_t)size;
+
+ PIN_JAVA_STRING(name, fileName, -1);
+
+ status = H5Pset_external(plid, fileName, off, sz);
+
+ UNPIN_JAVA_STRING(name, fileName);
+
+ if (status < 0)
+ h5libraryError(env);
+
+ return (jint)status;
+} /* end Java_hdf_hdf5lib_H5_H5Pset_1external */
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5Pget_external_count
+ * Signature: (J)I
+ */
+JNIEXPORT jint JNICALL
+Java_hdf_hdf5lib_H5_H5Pget_1external_1count(JNIEnv *env, jclass clss, jlong plist)
+{
+ int retVal = -1;
+
+ retVal = H5Pget_external_count((hid_t)plist);
+ if (retVal < 0)
+ h5libraryError(env);
+
+ return (jint)retVal;
+} /* end Java_hdf_hdf5lib_H5_H5Pget_1external_1count */
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5Pget_external
+ * Signature: (JIJ[Ljava/lang/String;[J)I
+ */
+JNIEXPORT jint JNICALL
+Java_hdf_hdf5lib_H5_H5Pget_1external(JNIEnv *env, jclass clss, jlong plist, jint idx, jlong name_size,
+ jobjectArray name, jlongArray size)
+{
+ herr_t status = -1;
+ jlong *theArray;
+ jboolean isCopy;
+ char *file;
+ jstring str;
+ off_t o;
+ hsize_t s;
+
+ if (name_size < 0) {
+ h5badArgument(env, "H5Pget_external: name_size < 0");
+ return -1;
+ } /* end if */
+ else if (name_size == 0) {
+ file = NULL;
+ } /* end else if */
+ else {
+ file = (char *)HDmalloc(sizeof(char)*(size_t)name_size);
+ } /* end else */
+
+ if (size != NULL) {
+ if (ENVPTR->GetArrayLength(ENVPAR size) < 2) {
+ HDfree(file);
+ h5badArgument(env, "H5Pget_external: size input array < 2");
+ return -1;
+ } /* end if */
+ theArray = (jlong *)ENVPTR->GetLongArrayElements(ENVPAR size, &isCopy);
+ if (theArray == NULL) {
+ HDfree(file);
+ h5JNIFatalError( env, "H5Pget_external: size array not pinned");
+ return -1;
+ } /* end if */
+ } /* end if */
+
+ status = H5Pget_external((hid_t) plist, (unsigned)idx, (size_t)name_size,
+ file, (off_t *)&o, (hsize_t *)&s);
+ if (status < 0) {
+ if (size != NULL) {
+ ENVPTR->ReleaseLongArrayElements(ENVPAR size, theArray, JNI_ABORT);
+ } /* end if */
+ HDfree(file);
+ h5libraryError(env);
+ } /* end if */
+ else {
+ if (size != NULL) {
+ theArray[0] = o;
+ theArray[1] = (jlong)s;
+ ENVPTR->ReleaseLongArrayElements(ENVPAR size, theArray, 0);
+ } /* end if */
+
+ if (file != NULL) {
+ /* NewStringUTF may throw OutOfMemoryError */
+ str = ENVPTR->NewStringUTF(ENVPAR file);
+ if (str == NULL) {
+ HDfree(file);
+ h5JNIFatalError(env, "H5Pget_external: return array not created");
+ } /* end if */
+ else {
+ /* SetObjectArrayElement may raise exceptions */
+ ENVPTR->SetObjectArrayElement(ENVPAR name, 0, (jobject)str);
+ HDfree(file);
+ } /* end else */
+ } /* end if */
+ } /* end else */
+ return (jint)status;
+} /* end Java_hdf_hdf5lib_H5_H5Pget_1external */
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5Pset_fill_value
+ * Signature: (JJ[B)I
+ */
+JNIEXPORT jint JNICALL
+Java_hdf_hdf5lib_H5_H5Pset_1fill_1value(JNIEnv *env, jclass clss, jlong plist_id, jlong type_id, jbyteArray value)
+{
+ jint status = -1;
+ jbyte *byteP;
+ jboolean isCopy;
+
+ if (value != NULL) {
+ byteP = ENVPTR->GetByteArrayElements(ENVPAR value, &isCopy);
+ if (byteP == NULL) {
+ h5JNIFatalError(env, "H5Pget_fill_value: value array not pinned");
+ return -1;
+ } /* end if */
+ } /* end if */
+
+ status = H5Pset_fill_value((hid_t)plist_id, (hid_t)type_id, byteP);
+
+ if (value != NULL)
+ ENVPTR->ReleaseByteArrayElements(ENVPAR value, byteP, JNI_ABORT);
+
+ if (status < 0)
+ h5libraryError(env);
+
+ return status;
+} /* end Java_hdf_hdf5lib_H5_H5Pset_1fill_1value */
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5Pget_fill_value
+ * Signature: (JJ[B)I
+ */
+JNIEXPORT jint JNICALL
+Java_hdf_hdf5lib_H5_H5Pget_1fill_1value(JNIEnv *env, jclass clss, jlong plist_id, jlong type_id, jbyteArray value)
+{
+ jint status = -1;
+ jbyte *byteP;
+ jboolean isCopy;
+
+ if (value == NULL) {
+ h5badArgument(env, "H5Pget_fill_value: value is NULL");
+ } /* end if */
+ else {
+ byteP = ENVPTR->GetByteArrayElements(ENVPAR value, &isCopy);
+ if (byteP == NULL) {
+ h5JNIFatalError(env, "H5Pget_fill_value: value array not pinned");
+ } /* end if */
+ else {
+ status = H5Pget_fill_value((hid_t)plist_id, (hid_t)type_id, byteP);
+ if (status < 0) {
+ ENVPTR->ReleaseByteArrayElements(ENVPAR value, byteP, JNI_ABORT);
+ h5libraryError(env);
+ } /* end if */
+ else
+ ENVPTR->ReleaseByteArrayElements(ENVPAR value, byteP, 0);
+ } /* end else */
+ } /* end else */
+
+ return status;
+} /* end Java_hdf_hdf5lib_H5_H5Pget_1fill_1value */
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5Pset_filter
+ * Signature: (JIIJ[I)I
+ */
+JNIEXPORT jint JNICALL
+Java_hdf_hdf5lib_H5_H5Pset_1filter(JNIEnv *env, jclass clss, jlong plist, jint filter, jint flags,
+ jlong cd_nelmts, jintArray cd_values)
+{
+ herr_t status = -1;
+ jint *theArray;
+ jboolean isCopy;
+
+ if (cd_values == NULL) {
+ status = H5Pset_filter((hid_t)plist, (H5Z_filter_t)filter,
+ (unsigned int)flags, (size_t)cd_nelmts, NULL);
+ if (status < 0)
+ h5libraryError(env);
+ } /* end if */
+ else {
+ theArray = (jint *)ENVPTR->GetIntArrayElements(ENVPAR cd_values, &isCopy);
+ if (theArray == NULL) {
+ h5JNIFatalError(env, "H5Pset_filter: input array not pinned");
+ }/* end if */
+ else {
+ status = H5Pset_filter((hid_t)plist, (H5Z_filter_t)filter,
+ (unsigned int)flags, (size_t)cd_nelmts, (const unsigned int *)theArray);
+ ENVPTR->ReleaseIntArrayElements(ENVPAR cd_values, theArray, JNI_ABORT);
+ if (status < 0)
+ h5libraryError(env);
+ } /* end else */
+ } /* end else */
+
+ return (jint)status;
+} /* end Java_hdf_hdf5lib_H5_H5Pset_1filter */
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5Pget_nfilters
+ * Signature: (J)I
+ */
+JNIEXPORT jint JNICALL
+Java_hdf_hdf5lib_H5_H5Pget_1nfilters(JNIEnv *env, jclass clss, jlong plist)
+{
+ int retVal = -1;
+
+ retVal = H5Pget_nfilters((hid_t)plist);
+ if (retVal < 0)
+ h5libraryError(env);
+
+ return (jint)retVal;
+} /* end Java_hdf_hdf5lib_H5_H5Pget_1nfilters */
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5Pget_filter
+ * Signature: (JI[I[J[IJ[Ljava/lang/String;)I
+ */
+JNIEXPORT jint JNICALL
+Java_hdf_hdf5lib_H5_H5Pget_1filter(JNIEnv *env, jclass clss, jlong plist, jint filter_number, jintArray flags,
+ jlongArray cd_nelmts, jintArray cd_values, jlong namelen, jobjectArray name)
+{
+ herr_t status = -1;
+ jint *flagsArray;
+ jlong *cd_nelmtsArray;
+ jint *cd_valuesArray;
+ jboolean isCopy;
+ jstring str;
+ char *filter;
+
+ if (namelen <= 0) {
+ h5badArgument(env, "H5Pget_filter: namelen <= 0");
+ } /* end if */
+ else if (flags == NULL) {
+ h5badArgument(env, "H5Pget_filter: flags is NULL");
+ } /* end else if */
+ else if (cd_nelmts == NULL) {
+ h5badArgument(env, "H5Pget_filter: cd_nelmts is NULL");
+ } /* end else if */
+ else if (cd_values == NULL) {
+ h5badArgument(env, "H5Pget_filter: cd_values is NULL");
+ } /* end else if */
+ else {
+ filter = (char *)HDmalloc(sizeof(char)*(size_t)namelen);
+ if (filter == NULL) {
+ h5outOfMemory(env, "H5Pget_filter: namelent malloc failed");
+ return -1;
+ } /* end if */
+ flagsArray = (jint *)ENVPTR->GetIntArrayElements(ENVPAR flags, &isCopy);
+ if (flagsArray == NULL) {
+ HDfree(filter);
+ h5JNIFatalError(env, "H5Pget_filter: flags array not pinned");
+ return -1;
+ } /* end if */
+ cd_nelmtsArray = (jlong *)ENVPTR->GetLongArrayElements(ENVPAR cd_nelmts, &isCopy);
+ if (cd_nelmtsArray == NULL) {
+ ENVPTR->ReleaseIntArrayElements(ENVPAR flags, flagsArray, JNI_ABORT);
+ HDfree(filter);
+ h5JNIFatalError(env, "H5Pget_filter: nelmts array not pinned");
+ return -1;
+ } /* end if */
+ cd_valuesArray = (jint *)ENVPTR->GetIntArrayElements(ENVPAR cd_values, &isCopy);
+ if (cd_valuesArray == NULL) {
+ ENVPTR->ReleaseLongArrayElements(ENVPAR cd_nelmts, cd_nelmtsArray, JNI_ABORT);
+ ENVPTR->ReleaseIntArrayElements(ENVPAR flags, flagsArray, JNI_ABORT);
+ HDfree(filter);
+ h5JNIFatalError(env, "H5Pget_filter: elmts array not pinned");
+ return -1;
+ } /* end if */
+
+ { /* direct cast (size_t *)variable fails on 32-bit environment */
+ long long cd_nelmts_temp = *(cd_nelmtsArray);
+ size_t cd_nelmts_t = (size_t)cd_nelmts_temp;
+ unsigned int filter_config;
+ status = H5Pget_filter2((hid_t)plist, (unsigned)filter_number,
+ (unsigned int *)flagsArray, &cd_nelmts_t, (unsigned int *)cd_valuesArray,
+ (size_t)namelen, filter, &filter_config);
+
+ *cd_nelmtsArray = (jlong)cd_nelmts_t;
+ } /* end direct cast special */
+
+ if (status < 0) {
+ ENVPTR->ReleaseIntArrayElements(ENVPAR cd_values, cd_valuesArray, JNI_ABORT);
+ ENVPTR->ReleaseLongArrayElements(ENVPAR cd_nelmts, cd_nelmtsArray, JNI_ABORT);
+ ENVPTR->ReleaseIntArrayElements(ENVPAR flags, flagsArray, JNI_ABORT);
+ HDfree(filter);
+ h5libraryError(env);
+ } /* end if */
+ else {
+ ENVPTR->ReleaseIntArrayElements(ENVPAR cd_values, cd_valuesArray, 0);
+ ENVPTR->ReleaseLongArrayElements(ENVPAR cd_nelmts, cd_nelmtsArray, 0);
+ ENVPTR->ReleaseIntArrayElements(ENVPAR flags, flagsArray, 0);
+
+ /* NewStringUTF may throw OutOfMemoryError */
+ str = ENVPTR->NewStringUTF(ENVPAR filter);
+ HDfree(filter);
+ if (str == NULL) {
+ h5JNIFatalError(env, "H5Pget_filter: return string not pinned");
+ } /* end if */
+ else {
+ /* SetObjectArrayElement may throw exceptiosn */
+ ENVPTR->SetObjectArrayElement(ENVPAR name, 0, (jobject)str);
+ } /* end else */
+ } /* end else */
+ } /* end else */
+
+ return (jint)status;
+} /* end Java_hdf_hdf5lib_H5_H5Pget_1filter */
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5Pget_driver
+ * Signature: (J)J
+ */
+JNIEXPORT jlong JNICALL
+Java_hdf_hdf5lib_H5_H5Pget_1driver(JNIEnv *env, jclass clss, jlong plist)
+{
+ hid_t retVal = -1;
+
+ retVal = H5Pget_driver((hid_t) plist);
+ if (retVal < 0)
+ h5libraryError(env);
+
+ return (jlong)retVal;
+} /* end Java_hdf_hdf5lib_H5_H5Pget_1driver */
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5Pset_cache
+ * Signature: (JIJJD)I
+ */
+JNIEXPORT jint JNICALL
+Java_hdf_hdf5lib_H5_H5Pset_1cache(JNIEnv *env, jclass clss, jlong plist, jint mdc_nelmts, jlong rdcc_nelmts,
+ jlong rdcc_nbytes, jdouble rdcc_w0)
+{
+ herr_t retVal = -1;
+
+ retVal = H5Pset_cache((hid_t)plist, (int)mdc_nelmts, (size_t)rdcc_nelmts,
+ (size_t)rdcc_nbytes, (double) rdcc_w0);
+ if (retVal < 0)
+ h5libraryError(env);
+
+ return (jint)retVal;
+} /* end Java_hdf_hdf5lib_H5_H5Pset_1cache */
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5Pget_cache
+ * Signature: (J[I[J[J[D)I
+ */
+JNIEXPORT jint JNICALL
+Java_hdf_hdf5lib_H5_H5Pget_1cache(JNIEnv *env, jclass clss, jlong plist, jintArray mdc_nelmts,
+ jlongArray rdcc_nelmts, jlongArray rdcc_nbytes, jdoubleArray rdcc_w0)
+{
+ herr_t status = -1;
+ jint mode;
+ jdouble *w0Array;
+ jlong *rdcc_nelmtsArray;
+ jlong *nbytesArray;
+ jboolean isCopy;
+
+ if (rdcc_w0 == NULL) {
+ w0Array = (jdouble *)NULL;
+ } /* end if */
+ else {
+ w0Array = (jdouble *)ENVPTR->GetDoubleArrayElements(ENVPAR rdcc_w0, &isCopy);
+ if (w0Array == NULL) {
+ h5JNIFatalError(env, "H5Pget_cache: w0_array array not pinned");
+ return -1;
+ } /* end if */
+ } /* end else */
+
+ if (rdcc_nelmts == NULL) {
+ rdcc_nelmtsArray = (jlong *) NULL;
+ } /* end if */
+ else {
+ rdcc_nelmtsArray = (jlong *)ENVPTR->GetLongArrayElements(ENVPAR rdcc_nelmts, &isCopy);
+ if (rdcc_nelmtsArray == NULL) {
+ /* exception -- out of memory */
+ if (w0Array != NULL) {
+ ENVPTR->ReleaseDoubleArrayElements(ENVPAR rdcc_w0, w0Array, JNI_ABORT);
+ }
+ h5JNIFatalError(env, "H5Pget_cache: rdcc_nelmts array not pinned");
+ return -1;
+ } /* end if */
+ } /* end else */
+
+ if (rdcc_nbytes == NULL) {
+ nbytesArray = (jlong *) NULL;
+ } /* end if */
+ else {
+ nbytesArray = (jlong *)ENVPTR->GetLongArrayElements(ENVPAR rdcc_nbytes, &isCopy);
+ if (nbytesArray == NULL) {
+ if (w0Array != NULL) {
+ ENVPTR->ReleaseDoubleArrayElements(ENVPAR rdcc_w0, w0Array, JNI_ABORT);
+ } /* end if */
+ if (rdcc_nelmtsArray != NULL) {
+ ENVPTR->ReleaseLongArrayElements(ENVPAR rdcc_nelmts, rdcc_nelmtsArray, JNI_ABORT);
+ } /* end if */
+ h5JNIFatalError(env, "H5Pget_cache: nbytesArray array not pinned");
+ return -1;
+ } /* end if */
+ } /* end else */
+
+ { /* direct cast (size_t *)variable fails on 32-bit environment */
+ long long rdcc_nelmts_temp = *(rdcc_nelmtsArray);
+ size_t rdcc_nelmts_t = (size_t)rdcc_nelmts_temp;
+ long long nbytes_temp = *(nbytesArray);
+ size_t nbytes_t = (size_t)nbytes_temp;
+
+ status = H5Pget_cache((hid_t)plist, (int *)NULL, &rdcc_nelmts_t,
+ &nbytes_t, (double *)w0Array);
+
+ *rdcc_nelmtsArray = (jlong)rdcc_nelmts_t;
+ *nbytesArray = (jlong)nbytes_t;
+ } /* end direct cast special */
+
+
+ if (status < 0) {
+ mode = JNI_ABORT;
+ } /* end if */
+ else {
+ mode = 0; /* commit and free */
+ } /* end else */
+
+ if (rdcc_nelmtsArray != NULL) {
+ ENVPTR->ReleaseLongArrayElements(ENVPAR rdcc_nelmts, rdcc_nelmtsArray, mode);
+ } /* end if */
+
+ if (nbytesArray != NULL) {
+ ENVPTR->ReleaseLongArrayElements(ENVPAR rdcc_nbytes, nbytesArray, mode);
+ } /* end if */
+
+ if (w0Array != NULL) {
+ ENVPTR->ReleaseDoubleArrayElements(ENVPAR rdcc_w0, w0Array, mode);
+ } /* end if */
+
+ if (status < 0) {
+ h5libraryError(env);
+ } /* end if */
+
+ return (jint)status;
+} /* end Java_hdf_hdf5lib_H5_H5Pget_1cache */
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5Pset_buffer
+ * Signature: (JJ[B[B)I
+ */
+JNIEXPORT jint JNICALL
+Java_hdf_hdf5lib_H5_H5Pset_1buffer(JNIEnv *env, jclass clss, jlong plist, jlong size, jbyteArray tconv, jbyteArray bkg)
+{
+ h5unimplemented(env, "H5Pset_buffer: not implemented");
+ return -1;
+#ifdef notdef
+
+/* DON'T IMPLEMENT THIS!!! */
+ jint status = -1;
+ jbyte *tconvP;
+ jbyte *bkgP;
+ jboolean isCopy;
+
+ if (tconv == NULL)
+ tconvP = (jbyte *)NULL;
+ else {
+ tconvP = ENVPTR->GetByteArrayElements(ENVPAR tconv, &isCopy);
+ if (tconvP == NULL) {
+ h5JNIFatalError(env, "H5Pset_buffer: tconv not pinned");
+ return -1;
+ }
+ }
+ if (bkg == NULL)
+ bkgP = (jbyte *)NULL;
+ else {
+ bkgP = ENVPTR->GetByteArrayElements(ENVPAR bkg, &isCopy);
+ if (bkgP == NULL) {
+ h5JNIFatalError(env, "H5Pset_buffer: bkg not pinned");
+ return -1;
+ }
+ }
+
+ status = H5Pset_buffer((hid_t)plist, (size_t)size, tconvP, bkgP);
+ if (status < 0) {
+ if (tconv != NULL)
+ ENVPTR->ReleaseByteArrayElements(ENVPAR tconv, tconvP, JNI_ABORT);
+ if (bkg != NULL)
+ ENVPTR->ReleaseByteArrayElements(ENVPAR bkg, bkgP, JNI_ABORT);
+ h5libraryError(env);
+ return -1;
+ }
+
+ if (tconv != NULL)
+ ENVPTR->ReleaseByteArrayElements(ENVPAR tconv, tconvP, 0);
+ if (bkg != NULL)
+ ENVPTR->ReleaseByteArrayElements(ENVPAR bkg, bkgP, 0);
+
+ return status;
+#endif
+} /* end Java_hdf_hdf5lib_H5_H5Pset_1buffer */
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5Pget_buffer
+ * Signature: (J[B[B)I
+ */
+JNIEXPORT jint JNICALL
+Java_hdf_hdf5lib_H5_H5Pget_1buffer(JNIEnv *env, jclass clss, jlong plist, jbyteArray tconv, jbyteArray bkg)
+{
+ h5unimplemented(env, "H5Pget_buffer: not implemented");
+ return -1;
+#ifdef notdef
+
+/* DON'T IMPLEMENT THIS!!! */
+ jlong status = -1;
+ jbyte *tconvP;
+ jbyte *bkgP;
+ jboolean isCopy;
+
+ if (tconv == NULL) {
+ h5nullArgument(env, "H5Pget_buffer: tconv input array is NULL");
+ return -1;
+ }
+ tconvP = ENVPTR->GetByteArrayElements(ENVPAR tconv, &isCopy);
+ if (tconvP == NULL) {
+ h5JNIFatalError(env, "H5Pget_buffer: tconv not pinned");
+ return -1;
+ }
+ if (bkg == NULL) {
+ h5nullArgument(env, "H5Pget_buffer: bkg array is NULL");
+ return -1;
+ }
+ bkgP = ENVPTR->GetByteArrayElements(ENVPAR bkg, &isCopy);
+ if (bkgP == NULL) {
+ h5JNIFatalError(env, "H5Pget_buffer: bkg not pinned");
+ return -1;
+ }
+
+ status = H5Pget_buffer((hid_t)plist, tconvP, bkgP);
+ if (status < 0) {
+ ENVPTR->ReleaseByteArrayElements(ENVPAR tconv, tconvP, JNI_ABORT);
+ ENVPTR->ReleaseByteArrayElements(ENVPAR bkg, bkgP, JNI_ABORT);
+ h5libraryError(env);
+ return -1;
+ }
+ ENVPTR->ReleaseByteArrayElements(ENVPAR tconv, tconvP, 0);
+ ENVPTR->ReleaseByteArrayElements(ENVPAR bkg, bkgP, 0);
+
+ return status;
+#endif
+} /* end Java_hdf_hdf5lib_H5_H5Pget_1buffer */
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5Pset_buffer_size
+ * Signature: (JJ)V
+ */
+JNIEXPORT void JNICALL
+Java_hdf_hdf5lib_H5_H5Pset_1buffer_1size(JNIEnv *env, jclass clss, jlong plist, jlong size)
+{
+ if (H5Pset_buffer((hid_t)plist, (size_t)size, NULL, NULL) < 0)
+ h5libraryError(env);
+} /* end Java_hdf_hdf5lib_H5_H5Pset_1buffer_1size */
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5Pget_buffer_size
+ * Signature: (J)J
+ */
+JNIEXPORT jlong JNICALL
+Java_hdf_hdf5lib_H5_H5Pget_1buffer_1size(JNIEnv *env, jclass clss, jlong plist)
+{
+ size_t size = 0;
+
+ size = H5Pget_buffer((hid_t)plist, NULL, NULL);
+ if (size == 0)
+ h5libraryError(env);
+
+ return (jlong)size;
+} /* end Java_hdf_hdf5lib_H5_H5Pget_1buffer_1size */
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5Pset_preserve
+ * Signature: (JZ)I
+ */
+JNIEXPORT jint JNICALL
+Java_hdf_hdf5lib_H5_H5Pset_1preserve(JNIEnv *env, jclass clss, jlong plist, jboolean status)
+{
+ hbool_t st;
+ herr_t retVal = -1;
+
+ if (status == JNI_TRUE) {
+ st = TRUE;
+ } /* end if */
+ else if (status == JNI_FALSE) {
+ st = false;
+ } /* end else if */
+ else {
+ h5badArgument(env, "H5Pset_preserve: status not TRUE or FALSE");
+ return -1;
+ } /* end else */
+
+ retVal = H5Pset_preserve((hid_t)plist, st);
+ if (retVal < 0)
+ h5libraryError(env);
+
+ return (jint)retVal;
+} /* end Java_hdf_hdf5lib_H5_H5Pset_1preserve */
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5Pget_preserve
+ * Signature: (J)I
+ */
+JNIEXPORT jint JNICALL
+Java_hdf_hdf5lib_H5_H5Pget_1preserve(JNIEnv *env, jclass clss, jlong plist)
+{
+ herr_t retVal = -1;
+
+ retVal = H5Pget_preserve((hid_t)plist);
+ if (retVal < 0)
+ h5libraryError(env);
+
+ return (jint)retVal;
+} /* end Java_hdf_hdf5lib_H5_H5Pget_1preserve */
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5Pset_deflate
+ * Signature: (JI)I
+ */
+JNIEXPORT jint JNICALL
+Java_hdf_hdf5lib_H5_H5Pset_1deflate(JNIEnv *env, jclass clss, jlong plist, jint level)
+{
+ herr_t retVal = -1;
+
+ retVal = H5Pset_deflate((hid_t)plist, (unsigned)level);
+ if (retVal < 0)
+ h5libraryError(env);
+
+ return (jint)retVal;
+} /* end Java_hdf_hdf5lib_H5_H5Pset_1deflate */
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5Pset_gc_references
+ * Signature: (JZ)I
+ */
+JNIEXPORT jint JNICALL
+Java_hdf_hdf5lib_H5_H5Pset_1gc_1references(JNIEnv *env, jclass clss, jlong fapl_id, jboolean gc_ref)
+{
+ herr_t retVal = -1;
+ unsigned gc_ref_val;
+
+ if (gc_ref == JNI_TRUE)
+ gc_ref_val = 1;
+ else
+ gc_ref_val = 0;
+
+ retVal = H5Pset_gc_references((hid_t)fapl_id, gc_ref_val);
+ if (retVal < 0)
+ h5libraryError(env);
+
+ return (jint)retVal;
+} /* end Java_hdf_hdf5lib_H5_H5Pset_1gc_1references */
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5Pget_gc_references
+ * Signature: (J)Z
+ */
+JNIEXPORT jboolean JNICALL
+Java_hdf_hdf5lib_H5_H5Pget_1gc_1references(JNIEnv *env, jclass clss, jlong fapl_id)
+{
+ unsigned gc_ref_val = 0;
+ jboolean bval = JNI_FALSE;
+
+ if (H5Pget_gc_references((hid_t)fapl_id, (unsigned *)&gc_ref_val) < 0) {
+ h5libraryError(env);
+ } /* end if */
+ else {
+ if (gc_ref_val == 1)
+ bval = JNI_TRUE;
+ } /* end else */
+
+ return bval;
+} /* end Java_hdf_hdf5lib_H5_H5Pget_1gc_1references */
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5Pset_btree_ratios
+ * Signature: (JDDD)I
+ */
+JNIEXPORT jint JNICALL
+Java_hdf_hdf5lib_H5_H5Pset_1btree_1ratios(JNIEnv *env, jclass clss, jlong plist_id, jdouble left, jdouble middle, jdouble right)
+{
+ herr_t status = -1;
+
+ status = H5Pset_btree_ratios((hid_t)plist_id, (double)left,(double)middle, (double)right);
+ if (status < 0)
+ h5libraryError(env);
+
+ return (jint)status;
+} /* end Java_hdf_hdf5lib_H5_H5Pset_1btree_1ratios */
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5Pget_btree_ratios
+ * Signature: (J[D[D[D)I
+ */
+JNIEXPORT jint JNICALL
+Java_hdf_hdf5lib_H5_H5Pget_1btree_1ratios(JNIEnv *env, jclass clss, jlong plist_id, jdoubleArray left,
+ jdoubleArray middle, jdoubleArray right)
+{
+ herr_t status = -1;
+ jdouble *leftP;
+ jdouble *middleP;
+ jdouble *rightP;
+ jboolean isCopy;
+
+ if (left == NULL) {
+ h5nullArgument(env, "H5Pget_btree_ratios: left input array is NULL");
+ } /* end if */
+ else if (middle == NULL) {
+ h5nullArgument(env, "H5Pget_btree_ratios: middle input array is NULL");
+ } /* end else if */
+ else if (right == NULL) {
+ h5nullArgument(env, "H5Pget_btree_ratios: right input array is NULL");
+ } /* end else if */
+ else {
+ leftP = (jdouble *)ENVPTR->GetDoubleArrayElements(ENVPAR left, &isCopy);
+ if (leftP == NULL) {
+ h5JNIFatalError(env, "H5Pget_btree_ratios: left not pinned");
+ } /* end if */
+ else {
+ middleP = (jdouble *)ENVPTR->GetDoubleArrayElements(ENVPAR middle, &isCopy);
+ if (middleP == NULL) {
+ ENVPTR->ReleaseDoubleArrayElements(ENVPAR left, leftP, JNI_ABORT);
+ h5JNIFatalError(env, "H5Pget_btree_ratios: middle not pinned");
+ } /* end if */
+ else {
+ rightP = (jdouble *)ENVPTR->GetDoubleArrayElements(ENVPAR right, &isCopy);
+ if (rightP == NULL) {
+ ENVPTR->ReleaseDoubleArrayElements(ENVPAR left, leftP, JNI_ABORT);
+ ENVPTR->ReleaseDoubleArrayElements(ENVPAR middle, middleP, JNI_ABORT);
+ h5JNIFatalError(env, "H5Pget_btree_ratios: middle not pinned");
+ } /* end if */
+ else {
+ status = H5Pget_btree_ratios((hid_t)plist_id, (double *)leftP,
+ (double *)middleP, (double *)rightP);
+ if (status < 0) {
+ ENVPTR->ReleaseDoubleArrayElements(ENVPAR left, leftP, JNI_ABORT);
+ ENVPTR->ReleaseDoubleArrayElements(ENVPAR middle, middleP, JNI_ABORT);
+ ENVPTR->ReleaseDoubleArrayElements(ENVPAR right, rightP, JNI_ABORT);
+ h5libraryError(env);
+ } /* end if */
+ else {
+ ENVPTR->ReleaseDoubleArrayElements(ENVPAR left, leftP, 0);
+ ENVPTR->ReleaseDoubleArrayElements(ENVPAR middle, middleP, 0);
+ ENVPTR->ReleaseDoubleArrayElements(ENVPAR right, rightP, 0);
+ } /* end else */
+ } /* end else */
+ } /* end else */
+ } /* end else */
+ } /* end else */
+
+ return (jint)status;
+} /* end Java_hdf_hdf5lib_H5_H5Pget_1btree_1ratios */
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5Pset_small_data_block_size
+ * Signature: (JJ)I
+ */
+JNIEXPORT jint JNICALL
+Java_hdf_hdf5lib_H5_H5Pset_1small_1data_1block_1size(JNIEnv *env, jclass clss, jlong plist, jlong size)
+{
+ long sz = (long)size;
+ herr_t retVal = -1;
+
+ retVal = H5Pset_small_data_block_size((hid_t)plist, (hsize_t)sz);
+ if (retVal < 0)
+ h5libraryError(env);
+
+ return (jint)retVal;
+} /* end Java_hdf_hdf5lib_H5_H5Pset_1small_1data_1block_1size */
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5Pget_small_data_block_size
+ * Signature: (J)J
+ */
+JNIEXPORT jlong JNICALL
+Java_hdf_hdf5lib_H5_H5Pget_1small_1data_1block_1size(JNIEnv *env, jclass clss, jlong plist)
+{
+ hsize_t s;
+
+ if (H5Pget_small_data_block_size((hid_t)plist, &s) < 0)
+ h5libraryError(env);
+
+ return (jlong)s;
+} /* end Java_hdf_hdf5lib_H5_H5Pget_1small_1data_1block_1size */
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5Pset_alloc_time
+ * Signature: (JI)I
+ */
+JNIEXPORT jint JNICALL
+Java_hdf_hdf5lib_H5_H5Pset_1alloc_1time(JNIEnv *env, jclass clss, jlong plist, jint alloc_time)
+{
+ herr_t retVal = -1;
+
+ retVal = H5Pset_alloc_time((hid_t)plist, (H5D_alloc_time_t)alloc_time);
+ if (retVal < 0)
+ h5libraryError(env);
+
+ return (jint)retVal;
+} /* end Java_hdf_hdf5lib_H5_H5Pset_1alloc_1time */
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5Pget_alloc_time
+ * Signature: (J[I)I
+ */
+JNIEXPORT jint JNICALL
+Java_hdf_hdf5lib_H5_H5Pget_1alloc_1time(JNIEnv *env, jclass clss, jlong plist, jintArray alloc_time)
+{
+ herr_t retVal = -1;
+ jint *theArray;
+ jboolean isCopy;
+ H5D_alloc_time_t time;
+
+ if (alloc_time == NULL) {
+ /* exception ? */
+ h5nullArgument(env, "H5Pget_alloc_time: alloc_time is NULL");
+ } /* end if */
+ else {
+ theArray = (jint *)ENVPTR->GetIntArrayElements(ENVPAR alloc_time, &isCopy);
+ if (theArray == NULL) {
+ h5JNIFatalError(env, "H5Pget_alloc_time: alloc_time not pinned");
+ } /* end if */
+ else {
+ retVal = H5Pget_alloc_time((hid_t)plist, &time);
+ if (retVal < 0) {
+ ENVPTR->ReleaseIntArrayElements(ENVPAR alloc_time, theArray, JNI_ABORT);
+ h5libraryError(env);
+ } /* end if */
+ else {
+ theArray[0] = time;
+ ENVPTR->ReleaseIntArrayElements(ENVPAR alloc_time, theArray, 0);
+ } /* end else */
+ } /* end else */
+ } /* end else */
+
+ return (jint)retVal;
+} /* end Java_hdf_hdf5lib_H5_H5Pget_1alloc_1time */
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5Pset_fill_time
+ * Signature: (JI)I
+ */
+JNIEXPORT jint JNICALL
+Java_hdf_hdf5lib_H5_H5Pset_1fill_1time(JNIEnv *env, jclass clss, jlong plist, jint fill_time)
+{
+ herr_t retVal = -1;
+
+ retVal = H5Pset_fill_time((hid_t)plist, (H5D_fill_time_t)fill_time);
+ if (retVal < 0)
+ h5libraryError(env);
+
+ return (jint)retVal;
+} /* end Java_hdf_hdf5lib_H5_H5Pset_1fill_1time */
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5Pget_fill_time
+ * Signature: (J[I)I
+ */
+JNIEXPORT jint JNICALL
+Java_hdf_hdf5lib_H5_H5Pget_1fill_1time(JNIEnv *env, jclass clss, jlong plist, jintArray fill_time)
+{
+ herr_t retVal = -1;
+ jint *theArray;
+ jboolean isCopy;
+ H5D_fill_time_t time;
+
+ if (fill_time == NULL) {
+ /* exception ? */
+ h5nullArgument(env, "H5Pget_fill_time: fill_time is NULL");
+ } /* end if */
+ else {
+ theArray = (jint *)ENVPTR->GetIntArrayElements(ENVPAR fill_time, &isCopy);
+ if (theArray == NULL) {
+ h5JNIFatalError(env, "H5Pget_fill_time: fill_time not pinned");
+ } /* end if */
+ else {
+ retVal = H5Pget_fill_time((hid_t)plist, &time);
+ if (retVal < 0) {
+ ENVPTR->ReleaseIntArrayElements(ENVPAR fill_time, theArray, JNI_ABORT);
+ h5libraryError(env);
+ } /* end if */
+ else {
+ theArray[0] = (jint)time;
+ ENVPTR->ReleaseIntArrayElements(ENVPAR fill_time, theArray, 0);
+ } /* end else */
+ } /* end else */
+ } /* end else */
+
+ return (jint)retVal;
+} /* end Java_hdf_hdf5lib_H5_H5Pget_1fill_1time */
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5Pfill_value_defined
+ * Signature: (J[I)I
+ */
+JNIEXPORT jint JNICALL
+Java_hdf_hdf5lib_H5_H5Pfill_1value_1defined(JNIEnv *env, jclass clss, jlong plist, jintArray status)
+{
+ herr_t retVal = -1;
+ jint *theArray;
+ jboolean isCopy;
+ H5D_fill_value_t value;
+
+ if (status == NULL) {
+ /* exception ? */
+ h5nullArgument(env, "H5Pfill_value_defined: status is NULL");
+ } /* end if */
+ else {
+ theArray = (jint *)ENVPTR->GetIntArrayElements(ENVPAR status, &isCopy);
+ if (theArray == NULL) {
+ h5JNIFatalError(env, "H5Pfill_value_defined: status not pinned");
+ } /* end if */
+ else {
+ retVal = H5Pfill_value_defined((hid_t)plist, &value);
+ if (retVal < 0) {
+ ENVPTR->ReleaseIntArrayElements(ENVPAR status, theArray, JNI_ABORT);
+ h5libraryError(env);
+ } /* end if */
+ else {
+ theArray[0] = value;
+ ENVPTR->ReleaseIntArrayElements(ENVPAR status, theArray, 0);
+ } /* end else */
+ } /* end else */
+ } /* end else */
+
+ return (jint)retVal;
+} /* end Java_hdf_hdf5lib_H5_H5Pfill_1value_1defined */
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5Pset_fletcher32
+ * Signature: (J)I
+ */
+JNIEXPORT jint JNICALL
+Java_hdf_hdf5lib_H5_H5Pset_1fletcher32(JNIEnv *env, jclass clss, jlong plist)
+{
+ herr_t retVal = -1;
+
+ retVal = H5Pset_fletcher32((hid_t)plist);
+ if (retVal < 0)
+ h5libraryError(env);
+
+ return (jint)retVal;
+} /* end Java_hdf_hdf5lib_H5_H5Pset_1fletcher32 */
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5Pset_edc_check
+ * Signature: (JI)I
+ */
+JNIEXPORT jint JNICALL
+Java_hdf_hdf5lib_H5_H5Pset_1edc_1check(JNIEnv *env, jclass clss, jlong plist, jint check)
+{
+ herr_t retVal = -1;
+
+ retVal = H5Pset_edc_check((hid_t)plist, (H5Z_EDC_t)check);
+ if (retVal < 0)
+ h5libraryError(env);
+
+ return (jint)retVal;
+} /* end Java_hdf_hdf5lib_H5_H5Pset_1edc_1check */
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5Pget_edc_check
+ * Signature: (J)I
+ */
+JNIEXPORT jint JNICALL
+Java_hdf_hdf5lib_H5_H5Pget_1edc_1check(JNIEnv *env, jclass clss, jlong plist)
+{
+ H5Z_EDC_t retVal = -1;
+
+ retVal = H5Pget_edc_check((hid_t)plist);
+ if (retVal < 0)
+ h5libraryError(env);
+
+ return (jint)retVal;
+} /* end Java_hdf_hdf5lib_H5_H5Pget_1edc_1check */
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5Pset_shuffle
+ * Signature: (J)I
+ */
+JNIEXPORT jint JNICALL
+Java_hdf_hdf5lib_H5_H5Pset_1shuffle(JNIEnv *env, jclass clss, jlong plist)
+{
+ herr_t retVal = -1;
+
+ retVal = H5Pset_shuffle((hid_t)plist);
+ if (retVal < 0)
+ h5libraryError(env);
+
+ return (jint)retVal;
+} /* end Java_hdf_hdf5lib_H5_H5Pset_1shuffle */
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5Pset_szip
+ * Signature: (JII)I
+ */
+JNIEXPORT jint JNICALL
+Java_hdf_hdf5lib_H5_H5Pset_1szip(JNIEnv *env, jclass clss, jlong plist, jint options_mask, jint pixels_per_block)
+{
+ herr_t retVal = -1;
+
+ retVal = H5Pset_szip((hid_t)plist, (unsigned int)options_mask, (unsigned int)pixels_per_block);
+ if (retVal < 0)
+ h5libraryError(env);
+
+ return (jint)retVal;
+} /* end Java_hdf_hdf5lib_H5_H5Pset_1szip */
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5Pset_hyper_vector_size
+ * Signature: (JJ)I
+ */
+JNIEXPORT jint JNICALL
+Java_hdf_hdf5lib_H5_H5Pset_1hyper_1vector_1size(JNIEnv *env, jclass clss, jlong plist, jlong vector_size)
+{
+ herr_t retVal = -1;
+
+ retVal = H5Pset_hyper_vector_size((hid_t)plist, (size_t)vector_size);
+ if (retVal < 0)
+ h5libraryError(env);
+
+ return (jint)retVal;
+} /* end Java_hdf_hdf5lib_H5_H5Pset_1hyper_1vector_1size */
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5Pget_hyper_vector_size
+ * Signature: (J[J)I
+ */
+JNIEXPORT jint JNICALL
+Java_hdf_hdf5lib_H5_H5Pget_1hyper_1vector_1size(JNIEnv *env, jclass clss, jlong plist, jlongArray vector_size)
+{
+ herr_t retVal = -1;
+ jlong *theArray;
+ size_t size;
+ jboolean isCopy;
+
+ if (vector_size == NULL) {
+ /* exception ? */
+ h5nullArgument(env, "H5Pget_hyper_vector_size: vector_size is NULL");
+ } /* end if */
+ else {
+ theArray = (jlong *)ENVPTR->GetLongArrayElements(ENVPAR vector_size, &isCopy);
+ if (theArray == NULL) {
+ h5JNIFatalError(env, "H5Pget_hyper_vector_size: vector_size not pinned");
+ } /* end if */
+ else {
+ retVal = H5Pget_hyper_vector_size((hid_t)plist, &size);
+ if (retVal < 0) {
+ ENVPTR->ReleaseLongArrayElements(ENVPAR vector_size, theArray, JNI_ABORT);
+ h5libraryError(env);
+ } /* end if */
+ else {
+ theArray[0] = (jlong)size;
+ ENVPTR->ReleaseLongArrayElements(ENVPAR vector_size, theArray, 0);
+ } /* end else */
+ } /* end else */
+ } /* end else */
+
+ return (jint)retVal;
+} /* end Java_hdf_hdf5lib_H5_H5Pget_1hyper_1vector_1size */
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5Pall_filters_avail
+ * Signature: (J)Z
+ */
+JNIEXPORT jboolean JNICALL
+Java_hdf_hdf5lib_H5_H5Pall_1filters_1avail(JNIEnv *env, jclass clss, jlong dcpl_id)
+{
+ htri_t bval = JNI_FALSE;
+
+ bval = H5Pall_filters_avail((hid_t)dcpl_id);
+ if (bval > 0)
+ bval = JNI_TRUE;
+ else if (bval < 0)
+ h5libraryError(env);
+
+ return (jboolean)bval;
+} /* end Java_hdf_hdf5lib_H5_H5Pall_1filters_1avail */
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5Pmodify_filter
+ * Signature: (JIIJ[I)I
+ */
+JNIEXPORT jint JNICALL
+Java_hdf_hdf5lib_H5_H5Pmodify_1filter(JNIEnv *env, jclass clss, jlong plist, jint filter,
+ jint flags, jlong cd_nelmts, jintArray cd_values)
+{
+ herr_t status = -1;
+ jint *cd_valuesP;
+ jboolean isCopy;
+
+ if (cd_values == NULL) {
+ h5nullArgument(env, "H5Pmodify_filter: cd_values is NULL");
+ } /* end if */
+ else {
+ cd_valuesP = ENVPTR->GetIntArrayElements(ENVPAR cd_values,&isCopy);
+ if (cd_valuesP == NULL) {
+ h5JNIFatalError(env, "H5Pmodify_filter: cd_values not pinned");
+ } /* end if */
+ else {
+ status = H5Pmodify_filter((hid_t)plist, (H5Z_filter_t)filter,(const unsigned int)flags,
+ (size_t)cd_nelmts, (unsigned int *)cd_valuesP);
+
+ ENVPTR->ReleaseIntArrayElements(ENVPAR cd_values, cd_valuesP, JNI_ABORT);
+
+ if (status < 0)
+ h5libraryError(env);
+ } /* end else */
+ } /* end else */
+
+ return (jint)status;
+} /* end Java_hdf_hdf5lib_H5_H5Pmodify_1filter */
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5Pget_filter_by_id
+ * Signature: (JI[I[J[IJ[Ljava/lang/String;)I
+ */
+JNIEXPORT jint JNICALL
+Java_hdf_hdf5lib_H5_H5Pget_1filter_1by_1id(JNIEnv *env, jclass clss, jlong plist, jint filter,
+ jintArray flags, jlongArray cd_nelmts, jintArray cd_values, jlong namelen, jobjectArray name)
+{
+ jboolean isCopy;
+ herr_t status = -1;
+ jint *cd_valuesArray;
+ jint *flagsArray;
+ jlong *cd_nelmtsArray;
+ jstring str;
+ char *aName;
+ int i = 0;
+ int rank;
+ long bs;
+
+ bs = (long)namelen;
+ if (bs <= 0) {
+ h5badArgument(env, "H5Pget_filter_by_id: namelen <= 0");
+ } /* end if */
+ else if (flags == NULL) {
+ h5nullArgument(env, "H5Pget_filter_by_id: flags is NULL");
+ } /* end else if */
+ else if (cd_nelmts == NULL) {
+ h5nullArgument(env, "H5Pget_filter_by_id: cd_nelms is NULL");
+ } /* end else if */
+ else if (cd_values == NULL) {
+ h5nullArgument(env, "H5Pget_filter_by_id: cd_values is NULL");
+ } /* end else if */
+ else if (name == NULL) {
+ h5nullArgument(env, "H5Pget_filter_by_id: name is NULL");
+ } /* end else if */
+ else {
+ aName = (char*)HDmalloc(sizeof(char) * (size_t)bs);
+ if (aName == NULL) {
+ h5outOfMemory(env, "H5Pget_filter_by_id: malloc failed");
+ return -1;
+ } /* end if */
+
+ flagsArray = ENVPTR->GetIntArrayElements(ENVPAR flags, &isCopy);
+ if (flagsArray == NULL) {
+ HDfree(aName);
+ h5JNIFatalError(env, "H5Pget_filter_by_id: flags not pinned");
+ return -1;
+ } /* end if */
+
+ cd_nelmtsArray = ENVPTR->GetLongArrayElements(ENVPAR cd_nelmts, &isCopy);
+ if (cd_nelmtsArray == NULL) {
+ ENVPTR->ReleaseIntArrayElements(ENVPAR flags, flagsArray, JNI_ABORT);
+ HDfree(aName);
+ h5JNIFatalError(env, "H5Pget_filter_by_id: cd_nelms not pinned");
+ return -1;
+ } /* end if */
+
+ cd_valuesArray = ENVPTR->GetIntArrayElements(ENVPAR cd_values, &isCopy);
+ rank = ENVPTR->GetArrayLength(ENVPAR cd_values);
+ if (cd_valuesArray == NULL) {
+ ENVPTR->ReleaseIntArrayElements(ENVPAR flags, flagsArray, JNI_ABORT);
+ ENVPTR->ReleaseLongArrayElements(ENVPAR cd_nelmts, cd_nelmtsArray, JNI_ABORT);
+ ENVPTR->ReleaseIntArrayElements(ENVPAR cd_values, cd_valuesArray, JNI_ABORT);
+ HDfree(aName);
+ h5JNIFatalError(env, "H5Pget_filter_by_id: cd_values array not converted to unsigned int.");
+ return -1;
+ } /* end if */
+
+ { /* direct cast (size_t *)variable fails on 32-bit environment */
+ long long cd_nelmts_temp = *(cd_nelmtsArray);
+ size_t cd_nelmts_t = (size_t)cd_nelmts_temp;
+ unsigned int filter_config;
+
+ status = H5Pget_filter_by_id2( (hid_t)plist, (H5Z_filter_t)filter,
+ (unsigned int *)flagsArray, &cd_nelmts_t, (unsigned int *)cd_valuesArray,
+ (size_t)namelen, (char *)aName, &filter_config);
+
+ *cd_nelmtsArray = (jlong)cd_nelmts_t;
+ } /* end direct cast special */
+
+ if (status < 0) {
+ ENVPTR->ReleaseIntArrayElements(ENVPAR flags, flagsArray, JNI_ABORT);
+ ENVPTR->ReleaseLongArrayElements(ENVPAR cd_nelmts, cd_nelmtsArray, JNI_ABORT);
+ ENVPTR->ReleaseIntArrayElements(ENVPAR cd_values, cd_valuesArray, JNI_ABORT);
+ HDfree(aName);
+ h5libraryError(env);
+ } /* end if */
+ else {
+ str = ENVPTR->NewStringUTF(ENVPAR aName);
+ ENVPTR->ReleaseIntArrayElements(ENVPAR flags, flagsArray, 0);
+ ENVPTR->ReleaseLongArrayElements(ENVPAR cd_nelmts, cd_nelmtsArray, 0);
+ ENVPTR->ReleaseIntArrayElements(ENVPAR cd_values, cd_valuesArray, 0);
+
+ HDfree(aName);
+ } /* end else */
+ } /* end else */
+
+ return (jint)status;
+} /* end Java_hdf_hdf5lib_H5_H5Pget_1filter_1by_1id */
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5Pset_fclose_degree
+ * Signature: (JI)I
+ */
+JNIEXPORT jint JNICALL
+Java_hdf_hdf5lib_H5_H5Pset_1fclose_1degree(JNIEnv *env, jclass clss, jlong plist, jint fc_degree)
+{
+ herr_t retVal = -1;
+
+ retVal = H5Pset_fclose_degree((hid_t)plist, (H5F_close_degree_t)fc_degree);
+ if (retVal < 0)
+ h5libraryError(env);
+
+ return (jint)retVal;
+} /* end Java_hdf_hdf5lib_H5_H5Pset_1fclose_1degree */
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5Pget_fclose_degree
+ * Signature: (J)I
+ */
+JNIEXPORT jint JNICALL
+Java_hdf_hdf5lib_H5_H5Pget_1fclose_1degree(JNIEnv *env, jclass clss, jlong plist)
+{
+ H5F_close_degree_t degree;
+
+ if (H5Pget_fclose_degree((hid_t)plist, &degree) < 0)
+ h5libraryError(env);
+
+ return (jint)degree;
+} /* end Java_hdf_hdf5lib_H5_H5Pget_1fclose_1degree */
+
+
+/**********************************************************************
+ * *
+ * File access properties *
+ * *
+ **********************************************************************/
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5Pset_fapl_family
+ * Signature: (JJJ)I
+ */
+JNIEXPORT jint JNICALL
+Java_hdf_hdf5lib_H5_H5Pset_1fapl_1family(JNIEnv *env, jclass clss, jlong plist, jlong memb_size, jlong memb_plist)
+{
+ long ms = (long)memb_size;
+ herr_t retVal = -1;
+
+ retVal = H5Pset_fapl_family((hid_t)plist, (hsize_t)ms, (hid_t)memb_plist);
+ if (retVal < 0)
+ h5libraryError(env);
+
+ return (jint)retVal;
+} /* end Java_hdf_hdf5lib_H5_H5Pset_1fapl_1family */
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5Pget_fapl_family
+ * Signature: (J[J[J)I
+ */
+JNIEXPORT jint JNICALL
+Java_hdf_hdf5lib_H5_H5Pget_1fapl_1family(JNIEnv *env, jclass clss, jlong tid, jlongArray memb_size, jlongArray memb_plist)
+{
+ herr_t status = -1;
+ jlong *sizeArray;
+ jlong *plistArray;
+ jboolean isCopy;
+ hsize_t *sa;
+ size_t i;
+ size_t rank;
+
+ if (memb_size == NULL) {
+ h5nullArgument(env, "H5Pget_family: memb_size is NULL");
+ } /* end if */
+ else if (memb_plist == NULL) {
+ h5nullArgument(env, "H5Pget_family: memb_plist is NULL");
+ } /* end else if */
+ else {
+ sizeArray = (jlong *)ENVPTR->GetLongArrayElements(ENVPAR memb_size, &isCopy);
+ if (sizeArray == NULL) {
+ h5JNIFatalError(env, "H5Pget_family: sizeArray not pinned");
+ return -1;
+ } /* end if */
+ rank = (size_t)ENVPTR->GetArrayLength(ENVPAR memb_size);
+ sa = (hsize_t *)HDmalloc(rank * sizeof(hsize_t));
+ if (sa == NULL) {
+ ENVPTR->ReleaseLongArrayElements(ENVPAR memb_size, sizeArray, JNI_ABORT);
+ h5JNIFatalError(env, "H5Screate-simple: dims not converted to hsize_t");
+ return -1;
+ } /* end if */
+ plistArray = (jlong *)ENVPTR->GetLongArrayElements(ENVPAR memb_plist, &isCopy);
+ if (plistArray == NULL) {
+ HDfree(sa);
+ ENVPTR->ReleaseLongArrayElements(ENVPAR memb_size, sizeArray, JNI_ABORT);
+ h5JNIFatalError(env, "H5Pget_family: plistArray not pinned");
+ return -1;
+ } /* end if */
+
+ status = H5Pget_fapl_family ((hid_t)tid, sa, (hid_t *)plistArray);
+
+ if (status < 0) {
+ HDfree(sa);
+ ENVPTR->ReleaseLongArrayElements(ENVPAR memb_size, sizeArray, JNI_ABORT);
+ ENVPTR->ReleaseLongArrayElements(ENVPAR memb_plist, plistArray, JNI_ABORT);
+ h5libraryError(env);
+ } /* end if */
+ else {
+ for (i = 0; i < rank; i++) {
+ sizeArray[i] = (jlong)sa[i];
+ } /* end for */
+ HDfree(sa);
+ ENVPTR->ReleaseLongArrayElements(ENVPAR memb_size, sizeArray, 0);
+ ENVPTR->ReleaseLongArrayElements(ENVPAR memb_plist, plistArray, 0);
+ } /* end else */
+ } /* end else */
+
+ return (jint)status;
+} /* end Java_hdf_hdf5lib_H5_H5Pget_1fapl_1family */
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5Pset_fapl_core
+ * Signature: (JJZ)I
+ */
+JNIEXPORT jint JNICALL
+Java_hdf_hdf5lib_H5_H5Pset_1fapl_1core(JNIEnv *env, jclass clss, jlong fapl_id, jlong increment, jboolean backing_store)
+{
+ herr_t retVal = -1;
+
+ retVal = H5Pset_fapl_core((hid_t)fapl_id, (size_t)increment, (hbool_t)backing_store);
+ if (retVal < 0)
+ h5libraryError(env);
+
+ return (jint)retVal;
+} /* end Java_hdf_hdf5lib_H5_H5Pset_1fapl_1core */
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5Pget_fapl_core
+ * Signature: (J[J[Z)I
+ */
+JNIEXPORT jint JNICALL
+Java_hdf_hdf5lib_H5_H5Pget_1fapl_1core(JNIEnv *env, jclass clss, jlong fapl_id, jlongArray increment, jbooleanArray backing_store)
+{
+ herr_t status = -1;
+ jlong *incArray;
+ jboolean *backArray;
+ jboolean isCopy;
+
+ if (increment == NULL) {
+ h5nullArgument(env, "H5Pget_fapl_core: increment is NULL");
+ } /* end if */
+ else if (backing_store == NULL) {
+ h5nullArgument(env, "H5Pget_fapl_core: backing_store is NULL");
+ } /* end else if */
+ else {
+ incArray = (jlong *)ENVPTR->GetLongArrayElements(ENVPAR increment, &isCopy);
+ if (incArray == NULL) {
+ h5JNIFatalError(env, "H5Pget_fapl_core: incArray not pinned");
+ return -1;
+ } /* end if */
+
+ backArray = (jboolean *)ENVPTR->GetBooleanArrayElements(ENVPAR backing_store, &isCopy);
+ if (backArray == NULL) {
+ ENVPTR->ReleaseLongArrayElements(ENVPAR increment, incArray, JNI_ABORT);
+ h5JNIFatalError(env, "H5Pget_fapl_core: backArray not pinned");
+ return -1;
+ } /* end if */
+
+ { /* direct cast (size_t *)variable fails on 32-bit environment */
+ long long inc_temp = *(incArray);
+ size_t inc_t = (size_t)inc_temp;
+
+ status = H5Pget_fapl_core((hid_t)fapl_id, &inc_t, (hbool_t *)backArray);
+
+ *incArray = (jlong)inc_t;
+ } /* end direct cast special */
+
+ if (status < 0) {
+ ENVPTR->ReleaseLongArrayElements(ENVPAR increment, incArray, JNI_ABORT);
+ ENVPTR->ReleaseBooleanArrayElements(ENVPAR backing_store, backArray, JNI_ABORT);
+ h5libraryError(env);
+ } /* end if */
+ else {
+ ENVPTR->ReleaseLongArrayElements(ENVPAR increment, incArray, 0);
+ ENVPTR->ReleaseBooleanArrayElements(ENVPAR backing_store, backArray, 0);
+ } /* end else */
+ } /* end else */
+
+ return (jint)status;
+} /* end Java_hdf_hdf5lib_H5_H5Pget_1fapl_1core */
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5Pset_family_offset
+ * Signature: (JJ)I
+ */
+JNIEXPORT jint JNICALL
+Java_hdf_hdf5lib_H5_H5Pset_1family_1offset(JNIEnv *env, jclass clss, jlong fapl_id, jlong offset)
+{
+ herr_t retVal = -1;
+
+ retVal = H5Pset_family_offset ((hid_t)fapl_id, (hsize_t)offset);
+ if (retVal < 0)
+ h5libraryError(env);
+
+ return (jint)retVal;
+} /* end Java_hdf_hdf5lib_H5_H5Pset_1family_1offset */
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5Pget_family_offset
+ * Signature: (J)J
+ */
+JNIEXPORT jlong JNICALL
+Java_hdf_hdf5lib_H5_H5Pget_1family_1offset(JNIEnv *env, jclass clss, jlong fapl_id)
+{
+ hsize_t offset = 0;
+ herr_t retVal = -1;
+
+ retVal = H5Pget_family_offset ((hid_t)fapl_id, &offset);
+ if (retVal < 0)
+ h5libraryError(env);
+
+ return (jlong)offset;
+} /* end Java_hdf_hdf5lib_H5_H5Pget_1family_1offset */
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5Pset_fapl_log
+ * Signature: (JLjava/lang/String;JJ)V
+ */
+JNIEXPORT void JNICALL
+Java_hdf_hdf5lib_H5_H5Pset_1fapl_1log(JNIEnv *env, jclass clss, jlong fapl_id, jstring logfile, jlong flags, jlong buf_size)
+{
+ herr_t retVal = -1;
+ const char *pLogfile;
+
+ PIN_JAVA_STRING0(logfile, pLogfile);
+
+ retVal = H5Pset_fapl_log( (hid_t)fapl_id, pLogfile, (unsigned long long)flags, (size_t)buf_size );
+
+ UNPIN_JAVA_STRING(logfile, pLogfile);
+
+ if (retVal < 0)
+ h5libraryError(env);
+} /* end Java_hdf_hdf5lib_H5_H5Pset_1fapl_1log */
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5Premove_filter
+ * Signature: (JI)I
+ */
+JNIEXPORT jint JNICALL
+Java_hdf_hdf5lib_H5_H5P1remove_1filter(JNIEnv *env, jclass clss, jlong obj_id, jint filter)
+{
+ herr_t status = -1;
+
+ status = H5Premove_filter ((hid_t)obj_id, (H5Z_filter_t)filter);
+ if (status < 0)
+ h5libraryError(env);
+
+ return status;
+} /* end Java_hdf_hdf5lib_H5_H5P1remove_1filter */
+
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5Pset
+ * Signature: (JLjava/lang/String;I)J
+ */
+JNIEXPORT jlong JNICALL
+Java_hdf_hdf5lib_H5_H5Pset(JNIEnv *env, jclass clss, jlong plid, jstring name, jint val)
+{
+ hid_t retVal = -1;
+ const char *cstr;
+
+ PIN_JAVA_STRING(name, cstr, -1);
+
+ retVal = H5Pset((hid_t)plid, cstr, &val);
+
+ UNPIN_JAVA_STRING(name, cstr);
+
+ if (retVal < 0)
+ h5libraryError(env);
+
+ return (jlong)retVal;
+} /* end Java_hdf_hdf5lib_H5_H5Pset */
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5Pexist
+ * Signature: (JLjava/lang/String;)Z
+ */
+JNIEXPORT jboolean JNICALL
+Java_hdf_hdf5lib_H5_H5Pexist(JNIEnv *env, jclass clss, jlong plid, jstring name)
+{
+ htri_t bval = JNI_FALSE;
+ const char *cstr;
+
+ PIN_JAVA_STRING(name, cstr, -1);
+
+ bval = H5Pexist((hid_t)plid, cstr);
+
+ UNPIN_JAVA_STRING(name, cstr);
+
+ if (bval > 0)
+ bval = JNI_TRUE;
+ else if (bval < 0)
+ h5libraryError(env);
+
+ return (jboolean)bval;
+} /* end Java_hdf_hdf5lib_H5_H5Pexist */
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5Pget_size
+ * Signature: (JLjava/lang/String;)J
+ */
+JNIEXPORT jlong JNICALL
+Java_hdf_hdf5lib_H5_H5Pget_1size(JNIEnv *env, jclass clss, jlong plid, jstring name)
+{
+ hid_t retVal = -1;
+ const char *cstr;
+ size_t size;
+
+ PIN_JAVA_STRING(name, cstr, -1);
+
+ retVal = H5Pget_size((hid_t)plid, cstr, &size);
+
+ UNPIN_JAVA_STRING(name, cstr);
+
+ if (retVal < 0)
+ h5libraryError(env);
+
+ return (jlong) size;
+} /* end Java_hdf_hdf5lib_H5_H5Pget_1size */
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5Pget_nprops
+ * Signature: (J)J
+ */
+JNIEXPORT jlong JNICALL
+Java_hdf_hdf5lib_H5_H5Pget_1nprops(JNIEnv *env, jclass clss, jlong plid)
+{
+ size_t nprops;
+
+ if (H5Pget_nprops((hid_t)plid, &nprops) < 0)
+ h5libraryError(env);
+
+ return (jlong)nprops;
+} /* end Java_hdf_hdf5lib_H5_H5Pget_1nprops */
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5Pget_class_name
+ * Signature: (J)Ljava/lang/String;
+ */
+JNIEXPORT jstring JNICALL
+Java_hdf_hdf5lib_H5_H5Pget_1class_1name(JNIEnv *env, jclass clss, jlong plid)
+{
+ char *c_str;
+ jstring j_str;
+
+ c_str = H5Pget_class_name((hid_t)plid);
+ if (c_str == NULL) {
+ h5libraryError(env);
+ } /* end if */
+ else {
+ j_str = ENVPTR->NewStringUTF(ENVPAR c_str);
+ H5free_memory(c_str);
+
+ if (j_str == NULL)
+ h5JNIFatalError(env,"H5Pget_class_name: return string failed");
+ } /* end else */
+ return j_str;
+} /* end Java_hdf_hdf5lib_H5_H5Pget_1class_1name */
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5Pget_class_parent
+ * Signature: (J)J
+ */
+JNIEXPORT jlong JNICALL
+Java_hdf_hdf5lib_H5_H5Pget_1class_1parent(JNIEnv *env, jclass clss, jlong plid)
+{
+ hid_t retVal = -1;
+
+ retVal = H5Pget_class_parent((hid_t)plid);
+ if (retVal < 0)
+ h5libraryError(env);
+
+ return (jlong)retVal;
+} /* end Java_hdf_hdf5lib_H5_H5Pget_1class_1parent */
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5Pisa_class
+ * Signature: (JJ)I
+ */
+JNIEXPORT jint JNICALL
+Java_hdf_hdf5lib_H5_H5Pisa_1class(JNIEnv *env, jclass clss, jlong plid, jlong pcls)
+{
+ htri_t retVal = -1;
+
+ retVal = H5Pisa_class((hid_t)plid, (hid_t)pcls);
+ if (retVal < 0)
+ h5libraryError(env);
+
+ return (jint)retVal;
+} /* end Java_hdf_hdf5lib_H5_H5Pisa_1class */
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5Pget
+ * Signature: (JLjava/lang/String;)I
+ */
+JNIEXPORT jint JNICALL
+Java_hdf_hdf5lib_H5_H5Pget(JNIEnv *env, jclass clss, jlong plid, jstring name)
+{
+ herr_t retVal = -1;
+ const char *cstr;
+ jint val;
+
+ PIN_JAVA_STRING(name, cstr, -1);
+
+ retVal = H5Pget((hid_t)plid, cstr, &val);
+
+ UNPIN_JAVA_STRING(name, cstr);
+
+ if (retVal < 0)
+ h5libraryError(env);
+
+ return (jint)val;
+} /* end Java_hdf_hdf5lib_H5_H5Pget */
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5Pequal
+ * Signature: (JJ)I
+ */
+JNIEXPORT jint JNICALL
+Java_hdf_hdf5lib_H5_H5Pequal(JNIEnv *env, jclass clss, jlong plid1, jlong plid2)
+{
+ htri_t retVal = -1;
+
+ retVal = H5Pequal((hid_t)plid1, (hid_t)plid2);
+ if (retVal < 0)
+ h5libraryError(env);
+
+ return (jint)retVal;
+} /* end Java_hdf_hdf5lib_H5_H5Pequal */
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5Pcopy_prop
+ * Signature: (JJLjava/lang/String;)I
+ */
+JNIEXPORT jint JNICALL
+Java_hdf_hdf5lib_H5_H5Pcopy_1prop(JNIEnv *env, jclass clss, jlong dst_plid, jlong src_plid, jstring name)
+{
+ herr_t retVal = -1;
+ const char *cstr;
+
+ PIN_JAVA_STRING(name, cstr, -1);
+
+ retVal = H5Pcopy_prop((hid_t)dst_plid, (hid_t)src_plid, cstr);
+
+ UNPIN_JAVA_STRING(name, cstr);
+
+ if (retVal < 0)
+ h5libraryError(env);
+
+ return (jint)retVal;
+} /* end Java_hdf_hdf5lib_H5_H5Pcopy_1prop */
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5Premove
+ * Signature: (JLjava/lang/String;)I
+ */
+JNIEXPORT jint JNICALL
+Java_hdf_hdf5lib_H5_H5Premove(JNIEnv *env, jclass clss, jlong plid, jstring name)
+{
+ herr_t retVal = -1;
+ const char *cstr;
+
+ PIN_JAVA_STRING(name, cstr, -1);
+
+ retVal = H5Premove((hid_t)plid, cstr);
+
+ UNPIN_JAVA_STRING(name, cstr);
+
+ if (retVal < 0)
+ h5libraryError(env);
+
+ return (jint)retVal;
+} /* end Java_hdf_hdf5lib_H5_H5Premove */
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5Punregister
+ * Signature: (JLjava/lang/String;)I
+ */
+JNIEXPORT jint JNICALL
+Java_hdf_hdf5lib_H5_H5Punregister(JNIEnv *env, jclass clss, jlong plid, jstring name)
+{
+ herr_t retVal = -1;
+ const char *cstr;
+
+ PIN_JAVA_STRING(name, cstr, -1);
+
+ retVal = H5Punregister((hid_t)plid, cstr);
+
+ UNPIN_JAVA_STRING(name, cstr);
+
+ if (retVal < 0)
+ h5libraryError(env);
+
+ return (jint)retVal;
+} /* end Java_hdf_hdf5lib_H5_H5Punregister */
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: _H5Pclose_class
+ * Signature: (J)I
+ */
+JNIEXPORT jint JNICALL
+Java_hdf_hdf5lib_H5__1H5Pclose_1class(JNIEnv *env, jclass clss, jlong plid)
+{
+ herr_t retVal = -1;
+
+ retVal = H5Pclose_class((hid_t)plid);
+ if (retVal < 0)
+ h5libraryError(env);
+
+ return (jint)retVal;
+} /* end Java_hdf_hdf5lib_H5_H5Pget_1filter2 */
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5Pget_filter2
+ * Signature: (JI[I[J[IJ[Ljava/lang/String;[I)I
+ */
+JNIEXPORT jint JNICALL
+Java_hdf_hdf5lib_H5_H5Pget_1filter2(JNIEnv *env, jclass clss, jlong plist, jint filter_number,
+ jintArray flags, jlongArray cd_nelmts, jintArray cd_values, jlong namelen,
+ jobjectArray name, jintArray filter_config)
+{
+ herr_t status = -1;
+ jint *flagsArray;
+ jlong *cd_nelmtsArray;
+ jint *cd_valuesArray;
+ jint *filter_configArray;
+ jboolean isCopy;
+ char *filter;
+ jstring str;
+
+ if (namelen <= 0) {
+ h5badArgument(env, "H5Pget_filter: namelen <= 0");
+ } /* end if */
+ else if (flags == NULL) {
+ h5badArgument(env, "H5Pget_filter: flags is NULL");
+ } /* end else if */
+ else if (cd_nelmts == NULL) {
+ h5badArgument(env, "H5Pget_filter: cd_nelmts is NULL");
+ } /* end else if */
+ else if (filter_config == NULL) {
+ h5badArgument(env, "H5Pget_filter: filter_config is NULL");
+ } /* end else if */
+ else {
+ filter = (char*)HDmalloc(sizeof(char)*(size_t)namelen);
+ if (filter == NULL) {
+ h5outOfMemory(env, "H5Pget_filter: namelent malloc failed");
+ return -1;
+ } /* end if */
+ flagsArray = (jint*)ENVPTR->GetIntArrayElements(ENVPAR flags, &isCopy);
+ if (flagsArray == NULL) {
+ HDfree(filter);
+ h5JNIFatalError(env, "H5Pget_filter: flags array not pinned");
+ return -1;
+ } /* end if */
+
+ cd_nelmtsArray = (jlong*)ENVPTR->GetLongArrayElements(ENVPAR cd_nelmts, &isCopy);
+ if (cd_nelmtsArray == NULL) {
+ ENVPTR->ReleaseIntArrayElements(ENVPAR flags, flagsArray, JNI_ABORT);
+ HDfree(filter);
+ h5JNIFatalError(env, "H5Pget_filter: nelmts array not pinned");
+ return -1;
+ } /* end if */
+ filter_configArray = (jint*)ENVPTR->GetIntArrayElements(ENVPAR filter_config, &isCopy);
+ if (filter_configArray == NULL) {
+ ENVPTR->ReleaseLongArrayElements(ENVPAR cd_nelmts, cd_nelmtsArray, JNI_ABORT);
+ ENVPTR->ReleaseIntArrayElements(ENVPAR flags, flagsArray, JNI_ABORT);
+ HDfree(filter);
+ h5JNIFatalError(env, "H5Pget_filter: filter_config array not pinned");
+ return -1;
+ } /* end if */
+
+ if (*cd_nelmtsArray == 0 && cd_values == NULL) {
+ /* direct cast (size_t *)variable fails on 32-bit environment */
+ long long cd_nelmts_temp = 0;
+ size_t cd_nelmts_t = (size_t)cd_nelmts_temp;
+
+ status = H5Pget_filter2((hid_t)plist, (unsigned)filter_number,
+ (unsigned int *)flagsArray, &cd_nelmts_t, NULL,
+ (size_t)namelen, filter, (unsigned int *)filter_configArray);
+
+ *cd_nelmtsArray = (jlong)cd_nelmts_t;
+ } /* end if */
+ else {
+ if (cd_values == NULL) {
+ h5badArgument(env, "H5Pget_filter: cd_values is NULL");
+ return -1;
+ } /* end if */
+ cd_valuesArray = (jint *)ENVPTR->GetIntArrayElements(ENVPAR cd_values, &isCopy);
+ if (cd_valuesArray == NULL) {
+ ENVPTR->ReleaseIntArrayElements(ENVPAR filter_config, filter_configArray, JNI_ABORT);
+ ENVPTR->ReleaseLongArrayElements(ENVPAR cd_nelmts, cd_nelmtsArray, JNI_ABORT);
+ ENVPTR->ReleaseIntArrayElements(ENVPAR flags, flagsArray, JNI_ABORT);
+ HDfree(filter);
+ h5JNIFatalError(env, "H5Pget_filter: elmts array not pinned");
+ return -1;
+ } /* end if */
+
+ { /* direct cast (size_t *)variable fails on 32-bit environment */
+ long long cd_nelmts_temp = *(cd_nelmtsArray);
+ size_t cd_nelmts_t = (size_t)cd_nelmts_temp;
+
+ status = H5Pget_filter2((hid_t)plist, (unsigned)filter_number,
+ (unsigned int *)flagsArray, &cd_nelmts_t, (unsigned int *)cd_valuesArray,
+ (size_t)namelen, filter, (unsigned int *)filter_configArray);
+
+ *cd_nelmtsArray = (jlong)cd_nelmts_t;
+ } /* end direct cast special */
+ } /* end else */
+
+ if (status < 0) {
+ if (cd_values)
+ ENVPTR->ReleaseIntArrayElements(ENVPAR cd_values, cd_valuesArray, JNI_ABORT);
+ ENVPTR->ReleaseIntArrayElements(ENVPAR filter_config, filter_configArray, JNI_ABORT);
+ ENVPTR->ReleaseLongArrayElements(ENVPAR cd_nelmts, cd_nelmtsArray, JNI_ABORT);
+ ENVPTR->ReleaseIntArrayElements(ENVPAR flags, flagsArray, JNI_ABORT);
+ HDfree(filter);
+ h5libraryError(env);
+ } /* end if */
+ else {
+ if (cd_values)
+ ENVPTR->ReleaseIntArrayElements(ENVPAR cd_values, cd_valuesArray, 0);
+ ENVPTR->ReleaseIntArrayElements(ENVPAR filter_config, filter_configArray, 0);
+ ENVPTR->ReleaseLongArrayElements(ENVPAR cd_nelmts, cd_nelmtsArray, 0);
+ ENVPTR->ReleaseIntArrayElements(ENVPAR flags, flagsArray, 0);
+ /* NewStringUTF may throw OutOfMemoryError */
+ str = ENVPTR->NewStringUTF(ENVPAR filter);
+ HDfree(filter);
+ if (str == NULL)
+ h5JNIFatalError(env, "H5Pget_filter: return string not pinned");
+ else
+ ENVPTR->SetObjectArrayElement(ENVPAR name, 0, (jobject)str);
+ } /* end else */
+ } /* end else */
+
+ return (jint)status;
+} /* end Java_hdf_hdf5lib_H5_H5Pget_1filter2 */
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5Pget_filter_by_id2
+ * Signature: (JI[I[J[IJ[Ljava/lang/String;[I)I
+ */
+JNIEXPORT jint JNICALL
+Java_hdf_hdf5lib_H5_H5Pget_1filter_1by_1id2(JNIEnv *env, jclass clss, jlong plist, jint filter,
+ jintArray flags, jlongArray cd_nelmts, jintArray cd_values, jlong namelen, jobjectArray name, jintArray filter_config)
+{
+ herr_t status = -1;
+ int i = 0;
+ jint *cd_valuesArray;
+ jint *flagsArray;
+ jint *filter_configArray;
+ jlong *cd_nelmtsArray;
+ jboolean isCopy;
+ long bs;
+ char *aName;
+ jstring str;
+
+ bs = (long)namelen;
+ if (bs <= 0) {
+ h5badArgument(env, "H5Pget_filter_by_id: namelen <= 0");
+ } /* end if */
+ else if (flags == NULL) {
+ h5nullArgument(env, "H5Pget_filter_by_id: flags is NULL");
+ } /* end else if */
+ else if (cd_nelmts == NULL) {
+ h5nullArgument(env, "H5Pget_filter_by_id: cd_nelms is NULL");
+ } /* end else if */
+ else if (cd_values == NULL) {
+ h5nullArgument(env, "H5Pget_filter_by_id: cd_values is NULL");
+ } /* end else if */
+ else if (name == NULL) {
+ h5nullArgument(env, "H5Pget_filter_by_id: name is NULL");
+ } /* end else if */
+ else if (filter_config == NULL) {
+ h5badArgument(env, "H5Pget_filter_by_id: filter_config is NULL");
+ } /* end else if */
+ else {
+ aName = (char*)HDmalloc(sizeof(char) * (size_t)bs);
+ if (aName == NULL) {
+ h5outOfMemory(env, "H5Pget_filter_by_id: malloc failed");
+ return -1;
+ } /* end if */
+ flagsArray = ENVPTR->GetIntArrayElements(ENVPAR flags,&isCopy);
+ if (flagsArray == NULL) {
+ HDfree(aName);
+ h5JNIFatalError(env, "H5Pget_filter_by_id: flags not pinned");
+ return -1;
+ } /* end if */
+ cd_nelmtsArray = ENVPTR->GetLongArrayElements(ENVPAR cd_nelmts, &isCopy);
+ if (cd_nelmtsArray == NULL) {
+ ENVPTR->ReleaseIntArrayElements(ENVPAR flags, flagsArray, JNI_ABORT);
+ HDfree(aName);
+ h5JNIFatalError(env, "H5Pget_filter_by_id: cd_nelms not pinned");
+ return -1;
+ } /* end if */
+ cd_valuesArray = ENVPTR->GetIntArrayElements(ENVPAR cd_values, &isCopy);
+ if (cd_valuesArray == NULL) {
+ ENVPTR->ReleaseIntArrayElements(ENVPAR flags, flagsArray, JNI_ABORT);
+ ENVPTR->ReleaseLongArrayElements(ENVPAR cd_nelmts, cd_nelmtsArray, JNI_ABORT);
+ HDfree(aName);
+ h5JNIFatalError(env, "H5Pget_filter_by_id: cd_values array not converted to unsigned int.");
+ return -1;
+ } /* end if */
+ filter_configArray = ENVPTR->GetIntArrayElements(ENVPAR filter_config, &isCopy);
+ if (filter_configArray == NULL) {
+ ENVPTR->ReleaseIntArrayElements(ENVPAR flags, flagsArray, JNI_ABORT);
+ ENVPTR->ReleaseLongArrayElements(ENVPAR cd_nelmts, cd_nelmtsArray, JNI_ABORT);
+ ENVPTR->ReleaseIntArrayElements(ENVPAR cd_values, cd_valuesArray, JNI_ABORT);
+ HDfree(aName);
+ h5JNIFatalError(env, "H5Pget_filter_by_id: flags not pinned");
+ return -1;
+ } /* end if */
+
+ { /* direct cast (size_t *)variable fails on 32-bit environment */
+ long long cd_nelmts_temp = *(cd_nelmtsArray);
+ size_t cd_nelmts_t = (size_t)cd_nelmts_temp;
+
+ status = H5Pget_filter_by_id2((hid_t)plist, (H5Z_filter_t)filter,
+ (unsigned int *)flagsArray, &cd_nelmts_t, (unsigned int *)cd_valuesArray,
+ (size_t)namelen, (char *)aName, (unsigned int *)filter_configArray);
+
+ *cd_nelmtsArray = (jlong)cd_nelmts_t;
+ } /* end direct cast special handling */
+
+ if (status < 0) {
+ ENVPTR->ReleaseIntArrayElements(ENVPAR flags, flagsArray, JNI_ABORT);
+ ENVPTR->ReleaseLongArrayElements(ENVPAR cd_nelmts, cd_nelmtsArray, JNI_ABORT);
+ ENVPTR->ReleaseIntArrayElements(ENVPAR cd_values, cd_valuesArray, JNI_ABORT);
+ ENVPTR->ReleaseIntArrayElements(ENVPAR filter_config, filter_configArray, JNI_ABORT);
+ HDfree(aName);
+ h5libraryError(env);
+ } /* end if */
+ else {
+ str = ENVPTR->NewStringUTF(ENVPAR aName);
+ HDfree(aName);
+ ENVPTR->ReleaseIntArrayElements(ENVPAR flags, flagsArray, 0);
+ ENVPTR->ReleaseLongArrayElements(ENVPAR cd_nelmts, cd_nelmtsArray, 0);
+ ENVPTR->ReleaseIntArrayElements(ENVPAR cd_values, cd_valuesArray, 0);
+ ENVPTR->ReleaseIntArrayElements(ENVPAR filter_config, filter_configArray, 0);
+ } /* end else */
+ } /* end else */
+
+ return (jint)status;
+} /* end Java_hdf_hdf5lib_H5_H5Pget_1filter_1by_1id2 */
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5Pget_nlinks
+ * Signature: (J)J
+ */
+JNIEXPORT jlong JNICALL
+Java_hdf_hdf5lib_H5_H5Pget_1nlinks(JNIEnv *env, jclass clss, jlong lapl_id)
+{
+ size_t nlinks;
+ if (H5Pget_nlinks((hid_t)lapl_id, &nlinks) < 0)
+ h5libraryError(env);
+
+ return (jlong) nlinks;
+} /* end Java_hdf_hdf5lib_H5_H5Pget_1nlinks */
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5Pset_nlinks
+ * Signature: (JJ)I
+ */
+JNIEXPORT jint JNICALL
+Java_hdf_hdf5lib_H5_H5Pset_1nlinks(JNIEnv *env, jclass clss, jlong lapl_id, jlong nlinks)
+{
+ herr_t retVal = -1;
+
+ if (nlinks <= 0) {
+ h5badArgument(env, "H5Pset_1nlinks: nlinks_l <= 0");
+ } /* end if */
+ else {
+ retVal = H5Pset_nlinks((hid_t)lapl_id, (size_t)nlinks);
+ if(retVal < 0)
+ h5libraryError(env);
+ } /* end else */
+ return (jint)retVal;
+} /* end Java_hdf_hdf5lib_H5_H5Pset_1nlinks */
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5Pget_libver_bounds
+ * Signature: (J[I)I
+ */
+JNIEXPORT jint JNICALL
+Java_hdf_hdf5lib_H5_H5Pget_1libver_1bounds(JNIEnv *env, jclass clss, jlong fapl_id, jintArray libver)
+{
+ herr_t retVal = -1;
+ H5F_libver_t *theArray = NULL;
+ jboolean isCopy;
+
+ if (libver == NULL) {
+ h5nullArgument(env, "H5Pget_libver_bounds: libversion bounds is NULL");
+ } /* end if */
+ else {
+ theArray = (H5F_libver_t*)ENVPTR->GetIntArrayElements(ENVPAR libver, &isCopy);
+ if (theArray == NULL) {
+ h5JNIFatalError(env, "H5Pget_libver_bounds: input not pinned");
+ } /* end if */
+ else {
+ retVal = H5Pget_libver_bounds((hid_t)fapl_id, &(theArray[0]), &(theArray[1]));
+ if(retVal < 0) {
+ ENVPTR->ReleaseIntArrayElements(ENVPAR libver, (jint*)theArray, JNI_ABORT);
+ h5libraryError(env);
+ } /* end if */
+ else
+ ENVPTR->ReleaseIntArrayElements(ENVPAR libver, (jint*)theArray, 0);
+ } /* end else */
+ } /* end else */
+
+ return (jint)retVal;
+} /* end Java_hdf_hdf5lib_H5_H5Pget_1libver_1bounds */
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5Pset_libver_bounds
+ * Signature: (JII)I
+ */
+JNIEXPORT jint JNICALL
+Java_hdf_hdf5lib_H5_H5Pset_1libver_1bounds(JNIEnv *env, jclass clss, jlong fapl_id, jint low, jint high)
+{
+ herr_t retVal = -1;
+
+ if ((H5F_libver_t)high != H5F_LIBVER_LATEST) {
+ h5badArgument(env, "H5Pset_libver_bounds: invalid high library version bound");
+ } /* end if */
+ else if(((H5F_libver_t)low !=H5F_LIBVER_EARLIEST) && ((H5F_libver_t)low != H5F_LIBVER_LATEST)) {
+ h5badArgument(env, "H5Pset_libver_bounds: invalid low library version bound");
+ } /* end else if */
+ else {
+ retVal = H5Pset_libver_bounds((hid_t)fapl_id, (H5F_libver_t)low, (H5F_libver_t)high);
+ if(retVal < 0)
+ h5libraryError(env);
+ } /* end else */
+
+ return (jint)retVal;
+} /* end Java_hdf_hdf5lib_H5_H5Pset_1libver_1bounds */
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5Pget_link_creation_order
+ * Signature: (J)I
+ */
+JNIEXPORT jint JNICALL
+Java_hdf_hdf5lib_H5_H5Pget_1link_1creation_1order(JNIEnv *env, jclass clss, jlong gcpl_id)
+{
+ unsigned crt_order_flags;
+
+ if(H5Pget_link_creation_order((hid_t)gcpl_id, &crt_order_flags) < 0)
+ h5libraryError(env);
+
+ return (jint)crt_order_flags;
+} /* end Java_hdf_hdf5lib_H5_H5Pget_1link_1creation_1order */
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5Pset_link_creation_order
+ * Signature: (JI)I
+ */
+JNIEXPORT jint JNICALL
+Java_hdf_hdf5lib_H5_H5Pset_1link_1creation_1order(JNIEnv *env, jclass clss, jlong gcpl_id, jint crt_order_flags)
+{
+ herr_t retVal = -1;
+
+ retVal = H5Pset_link_creation_order((hid_t)gcpl_id, (unsigned)crt_order_flags);
+ if(retVal < 0)
+ h5libraryError(env);
+
+ return (jint)retVal;
+} /* end Java_hdf_hdf5lib_H5_H5Pset_1link_1creation_1order */
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5Pget_attr_creation_order
+ * Signature: (J)I
+ */
+JNIEXPORT jint JNICALL
+Java_hdf_hdf5lib_H5_H5Pget_1attr_1creation_1order(JNIEnv *env, jclass clss, jlong ocpl_id)
+{
+ unsigned crt_order_flags;
+
+ if(H5Pget_attr_creation_order((hid_t)ocpl_id, &crt_order_flags) < 0)
+ h5libraryError(env);
+
+ return (jint)crt_order_flags;
+} /* end Java_hdf_hdf5lib_H5_H5Pget_1attr_1creation_1order */
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5Pset_attr_creation_order
+ * Signature: (JI)I
+ */
+JNIEXPORT jint JNICALL
+Java_hdf_hdf5lib_H5_H5Pset_1attr_1creation_1order(JNIEnv *env, jclass clss, jlong ocpl_id, jint crt_order_flags)
+{
+ herr_t retVal = -1;
+
+ retVal = H5Pset_attr_creation_order((hid_t)ocpl_id, (unsigned)crt_order_flags);
+ if(retVal < 0)
+ h5libraryError(env);
+
+ return (jint)retVal;
+} /* end Java_hdf_hdf5lib_H5_H5Pset_1attr_1creation_1order */
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5Pset_copy_object
+ * Signature: (JI)V
+ */
+JNIEXPORT void JNICALL
+Java_hdf_hdf5lib_H5_H5Pset_1copy_1object(JNIEnv *env, jclass clss, jlong ocp_plist_id, jint copy_options)
+{
+ herr_t retVal = -1;
+
+ retVal = H5Pset_copy_object((hid_t)ocp_plist_id, (unsigned)copy_options);
+ if(retVal < 0)
+ h5libraryError(env);
+} /* end Java_hdf_hdf5lib_H5_H5Pset_1copy_1object */
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5Pget_copy_object
+ * Signature: (J)I
+ */
+JNIEXPORT jint JNICALL
+Java_hdf_hdf5lib_H5_H5Pget_1copy_1object(JNIEnv *env, jclass clss, jlong ocp_plist_id)
+{
+ unsigned copy_options;
+
+ if(H5Pget_copy_object((hid_t)ocp_plist_id, &copy_options) < 0)
+ h5libraryError(env);
+
+ return (jint)copy_options;
+} /* end Java_hdf_hdf5lib_H5_H5Pget_1copy_1object */
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5Pset_create_intermediate_group
+ * Signature: (JZ)I
+ */
+JNIEXPORT jint JNICALL
+Java_hdf_hdf5lib_H5_H5Pset_1create_1intermediate_1group(JNIEnv *env, jclass clss, jlong lcpl_id, jboolean crt_intermed_group)
+{
+ herr_t retVal = -1;
+
+ retVal = H5Pset_create_intermediate_group((hid_t)lcpl_id, (unsigned)crt_intermed_group);
+ if(retVal < 0)
+ h5libraryError(env);
+
+ return (jint)retVal;
+} /* end Java_hdf_hdf5lib_H5_H5Pset_1create_1intermediate_1group */
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5Pget_create_intermediate_group
+ * Signature: (J)Z
+ */
+JNIEXPORT jboolean JNICALL
+Java_hdf_hdf5lib_H5_H5Pget_1create_1intermediate_1group(JNIEnv *env, jclass clss, jlong lcpl_id)
+{
+ unsigned crt_intermed_group;
+
+ if(H5Pget_create_intermediate_group((hid_t)lcpl_id, &crt_intermed_group) < 0)
+ h5libraryError(env);
+
+ return (jboolean)crt_intermed_group;
+} /* end Java_hdf_hdf5lib_H5_H5Pget_1create_1intermediate_1group */
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5Pset_data_transform
+ * Signature: (JLjava/lang/String;)I
+ */
+JNIEXPORT jint JNICALL
+Java_hdf_hdf5lib_H5_H5Pset_1data_1transform(JNIEnv *env, jclass clss, jlong plist_id, jstring expression)
+{
+ herr_t retVal = -1;
+ const char *express;
+
+ PIN_JAVA_STRING(expression, express, -1);
+
+ retVal = H5Pset_data_transform((hid_t)plist_id, express);
+
+ UNPIN_JAVA_STRING(expression, express);
+
+ if (retVal < 0)
+ h5libraryError(env);
+
+ return (jint)retVal;
+} /* end Java_hdf_hdf5lib_H5_H5Pset_1data_1transform */
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5Pget_data_transform
+ * Signature: (J[Ljava/lang/String;J)J
+ */
+JNIEXPORT jlong JNICALL
+Java_hdf_hdf5lib_H5_H5Pget_1data_1transform(JNIEnv *env, jclass clss, jlong plist_id, jobjectArray expression, jlong size)
+{
+ size_t buf_size;
+ char *express;
+ jlong express_size = -1;
+ jstring str = NULL;
+
+ if (size <= 0) {
+ h5badArgument(env, "H5Pget_data_transform: size <= 0");
+ } /* end if */
+ else {
+ express_size = (jlong)H5Pget_data_transform((hid_t)plist_id, (char*)NULL, (size_t)size);
+ if(express_size < 0) {
+ h5libraryError(env);
+ } /* end if */
+ else {
+ buf_size = (size_t)express_size + 1;/* add extra space for the null terminator */
+ express = (char*)HDmalloc(sizeof(char) * buf_size);
+ if (express == NULL) {
+ h5outOfMemory(env, "H5Pget_data_transform: malloc failed ");
+ } /* end if */
+ else {
+ express_size = (jlong)H5Pget_data_transform((hid_t)plist_id, express, (size_t)size);
+ if (express_size < 0) {
+ HDfree(express);
+ h5libraryError(env);
+ }
+ else {
+ str = ENVPTR->NewStringUTF(ENVPAR express);
+ HDfree(express);
+ if (str == NULL)
+ h5JNIFatalError(env, "H5Pget_data_transform: return string not created");
+ else
+ ENVPTR->SetObjectArrayElement(ENVPAR expression, 0, str);
+ } /* end else */
+ } /* end else */
+ } /* end else */
+ } /* end else */
+ return express_size;
+} /* end Java_hdf_hdf5lib_H5_H5Pget_1data_1transform */
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5Pget_elink_acc_flags
+ * Signature: (J)I
+ */
+JNIEXPORT jint JNICALL
+Java_hdf_hdf5lib_H5_H5Pget_1elink_1acc_1flags(JNIEnv *env, jclass clss, jlong lapl_id)
+{
+ unsigned flags;
+
+ if(H5Pget_elink_acc_flags((hid_t)lapl_id, &flags) < 0)
+ h5libraryError(env);
+
+ return (jint)flags;
+} /* end Java_hdf_hdf5lib_H5_H5Pget_1elink_1acc_1flags */
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5Pset_elink_acc_flags
+ * Signature: (JI)I
+ */
+JNIEXPORT jint JNICALL
+Java_hdf_hdf5lib_H5_H5Pset_1elink_1acc_1flags(JNIEnv *env, jclass clss, jlong lapl_id, jint flags)
+{
+ herr_t retVal = -1;
+
+ retVal = H5Pset_elink_acc_flags((hid_t)lapl_id, (unsigned)flags);
+ if (retVal < 0)
+ h5libraryError(env);
+
+ return (jint) retVal;
+} /* end Java_hdf_hdf5lib_H5_H5Pset_1elink_1acc_1flags */
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5Pset_link_phase_change
+ * Signature: (JII)I
+ */
+JNIEXPORT jint JNICALL
+Java_hdf_hdf5lib_H5_H5Pset_1link_1phase_1change(JNIEnv *env, jclass clss, jlong gcpl_id, jint max_compact, jint min_dense)
+{
+ herr_t retVal = -1;
+
+ if(max_compact < min_dense) {
+ h5badArgument(env, "H5Pset_link_phase_change: max compact value must be >= min dense value");
+ } /* end if */
+ else if(max_compact > 65535) {
+ h5badArgument(env, "H5Pset_link_phase_change: max compact value must be < 65536");
+ } /* end else if */
+ else if(min_dense > 65535) {
+ h5badArgument(env, "H5Pset_link_phase_change: min dense value must be < 65536");
+ } /* end else if */
+ else {
+ retVal = H5Pset_link_phase_change((hid_t)gcpl_id, (unsigned)max_compact, (unsigned)min_dense);
+ if(retVal < 0)
+ h5libraryError(env);
+ } /* end else */
+
+ return (jint)retVal;
+} /* end Java_hdf_hdf5lib_H5_H5Pset_1link_1phase_1change */
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5Pget_link_phase_change
+ * Signature: (J[I)I
+ */
+JNIEXPORT jint JNICALL
+Java_hdf_hdf5lib_H5_H5Pget_1link_1phase_1change (JNIEnv *env, jclass clss, jlong gcpl_id, jintArray links)
+{
+ herr_t retVal = -1;
+ unsigned *theArray = NULL;
+ jboolean isCopy;
+
+ if (links == NULL) {
+ h5nullArgument( env, "H5Pget_link_phase_change: links is NULL");
+ } /* end if */
+ else {
+ theArray = (unsigned *)ENVPTR->GetIntArrayElements(ENVPAR links, &isCopy);
+ if (theArray == NULL) {
+ h5JNIFatalError( env, "H5Pget_link_phase_change: input not pinned");
+ } /* end if */
+ else {
+ retVal = H5Pget_link_phase_change((hid_t)gcpl_id, &(theArray[0]), &(theArray[1]));
+ if(retVal < 0) {
+ ENVPTR->ReleaseIntArrayElements(ENVPAR links, (jint *)theArray, JNI_ABORT);
+ h5libraryError(env);
+ } /* end if */
+ else
+ ENVPTR->ReleaseIntArrayElements(ENVPAR links, (jint *)theArray, 0);
+ } /* end else */
+ } /* end else */
+
+ return (jint)retVal;
+} /* end Java_hdf_hdf5lib_H5_H5Pget_1link_1phase_1change */
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5Pget_attr_phase_change
+ * Signature: (J[I)I
+ */
+JNIEXPORT jint JNICALL
+Java_hdf_hdf5lib_H5_H5Pget_1attr_1phase_1change(JNIEnv *env, jclass clss, jlong ocpl_id, jintArray attributes)
+{
+ herr_t retVal = -1;
+ unsigned *theArray = NULL;
+ jboolean isCopy;
+
+ if (attributes == NULL) {
+ h5nullArgument(env, "H5Pget_attr_phase_change: attributes is NULL");
+ } /* end if */
+ else {
+ theArray = (unsigned *)ENVPTR->GetIntArrayElements(ENVPAR attributes, &isCopy);
+ if (theArray == NULL) {
+ h5JNIFatalError(env, "H5Pget_attr_phase_change: input not pinned");
+ } /* end if */
+ else {
+ retVal = H5Pget_attr_phase_change((hid_t)ocpl_id, &(theArray[0]), &(theArray[1]));
+ if(retVal < 0) {
+ ENVPTR->ReleaseIntArrayElements(ENVPAR attributes, (jint *)theArray, JNI_ABORT);
+ h5libraryError(env);
+ } /* end if */
+ else
+ ENVPTR->ReleaseIntArrayElements(ENVPAR attributes, (jint *)theArray, 0);
+ } /* end else */
+ } /* end else */
+
+ return (jint)retVal;
+} /* end Java_hdf_hdf5lib_H5_H5Pget_1attr_1phase_1change */
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5Pset_attr_phase_change
+ * Signature: (JII)V
+ */
+JNIEXPORT void JNICALL
+Java_hdf_hdf5lib_H5_H5Pset_1attr_1phase_1change(JNIEnv *env, jclass clss, jlong ocpl_id, jint max_compact, jint min_dense)
+{
+ herr_t retVal = -1;
+
+ retVal = H5Pset_attr_phase_change((hid_t)ocpl_id, (unsigned)max_compact, (unsigned)min_dense);
+ if(retVal < 0)
+ h5libraryError(env);
+} /* end Java_hdf_hdf5lib_H5_H5Pset_1attr_1phase_1change */
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5Pget_shared_mesg_phase_change
+ * Signature: (J[I)I
+ */
+JNIEXPORT jint JNICALL
+Java_hdf_hdf5lib_H5_H5Pget_1shared_1mesg_1phase_1change(JNIEnv *env, jclass clss, jlong fcpl_id, jintArray size)
+{
+ herr_t retVal = -1;
+ unsigned *theArray = NULL;
+ jboolean isCopy;
+
+ if (size == NULL) {
+ h5nullArgument(env, "H5Pget_shared_mesg_phase_change: size is NULL");
+ } /* end if */
+ else {
+ theArray = (unsigned *)ENVPTR->GetIntArrayElements(ENVPAR size, &isCopy);
+ if (theArray == NULL) {
+ h5JNIFatalError(env, "H5Pget_shared_mesg_phase_change: input not pinned");
+ } /* end if */
+ else {
+ retVal = H5Pget_shared_mesg_phase_change((hid_t)fcpl_id, &(theArray[0]), &(theArray[1]));
+ if(retVal < 0) {
+ ENVPTR->ReleaseIntArrayElements(ENVPAR size, (jint *)theArray, JNI_ABORT);
+ h5libraryError(env);
+ } /* end if */
+ else
+ ENVPTR->ReleaseIntArrayElements(ENVPAR size, (jint *)theArray, 0);
+ } /* end else */
+ } /* end else */
+
+ return (jint)retVal;
+} /* end Java_hdf_hdf5lib_H5_H5Pget_1shared_1mesg_1phase_1change */
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5Pset_shared_mesg_phase_change
+ * Signature: (JII)I
+ */
+JNIEXPORT jint JNICALL
+Java_hdf_hdf5lib_H5_H5Pset_1shared_1mesg_1phase_1change(JNIEnv *env, jclass clss, jlong fcpl_id, jint max_list, jint min_btree)
+{
+ herr_t retVal = -1;
+
+ /* Check that values are sensible. The min_btree value must be no greater
+ * than the max list plus one.
+ *
+ * Range check to make certain they will fit into encoded form.
+ */
+
+ if(max_list + 1 < min_btree) {
+ h5badArgument(env, "H5Pset_shared_mesg_phase_change: minimum B-tree value is greater than maximum list value");
+ } /* end if */
+ else if(max_list > H5O_SHMESG_MAX_LIST_SIZE) {
+ h5badArgument(env, "H5Pset_shared_mesg_phase_change: max list value is larger than H5O_SHMESG_MAX_LIST_SIZE");
+ } /* end else if */
+ else if(min_btree > H5O_SHMESG_MAX_LIST_SIZE) {
+ h5badArgument(env, "H5Pset_shared_mesg_phase_change: min btree value is larger than H5O_SHMESG_MAX_LIST_SIZE");
+ } /* end else if */
+ else {
+ retVal = H5Pset_shared_mesg_phase_change((hid_t)fcpl_id, (unsigned)max_list, (unsigned)min_btree);
+ if(retVal < 0)
+ h5libraryError(env);
+ } /* end else */
+
+ return (jint)retVal;
+} /* end Java_hdf_hdf5lib_H5_H5Pset_1shared_1mesg_1phase_1change */
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5Pget_shared_mesg_nindexes
+ * Signature: (J)I
+ */
+JNIEXPORT jint JNICALL
+Java_hdf_hdf5lib_H5_H5Pget_1shared_1mesg_1nindexes(JNIEnv *env, jclass clss, jlong fcpl_id)
+{
+ unsigned nindexes;
+
+ if(H5Pget_shared_mesg_nindexes((hid_t)fcpl_id, &nindexes) < 0)
+ h5libraryError(env);
+
+ return (jint)nindexes;
+} /* end Java_hdf_hdf5lib_H5_H5Pget_1shared_1mesg_1nindexes */
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5Pset_shared_mesg_nindexes
+ * Signature: (JI)I
+ */
+JNIEXPORT jint JNICALL
+Java_hdf_hdf5lib_H5_H5Pset_1shared_1mesg_1nindexes(JNIEnv *env, jclass clss, jlong plist_id, jint nindexes)
+{
+ herr_t retVal = -1;
+
+ if (nindexes > H5O_SHMESG_MAX_NINDEXES) {
+ h5badArgument(env, "H5Pset_shared_mesg_nindexes: number of indexes is greater than H5O_SHMESG_MAX_NINDEXES");
+ } /* end if */
+ else {
+ retVal = H5Pset_shared_mesg_nindexes((hid_t)plist_id, (unsigned)nindexes);
+ if(retVal < 0)
+ h5libraryError(env);
+ } /* end else */
+
+ return (jint)retVal;
+} /* end Java_hdf_hdf5lib_H5_H5Pset_1shared_1mesg_1nindexes */
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5Pset_shared_mesg_index
+ * Signature: (JIII)I
+ */
+JNIEXPORT jint JNICALL
+Java_hdf_hdf5lib_H5_H5Pset_1shared_1mesg_1index(JNIEnv *env, jclass clss, jlong fcpl_id, jint index_num,
+ jint mesg_type_flags, jint min_mesg_size)
+{
+ herr_t retVal = -1;
+ unsigned nindexes;/* Number of SOHM indexes */
+
+ /* Check arguments */
+ if(mesg_type_flags > H5O_SHMESG_ALL_FLAG) {
+ h5badArgument(env, "H5Pset_shared_mesg_index: unrecognized flags in mesg_type_flags");
+ } /* end if */
+ else if(H5Pget_shared_mesg_nindexes((hid_t)fcpl_id, &nindexes) < 0) { /* Read the current number of indexes */
+ h5libraryError(env);
+ } /* end else if */
+ else {
+ /* Range check */
+ if((unsigned)index_num >= nindexes) {
+ h5badArgument(env, "H5Pset_shared_mesg_index: index_num is too large; no such index");
+ } /* end if */
+ else {
+ retVal = H5Pset_shared_mesg_index((hid_t)fcpl_id, (unsigned)index_num, (unsigned) mesg_type_flags, (unsigned) min_mesg_size);
+ if(retVal < 0)
+ h5libraryError(env);
+ } /* end else */
+ } /* end else */
+
+ return (jint)retVal;
+} /* end Java_hdf_hdf5lib_H5_H5Pset_1shared_1mesg_1index */
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5Pget_shared_mesg_index
+ * Signature: (JI[I)I
+ */
+JNIEXPORT jint JNICALL
+Java_hdf_hdf5lib_H5_H5Pget_1shared_1mesg_1index(JNIEnv *env, jclass clss, jlong fcpl_id, jint index_num, jintArray mesg_info)
+{
+ herr_t retVal = -1;
+ unsigned nindexes;/* Number of SOHM indexes */
+ unsigned *theArray = NULL;
+ jboolean isCopy;
+
+ /* Read the current number of indexes */
+ if(H5Pget_shared_mesg_nindexes((hid_t)fcpl_id, &nindexes)<0) {
+ h5libraryError(env);
+ } /* end if */
+ else {
+ /* Range check */
+ if((unsigned)index_num >= nindexes) {
+ h5badArgument(env, "H5Pget_shared_mesg_index: index_num is too large; no such index");
+ } /* end if */
+ else if (mesg_info == NULL) {
+ h5nullArgument(env, "H5Pget_shared_mesg_index: mesg_info is NULL");
+ } /* end else if */
+ else {
+ theArray = (unsigned *)ENVPTR->GetIntArrayElements(ENVPAR mesg_info, &isCopy);
+ if (theArray == NULL) {
+ h5JNIFatalError(env, "H5Pget_shared_mesg_index: input not pinned");
+ } /* end if */
+ else {
+ retVal = H5Pget_shared_mesg_index((hid_t)fcpl_id, (unsigned)index_num, &(theArray[0]), &(theArray[1]));
+ if(retVal < 0) {
+ ENVPTR->ReleaseIntArrayElements(ENVPAR mesg_info, (jint*)theArray, JNI_ABORT);
+ h5libraryError(env);
+ } /* end if */
+ else
+ ENVPTR->ReleaseIntArrayElements(ENVPAR mesg_info, (jint*)theArray, 0);
+ } /* end else */
+ } /* end else */
+ } /* end else */
+
+ return (jint)retVal;
+} /* end Java_hdf_hdf5lib_H5_H5Pget_1shared_1mesg_1index */
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5Pset_local_heap_size_hint
+ * Signature: (JJ)I
+ */
+JNIEXPORT jint JNICALL
+Java_hdf_hdf5lib_H5_H5Pset_1local_1heap_1size_1hint(JNIEnv *env, jclass clss, jlong gcpl_id, jlong size_hint)
+{
+ herr_t retVal = -1;
+
+ retVal = H5Pset_local_heap_size_hint((hid_t)gcpl_id, (size_t)size_hint);
+ if(retVal < 0)
+ h5libraryError(env);
+
+ return (jint)retVal;
+} /* end Java_hdf_hdf5lib_H5_H5Pset_1local_1heap_1size_1hint */
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5Pget_local_heap_size_hint
+ * Signature: (J)J
+ */
+JNIEXPORT jlong JNICALL
+Java_hdf_hdf5lib_H5_H5Pget_1local_1heap_1size_1hint(JNIEnv *env, jclass clss, jlong gcpl_id)
+{
+ size_t size_hint;
+
+ if(H5Pget_local_heap_size_hint((hid_t)gcpl_id, &size_hint) < 0)
+ h5libraryError(env);
+
+ return (jlong)size_hint;
+} /* end Java_hdf_hdf5lib_H5_H5Pget_1local_1heap_1size_1hint */
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5Pset_nbit
+ * Signature: (J)I
+ */
+JNIEXPORT jint JNICALL
+Java_hdf_hdf5lib_H5_H5Pset_1nbit(JNIEnv *env, jclass clss, jlong plist_id)
+{
+ herr_t retVal = -1;
+
+ retVal = H5Pset_nbit((hid_t)plist_id);
+ if(retVal < 0)
+ h5libraryError(env);
+
+ return (jint)retVal;
+} /* end Java_hdf_hdf5lib_H5_H5Pset_1nbit */
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5Pset_scaleoffset
+ * Signature: (JII)I
+ */
+JNIEXPORT jint JNICALL
+Java_hdf_hdf5lib_H5_H5Pset_1scaleoffset(JNIEnv *env, jclass clss, jlong plist_id, jint scale_type, jint scale_factor)
+{
+ herr_t retVal = -1;
+
+ /* Check arguments */
+ if(scale_factor < 0) {
+ h5badArgument(env, "H5Pset_scaleoffset: scale factor must be > 0");
+ } /* end if */
+ else {
+ if(scale_type != H5Z_SO_FLOAT_DSCALE && scale_type != H5Z_SO_FLOAT_ESCALE && scale_type != H5Z_SO_INT){
+ h5badArgument(env, "H5Pset_scaleoffset: invalid scale type");
+ } /* end if */
+ else {
+ retVal = H5Pset_scaleoffset((hid_t)plist_id, (H5Z_SO_scale_type_t)scale_type, scale_factor);
+ if(retVal < 0)
+ h5libraryError(env);
+ } /* end else */
+ } /* end else */
+
+ return (jint)retVal;
+} /* end Java_hdf_hdf5lib_H5_H5Pset_1scaleoffset */
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5Pset_est_link_info
+ * Signature: (JII)I
+ */
+JNIEXPORT jint JNICALL
+Java_hdf_hdf5lib_H5_H5Pset_1est_1link_1info(JNIEnv *env, jclass clss, jlong gcpl_id, jint est_num_entries, jint est_name_len)
+{
+ herr_t retVal = -1;
+
+ /* Range check values */
+ if((est_num_entries > 65535) || (est_name_len > 65535)) {
+ h5badArgument(env, "H5Pset_est_link_info: est. name length or number of entries must be < 65536");
+ } /* end if */
+ else {
+ retVal = H5Pset_est_link_info((hid_t)gcpl_id, (unsigned)est_num_entries, (unsigned)est_name_len);
+ if(retVal < 0)
+ h5libraryError(env);
+ } /* end else */
+
+ return (jint)retVal;
+} /* end Java_hdf_hdf5lib_H5_H5Pset_1est_1link_1info */
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5Pget_est_link_info
+ * Signature: (J[I)I
+ */
+JNIEXPORT jint JNICALL
+Java_hdf_hdf5lib_H5_H5Pget_1est_1link_1info(JNIEnv *env, jclass clss, jlong gcpl_id, jintArray link_info)
+{
+ herr_t retVal = -1;
+ unsigned *theArray = NULL;
+ jboolean isCopy;
+
+ if (link_info == NULL) {
+ h5nullArgument(env, "H5Pget_est_link_info: link_info is NULL");
+ } /* end if */
+ else {
+ theArray = (unsigned *)ENVPTR->GetIntArrayElements(ENVPAR link_info,&isCopy);
+ if (theArray == NULL) {
+ h5JNIFatalError(env, "H5Pget_est_link_info: input not pinned");
+ } /* end if */
+ else {
+ retVal= H5Pget_est_link_info((hid_t)gcpl_id, &(theArray[0]), &(theArray[1]));
+ if(retVal < 0) {
+ ENVPTR->ReleaseIntArrayElements(ENVPAR link_info, (jint *)theArray, JNI_ABORT);
+ h5libraryError(env);
+ } /* end if */
+ else
+ ENVPTR->ReleaseIntArrayElements(ENVPAR link_info, (jint *)theArray, 0);
+ } /* end else */
+ } /* end else */
+
+ return (jint)retVal;
+} /* end Java_hdf_hdf5lib_H5_H5Pget_1est_1link_1info */
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5Pset_elink_fapl
+ * Signature: (JJ)I
+ */
+JNIEXPORT jint JNICALL
+Java_hdf_hdf5lib_H5_H5Pset_1elink_1fapl(JNIEnv *env, jclass clss, jlong lapl_id, jlong fapl_id)
+{
+ herr_t retVal = -1;
+
+ retVal = H5Pset_elink_fapl((hid_t)lapl_id, (hid_t)fapl_id);
+ if(retVal < 0)
+ h5libraryError(env);
+
+ return (jint)retVal;
+} /* end Java_hdf_hdf5lib_H5_H5Pset_1elink_1fapl */
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: _H5Pget_elink_fapl
+ * Signature: (J)J
+ */
+JNIEXPORT jlong JNICALL
+Java_hdf_hdf5lib_H5__1H5Pget_1elink_1fapl(JNIEnv *env, jclass clss, jlong lapl_id)
+{
+ hid_t retVal = -1;
+
+ retVal = H5Pget_elink_fapl((hid_t)lapl_id);
+ if (retVal < 0)
+ h5libraryError(env);
+
+ return (jlong)retVal;
+} /* end Java_hdf_hdf5lib_H5__1H5Pget_1elink_1fapl */
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5Pset_elink_prefix
+ * Signature: (JLjava/lang/String;)I
+ */
+JNIEXPORT jint JNICALL
+Java_hdf_hdf5lib_H5_H5Pset_1elink_1prefix(JNIEnv *env, jclass clss, jlong lapl_id, jstring prefix)
+{
+ herr_t retVal = -1;
+ const char *aName;
+
+ PIN_JAVA_STRING(prefix, aName, -1);
+
+ retVal = H5Pset_elink_prefix((hid_t)lapl_id, aName);
+
+ UNPIN_JAVA_STRING(prefix, aName);
+
+ if(retVal < 0)
+ h5libraryError(env);
+
+ return (jint)retVal;
+} /* end Java_hdf_hdf5lib_H5_H5Pset_1elink_1prefix */
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5Pget_elink_prefix
+ * Signature: (J[Ljava/lang/String;)J
+ */
+JNIEXPORT jlong JNICALL
+Java_hdf_hdf5lib_H5_H5Pget_1elink_1prefix(JNIEnv *env, jclass clss, jlong lapl_id, jobjectArray prefix)
+{
+ size_t size = 0;
+ char *pre;
+ jlong prefix_size;
+ jstring str = NULL;
+
+ if (prefix == NULL) {
+ h5nullArgument(env, "H5Pget_elink_prefix: prefix is NULL");
+ } /* end if */
+ else {
+ prefix_size = (jlong)H5Pget_elink_prefix((hid_t)lapl_id, (char*)NULL, size);
+ if(prefix_size < 0) {
+ h5libraryError(env);
+ } /* end if */
+ else {
+ size = (size_t)prefix_size + 1;/* add extra space for the null terminator */
+ pre = (char*)HDmalloc(sizeof(char)*size);
+ if (pre == NULL) {
+ h5outOfMemory(env, "H5Pget_elink_prefix: malloc failed ");
+ } /* end if */
+ else {
+ prefix_size = (jlong)H5Pget_elink_prefix((hid_t)lapl_id, (char*)pre, size);
+
+ if (prefix_size < 0) {
+ HDfree(pre);
+ h5libraryError(env);
+ } /* end if */
+ else {
+ str = ENVPTR->NewStringUTF(ENVPAR pre);
+ HDfree(pre);
+ if (str == NULL) {
+ h5JNIFatalError(env, "H5Pget_elink_prefix: return string not created");
+ } /* end if */
+ else
+ ENVPTR->SetObjectArrayElement(ENVPAR prefix, 0, str);
+ } /* end else */
+ } /* end else */
+ } /* end else */
+ } /* end else */
+
+ return prefix_size;
+} /* end Java_hdf_hdf5lib_H5_H5Pget_1elink_1prefix */
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5Pset_fapl_direct
+ * Signature: (JJJJ)I
+ */
+JNIEXPORT jint JNICALL
+Java_hdf_hdf5lib_H5_H5Pset_1fapl_1direct(JNIEnv *env, jclass clss, jlong fapl_id, jlong alignment,
+ jlong block_size, jlong cbuf_size)
+{
+ herr_t retVal = -1;
+
+#ifdef H5_HAVE_DIRECT
+ retVal = H5Pset_fapl_direct((hid_t)fapl_id, (size_t)alignment, (size_t)block_size, (size_t)cbuf_size);
+#endif
+ if(retVal < 0)
+ h5libraryError(env);
+
+ return (jint)retVal;
+} /* end Java_hdf_hdf5lib_H5_H5Pset_1fapl_1direct */
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5Pget_fapl_direct
+ * Signature: (J[J)I
+ */
+JNIEXPORT jint JNICALL
+Java_hdf_hdf5lib_H5_H5Pget_1fapl_1direct(JNIEnv *env, jclass clss, jlong fapl_id, jlongArray info)
+{
+ herr_t retVal = -1;
+
+#ifdef H5_HAVE_DIRECT
+ size_t alignment = 0;
+ size_t block_size = 0;
+ size_t cbuf_size = 0;
+ jlong *theArray;
+ jboolean isCopy;
+ if (info == NULL) {
+ h5nullArgument(env, "H5Pget_fapl_direct: info input array is NULL");
+ } /* end if */
+ else {
+ if (ENVPTR->GetArrayLength(ENVPAR info) < 3) {
+ h5badArgument( env, "H5Pget_fapl_direct: info input array < 4");
+ } /* end if */
+ else {
+ theArray = (jlong *)ENVPTR->GetLongArrayElements(ENVPAR info, &isCopy);
+ if (theArray == NULL) {
+ h5JNIFatalError(env, "H5Pget_fapl_direct: info not pinned");
+ } /* end if */
+ else {
+ retVal = H5Pget_fapl_direct((hid_t)fapl_id, &alignment, &block_size, &cbuf_size);
+ if(retVal < 0) {
+ ENVPTR->ReleaseLongArrayElements(ENVPAR info, theArray, JNI_ABORT);
+ h5libraryError(env);
+ } /* end if */
+ else {
+ theArray[0] = (jlong)alignment;
+ theArray[1] = (jlong)block_size;
+ theArray[2] = (jlong)cbuf_size;
+ ENVPTR->ReleaseLongArrayElements(ENVPAR info, theArray, 0);
+ } /* end else */
+ } /* end else */
+ } /* end else */
+ } /* end else */
+#else
+ if (retVal < 0)
+ h5libraryError(env);
+#endif
+
+ return (jint)retVal;
+} /* end Java_hdf_hdf5lib_H5_H5Pget_1fapl_1direct */
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5Pset_fapl_sec2
+ * Signature: (J)I
+ */
+JNIEXPORT jint JNICALL
+Java_hdf_hdf5lib_H5_H5Pset_1fapl_1sec2(JNIEnv *env, jclass clss, jlong fapl_id)
+{
+ herr_t retVal = -1;
+
+ retVal = H5Pset_fapl_sec2((hid_t) fapl_id);
+ if (retVal < 0)
+ h5libraryError(env);
+
+ return (jint)retVal;
+} /* end Java_hdf_hdf5lib_H5_H5Pset_1fapl_1sec2 */
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5Pset_fapl_stdio
+ * Signature: (J)I
+ */
+JNIEXPORT jint JNICALL
+Java_hdf_hdf5lib_H5_H5Pset_1fapl_1stdio(JNIEnv *env, jclass clss, jlong fapl_id)
+{
+ herr_t retVal = -1;
+
+ retVal = H5Pset_fapl_stdio((hid_t) fapl_id);
+ if (retVal < 0)
+ h5libraryError(env);
+
+ return (jint)retVal;
+} /* end Java_hdf_hdf5lib_H5_H5Pset_1fapl_1stdio */
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5Pset_fapl_windows
+ * Signature: (J)I
+ */
+JNIEXPORT jint JNICALL
+Java_hdf_hdf5lib_H5_H5Pset_1fapl_1windows(JNIEnv *env, jclass clss, jlong fapl_id)
+{
+ herr_t retVal = -1;
+
+#ifdef H5_HAVE_WINDOWS
+ retVal = H5Pset_fapl_windows((hid_t) fapl_id);
+#endif
+ if (retVal < 0)
+ h5libraryError(env);
+
+ return (jint)retVal;
+} /* end Java_hdf_hdf5lib_H5_H5Pset_1fapl_1windows */
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5Pget_fapl_muti
+ * Signature: (J[I[J[Ljava/lang/String;[J)Z
+ */
+JNIEXPORT jboolean JNICALL
+Java_hdf_hdf5lib_H5_H5Pget_1fapl_1multi(JNIEnv *env, jclass clss, jlong tid, jintArray memb_map,
+ jlongArray memb_fapl, jobjectArray memb_name, jlongArray memb_addr)
+{
+ herr_t status = -1;
+ int i;
+ jint *themapArray = NULL;
+ jlong *thefaplArray = NULL;
+ jlong *theaddrArray = NULL;
+ char **mName = NULL;
+ jstring str;
+ jboolean isCopy;
+ int relax = 0;
+
+ if (memb_map) {
+ themapArray = (jint*)ENVPTR->GetIntArrayElements(ENVPAR memb_map, &isCopy);
+ if (themapArray == NULL) {
+ h5JNIFatalError(env, "H5Pget_fapl_muti: memb_map not pinned");
+ return 0;
+ } /* end if */
+ } /* end if */
+
+ if (memb_fapl) {
+ thefaplArray = (jlong*)ENVPTR->GetLongArrayElements(ENVPAR memb_fapl, &isCopy);
+ if (thefaplArray == NULL) {
+ if (memb_map) ENVPTR->ReleaseIntArrayElements(ENVPAR memb_map, themapArray, JNI_ABORT);
+ h5JNIFatalError(env, "H5Pget_fapl_muti: memb_fapl not pinned");
+ return 0;
+ } /* end if */
+ } /* end if */
+
+ if (memb_addr) {
+ theaddrArray = (jlong*)ENVPTR->GetLongArrayElements(ENVPAR memb_addr, &isCopy);
+ if (theaddrArray == NULL) {
+ if (memb_map) ENVPTR->ReleaseIntArrayElements(ENVPAR memb_map, themapArray, JNI_ABORT);
+ if (memb_fapl) ENVPTR->ReleaseLongArrayElements(ENVPAR memb_fapl, thefaplArray, JNI_ABORT);
+ h5JNIFatalError(env, "H5Pget_fapl_muti: memb_addr not pinned");
+ return 0;
+ } /* end if */
+ } /* end if */
+
+ if (memb_name)
+ mName = (char**)HDcalloc(H5FD_MEM_NTYPES, sizeof (*mName));
+
+ status = H5Pget_fapl_multi((hid_t)tid, (H5FD_mem_t*)themapArray, (hid_t*)thefaplArray, mName, (haddr_t*)theaddrArray, (hbool_t*)&relax);
+
+ if (status < 0) {
+ if (memb_map) ENVPTR->ReleaseIntArrayElements(ENVPAR memb_map, themapArray, JNI_ABORT);
+ if (memb_fapl) ENVPTR->ReleaseLongArrayElements(ENVPAR memb_fapl, thefaplArray, JNI_ABORT);
+ if (memb_addr) ENVPTR->ReleaseLongArrayElements(ENVPAR memb_addr, theaddrArray, JNI_ABORT);
+ if (memb_name) h5str_array_free(mName, H5FD_MEM_NTYPES);
+ h5libraryError(env);
+ } /* end if */
+ else {
+ if (memb_map) ENVPTR->ReleaseIntArrayElements(ENVPAR memb_map, themapArray, 0);
+ if (memb_fapl) ENVPTR->ReleaseLongArrayElements(ENVPAR memb_fapl, thefaplArray, 0);
+ if (memb_addr) ENVPTR->ReleaseLongArrayElements(ENVPAR memb_addr, theaddrArray, 0);
+
+ if (memb_name) {
+ if (mName) {
+ for (i = 0; i < H5FD_MEM_NTYPES; i++) {
+ if (*(mName + i)) {
+ str = ENVPTR->NewStringUTF(ENVPAR *(mName+i));
+ ENVPTR->SetObjectArrayElement(ENVPAR memb_name, i, (jobject)str);
+ } /* end if */
+ } /* for (i=0; i<n; i++)*/
+ } /* end if */
+ h5str_array_free(mName, H5FD_MEM_NTYPES);
+ } /* end if */
+ } /* end else */
+
+ return (relax!=0);
+} /* end Java_hdf_hdf5lib_H5_H5Pget_1fapl_1multi */
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5Pset_fapl_muti
+ * Signature: (J[I[J[Ljava/lang/String;[JZ)V
+ */
+JNIEXPORT void JNICALL
+Java_hdf_hdf5lib_H5_H5Pset_1fapl_1multi(JNIEnv *env, jclass clss, jlong tid, jintArray memb_map,
+ jlongArray memb_fapl, jobjectArray memb_name, jlongArray memb_addr, jboolean relax)
+{
+ herr_t status = -1;
+ jint *themapArray = NULL;
+ jlong *thefaplArray = NULL;
+ jlong *theaddrArray = NULL;
+ jboolean isCopy;
+ jclass Sjc;
+ jstring rstring;
+ jobject o;
+ jboolean bb;
+ const char **mName = NULL;
+ char *member_name[H5FD_MEM_NTYPES];
+
+ if (memb_map) {
+ themapArray = (jint *)ENVPTR->GetIntArrayElements(ENVPAR memb_map, &isCopy);
+ if (themapArray == NULL) {
+ h5JNIFatalError(env, "H5Pget_fapl_muti: memb_map not pinned");
+ return;
+ } /* end if */
+ } /* end if */
+
+ if (memb_fapl) {
+ thefaplArray = (jlong *)ENVPTR->GetLongArrayElements(ENVPAR memb_fapl, &isCopy);
+ if (thefaplArray == NULL) {
+ if (memb_map)
+ ENVPTR->ReleaseIntArrayElements(ENVPAR memb_map, themapArray, JNI_ABORT);
+ h5JNIFatalError(env, "H5Pget_fapl_muti: memb_fapl not pinned");
+ return;
+ } /* end if */
+ } /* end if */
+
+ if (memb_addr) {
+ theaddrArray = (jlong *)ENVPTR->GetLongArrayElements(ENVPAR memb_addr, &isCopy);
+ if (theaddrArray == NULL) {
+ if (memb_map)
+ ENVPTR->ReleaseIntArrayElements(ENVPAR memb_map, themapArray, JNI_ABORT);
+ if (memb_fapl)
+ ENVPTR->ReleaseLongArrayElements(ENVPAR memb_fapl, thefaplArray, JNI_ABORT);
+ h5JNIFatalError(env, "H5Pget_fapl_muti: memb_addr not pinned");
+ return;
+ } /* end if */
+ } /* end if */
+
+ HDmemset(member_name, 0, H5FD_MEM_NTYPES * sizeof(char*));
+ if (memb_name) {
+ int i;
+ for (i = 0; i < H5FD_MEM_NTYPES; i++) {
+ jstring obj = (jstring) ENVPTR->GetObjectArrayElement(ENVPAR (jobjectArray) memb_name, i);
+ if (obj != 0) {
+ jsize length = ENVPTR->GetStringUTFLength(ENVPAR obj);
+ const char *utf8 = ENVPTR->GetStringUTFChars(ENVPAR obj, 0);
+
+ if (utf8) {
+ member_name[i] = (char*)HDmalloc(strlen(utf8) + 1);
+ if (member_name[i]) {
+ strcpy(member_name[i], utf8);
+ } /* end if */
+ } /* end if */
+
+ ENVPTR->ReleaseStringUTFChars(ENVPAR obj, utf8);
+ ENVPTR->DeleteLocalRef(ENVPAR obj);
+ } /* end if */
+ } /* end for */
+ mName = (const char **)member_name;
+ } /* end if */
+
+ status = H5Pset_fapl_multi((hid_t)tid, (const H5FD_mem_t *)themapArray, (const hid_t *)thefaplArray, mName, (const haddr_t *)theaddrArray, (hbool_t)relax);
+
+ if (status < 0) {
+ if (memb_map) ENVPTR->ReleaseIntArrayElements(ENVPAR memb_map, themapArray, JNI_ABORT);
+ if (memb_fapl) ENVPTR->ReleaseLongArrayElements(ENVPAR memb_fapl, thefaplArray, JNI_ABORT);
+ if (memb_addr) ENVPTR->ReleaseLongArrayElements(ENVPAR memb_addr, theaddrArray, JNI_ABORT);
+ if (memb_name) {
+ int i;
+ for (i = 0; i < H5FD_MEM_NTYPES; i++)
+ HDfree(member_name[i]);
+ } /* end if */
+ h5libraryError(env);
+ } /* end if */
+ else {
+ if (memb_map) ENVPTR->ReleaseIntArrayElements(ENVPAR memb_map, themapArray, 0);
+ if (memb_fapl) ENVPTR->ReleaseLongArrayElements(ENVPAR memb_fapl, thefaplArray, 0);
+ if (memb_addr) ENVPTR->ReleaseLongArrayElements(ENVPAR memb_addr, theaddrArray, 0);
+ if (memb_name) {
+ if (mName != NULL) {
+ int i;
+ Sjc = ENVPTR->FindClass(ENVPAR "java/lang/String");
+ if (Sjc != NULL) {
+ for (i = 0; i < H5FD_MEM_NTYPES; i++) {
+ rstring = ENVPTR->NewStringUTF(ENVPAR member_name[i]);
+ o = ENVPTR->GetObjectArrayElement(ENVPAR memb_name, i);
+ if (o != NULL) {
+ bb = ENVPTR->IsInstanceOf(ENVPAR o, Sjc);
+ if (bb == JNI_TRUE) {
+ ENVPTR->SetObjectArrayElement(ENVPAR memb_name, i, (jobject)rstring);
+ } /* end if */
+ ENVPTR->DeleteLocalRef(ENVPAR o);
+ } /* end if */
+ HDfree(member_name[i]);
+ } /* end for */
+ } /* end if */
+ } /* end if */
+ } /* end if */
+ } /* end else */
+} /* end Java_hdf_hdf5lib_H5_H5Pset_1fapl_1multi */
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5Pset_fapl_split
+ * Signature: (JLjava/lang/String;JLjava/lang/String;J)V
+ */
+JNIEXPORT void JNICALL
+Java_hdf_hdf5lib_H5_H5Pset_1fapl_1split(JNIEnv *env, jclass clss, jlong fapl_id, jstring metaext, jlong meta_pl_id, jstring rawext, jlong raw_pl_id)
+{
+ herr_t retVal = -1;
+ const char *mstr;
+ const char *rstr;
+
+ PIN_JAVA_STRING_TWO0(metaext, mstr, rawext, rstr);
+
+ retVal = H5Pset_fapl_split((hid_t)fapl_id, mstr, (hid_t)meta_pl_id, rstr, (hid_t)raw_pl_id);
+
+ UNPIN_JAVA_STRING_TWO(metaext, mstr, rawext, rstr);
+
+ if (retVal < 0)
+ h5libraryError(env);
+} /* end Java_hdf_hdf5lib_H5_H5Pset_1fapl_1split */
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5Pset_meta_block_size
+ * Signature: (JJ)V
+ */
+JNIEXPORT void JNICALL
+Java_hdf_hdf5lib_H5_H5Pset_1meta_1block_1size(JNIEnv *env, jclass clss, jlong plist, jlong size)
+{
+ long sz = (long)size;
+
+ if (H5Pset_meta_block_size((hid_t)plist, (hsize_t)sz) < 0)
+ h5libraryError(env);
+} /* end Java_hdf_hdf5lib_H5_H5Pset_1meta_1block_1size */
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5Pget_meta_block_size
+ * Signature: (J)J
+ */
+JNIEXPORT jlong JNICALL
+Java_hdf_hdf5lib_H5_H5Pget_1meta_1block_1size(JNIEnv *env, jclass clss, jlong plist)
+{
+ hsize_t s;
+
+ if (H5Pget_meta_block_size((hid_t)plist, &s) < 0)
+ h5libraryError(env);
+
+ return (jlong)s;
+} /* end Java_hdf_hdf5lib_H5_H5Pget_1meta_1block_1size */
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5Pset_sieve_buf_size
+ * Signature: (JJ)V
+ */
+JNIEXPORT void JNICALL
+Java_hdf_hdf5lib_H5_H5Pset_1sieve_1buf_1size(JNIEnv *env, jclass clss, jlong plist, jlong size)
+{
+ size_t sz = (size_t)size;
+
+ if (H5Pset_sieve_buf_size((hid_t)plist, (size_t)sz) < 0)
+ h5libraryError(env);
+} /* end Java_hdf_hdf5lib_H5_H5Pset_1sieve_1buf_1size */
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5Pget_sieve_buf_size
+ * Signature: (J)J
+ */
+JNIEXPORT jlong JNICALL
+Java_hdf_hdf5lib_H5_H5Pget_1sieve_1buf_1size(JNIEnv *env, jclass clss, jlong plist)
+{
+ size_t s;
+
+ if ( H5Pget_sieve_buf_size((hid_t)plist, &s) < 0)
+ h5libraryError(env);
+
+ return (jlong)s;
+} /* end Java_hdf_hdf5lib_H5_H5Pget_1sieve_1buf_1size */
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5Pset_elink_file_cache_size
+ * Signature: (JI)V
+ */
+JNIEXPORT void JNICALL
+Java_hdf_hdf5lib_H5_H5Pset_1elink_1file_1cache_1size(JNIEnv *env, jclass clss, jlong plist, jint size)
+{
+ unsigned sz = (unsigned)size;
+
+ if (H5Pset_elink_file_cache_size((hid_t)plist, (unsigned)sz) < 0)
+ h5libraryError(env);
+} /* end Java_hdf_hdf5lib_H5_H5Pset_1elink_1file_1cache_1size */
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5Pget_elink_file_cache_size
+ * Signature: (J)I
+ */
+JNIEXPORT jint JNICALL
+Java_hdf_hdf5lib_H5_H5Pget_1elink_1file_1cache_1size(JNIEnv *env, jclass clss, jlong plist)
+{
+ unsigned s;
+
+ if (H5Pget_elink_file_cache_size((hid_t)plist, &s) < 0)
+ h5libraryError(env);
+
+ return (jint)s;
+} /* end Java_hdf_hdf5lib_H5_H5Pget_1elink_1file_1cache_1size */
+
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5Pget_mdc_config
+ * Signature: (J)Lhdf/hdf5lib/structs/H5AC_cache_config_t;
+ */
+JNIEXPORT jobject JNICALL
+Java_hdf_hdf5lib_H5_H5Pget_1mdc_1config(JNIEnv *env, jclass clss, jlong plist)
+{
+ H5AC_cache_config_t cacheinfo;
+ herr_t status = -1;
+ jvalue args[30];
+ jstring j_str = NULL;
+ jobject ret_obj = NULL;
+
+ HDmemset(&cacheinfo, 0, sizeof(H5AC_cache_config_t));
+ cacheinfo.version = H5AC__CURR_CACHE_CONFIG_VERSION;
+
+ status = H5Pget_mdc_config((hid_t)plist, &cacheinfo);
+
+ if (status < 0) {
+ h5libraryError(env);
+ } /* end if */
+ else {
+ args[0].i = cacheinfo.version;
+ args[1].z = cacheinfo.rpt_fcn_enabled;
+ args[2].z = cacheinfo.open_trace_file;
+ args[3].z = cacheinfo.close_trace_file;
+ if (cacheinfo.trace_file_name != NULL) {
+ j_str = ENVPTR->NewStringUTF(ENVPAR cacheinfo.trace_file_name);
+ } /* end if */
+ args[4].l = j_str;
+ args[5].z = cacheinfo.evictions_enabled;
+ args[6].z = cacheinfo.set_initial_size;
+ args[7].j = (jlong)cacheinfo.initial_size;
+ args[8].d = cacheinfo.min_clean_fraction;
+ args[9].j = (jlong)cacheinfo.max_size;
+ args[10].j = (jlong)cacheinfo.min_size;
+ args[11].j = cacheinfo.epoch_length;
+ args[12].i = cacheinfo.incr_mode;
+ args[13].d = cacheinfo.lower_hr_threshold;
+ args[14].d = cacheinfo.increment;
+ args[15].z = cacheinfo.apply_max_increment;
+ args[16].j = (jlong)cacheinfo.max_increment;
+ args[17].i = cacheinfo.flash_incr_mode;
+ args[18].d = cacheinfo.flash_multiple;
+ args[19].d = cacheinfo.flash_threshold;
+ args[20].i = cacheinfo.decr_mode;
+ args[21].d = cacheinfo.upper_hr_threshold;
+ args[22].d = cacheinfo.decrement;
+ args[23].z = cacheinfo.apply_max_decrement;
+ args[24].j = (jlong)cacheinfo.max_decrement;
+ args[25].i = cacheinfo.epochs_before_eviction;
+ args[26].z = cacheinfo.apply_empty_reserve;
+ args[27].d = cacheinfo.empty_reserve;
+ args[28].j = (jlong)cacheinfo.dirty_bytes_threshold;
+ args[29].i = cacheinfo.metadata_write_strategy;
+ CALL_CONSTRUCTOR("hdf/hdf5lib/structs/H5AC_cache_config_t", "(IZZZLjava/lang/String;ZZJDJJJIDDZJIDDIDDZJIZDJI)V", args);
+ } /* end else */
+ return ret_obj;
+} /* end Java_hdf_hdf5lib_H5_H5Pget_1mdc_1config */
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5Pset_mdc_config
+ * Signature: (JLhdf/hdf5lib/structs/H5AC_cache_config_t;)V
+ */
+JNIEXPORT void JNICALL
+Java_hdf_hdf5lib_H5_H5Pset_1mdc_1config(JNIEnv *env, jclass clss, jlong plist, jobject cache_config)
+{
+ herr_t status = -1;
+ jclass cls;
+ jfieldID fid;
+ jstring j_str;
+ const char *str;
+ H5AC_cache_config_t cacheinfo;
+
+ cls = ENVPTR->GetObjectClass(ENVPAR cache_config);
+ fid = ENVPTR->GetFieldID(ENVPAR cls, "version", "I");
+ if(fid == 0) {
+ h5badArgument(env, "H5Pset_mdc_config: version");
+ return;
+ } /* end if */
+ cacheinfo.version = ENVPTR->GetIntField(ENVPAR cache_config, fid);
+ if(ENVPTR->ExceptionOccurred(ENVONLY)) {
+ h5JNIFatalError(env, "H5Pset_mdc_config: loading version failed");
+ return;
+ } /* end if */
+
+ fid = ENVPTR->GetFieldID(ENVPAR cls, "rpt_fcn_enabled", "Z");
+ if(fid == 0) {
+ h5badArgument(env, "H5Pset_mdc_config: rpt_fcn_enabled");
+ return;
+ } /* end if */
+ cacheinfo.rpt_fcn_enabled = ENVPTR->GetBooleanField(ENVPAR cache_config, fid);
+ if(ENVPTR->ExceptionOccurred(ENVONLY)) {
+ h5JNIFatalError(env, "H5Pset_mdc_config: loading rpt_fcn_enabled failed");
+ return;
+ } /* end if */
+
+ fid = ENVPTR->GetFieldID(ENVPAR cls, "open_trace_file", "Z");
+ if(fid == 0) {
+ h5badArgument(env, "H5Pset_mdc_config: open_trace_file");
+ return;
+ } /* end if */
+ cacheinfo.open_trace_file = ENVPTR->GetBooleanField(ENVPAR cache_config, fid);
+ if(ENVPTR->ExceptionOccurred(ENVONLY)) {
+ h5JNIFatalError(env, "H5Pset_mdc_config: loading open_trace_file failed");
+ return;
+ } /* end if */
+
+ fid = ENVPTR->GetFieldID(ENVPAR cls, "close_trace_file", "Z");
+ if(fid == 0) {
+ h5badArgument(env, "H5Pset_mdc_config: close_trace_file");
+ return;
+ } /* end if */
+ cacheinfo.close_trace_file = ENVPTR->GetBooleanField(ENVPAR cache_config, fid);
+ if(ENVPTR->ExceptionOccurred(ENVONLY)) {
+ h5JNIFatalError(env, "H5Pset_mdc_config: loading close_trace_file failed");
+ return;
+ } /* end if */
+
+ fid = ENVPTR->GetFieldID(ENVPAR cls, "trace_file_name", "Ljava/lang/String;");
+ if(fid == 0) {
+ h5badArgument(env, "H5Pset_mdc_config: trace_file_name");
+ return;
+ } /* end if */
+ j_str = (jstring)ENVPTR->GetObjectField(ENVPAR cache_config, fid);
+ str = ENVPTR->GetStringUTFChars(ENVPAR j_str, NULL);
+ if (str == NULL) {
+ h5JNIFatalError(env, "H5Pset_mdc_config: out of memory trace_file_name");
+ return;
+ } /* end if */
+ strncpy(cacheinfo.trace_file_name, str, 1025);
+ ENVPTR->ReleaseStringUTFChars(ENVPAR j_str, str);
+ if(ENVPTR->ExceptionOccurred(ENVONLY)) {
+ h5JNIFatalError(env, "H5Pset_mdc_config: loading trace_file_name failed");
+ return;
+ } /* end if */
+
+ fid = ENVPTR->GetFieldID(ENVPAR cls, "evictions_enabled", "Z");
+ if(fid == 0) {
+ h5badArgument(env, "H5Pset_mdc_config: evictions_enabled");
+ return;
+ } /* end if */
+ cacheinfo.evictions_enabled = ENVPTR->GetBooleanField(ENVPAR cache_config, fid);
+ if(ENVPTR->ExceptionOccurred(ENVONLY)) {
+ h5JNIFatalError(env, "H5Pset_mdc_config: loading evictions_enabled failed");
+ return;
+ } /* end if */
+
+ fid = ENVPTR->GetFieldID(ENVPAR cls, "set_initial_size", "Z");
+ if(fid == 0) {
+ h5badArgument(env, "H5Pset_mdc_config: set_initial_size");
+ return;
+ } /* end if */
+ cacheinfo.set_initial_size = ENVPTR->GetBooleanField(ENVPAR cache_config, fid);
+ if(ENVPTR->ExceptionOccurred(ENVONLY)) {
+ h5JNIFatalError(env, "H5Pset_mdc_config: loading set_initial_size failed");
+ return;
+ } /* end if */
+
+ fid = ENVPTR->GetFieldID(ENVPAR cls, "initial_size", "J");
+ if(fid == 0) {
+ h5badArgument(env, "H5Pset_mdc_config: initial_size");
+ return;
+ } /* end if */
+ cacheinfo.initial_size = (size_t)ENVPTR->GetLongField(ENVPAR cache_config, fid);
+ if(ENVPTR->ExceptionOccurred(ENVONLY)) {
+ h5JNIFatalError(env, "H5Pset_mdc_config: loading initial_size failed");
+ return;
+ } /* end if */
+
+ fid = ENVPTR->GetFieldID(ENVPAR cls, "min_clean_fraction", "D");
+ if(fid == 0) {
+ h5badArgument(env, "H5Pset_mdc_config: min_clean_fraction");
+ return;
+ } /* end if */
+ cacheinfo.min_clean_fraction = ENVPTR->GetDoubleField(ENVPAR cache_config, fid);
+ if(ENVPTR->ExceptionOccurred(ENVONLY)) {
+ h5JNIFatalError(env, "H5Pset_mdc_config: loading min_clean_fraction failed");
+ return;
+ } /* end if */
+
+ fid = ENVPTR->GetFieldID(ENVPAR cls, "max_size", "J");
+ if(fid == 0) {
+ h5badArgument(env, "H5Pset_mdc_config: max_size");
+ return;
+ } /* end if */
+ cacheinfo.max_size = (size_t)ENVPTR->GetLongField(ENVPAR cache_config, fid);
+ if(ENVPTR->ExceptionOccurred(ENVONLY)) {
+ h5JNIFatalError(env, "H5Pset_mdc_config: loading max_size failed");
+ return;
+ } /* end if */
+
+ fid = ENVPTR->GetFieldID(ENVPAR cls, "min_size", "J");
+ if(fid == 0) {
+ h5badArgument(env, "H5Pset_mdc_config: min_size");
+ return;
+ } /* end if */
+ cacheinfo.min_size = (size_t)ENVPTR->GetLongField(ENVPAR cache_config, fid);
+ if(ENVPTR->ExceptionOccurred(ENVONLY)) {
+ h5JNIFatalError(env, "H5Pset_mdc_config: loading min_size failed");
+ return;
+ } /* end if */
+
+ fid = ENVPTR->GetFieldID(ENVPAR cls, "epoch_length", "J");
+ if(fid == 0) {
+ h5badArgument(env, "H5Pset_mdc_config: epoch_length");
+ return;
+ }
+ cacheinfo.epoch_length = (long int)ENVPTR->GetLongField(ENVPAR cache_config, fid);
+ if(ENVPTR->ExceptionOccurred(ENVONLY)) {
+ h5JNIFatalError(env, "H5Pset_mdc_config: loading epoch_length failed");
+ return;
+ } /* end if */
+
+ fid = ENVPTR->GetFieldID(ENVPAR cls, "incr_mode", "I");
+ if(fid == 0) {
+ h5badArgument(env, "H5Pset_mdc_config: incr_mode");
+ return;
+ } /* end if */
+ cacheinfo.incr_mode = ENVPTR->GetIntField(ENVPAR cache_config, fid); /*(enum H5C_cache_incr_mode) */
+ if(ENVPTR->ExceptionOccurred(ENVONLY)) {
+ h5JNIFatalError(env, "H5Pset_mdc_config: loading incr_mode failed");
+ return;
+ } /* end if */
+
+ fid = ENVPTR->GetFieldID(ENVPAR cls, "lower_hr_threshold", "D");
+ if(fid == 0) {
+ h5badArgument(env, "H5Pset_mdc_config: lower_hr_threshold");
+ return;
+ } /* end if */
+ cacheinfo.lower_hr_threshold = ENVPTR->GetDoubleField(ENVPAR cache_config, fid);
+ if(ENVPTR->ExceptionOccurred(ENVONLY)) {
+ h5JNIFatalError(env, "H5Pset_mdc_config: loading lower_hr_threshold failed");
+ return;
+ } /* end if */
+
+ fid = ENVPTR->GetFieldID(ENVPAR cls, "increment", "D");
+ if(fid == 0) {
+ h5badArgument(env, "H5Pset_mdc_config: increment");
+ return;
+ } /* end if */
+ cacheinfo.increment = ENVPTR->GetDoubleField(ENVPAR cache_config, fid);
+ if(ENVPTR->ExceptionOccurred(ENVONLY)) {
+ h5JNIFatalError(env, "H5Pset_mdc_config: loading increment failed");
+ return;
+ } /* end if */
+
+ fid = ENVPTR->GetFieldID(ENVPAR cls, "apply_max_increment", "Z");
+ if(fid == 0) {
+ h5badArgument(env, "H5Pset_mdc_config: apply_max_increment");
+ return;
+ } /* end if */
+ cacheinfo.apply_max_increment = ENVPTR->GetBooleanField(ENVPAR cache_config, fid);
+ if(ENVPTR->ExceptionOccurred(ENVONLY)) {
+ h5JNIFatalError(env, "H5Pset_mdc_config: loading apply_max_increment failed");
+ return;
+ } /* end if */
+
+ fid = ENVPTR->GetFieldID(ENVPAR cls, "max_increment", "J");
+ if(fid == 0) {
+ h5badArgument(env, "H5Pset_mdc_config: max_increment");
+ return;
+ } /* end if */
+ cacheinfo.max_increment = (size_t)ENVPTR->GetLongField(ENVPAR cache_config, fid);
+ if(ENVPTR->ExceptionOccurred(ENVONLY)) {
+ h5JNIFatalError(env, "H5Pset_mdc_config: loading max_increment failed");
+ return;
+ } /* end if */
+
+ fid = ENVPTR->GetFieldID(ENVPAR cls, "flash_incr_mode", "I");
+ if(fid == 0) {
+ h5badArgument(env, "H5Pset_mdc_config: flash_incr_mode");
+ return;
+ } /* end if */
+ cacheinfo.flash_incr_mode = ENVPTR->GetIntField(ENVPAR cache_config, fid); /*(enum H5C_cache_flash_incr_mode) */
+ if(ENVPTR->ExceptionOccurred(ENVONLY)) {
+ h5JNIFatalError(env, "H5Pset_mdc_config: loading flash_incr_mode failed");
+ return;
+ } /* end if */
+
+ fid = ENVPTR->GetFieldID(ENVPAR cls, "flash_multiple", "D");
+ if(fid == 0) {
+ h5badArgument(env, "H5Pset_mdc_config: flash_multiple");
+ return;
+ } /* end if */
+ cacheinfo.flash_multiple = ENVPTR->GetDoubleField(ENVPAR cache_config, fid);
+ if(ENVPTR->ExceptionOccurred(ENVONLY)) {
+ h5JNIFatalError(env, "H5Pset_mdc_config: loading flash_multiple failed");
+ return;
+ } /* end if */
+
+ fid = ENVPTR->GetFieldID(ENVPAR cls, "flash_threshold", "D");
+ if(fid == 0) {
+ h5badArgument(env, "H5Pset_mdc_config: flash_threshold");
+ return;
+ } /* end if */
+ cacheinfo.flash_threshold = ENVPTR->GetDoubleField(ENVPAR cache_config, fid);
+ if(ENVPTR->ExceptionOccurred(ENVONLY)) {
+ h5JNIFatalError(env, "H5Pset_mdc_config: loading flash_threshold failed");
+ return;
+ } /* end if */
+
+ fid = ENVPTR->GetFieldID(ENVPAR cls, "decr_mode", "I");
+ if(fid == 0) {
+ h5badArgument(env, "H5Pset_mdc_config: decr_mode");
+ return;
+ } /* end if */
+ cacheinfo.decr_mode = ENVPTR->GetIntField(ENVPAR cache_config, fid); /*(enum H5C_cache_decr_mode) */
+ if(ENVPTR->ExceptionOccurred(ENVONLY)) {
+ h5JNIFatalError(env, "H5Pset_mdc_config: loading decr_mode failed");
+ return;
+ } /* end if */
+
+ fid = ENVPTR->GetFieldID(ENVPAR cls, "upper_hr_threshold", "D");
+ if(fid == 0) {
+ h5badArgument(env, "H5Pset_mdc_config: upper_hr_threshold");
+ return;
+ } /* end if */
+ cacheinfo.upper_hr_threshold = ENVPTR->GetDoubleField(ENVPAR cache_config, fid);
+ if(ENVPTR->ExceptionOccurred(ENVONLY)) {
+ h5JNIFatalError(env, "H5Pset_mdc_config: loading upper_hr_threshold failed");
+ return;
+ } /* end if */
+
+ fid = ENVPTR->GetFieldID(ENVPAR cls, "decrement", "D");
+ if(fid == 0) {
+ h5badArgument(env, "H5Pset_mdc_config: decrement");
+ return;
+ } /* end if */
+ cacheinfo.decrement = ENVPTR->GetDoubleField(ENVPAR cache_config, fid);
+ if(ENVPTR->ExceptionOccurred(ENVONLY)) {
+ h5JNIFatalError(env, "H5Pset_mdc_config: loading decrement failed");
+ return;
+ } /* end if */
+
+ fid = ENVPTR->GetFieldID(ENVPAR cls, "apply_max_decrement", "Z");
+ if(fid == 0) {
+ h5badArgument(env, "H5Pset_mdc_config: apply_max_decrement");
+ return;
+ } /* end if */
+ cacheinfo.apply_max_decrement = ENVPTR->GetBooleanField(ENVPAR cache_config, fid);
+ if(ENVPTR->ExceptionOccurred(ENVONLY)) {
+ h5JNIFatalError(env, "H5Pset_mdc_config: loading apply_max_decrement failed");
+ return;
+ } /* end if */
+
+ fid = ENVPTR->GetFieldID(ENVPAR cls, "max_decrement", "J");
+ if(fid == 0) {
+ h5badArgument(env, "H5Pset_mdc_config: max_decrement");
+ return;
+ } /* end if */
+ cacheinfo.max_decrement = (size_t)ENVPTR->GetLongField(ENVPAR cache_config, fid);
+ if(ENVPTR->ExceptionOccurred(ENVONLY)) {
+ h5JNIFatalError(env, "H5Pset_mdc_config: loading max_decrement failed");
+ return;
+ } /* end if */
+
+ fid = ENVPTR->GetFieldID(ENVPAR cls, "epochs_before_eviction", "I");
+ if(fid == 0) {
+ h5badArgument(env, "H5Pset_mdc_config: epochs_before_eviction");
+ return;
+ } /* end if */
+ cacheinfo.epochs_before_eviction = ENVPTR->GetIntField(ENVPAR cache_config, fid);
+ if(ENVPTR->ExceptionOccurred(ENVONLY)) {
+ h5JNIFatalError(env, "H5Pset_mdc_config: loading epochs_before_eviction failed");
+ return;
+ } /* end if */
+
+ fid = ENVPTR->GetFieldID(ENVPAR cls, "apply_empty_reserve", "Z");
+ if(fid == 0) {
+ h5badArgument(env, "H5Pset_mdc_config: apply_empty_reserve");
+ return;
+ } /* end if */
+ cacheinfo.apply_empty_reserve = ENVPTR->GetBooleanField(ENVPAR cache_config, fid);
+ if(ENVPTR->ExceptionOccurred(ENVONLY)) {
+ h5JNIFatalError(env, "H5Pset_mdc_config: loading apply_empty_reserve failed");
+ return;
+ } /* end if */
+
+ fid = ENVPTR->GetFieldID(ENVPAR cls, "empty_reserve", "D");
+ if(fid == 0) {
+ h5badArgument(env, "H5Pset_mdc_config: empty_reserve");
+ return;
+ } /* end if */
+ cacheinfo.empty_reserve = ENVPTR->GetDoubleField(ENVPAR cache_config, fid);
+ if(ENVPTR->ExceptionOccurred(ENVONLY)) {
+ h5JNIFatalError(env, "H5Pset_mdc_config: loading empty_reserve failed");
+ return;
+ } /* end if */
+
+ fid = ENVPTR->GetFieldID(ENVPAR cls, "dirty_bytes_threshold", "J");
+ if(fid == 0) {
+ h5badArgument(env, "H5Pset_mdc_config: dirty_bytes_threshold");
+ return;
+ } /* end if */
+ cacheinfo.dirty_bytes_threshold = (size_t)ENVPTR->GetLongField(ENVPAR cache_config, fid);
+ if(ENVPTR->ExceptionOccurred(ENVONLY)) {
+ h5JNIFatalError(env, "H5Pset_mdc_config: loading dirty_bytes_threshold failed");
+ return;
+ } /* end if */
+
+ fid = ENVPTR->GetFieldID(ENVPAR cls, "metadata_write_strategy", "I");
+ if(fid == 0) {
+ h5badArgument(env, "H5Pset_mdc_config: metadata_write_strategy");
+ return;
+ } /* end if */
+ cacheinfo.metadata_write_strategy = ENVPTR->GetIntField(ENVPAR cache_config, fid);
+ if(ENVPTR->ExceptionOccurred(ENVONLY)) {
+ h5JNIFatalError(env, "H5Pset_mdc_config: loading metadata_write_strategy failed");
+ return;
+ } /* end if */
+
+ status = H5Pset_mdc_config((hid_t)plist, &cacheinfo);
+
+ if (status < 0)
+ h5libraryError(env);
+} /* end Java_hdf_hdf5lib_H5_H5Pset_1mdc_1config */
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5Pset_chunk_cache
+ * Signature: (JJJD)V
+ */
+JNIEXPORT void JNICALL
+Java_hdf_hdf5lib_H5_H5Pset_1chunk_1cache(JNIEnv *env, jclass clss, jlong dapl, jlong rdcc_nslots,
+ jlong rdcc_nbytes, jdouble rdcc_w0)
+{
+ if (H5Pset_chunk_cache((hid_t)dapl, (size_t)rdcc_nslots, (size_t)rdcc_nbytes, (double) rdcc_w0) < 0)
+ h5libraryError(env);
+} /* end Java_hdf_hdf5lib_H5_H5Pset_1chunk_1cache */
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5Pget_chunk_cache
+ * Signature: (J[J[J[D)V
+ */
+JNIEXPORT void JNICALL
+Java_hdf_hdf5lib_H5_H5Pget_1chunk_1cache(JNIEnv *env, jclass clss, jlong dapl, jlongArray rdcc_nslots,
+ jlongArray rdcc_nbytes, jdoubleArray rdcc_w0)
+{
+ herr_t status = -1;
+ jint mode;
+ jdouble *w0Array;
+ jlong *rdcc_nslotsArray;
+ jlong *nbytesArray;
+ jboolean isCopy;
+
+ if (rdcc_w0 == NULL) {
+ w0Array = (jdouble *)NULL;
+ } /* end if */
+ else {
+ w0Array = (jdouble *)ENVPTR->GetDoubleArrayElements(ENVPAR rdcc_w0, &isCopy);
+ if (w0Array == NULL) {
+ h5JNIFatalError(env, "H5Pget_chunk_cache: w0_array array not pinned");
+ return;
+ } /* end if */
+ } /* end else */
+
+ if (rdcc_nslots == NULL) {
+ rdcc_nslotsArray = (jlong *)NULL;
+ } /* end if */
+ else {
+ rdcc_nslotsArray = (jlong *)ENVPTR->GetLongArrayElements(ENVPAR rdcc_nslots, &isCopy);
+ if (rdcc_nslotsArray == NULL) {
+ /* exception -- out of memory */
+ if (w0Array != NULL) {
+ ENVPTR->ReleaseDoubleArrayElements(ENVPAR rdcc_w0, w0Array, JNI_ABORT);
+ } /* end if */
+ h5JNIFatalError(env, "H5Pget_chunk_cache: rdcc_nslots array not pinned");
+ return;
+ } /* end if */
+ } /* end else */
+
+ if (rdcc_nbytes == NULL) {
+ nbytesArray = (jlong *)NULL;
+ } /* end if */
+ else {
+ nbytesArray = (jlong *)ENVPTR->GetLongArrayElements(ENVPAR rdcc_nbytes, &isCopy);
+ if (nbytesArray == NULL) {
+ if (w0Array != NULL) {
+ ENVPTR->ReleaseDoubleArrayElements(ENVPAR rdcc_w0, w0Array, JNI_ABORT);
+ } /* end if */
+ if (rdcc_nslotsArray != NULL) {
+ ENVPTR->ReleaseLongArrayElements(ENVPAR rdcc_nslots, rdcc_nslotsArray, JNI_ABORT);
+ } /* end if */
+ h5JNIFatalError(env, "H5Pget_chunk_cache: nbytesArray array not pinned");
+ return;
+ } /* end if */
+ } /* end else */
+
+ { /* direct cast (size_t *)variable fails on 32-bit environment */
+ long long rdcc_nslots_temp = *(rdcc_nslotsArray);
+ size_t rdcc_nslots_t = (size_t)rdcc_nslots_temp;
+ long long nbytes_temp = *(nbytesArray);
+ size_t nbytes_t = (size_t)nbytes_temp;
+
+ status = H5Pget_chunk_cache((hid_t)dapl, &rdcc_nslots_t, &nbytes_t, (double *)w0Array);
+
+ *rdcc_nslotsArray = (jlong)rdcc_nslots_t;
+ *nbytesArray = (jlong)nbytes_t;
+ } /* end direct cast special handling */
+
+ if (status < 0) {
+ mode = JNI_ABORT;
+ } /* end if */
+ else {
+ mode = 0; /* commit and free */
+ } /* end else */
+
+ if (rdcc_nslotsArray != NULL) {
+ ENVPTR->ReleaseLongArrayElements(ENVPAR rdcc_nslots, rdcc_nslotsArray, mode);
+ }
+ /* end if */
+ if (nbytesArray != NULL) {
+ ENVPTR->ReleaseLongArrayElements(ENVPAR rdcc_nbytes, nbytesArray, mode);
+ } /* end if */
+
+ if (w0Array != NULL) {
+ ENVPTR->ReleaseDoubleArrayElements(ENVPAR rdcc_w0, w0Array, mode);
+ } /* end if */
+
+ if (status < 0)
+ h5libraryError(env);
+} /* end Java_hdf_hdf5lib_H5_H5Pget_1chunk_1cache */
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5Pget_obj_track_times
+ * Signature: (J)Z
+ */
+JNIEXPORT jboolean JNICALL
+Java_hdf_hdf5lib_H5_H5Pget_1obj_1track_1times(JNIEnv *env, jclass clss, jlong objplid)
+{
+ hbool_t track_times;
+
+ if (H5Pget_obj_track_times((hid_t)objplid, &track_times) < 0) {
+ h5libraryError(env);
+ return JNI_FALSE;
+ } /* end if */
+ if (track_times == 1) {
+ return JNI_TRUE;
+ } /* end if */
+ return JNI_FALSE;
+} /* end Java_hdf_hdf5lib_H5_H5Pget_1obj_1track_1times */
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5Pset_obj_track_times
+ * Signature: (JZ)V
+ */
+JNIEXPORT void JNICALL
+Java_hdf_hdf5lib_H5_H5Pset_1obj_1track_1times(JNIEnv *env, jclass clss, jlong objplid, jboolean track_times)
+{
+ hbool_t track;
+
+ if (track_times == JNI_TRUE) {
+ track = 1;
+ } /* end if */
+ else {
+ track = 0;
+ } /* end else */
+
+ if (H5Pset_obj_track_times((hid_t)objplid, track) < 0)
+ h5libraryError(env);
+} /* end Java_hdf_hdf5lib_H5_H5Pset_1obj_1track_1times */
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5Pget_char_encoding
+ * Signature: (J)I
+ */
+JNIEXPORT jint JNICALL
+Java_hdf_hdf5lib_H5_H5Pget_1char_1encoding(JNIEnv *env, jclass clss, jlong acpl)
+{
+ H5T_cset_t encoding;
+
+ if (H5Pget_char_encoding((hid_t)acpl, &encoding) < 0)
+ h5libraryError(env);
+
+ return encoding;
+} /* end Java_hdf_hdf5lib_H5_H5Pget_1char_1encoding */
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5Pset_char_encoding
+ * Signature: (JI)V
+ */
+JNIEXPORT void JNICALL
+Java_hdf_hdf5lib_H5_H5Pset_1char_1encoding(JNIEnv *env, jclass clss, jlong acpl, jint encoding)
+{
+ if (H5Pset_char_encoding((hid_t)acpl, (H5T_cset_t)encoding) < 0)
+ h5libraryError(env);
+} /* end Java_hdf_hdf5lib_H5_H5Pset_1char_1encoding */
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5Pset_virtual
+ * Signature: (JJLjava/lang/String;Ljava/lang/String;J)V
+ */
+JNIEXPORT void JNICALL
+Java_hdf_hdf5lib_H5_H5Pset_1virtual(JNIEnv *env, jclass clss, jlong dcpl_id, jlong vspace_id,
+ jstring src_file_name, jstring src_dset_name, jlong src_space_id)
+{
+ herr_t retVal = -1;
+ const char *fstr;
+ const char *dstr;
+
+ PIN_JAVA_STRING_TWO0(src_file_name, fstr, src_dset_name, dstr);
+
+ retVal = H5Pset_virtual((hid_t)dcpl_id, (hid_t)vspace_id, fstr, dstr, (hid_t)src_space_id);
+
+ UNPIN_JAVA_STRING_TWO(src_file_name, fstr, src_dset_name, dstr);
+
+ if (retVal < 0)
+ h5libraryError(env);
+} /* end Java_hdf_hdf5lib_H5_H5Pset_1virtual */
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5Pget_virtual_count
+ * Signature: (J)J
+ */
+JNIEXPORT jlong JNICALL
+Java_hdf_hdf5lib_H5_H5Pget_1virtual_1count(JNIEnv *env, jclass clss, jlong dcpl_id)
+{
+ size_t s;
+
+ if (H5Pget_virtual_count((hid_t)dcpl_id, &s) < 0)
+ h5libraryError(env);
+
+ return (jlong)s;
+} /* end Java_hdf_hdf5lib_H5_H5Pget_1virtual_1count */
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5Pget_virtual_vspace
+ * Signature: (JJ)J
+ */
+JNIEXPORT jlong JNICALL
+Java_hdf_hdf5lib_H5_H5Pget_1virtual_1vspace(JNIEnv *env, jclass clss, jlong dcpl_id, jlong index)
+{
+ hid_t space_id = -1;
+
+ space_id = H5Pget_virtual_vspace((hid_t)dcpl_id, (size_t)index);
+ if (space_id < 0)
+ h5libraryError(env);
+
+ return (jlong)space_id;
+} /* end Java_hdf_hdf5lib_H5_H5Pget_1virtual_1vspace */
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5Pget_virtual_srcspace
+ * Signature: (JJ)J
+ */
+JNIEXPORT jlong JNICALL
+Java_hdf_hdf5lib_H5_H5Pget_1virtual_1srcspace(JNIEnv *env, jclass clss, jlong dcpl_id, jlong index)
+{
+ hid_t space_id = -1;
+
+ space_id = H5Pget_virtual_srcspace((hid_t)dcpl_id, (size_t)index);
+ if (space_id < 0)
+ h5libraryError(env);
+
+ return (jlong)space_id;
+} /* end Java_hdf_hdf5lib_H5_H5Pget_1virtual_1srcspace */
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5Pget_virtual_filename
+ * Signature: (JJ)Ljava/lang/String;
+ */
+JNIEXPORT jstring JNICALL
+Java_hdf_hdf5lib_H5_H5Pget_1virtual_1filename(JNIEnv *env, jclass clss, jlong dcpl_id, jlong index)
+{
+ char *fname;
+ ssize_t buf_size;
+ ssize_t status;
+ jstring str = NULL;
+
+ /* get the length of the filename */
+ buf_size = H5Pget_virtual_filename((hid_t)dcpl_id, (size_t)index, NULL, 0);
+ if (buf_size < 0) {
+ h5badArgument( env, "H5Pget_virtual_filename: buf_size < 0");
+ } /* end if */
+ else if (buf_size >= 0) {
+ buf_size++; /* add extra space for the null terminator */
+ fname = (char *)HDmalloc(sizeof(char) * (size_t)buf_size);
+ if (fname == NULL) {
+ h5outOfMemory( env, "H5Pget_virtual_filename: malloc failed");
+ } /* end if */
+ else {
+ status = H5Pget_virtual_filename((hid_t)dcpl_id, (size_t)index, fname, (size_t)buf_size);
+
+ if (status >= 0) {
+ str = ENVPTR->NewStringUTF(ENVPAR fname);
+ HDfree(fname);
+ if (str == NULL)
+ h5JNIFatalError( env, "H5Pget_virtual_filename: return string not allocated");
+ } /* end if */
+ else {
+ HDfree(fname);
+ h5libraryError(env);
+ } /* end else */
+ } /* end else */
+ } /* end else if */
+
+ return (jstring)str;
+} /* end Java_hdf_hdf5lib_H5_H5Pget_1virtual_1filename */
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5Pget_virtual_dsetname
+ * Signature: (JJ)Ljava/lang/String;
+ */
+JNIEXPORT jstring JNICALL
+Java_hdf_hdf5lib_H5_H5Pget_1virtual_1dsetname(JNIEnv *env, jclass clss, jlong dcpl_id, jlong index)
+{
+ char *dname;
+ ssize_t buf_size;
+ ssize_t status;
+ jstring str = NULL;
+
+ /* get the length of the filename */
+ buf_size = H5Pget_virtual_dsetname((hid_t)dcpl_id, (size_t)index, NULL, 0);
+ if (buf_size < 0) {
+ h5badArgument( env, "H5Pget_virtual_dsetname: buf_size < 0");
+ } /* end if */
+ else if (buf_size > 0) {
+ buf_size++; /* add extra space for the null terminator */
+ dname = (char *)HDmalloc(sizeof(char) * (size_t)buf_size);
+ if (dname == NULL) {
+ h5outOfMemory( env, "H5Pget_virtual_dsetname: malloc failed");
+ } /* end if */
+ else {
+ status = H5Pget_virtual_dsetname((hid_t)dcpl_id, (size_t)index, dname, (size_t)buf_size);
+
+ if (status >= 0) {
+ str = ENVPTR->NewStringUTF(ENVPAR dname);
+ HDfree(dname);
+ if (str == NULL)
+ h5JNIFatalError( env, "H5Pget_virtual_dsetname: return string not allocated");
+ } /* end if */
+ else {
+ HDfree(dname);
+ h5libraryError(env);
+ } /* end else */
+ } /* end else */
+ } /* end else if */
+ return (jstring)str;
+} /* end Java_hdf_hdf5lib_H5_H5Pget_1virtual_1dsetname */
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5Pget_virtual_view
+ * Signature: (J)I
+ */
+JNIEXPORT jint JNICALL
+Java_hdf_hdf5lib_H5_H5Pget_1virtual_1view(JNIEnv *env, jclass clss, jlong dapl_id)
+{
+ H5D_vds_view_t virtual_view;
+
+ if (H5Pget_virtual_view((hid_t)dapl_id, &virtual_view) < 0)
+ h5libraryError(env);
+
+ return (jint)virtual_view;
+} /* end Java_hdf_hdf5lib_H5_H5Pget_1virtual_1view */
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5Pset_virtual_view
+ * Signature: (JI)V
+ */
+JNIEXPORT void JNICALL
+Java_hdf_hdf5lib_H5_H5Pset_1virtual_1view(JNIEnv *env, jclass clss, jlong dapl_id, jint view)
+{
+ if (H5Pset_virtual_view((hid_t)dapl_id, (H5D_vds_view_t)view) < 0)
+ h5libraryError(env);
+} /* end Java_hdf_hdf5lib_H5_H5Pset_1virtual_1view */
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5Pget_virtual_printf_gap
+ * Signature: (J)J
+ */
+JNIEXPORT jlong JNICALL
+Java_hdf_hdf5lib_H5_H5Pget_1virtual_1printf_1gap(JNIEnv *env, jclass clss, jlong dapl_id)
+{
+ hsize_t gap_size;
+
+ if (H5Pget_virtual_printf_gap((hid_t)dapl_id, &gap_size) < 0)
+ h5libraryError(env);
+
+ return (jlong)gap_size;
+} /* end Java_hdf_hdf5lib_H5_H5Pget_1virtual_1printf_1gap */
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5Pset_virtual_printf_gap
+ * Signature: (JJ)V
+ */
+JNIEXPORT void JNICALL
+Java_hdf_hdf5lib_H5_H5Pset_1virtual_1printf_1gap(JNIEnv *env, jclass clss, jlong dapl_id, jlong gap_size)
+{
+ if (H5Pset_virtual_printf_gap((hid_t)dapl_id, (hsize_t)gap_size) < 0)
+ h5libraryError(env);
+} /* end Java_hdf_hdf5lib_H5_H5Pset_1virtual_1printf_1gap */
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5Pget_file_space
+ * Signature: (J[I[J)V
+ */
+JNIEXPORT void JNICALL
+Java_hdf_hdf5lib_H5_H5Pget_1file_1space(JNIEnv *env, jclass clss, jlong fcpl_id, jintArray strategy, jlongArray threshold)
+{
+ herr_t status = -1;
+ jint *thestrategyArray = NULL;
+ jlong *thethresholdArray = NULL;
+ jboolean isCopy;
+
+ if (strategy) {
+ thestrategyArray = (jint*)ENVPTR->GetIntArrayElements(ENVPAR strategy, &isCopy);
+ if (thestrategyArray == NULL) {
+ h5JNIFatalError(env, "H5Pget_file_space: strategy not pinned");
+ return;
+ }
+ }
+
+ if (threshold) {
+ thethresholdArray = (jlong*)ENVPTR->GetLongArrayElements(ENVPAR threshold, &isCopy);
+ if (thethresholdArray == NULL) {
+ if (strategy) ENVPTR->ReleaseIntArrayElements(ENVPAR strategy, thestrategyArray, JNI_ABORT);
+ h5JNIFatalError(env, "H5Pget_file_space: threshold not pinned");
+ return;
+ } /* end if */
+ } /* end if */
+
+ status = H5Pget_file_space((hid_t)fcpl_id, (H5F_file_space_type_t*)thestrategyArray, (hsize_t*)thethresholdArray);
+
+ if (status < 0) {
+ if (strategy) ENVPTR->ReleaseIntArrayElements(ENVPAR strategy, thestrategyArray, JNI_ABORT);
+ if (threshold) ENVPTR->ReleaseLongArrayElements(ENVPAR threshold, thethresholdArray, JNI_ABORT);
+ h5libraryError(env);
+ } /* end if */
+ else {
+ if (strategy) ENVPTR->ReleaseIntArrayElements(ENVPAR strategy, thestrategyArray, 0);
+ if (threshold) ENVPTR->ReleaseLongArrayElements(ENVPAR threshold, thethresholdArray, 0);
+ } /* end else */
+} /* end Java_hdf_hdf5lib_H5_H5Pget_1file_1space */
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5Pset_file_space
+ * Signature: (JIJ)V
+ */
+JNIEXPORT void JNICALL
+Java_hdf_hdf5lib_H5_H5Pset_1file_1space(JNIEnv *env, jclass clss, jlong fcpl_id, jint strategy, jlong threshold)
+{
+ if (H5Pset_file_space((hid_t)fcpl_id, (H5F_file_space_type_t)strategy, (hsize_t)threshold) < 0)
+ h5libraryError(env);
+} /* end Java_hdf_hdf5lib_H5_H5Pset_1file_1space */
+
+
+static herr_t
+H5P_cls_create_cb(hid_t prop_id, void *create_data)
+{
+ JNIEnv *cbenv;
+ jint status = -1;
+ jclass cls;
+ jmethodID mid;
+ jmethodID constructor;
+
+ if(JVMPTR->AttachCurrentThread(JVMPAR2 (void**)&cbenv, NULL) == 0) {
+ cls = CBENVPTR->GetObjectClass(CBENVPAR create_callback);
+ if (cls != 0) {
+ mid = CBENVPTR->GetMethodID(CBENVPAR cls, "callback", "(JLhdf/hdf5lib/callbacks/H5P_cls_create_func_t;)I");
+ if (mid != 0) {
+ /* fprintf(stderr, "JNI H5P_cls_create_func_cb execute\n"); fflush(stderr); */
+ status = CBENVPTR->CallIntMethod(CBENVPAR create_callback, mid, prop_id, create_data);
+ /* fprintf(stderr, "\nJNI H5P_cls_create_func_cb status: %d\n", status); fflush(stderr); */
+ } /* end if */
+ } /* end if */
+ } /* end if */
+ JVMPTR->DetachCurrentThread(JVMPAR);
+ return status;
+} /* end H5P_cls_create_cb */
+
+static herr_t
+H5P_cls_copy_cb(hid_t new_prop_id, hid_t old_prop_id, void *copy_data)
+{
+ JNIEnv *cbenv;
+ jint status;
+ jclass cls;
+ jmethodID mid;
+ jmethodID constructor;
+
+ if(JVMPTR->AttachCurrentThread(JVMPAR2 (void**)&cbenv, NULL) == 0) {
+ cls = CBENVPTR->GetObjectClass(CBENVPAR copy_callback);
+ if (cls != 0) {
+ mid = CBENVPTR->GetMethodID(CBENVPAR cls, "callback", "(JJLhdf/hdf5lib/callbacks/H5P_cls_copy_func_t;)I");
+ if (mid != 0) {
+ status = CBENVPTR->CallIntMethod(CBENVPAR copy_callback, mid, new_prop_id, old_prop_id, copy_data);
+ } /* end if */
+ } /* end if */
+ } /* end if */
+ JVMPTR->DetachCurrentThread(JVMPAR);
+ return status;
+} /* end H5P_cls_ccopy_cb */
+
+static herr_t
+H5P_cls_close_cb(hid_t prop_id, void *close_data)
+{
+ JNIEnv *cbenv;
+ jint status;
+ jclass cls;
+ jmethodID mid;
+ jmethodID constructor;
+
+ if(JVMPTR->AttachCurrentThread(JVMPAR2 (void**)&cbenv, NULL) == 0) {
+ cls = CBENVPTR->GetObjectClass(CBENVPAR close_callback);
+ if (cls != 0) {
+ mid = CBENVPTR->GetMethodID(CBENVPAR cls, "callback", "(JLhdf/hdf5lib/callbacks/H5P_cls_close_func_t;)I");
+ if (mid != 0) {
+ status = CBENVPTR->CallIntMethod(CBENVPAR close_callback, mid, prop_id, close_data);
+ } /* end if */
+ } /* end if */
+ } /* end if */
+ JVMPTR->DetachCurrentThread(JVMPAR);
+ return status;
+} /* end H5P_cls_close_cb */
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: _H5Pcreate_class_nocb
+ * Signature: (JLjava/lang/String;Ljava/lang/Object;Ljava/lang/Object;Ljava/lang/Object;Ljava/lang/Object;Ljava/lang/Object;Ljava/lang/Object;)J
+ */
+JNIEXPORT jlong JNICALL
+Java_hdf_hdf5lib_H5__1H5Pcreate_1class_1nocb(JNIEnv *env, jclass clss, jlong parent_class, jstring name)
+{
+ hid_t class_id = -1;
+ const char *cstr;
+
+ PIN_JAVA_STRING(name, cstr, -1);
+
+ class_id = H5Pcreate_class((hid_t)parent_class, cstr, NULL, NULL, NULL, NULL, NULL, NULL);
+
+ UNPIN_JAVA_STRING(name, cstr);
+
+ if (class_id < 0)
+ h5libraryError(env);
+
+ return (jlong)class_id;
+} /* end Java_hdf_hdf5lib_H5__1H5Pcreate_1class_1nocb */
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: _H5Pcreate_class
+ * Signature: (JLjava/lang/String;Ljava/lang/Object;Ljava/lang/Object;Ljava/lang/Object;Ljava/lang/Object;Ljava/lang/Object;Ljava/lang/Object;)J
+ */
+JNIEXPORT jlong JNICALL
+Java_hdf_hdf5lib_H5__1H5Pcreate_1class(JNIEnv *env, jclass clss, jlong parent_class, jstring name, jobject create_op,
+ jobject create_data, jobject copy_op, jobject copy_data, jobject close_op, jobject close_data)
+{
+ hid_t class_id = -1;
+ const char *cstr;
+ copy_callback = copy_op;
+ close_callback = close_op;
+ create_callback = create_op;
+
+ PIN_JAVA_STRING(name, cstr, -1);
+
+ class_id = H5Pcreate_class((hid_t)parent_class, cstr, (H5P_cls_create_func_t)H5P_cls_create_cb, (void*) create_data,
+ (H5P_cls_copy_func_t)H5P_cls_copy_cb, (void*) copy_data, (H5P_cls_close_func_t)H5P_cls_close_cb, (void*) close_data);
+
+ UNPIN_JAVA_STRING(name, cstr);
+
+ if (class_id < 0)
+ h5libraryError(env);
+
+ return (jlong)class_id;
+} /* end Java_hdf_hdf5lib_H5__1H5Pcreate_1class */
+
+static herr_t
+H5P_prp_create_cb(const char *name, size_t size, void *value)
+{
+ JNIEnv *cbenv;
+ jint status;
+ jclass cls;
+ jmethodID mid;
+ jmethodID constructor;
+ jstring str;
+
+ if(JVMPTR->AttachCurrentThread(JVMPAR2 (void**)&cbenv, NULL) == 0) {
+ cls = CBENVPTR->GetObjectClass(CBENVPAR create_callback);
+ if (cls != 0) {
+ mid = CBENVPTR->GetMethodID(CBENVPAR cls, "callback", "(Ljava/lang/String;J[B)I");
+ if (mid != 0) {
+ str = CBENVPTR->NewStringUTF(CBENVPAR name);
+ status = CBENVPTR->CallIntMethod(CBENVPAR create_callback, mid, str, size, value);
+ } /* end if */
+ } /* end if */
+ } /* end if */
+ JVMPTR->DetachCurrentThread(JVMPAR);
+ return status;
+} /* end H5P_prp_create_cb */
+
+static herr_t
+H5P_prp_copy_cb(const char *name, size_t size, void *value)
+{
+ JNIEnv *cbenv;
+ jint status;
+ jclass cls;
+ jmethodID mid;
+ jmethodID constructor;
+ jstring str;
+
+ if(JVMPTR->AttachCurrentThread(JVMPAR2 (void**)&cbenv, NULL) == 0) {
+ cls = CBENVPTR->GetObjectClass(CBENVPAR copy_callback);
+ if (cls != 0) {
+ mid = CBENVPTR->GetMethodID(CBENVPAR cls, "callback", "(Ljava/lang/String;J[B)I");
+ if (mid != 0) {
+ str = CBENVPTR->NewStringUTF(CBENVPAR name);
+ status = CBENVPTR->CallIntMethod(CBENVPAR copy_callback, mid, str, size, value);
+ } /* end if */
+ } /* end if */
+ } /* end if */
+ JVMPTR->DetachCurrentThread(JVMPAR);
+ return status;
+} /* end H5P_prp_copy_cb */
+
+static herr_t
+H5P_prp_close_cb(const char *name, size_t size, void *value)
+{
+ JNIEnv *cbenv;
+ jint status;
+ jclass cls;
+ jmethodID mid;
+ jmethodID constructor;
+ jstring str;
+
+ if(JVMPTR->AttachCurrentThread(JVMPAR2 (void**)&cbenv, NULL) == 0) {
+ cls = CBENVPTR->GetObjectClass(CBENVPAR close_callback);
+ if (cls != 0) {
+ mid = CBENVPTR->GetMethodID(CBENVPAR cls, "callback", "(Ljava/lang/String;J[B)I");
+ if (mid != 0) {
+ str = CBENVPTR->NewStringUTF(CBENVPAR name);
+ status = CBENVPTR->CallIntMethod(CBENVPAR close_callback, mid, str, size, value);
+ } /* end if */
+ } /* end if */
+ } /* end if */
+ JVMPTR->DetachCurrentThread(JVMPAR);
+ return status;
+} /* end H5P_prp_close_cb */
+
+static int
+H5P_prp_compare_cb(void *value1, void *value2, size_t size)
+{
+ JNIEnv *cbenv;
+ jint status;
+ jclass cls;
+ jmethodID mid;
+ jmethodID constructor;
+
+ if(JVMPTR->AttachCurrentThread(JVMPAR2 (void**)&cbenv, NULL) == 0) {
+ cls = CBENVPTR->GetObjectClass(CBENVPAR compare_callback);
+ if (cls != 0) {
+ mid = CBENVPTR->GetMethodID(CBENVPAR cls, "callback", "([B[BJ)I");
+ if (mid != 0) {
+ status = CBENVPTR->CallIntMethod(CBENVPAR compare_callback, mid, value1, value2, size);
+ } /* end if */
+ } /* end if */
+ } /* end if */
+ JVMPTR->DetachCurrentThread(JVMPAR);
+ return status;
+} /* end H5P_prp_compare_cb */
+
+static herr_t
+H5P_prp_get_cb(hid_t prop_id, const char *name, size_t size, void *value)
+{
+ JNIEnv *cbenv;
+ jint status;
+ jclass cls;
+ jmethodID mid;
+ jmethodID constructor;
+ jstring str;
+
+ if(JVMPTR->AttachCurrentThread(JVMPAR2 (void**)&cbenv, NULL) == 0) {
+ cls = CBENVPTR->GetObjectClass(CBENVPAR get_callback);
+ if (cls != 0) {
+ mid = CBENVPTR->GetMethodID(CBENVPAR cls, "callback", "(JLjava/lang/String;J[B)I");
+ if (mid != 0) {
+ str = CBENVPTR->NewStringUTF(CBENVPAR name);
+ status = CBENVPTR->CallIntMethod(CBENVPAR get_callback, mid, prop_id, str, size, value);
+ } /* end if */
+ } /* end if */
+ } /* end if */
+ JVMPTR->DetachCurrentThread(JVMPAR);
+ return status;
+} /* end H5P_prp_get_cb */
+
+static herr_t
+H5P_prp_set_cb(hid_t prop_id, const char *name, size_t size, void *value)
+{
+ JNIEnv *cbenv;
+ jint status;
+ jclass cls;
+ jmethodID mid;
+ jmethodID constructor;
+ jstring str;
+
+ if(JVMPTR->AttachCurrentThread(JVMPAR2 (void**)&cbenv, NULL) == 0) {
+ cls = CBENVPTR->GetObjectClass(CBENVPAR set_callback);
+ if (cls != 0) {
+ mid = CBENVPTR->GetMethodID(CBENVPAR cls, "callback", "(JLjava/lang/String;J[B)I");
+ if (mid != 0) {
+ str = CBENVPTR->NewStringUTF(CBENVPAR name);
+ status = CBENVPTR->CallIntMethod(CBENVPAR set_callback, mid, prop_id, str, size, value);
+ } /* end if */
+ } /* end if */
+ } /* end if */
+ JVMPTR->DetachCurrentThread(JVMPAR);
+ return status;
+} /* end H5P_prp_set_cb */
+
+static herr_t
+H5P_prp_delete_cb(hid_t prop_id, const char *name, size_t size, void *value)
+{
+ JNIEnv *cbenv;
+ jint status;
+ jclass cls;
+ jmethodID mid;
+ jmethodID constructor;
+ jstring str;
+
+ if(JVMPTR->AttachCurrentThread(JVMPAR2 (void**)&cbenv, NULL) == 0) {
+ cls = CBENVPTR->GetObjectClass(CBENVPAR delete_callback);
+ if (cls != 0) {
+ mid = CBENVPTR->GetMethodID(CBENVPAR cls, "callback", "(JLjava/lang/String;J[B)I");
+ if (mid != 0) {
+ str = CBENVPTR->NewStringUTF(CBENVPAR name);
+ status = CBENVPTR->CallIntMethod(CBENVPAR delete_callback, mid, prop_id, str, size, value);
+ } /* end if */
+ } /* end if */
+ } /* end if */
+ JVMPTR->DetachCurrentThread(JVMPAR);
+ return status;
+} /* end H5P_prp_delete_cb */
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5Pregister2_nocb
+ * Signature: (JLjava/lang/String;J[B)V
+ */
+JNIEXPORT void JNICALL
+Java_hdf_hdf5lib_H5_H5Pregister2_1nocb(JNIEnv *env, jclass clss, jlong cls_id, jstring name, jlong prp_size, jbyteArray def_value)
+{
+ herr_t status = -1;
+ jbyte *buffP;
+ jboolean isCopy2;
+ const char *cstr;
+
+ PIN_JAVA_STRING0(name, cstr);
+ buffP = ENVPTR->GetByteArrayElements(ENVPAR def_value, &isCopy2);
+ if (buffP == NULL) {
+ UNPIN_JAVA_STRING(name, cstr);
+ h5JNIFatalError(env, "H5Pregister2: buf not pinned");
+ } /* end if */
+ else {
+ status = H5Pregister2((hid_t)cls_id, cstr, (size_t)prp_size, (void*)buffP, NULL, NULL, NULL, NULL, NULL, NULL, NULL);
+
+ UNPIN_JAVA_STRING(name, cstr);
+ if (status < 0) {
+ ENVPTR->ReleaseByteArrayElements(ENVPAR def_value, buffP, JNI_ABORT);
+ h5libraryError(env);
+ } /* end if */
+ else {
+ ENVPTR->ReleaseByteArrayElements(ENVPAR def_value, buffP, 0);
+ } /* end else */
+ } /* end else */
+} /* end Java_hdf_hdf5lib_H5_H5Pregister2_1nocb */
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5Pregister2
+ * Signature: (JLjava/lang/String;J[BLjava/lang/Object;Ljava/lang/Object;Ljava/lang/Object;Ljava/lang/Object;Ljava/lang/Object;Ljava/lang/Object;Ljava/lang/Object;)V
+ */
+JNIEXPORT void JNICALL
+Java_hdf_hdf5lib_H5_H5Pregister2(JNIEnv *env, jclass clss, jlong cls_id, jstring name, jlong prp_size,
+ jbyteArray def_value, jobject prp_create, jobject prp_set, jobject prp_get, jobject prp_delete,
+ jobject prp_copy, jobject prp_cmp, jobject prp_close)
+{
+ herr_t status = -1;
+ jbyte *buffP;
+ jboolean isCopy2;
+ const char *cstr;
+ copy_callback = prp_copy;
+ close_callback = prp_close;
+ create_callback = prp_create;
+ compare_callback = prp_cmp;
+ set_callback = prp_set;
+ get_callback = prp_get;
+ delete_callback = prp_delete;
+
+ PIN_JAVA_STRING0(name, cstr);
+ buffP = ENVPTR->GetByteArrayElements(ENVPAR def_value, &isCopy2);
+ if (buffP == NULL) {
+ UNPIN_JAVA_STRING(name, cstr);
+ h5JNIFatalError(env, "H5Pregister2: buf not pinned");
+ } /* end if */
+ else {
+ status = H5Pregister2((hid_t)cls_id, cstr, (size_t)prp_size, (void*)buffP, (H5P_prp_create_func_t)H5P_prp_create_cb,
+ (H5P_prp_set_func_t)H5P_prp_set_cb, (H5P_prp_get_func_t)H5P_prp_get_cb, (H5P_prp_delete_func_t)H5P_prp_delete_cb,
+ (H5P_prp_copy_func_t)H5P_prp_copy_cb, (H5P_prp_compare_func_t)H5P_prp_compare_cb, (H5P_prp_close_func_t)H5P_prp_close_cb);
+
+ UNPIN_JAVA_STRING(name, cstr);
+ if (status < 0) {
+ ENVPTR->ReleaseByteArrayElements(ENVPAR def_value, buffP, JNI_ABORT);
+ h5libraryError(env);
+ } /* end if */
+ else {
+ ENVPTR->ReleaseByteArrayElements(ENVPAR def_value, buffP, 0);
+ } /* end else */
+ } /* end else */
+} /* end Java_hdf_hdf5lib_H5_H5Pregister2 */
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5Pinsert2_nocb
+ * Signature: (JLjava/lang/String;J[B)V
+ */
+JNIEXPORT void JNICALL
+Java_hdf_hdf5lib_H5_H5Pinsert2_1nocb(JNIEnv *env, jclass clss, jlong cls_id, jstring name, jlong prp_size, jbyteArray def_value)
+{
+ herr_t status = -1;
+ jbyte *buffP;
+ jboolean isCopy2;
+ const char *cstr;
+
+ PIN_JAVA_STRING0(name, cstr);
+ buffP = ENVPTR->GetByteArrayElements(ENVPAR def_value, &isCopy2);
+ if (buffP == NULL) {
+ UNPIN_JAVA_STRING(name, cstr);
+ h5JNIFatalError(env, "H5Pinsert2: buf not pinned");
+ } /* end if */
+ else {
+ status = H5Pinsert2((hid_t)cls_id, cstr, (size_t)prp_size, (void*)buffP, NULL, NULL, NULL, NULL, NULL, NULL);
+
+ UNPIN_JAVA_STRING(name, cstr);
+ if (status < 0) {
+ ENVPTR->ReleaseByteArrayElements(ENVPAR def_value, buffP, JNI_ABORT);
+ h5libraryError(env);
+ } /* end if */
+ else {
+ ENVPTR->ReleaseByteArrayElements(ENVPAR def_value, buffP, 0);
+ } /* end else */
+ } /* end else */
+} /* end Java_hdf_hdf5lib_H5_H5Pinsert2 */
+
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5Pinsert2
+ * Signature: (JLjava/lang/String;J[BLjava/lang/Object;Ljava/lang/Object;Ljava/lang/Object;Ljava/lang/Object;Ljava/lang/Object;Ljava/lang/Object;)V
+ */
+JNIEXPORT void JNICALL
+Java_hdf_hdf5lib_H5_H5Pinsert2(JNIEnv *env, jclass clss, jlong cls_id, jstring name, jlong prp_size,
+ jbyteArray def_value, jobject prp_set, jobject prp_get, jobject prp_delete,
+ jobject prp_copy, jobject prp_cmp, jobject prp_close)
+{
+ herr_t status = -1;
+ jbyte *buffP;
+ jboolean isCopy2;
+ const char *cstr;
+ copy_callback = prp_copy;
+ close_callback = prp_close;
+ compare_callback = prp_cmp;
+ set_callback = prp_set;
+ get_callback = prp_get;
+ delete_callback = prp_delete;
+
+ PIN_JAVA_STRING0(name, cstr);
+ buffP = ENVPTR->GetByteArrayElements(ENVPAR def_value, &isCopy2);
+ if (buffP == NULL) {
+ UNPIN_JAVA_STRING(name, cstr);
+ h5JNIFatalError(env, "H5Pinsert2: buf not pinned");
+ } /* end if */
+ else {
+ status = H5Pinsert2((hid_t)cls_id, cstr, (size_t)prp_size, (void*)buffP,
+ (H5P_prp_set_func_t)H5P_prp_set_cb, (H5P_prp_get_func_t)H5P_prp_get_cb, (H5P_prp_delete_func_t)H5P_prp_delete_cb,
+ (H5P_prp_copy_func_t)H5P_prp_copy_cb, (H5P_prp_compare_func_t)H5P_prp_compare_cb, (H5P_prp_close_func_t)H5P_prp_close_cb);
+
+ UNPIN_JAVA_STRING(name, cstr);
+ if (status < 0) {
+ ENVPTR->ReleaseByteArrayElements(ENVPAR def_value, buffP, JNI_ABORT);
+ h5libraryError(env);
+ } /* end if */
+ else {
+ ENVPTR->ReleaseByteArrayElements(ENVPAR def_value, buffP, 0);
+ } /* end else */
+ } /* end else */
+} /* end Java_hdf_hdf5lib_H5_H5Pinsert2 */
+
+static herr_t
+H5P_iterate_cb(hid_t prop_id, const char *name, void *op_data)
+{
+ JNIEnv *cbenv;
+ jint status = -1;
+ jclass cls;
+ jmethodID mid;
+ jstring str;
+ jmethodID constructor;
+
+ /* fprintf(stderr, "\nJNI H5P_iterate_cb entered\n"); fflush(stderr); */
+ if(JVMPTR->AttachCurrentThread(JVMPAR2 (void**)&cbenv, NULL) != 0) {
+ /* fprintf(stderr, "\nJNI H5P_iterate_cb error: AttachCurrentThread failed\n"); fflush(stderr); */
+ JVMPTR->DetachCurrentThread(JVMPAR);
+ } /* end if */
+ else {
+ cls = CBENVPTR->GetObjectClass(CBENVPAR visit_callback);
+ if (cls == 0) {
+ /* fprintf(stderr, "\nJNI H5P_iterate_cb error: GetObjectClass failed\n"); fflush(stderr); */
+ JVMPTR->DetachCurrentThread(JVMPAR);
+ } /* end if */
+ else {
+ mid = CBENVPTR->GetMethodID(CBENVPAR cls, "callback", "(JLjava/lang/String;Lhdf/hdf5lib/callbacks/H5P_iterate_t;)I");
+ if (mid == 0) {
+ /* fprintf(stderr, "\nJNI H5P_iterate_cb error: GetMethodID failed\n"); fflush(stderr); */
+ JVMPTR->DetachCurrentThread(JVMPAR);
+ } /* end if */
+ else {
+ str = CBENVPTR->NewStringUTF(CBENVPAR name);
+
+ /* fprintf(stderr, "JNI H5P_iterate_cb execute\n"); fflush(stderr); */
+ status = CBENVPTR->CallIntMethod(CBENVPAR visit_callback, mid, prop_id, str, op_data);
+ /* fprintf(stderr, "\nJNI H5P_iterate_cb status: %d\n", status); fflush(stderr); */
+ } /* end else */
+ } /* end else */
+ } /* end else */
+
+ JVMPTR->DetachCurrentThread(JVMPAR);
+ /* fprintf(stderr, "\nJNI H5P_iterate_cb leave\n"); fflush(stderr); */
+
+ return status;
+} /* end H5P_iterate_cb */
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5Piterate
+ * Signature: (J[ILjava/lang/Object;Ljava/lang/Object;)I
+ */
+JNIEXPORT jint JNICALL
+Java_hdf_hdf5lib_H5_H5Piterate(JNIEnv *env, jclass clss, jlong prop_id, jintArray idx,
+ jobject callback_op, jobject op_data)
+{
+ herr_t status = -1;
+ jint *theArray = NULL;
+ jboolean isCopy;
+
+ ENVPTR->GetJavaVM(ENVPAR &jvm);
+ visit_callback = callback_op;
+
+ if (op_data == NULL) {
+ h5nullArgument(env, "H5Piterate: op_data is NULL");
+ } /* end if */
+ else if (callback_op == NULL) {
+ h5nullArgument(env, "H5Piterate: callback_op is NULL");
+ } /* end else if */
+ else {
+ if (idx == NULL) {
+ status = H5Piterate((hid_t)prop_id, NULL, (H5P_iterate_t)H5P_iterate_cb, (void*)op_data);
+ } /* end if */
+ else {
+ theArray = (jint *)ENVPTR->GetIntArrayElements(ENVPAR idx, &isCopy);
+ if (theArray == NULL) {
+ h5JNIFatalError(env, "H5Piterate: idx not pinned");
+ return -1;
+ } /* end if */
+ status = H5Piterate((hid_t)prop_id, (int*)&theArray[0], (H5P_iterate_t)H5P_iterate_cb, (void*)op_data);
+ } /* end else */
+
+ if (status < 0) {
+ if(idx)
+ ENVPTR->ReleaseIntArrayElements(ENVPAR idx, theArray, JNI_ABORT);
+ h5libraryError(env);
+ } /* end if */
+ else if (idx)
+ ENVPTR->ReleaseIntArrayElements(ENVPAR idx, theArray, 0);
+ } /* end else */
+
+ return status;
+} /* end Java_hdf_hdf5lib_H5_H5Piterate */
+
+#ifdef __cplusplus
+} /* end extern "C" */
+#endif /* __cplusplus */
diff --git a/java/src/jni/h5pImp.h b/java/src/jni/h5pImp.h
new file mode 100644
index 0000000..66488c2
--- /dev/null
+++ b/java/src/jni/h5pImp.h
@@ -0,0 +1,1313 @@
+/* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
+ * Copyright by The HDF Group. *
+ * Copyright by the Board of Trustees of the University of Illinois. *
+ * All rights reserved. *
+ * *
+ * This file is part of HDF5. The full HDF5 copyright notice, including *
+ * terms governing use, modification, and redistribution, is contained in *
+ * the files COPYING and Copyright.html. COPYING can be found at the root *
+ * of the source code distribution tree; Copyright.html can be found at the *
+ * root level of an installed copy of the electronic HDF5 document set and *
+ * is linked from the top-level documents page. It can also be found at *
+ * http://hdfgroup.org/HDF5/doc/Copyright.html. If you do not have *
+ * access to either file, you may request a copy from help@hdfgroup.org. *
+ * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */
+
+#include <jni.h>
+/* Header for class hdf_hdf5lib_H5_H5_H5P */
+
+#ifndef _Included_hdf_hdf5lib_H5_H5P
+#define _Included_hdf_hdf5lib_H5_H5P
+
+#ifdef __cplusplus
+extern "C" {
+#endif /* __cplusplus */
+
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5Pcreate
+ * Signature: (J)J
+ */
+JNIEXPORT jlong JNICALL Java_hdf_hdf5lib_H5__1H5Pcreate
+(JNIEnv *, jclass, jlong);
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5Pclose
+ * Signature: (J)I
+ */
+JNIEXPORT jint JNICALL Java_hdf_hdf5lib_H5__1H5Pclose
+(JNIEnv *, jclass, jlong);
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5Pget_class
+ * Signature: (J)J
+ */
+JNIEXPORT jlong JNICALL Java_hdf_hdf5lib_H5_H5Pget_1class
+(JNIEnv *, jclass, jlong);
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5Pcopy
+ * Signature: (J)J
+ */
+JNIEXPORT jlong JNICALL Java_hdf_hdf5lib_H5__1H5Pcopy
+(JNIEnv *, jclass, jlong);
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5Pget_version
+ * Signature: (J[I)I
+ */
+JNIEXPORT jint JNICALL Java_hdf_hdf5lib_H5_H5Pget_1version
+(JNIEnv *, jclass, jlong, jintArray);
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5Pset_userblock
+ * Signature: (JJ)I
+ */
+JNIEXPORT jint JNICALL Java_hdf_hdf5lib_H5_H5Pset_1userblock
+(JNIEnv *, jclass, jlong, jlong);
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5Pget_userblock
+ * Signature: (J[J)I
+ */
+JNIEXPORT jint JNICALL Java_hdf_hdf5lib_H5_H5Pget_1userblock
+(JNIEnv *, jclass, jlong, jlongArray);
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5Pset_sizes
+ * Signature: (JII)I
+ */
+JNIEXPORT jint JNICALL Java_hdf_hdf5lib_H5_H5Pset_1sizes
+(JNIEnv *, jclass, jlong, jint, jint);
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5Pget_sizes
+ * Signature: (J[J)I
+ */
+JNIEXPORT jint JNICALL Java_hdf_hdf5lib_H5_H5Pget_1sizes
+(JNIEnv *, jclass, jlong, jlongArray);
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5Pset_sym_k
+ * Signature: (JII)I
+ */
+JNIEXPORT jint JNICALL Java_hdf_hdf5lib_H5_H5Pset_1sym_1k
+(JNIEnv *, jclass, jlong, jint, jint);
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5Pget_sym_k
+ * Signature: (J[I)I
+ */
+JNIEXPORT jint JNICALL Java_hdf_hdf5lib_H5_H5Pget_1sym_1k
+(JNIEnv *, jclass, jlong, jintArray);
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5Pset_istore_k
+ * Signature: (JI)I
+ */
+JNIEXPORT jint JNICALL Java_hdf_hdf5lib_H5_H5Pset_1istore_1k
+(JNIEnv *, jclass, jlong, jint);
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5Pget_istore_k
+ * Signature: (J[I)I
+ */
+JNIEXPORT jint JNICALL Java_hdf_hdf5lib_H5_H5Pget_1istore_1k
+(JNIEnv *, jclass, jlong, jintArray);
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5Pset_layout
+ * Signature: (JI)I
+ */
+JNIEXPORT jint JNICALL Java_hdf_hdf5lib_H5_H5Pset_1layout
+(JNIEnv *, jclass, jlong, jint);
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5Pget_layout
+ * Signature: (J)I
+ */
+JNIEXPORT jint JNICALL Java_hdf_hdf5lib_H5_H5Pget_1layout
+(JNIEnv *, jclass, jlong);
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5Pset_chunk
+ * Signature: (JI[B)I
+ */
+JNIEXPORT jint JNICALL Java_hdf_hdf5lib_H5_H5Pset_1chunk
+(JNIEnv *, jclass, jlong, jint, jbyteArray);
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5Pget_chunk
+ * Signature: (JI[J)I
+ */
+JNIEXPORT jint JNICALL Java_hdf_hdf5lib_H5_H5Pget_1chunk
+(JNIEnv *, jclass, jlong, jint, jlongArray);
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5Pset_alignment
+ * Signature: (JJJ)I
+ */
+JNIEXPORT jint JNICALL Java_hdf_hdf5lib_H5_H5Pset_1alignment
+(JNIEnv *, jclass, jlong, jlong, jlong);
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5Pget_alignment
+ * Signature: (J[J)I
+ */
+JNIEXPORT jint JNICALL Java_hdf_hdf5lib_H5_H5Pget_1alignment
+(JNIEnv *, jclass, jlong, jlongArray);
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5Pset_external
+ * Signature: (JLjava/lang/String;JJ)I
+ */
+JNIEXPORT jint JNICALL Java_hdf_hdf5lib_H5_H5Pset_1external
+(JNIEnv *, jclass, jlong, jstring, jlong, jlong);
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5Pget_external_count
+ * Signature: (J)I
+ */
+JNIEXPORT jint JNICALL Java_hdf_hdf5lib_H5_H5Pget_1external_1count
+(JNIEnv *, jclass, jlong);
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5Pget_external
+ * Signature: (JIJ[Ljava/lang/String;[J)I
+ */
+JNIEXPORT jint JNICALL Java_hdf_hdf5lib_H5_H5Pget_1external
+(JNIEnv *, jclass, jlong, jint, jlong, jobjectArray, jlongArray);
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5Pset_fill_value
+ * Signature: (JJ[B)I
+ */
+JNIEXPORT jint JNICALL Java_hdf_hdf5lib_H5_H5Pset_1fill_1value
+(JNIEnv *, jclass, jlong, jlong, jbyteArray);
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5Pget_fill_value
+ * Signature: (JJ[B)I
+ */
+JNIEXPORT jint JNICALL Java_hdf_hdf5lib_H5_H5Pget_1fill_1value
+(JNIEnv *, jclass, jlong, jlong, jbyteArray);
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5Pset_filter
+ * Signature: (JIIJ[I)I
+ */
+JNIEXPORT jint JNICALL Java_hdf_hdf5lib_H5_H5Pset_1filter
+(JNIEnv *, jclass, jlong, jint, jint, jlong, jintArray);
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5Pget_nfilters
+ * Signature: (J)I
+ */
+JNIEXPORT jint JNICALL Java_hdf_hdf5lib_H5_H5Pget_1nfilters
+(JNIEnv *, jclass, jlong);
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5Pget_filter
+ * Signature: (JI[I[J[IJ[Ljava/lang/String;)I
+ */
+JNIEXPORT jint JNICALL Java_hdf_hdf5lib_H5_H5Pget_1filter
+(JNIEnv *, jclass, jlong, jint, jintArray, jlongArray, jintArray, jlong, jobjectArray);
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5Pget_driver
+ * Signature: (J)J
+ */
+JNIEXPORT jlong JNICALL Java_hdf_hdf5lib_H5_H5Pget_1driver
+(JNIEnv *, jclass, jlong);
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5Pset_cache
+ * Signature: (JIJJD)I
+ */
+JNIEXPORT jint JNICALL Java_hdf_hdf5lib_H5_H5Pset_1cache
+(JNIEnv *, jclass, jlong, jint, jlong, jlong, jdouble);
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5Pget_cache
+ * Signature: (J[I[J[J[D)I
+ */
+JNIEXPORT jint JNICALL Java_hdf_hdf5lib_H5_H5Pget_1cache
+(JNIEnv *, jclass, jlong, jintArray, jlongArray, jlongArray, jdoubleArray);
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5Pset_buffer
+ * Signature: (JJ[B[B)I
+ */
+JNIEXPORT jint JNICALL Java_hdf_hdf5lib_H5_H5Pset_1buffer
+(JNIEnv *, jclass, jlong, jlong, jbyteArray, jbyteArray);
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5Pget_buffer
+ * Signature: (J[B[B)I
+ */
+JNIEXPORT jint JNICALL Java_hdf_hdf5lib_H5_H5Pget_1buffer
+(JNIEnv *, jclass, jlong, jbyteArray, jbyteArray);
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5Pset_buffer_size
+ * Signature: (JJ)V
+ */
+JNIEXPORT void JNICALL Java_hdf_hdf5lib_H5_H5Pset_1buffer_1size
+(JNIEnv *, jclass, jlong, jlong);
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5Pget_buffer_size
+ * Signature: (J)J
+ */
+JNIEXPORT jlong JNICALL Java_hdf_hdf5lib_H5_H5Pget_1buffer_1size
+(JNIEnv *, jclass, jlong);
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5Pset_preserve
+ * Signature: (JZ)I
+ */
+JNIEXPORT jint JNICALL Java_hdf_hdf5lib_H5_H5Pset_1preserve
+(JNIEnv *, jclass, jlong, jboolean);
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5Pget_preserve
+ * Signature: (J)I
+ */
+JNIEXPORT jint JNICALL Java_hdf_hdf5lib_H5_H5Pget_1preserve
+(JNIEnv *, jclass, jlong);
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5Pset_deflate
+ * Signature: (JI)I
+ */
+JNIEXPORT jint JNICALL Java_hdf_hdf5lib_H5_H5Pset_1deflate
+(JNIEnv *, jclass, jlong, jint);
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5Pset_gc_references
+ * Signature: (JZ)I
+ */
+JNIEXPORT jint JNICALL Java_hdf_hdf5lib_H5_H5Pset_1gc_1references
+(JNIEnv *, jclass, jlong, jboolean);
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5Pget_gc_references
+ * Signature: (J)Z
+ */
+JNIEXPORT jboolean JNICALL Java_hdf_hdf5lib_H5_H5Pget_1gc_1references
+(JNIEnv *, jclass, jlong);
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5Pset_btree_ratios
+ * Signature: (JDDD)I
+ */
+JNIEXPORT jint JNICALL Java_hdf_hdf5lib_H5_H5Pset_1btree_1ratios
+(JNIEnv *, jclass, jlong, jdouble, jdouble, jdouble);
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5Pget_btree_ratios
+ * Signature: (J[D[D[D)I
+ */
+JNIEXPORT jint JNICALL Java_hdf_hdf5lib_H5_H5Pget_1btree_1ratios
+(JNIEnv *, jclass, jlong, jdoubleArray, jdoubleArray, jdoubleArray);
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5Pset_small_data_block_size
+ * Signature: (JJ)I
+ */
+JNIEXPORT jint JNICALL Java_hdf_hdf5lib_H5_H5Pset_1small_1data_1block_1size
+(JNIEnv *, jclass, jlong, jlong);
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5Pget_small_data_block_size
+ * Signature: (J)J
+ */
+JNIEXPORT jlong JNICALL Java_hdf_hdf5lib_H5_H5Pget_1small_1data_1block_1size
+(JNIEnv *, jclass, jlong);
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5Pset_alloc_time
+ * Signature: (JI)I
+ */
+JNIEXPORT jint JNICALL Java_hdf_hdf5lib_H5_H5Pset_1alloc_1time
+(JNIEnv *, jclass, jlong, jint);
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5Pget_alloc_time
+ * Signature: (J[I)I
+ */
+JNIEXPORT jint JNICALL Java_hdf_hdf5lib_H5_H5Pget_1alloc_1time
+(JNIEnv *, jclass, jlong, jintArray);
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5Pset_fill_time
+ * Signature: (JI)I
+ */
+JNIEXPORT jint JNICALL Java_hdf_hdf5lib_H5_H5Pset_1fill_1time
+(JNIEnv *, jclass, jlong, jint);
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5Pget_fill_time
+ * Signature: (J[I)I
+ */
+JNIEXPORT jint JNICALL Java_hdf_hdf5lib_H5_H5Pget_1fill_1time
+(JNIEnv *, jclass, jlong, jintArray);
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5Pfill_value_defined
+ * Signature: (J[I)I
+ */
+JNIEXPORT jint JNICALL Java_hdf_hdf5lib_H5_H5Pfill_1value_1defined
+(JNIEnv *, jclass, jlong, jintArray);
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5Pset_fletcher32
+ * Signature: (J)I
+ */
+JNIEXPORT jint JNICALL Java_hdf_hdf5lib_H5_H5Pset_1fletcher32
+(JNIEnv *, jclass, jlong);
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5Pset_edc_check
+ * Signature: (JI)I
+ */
+JNIEXPORT jint JNICALL Java_hdf_hdf5lib_H5_H5Pset_1edc_1check
+(JNIEnv *, jclass, jlong, jint);
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5Pget_edc_check
+ * Signature: (J)I
+ */
+JNIEXPORT jint JNICALL Java_hdf_hdf5lib_H5_H5Pget_1edc_1check
+(JNIEnv *, jclass, jlong);
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5Pset_shuffle
+ * Signature: (J)I
+ */
+JNIEXPORT jint JNICALL Java_hdf_hdf5lib_H5_H5Pset_1shuffle
+(JNIEnv *, jclass, jlong);
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5Pset_szip
+ * Signature: (JII)I
+ */
+JNIEXPORT jint JNICALL Java_hdf_hdf5lib_H5_H5Pset_1szip
+(JNIEnv *, jclass, jlong, jint, jint);
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5Pset_hyper_vector_size
+ * Signature: (JJ)I
+ */
+JNIEXPORT jint JNICALL Java_hdf_hdf5lib_H5_H5Pset_1hyper_1vector_1size
+(JNIEnv *, jclass, jlong, jlong);
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5Pget_hyper_vector_size
+ * Signature: (J[J)I
+ */
+JNIEXPORT jint JNICALL Java_hdf_hdf5lib_H5_H5Pget_1hyper_1vector_1size
+(JNIEnv *, jclass, jlong, jlongArray);
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5Pall_filters_avail
+ * Signature: (J)Z
+ */
+JNIEXPORT jboolean JNICALL Java_hdf_hdf5lib_H5_H5Pall_1filters_1avail
+(JNIEnv *, jclass, jlong);
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5Pmodify_filter
+ * Signature: (JIIJ[I)I
+ */
+JNIEXPORT jint JNICALL Java_hdf_hdf5lib_H5_H5Pmodify_1filter
+(JNIEnv *, jclass, jlong, jint, jint, jlong, jintArray);
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5Pget_filter_by_id
+ * Signature: (JI[I[J[IJ[Ljava/lang/String;)I
+ */
+JNIEXPORT jint JNICALL Java_hdf_hdf5lib_H5_H5Pget_1filter_1by_1id
+(JNIEnv *, jclass, jlong, jint, jintArray, jlongArray, jintArray, jlong, jobjectArray);
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5Pset_fclose_degree
+ * Signature: (JI)I
+ */
+JNIEXPORT jint JNICALL Java_hdf_hdf5lib_H5_H5Pset_1fclose_1degree
+(JNIEnv *, jclass, jlong, jint);
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5Pget_fclose_degree
+ * Signature: (J)I
+ */
+JNIEXPORT jint JNICALL Java_hdf_hdf5lib_H5_H5Pget_1fclose_1degree
+(JNIEnv *, jclass, jlong);
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5Pset_fapl_family
+ * Signature: (JJJ)I
+ */
+JNIEXPORT jint JNICALL Java_hdf_hdf5lib_H5_H5Pset_1fapl_1family
+(JNIEnv *, jclass, jlong, jlong, jlong);
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5Pget_fapl_family
+ * Signature: (J[J[J)I
+ */
+JNIEXPORT jint JNICALL Java_hdf_hdf5lib_H5_H5Pget_1fapl_1family
+(JNIEnv *, jclass, jlong, jlongArray, jlongArray);
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5Pset_fapl_core
+ * Signature: (JJZ)I
+ */
+JNIEXPORT jint JNICALL Java_hdf_hdf5lib_H5_H5Pset_1fapl_1core
+(JNIEnv *, jclass, jlong, jlong, jboolean);
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5Pget_fapl_core
+ * Signature: (J[J[Z)I
+ */
+JNIEXPORT jint JNICALL Java_hdf_hdf5lib_H5_H5Pget_1fapl_1core
+(JNIEnv *, jclass, jlong, jlongArray, jbooleanArray);
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5Pset_family_offset
+ * Signature: (JJ)I
+ */
+JNIEXPORT jint JNICALL Java_hdf_hdf5lib_H5_H5Pset_1family_1offset
+(JNIEnv *, jclass, jlong, jlong);
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5Pget_family_offset
+ * Signature: (J)J
+ */
+JNIEXPORT jlong JNICALL Java_hdf_hdf5lib_H5_H5Pget_1family_1offset
+(JNIEnv *, jclass, jlong);
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5Pset_fapl_log
+ * Signature: (JLjava/lang/String;JJ)V
+ */
+JNIEXPORT void JNICALL Java_hdf_hdf5lib_H5_H5Pset_1fapl_1log
+(JNIEnv *, jclass, jlong, jstring, jlong, jlong);
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5Premove_filter
+ * Signature: (JI)I
+ */
+JNIEXPORT jint JNICALL Java_hdf_hdf5lib_H5_H5P1remove_1filter
+(JNIEnv *, jclass, jlong, jint);
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5Pset
+ * Signature: (JLjava/lang/String;I)J
+ */
+JNIEXPORT jlong JNICALL Java_hdf_hdf5lib_H5_H5Pset
+(JNIEnv *, jclass, jlong, jstring, jint);
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5Pexist
+ * Signature: (JLjava/lang/String;)Z
+ */
+JNIEXPORT jboolean JNICALL Java_hdf_hdf5lib_H5_H5Pexist
+(JNIEnv *, jclass, jlong, jstring);
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5Pget_size
+ * Signature: (JLjava/lang/String;)J
+ */
+JNIEXPORT jlong JNICALL Java_hdf_hdf5lib_H5_H5Pget_1size
+(JNIEnv *, jclass, jlong, jstring);
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5Pget_nprops
+ * Signature: (J)J
+ */
+JNIEXPORT jlong JNICALL Java_hdf_hdf5lib_H5_H5Pget_1nprops
+(JNIEnv *, jclass, jlong);
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5Pget_class_name
+ * Signature: (J)Ljava/lang/String;
+ */
+JNIEXPORT jstring JNICALL Java_hdf_hdf5lib_H5_H5Pget_1class_1name
+(JNIEnv *, jclass, jlong);
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5Pget_class_parent
+ * Signature: (J)J
+ */
+JNIEXPORT jlong JNICALL Java_hdf_hdf5lib_H5_H5Pget_1class_1parent
+(JNIEnv *, jclass, jlong);
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5Pisa_class
+ * Signature: (JJ)I
+ */
+JNIEXPORT jint JNICALL Java_hdf_hdf5lib_H5_H5Pisa_1class
+(JNIEnv *, jclass, jlong, jlong);
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5Pget
+ * Signature: (JLjava/lang/String;)I
+ */
+JNIEXPORT jint JNICALL Java_hdf_hdf5lib_H5_H5Pget
+(JNIEnv *, jclass, jlong, jstring);
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5Pequal
+ * Signature: (JJ)I
+ */
+JNIEXPORT jint JNICALL Java_hdf_hdf5lib_H5_H5Pequal
+(JNIEnv *, jclass, jlong, jlong);
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5Pcopy_prop
+ * Signature: (JJLjava/lang/String;)I
+ */
+JNIEXPORT jint JNICALL Java_hdf_hdf5lib_H5_H5Pcopy_1prop
+(JNIEnv *, jclass, jlong, jlong, jstring);
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5Premove
+ * Signature: (JLjava/lang/String;)I
+ */
+JNIEXPORT jint JNICALL Java_hdf_hdf5lib_H5_H5Premove
+(JNIEnv *, jclass, jlong, jstring);
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5Punregister
+ * Signature: (JLjava/lang/String;)I
+ */
+JNIEXPORT jint JNICALL Java_hdf_hdf5lib_H5_H5Punregister
+(JNIEnv *, jclass, jlong, jstring);
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: _H5Pclose_class
+ * Signature: (J)I
+ */
+JNIEXPORT jint JNICALL Java_hdf_hdf5lib_H5__1H5Pclose_1class
+(JNIEnv *, jclass, jlong);
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5Pget_filter2
+ * Signature: (JI[I[J[IJ[Ljava/lang/String;[I)I
+ */
+JNIEXPORT jint JNICALL Java_hdf_hdf5lib_H5_H5Pget_1filter2
+(JNIEnv *, jclass, jlong, jint, jintArray, jlongArray, jintArray, jlong, jobjectArray, jintArray);
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5Pget_filter_by_id2
+ * Signature: (JI[I[J[IJ[Ljava/lang/String;[I)I
+ */
+JNIEXPORT jint JNICALL Java_hdf_hdf5lib_H5_H5Pget_1filter_1by_1id2
+(JNIEnv *, jclass, jlong, jint, jintArray, jlongArray, jintArray, jlong, jobjectArray, jintArray);
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5Pget_nlinks
+ * Signature: (J)J
+ */
+JNIEXPORT jlong JNICALL Java_hdf_hdf5lib_H5_H5Pget_1nlinks
+(JNIEnv *, jclass, jlong);
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5Pset_nlinks
+ * Signature: (JJ)I
+ */
+JNIEXPORT jint JNICALL Java_hdf_hdf5lib_H5_H5Pset_1nlinks
+(JNIEnv *, jclass, jlong, jlong);
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5Pget_libver_bounds
+ * Signature: (J[I)I
+ */
+JNIEXPORT jint JNICALL Java_hdf_hdf5lib_H5_H5Pget_1libver_1bounds
+(JNIEnv *, jclass, jlong, jintArray);
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5Pset_libver_bounds
+ * Signature: (JII)I
+ */
+JNIEXPORT jint JNICALL Java_hdf_hdf5lib_H5_H5Pset_1libver_1bounds
+(JNIEnv *, jclass, jlong, jint, jint);
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5Pget_link_creation_order
+ * Signature: (J)I
+ */
+JNIEXPORT jint JNICALL Java_hdf_hdf5lib_H5_H5Pget_1link_1creation_1order
+(JNIEnv *, jclass, jlong);
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5Pset_link_creation_order
+ * Signature: (JI)I
+ */
+JNIEXPORT jint JNICALL Java_hdf_hdf5lib_H5_H5Pset_1link_1creation_1order
+(JNIEnv *, jclass, jlong, jint);
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5Pget_attr_creation_order
+ * Signature: (J)I
+ */
+JNIEXPORT jint JNICALL Java_hdf_hdf5lib_H5_H5Pget_1attr_1creation_1order
+(JNIEnv *, jclass, jlong);
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5Pset_attr_creation_order
+ * Signature: (JI)I
+ */
+JNIEXPORT jint JNICALL Java_hdf_hdf5lib_H5_H5Pset_1attr_1creation_1order
+(JNIEnv *, jclass, jlong, jint);
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5Pset_copy_object
+ * Signature: (JI)V
+ */
+JNIEXPORT void JNICALL Java_hdf_hdf5lib_H5_H5Pset_1copy_1object
+(JNIEnv *, jclass, jlong, jint);
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5Pget_copy_object
+ * Signature: (J)I
+ */
+JNIEXPORT jint JNICALL Java_hdf_hdf5lib_H5_H5Pget_1copy_1object
+(JNIEnv *, jclass, jlong);
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5Pset_create_intermediate_group
+ * Signature: (JZ)I
+ */
+JNIEXPORT jint JNICALL Java_hdf_hdf5lib_H5_H5Pset_1create_1intermediate_1group
+(JNIEnv *, jclass, jlong, jboolean);
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5Pget_create_intermediate_group
+ * Signature: (J)Z
+ */
+JNIEXPORT jboolean JNICALL Java_hdf_hdf5lib_H5_H5Pget_1create_1intermediate_1group
+(JNIEnv *, jclass, jlong);
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5Pset_data_transform
+ * Signature: (JLjava/lang/String;)I
+ */
+JNIEXPORT jint JNICALL Java_hdf_hdf5lib_H5_H5Pset_1data_1transform
+(JNIEnv *, jclass, jlong, jstring);
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5Pget_data_transform
+ * Signature: (J[Ljava/lang/String;J)J
+ */
+JNIEXPORT jlong JNICALL Java_hdf_hdf5lib_H5_H5Pget_1data_1transform
+(JNIEnv *, jclass, jlong, jobjectArray, jlong);
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5Pget_elink_acc_flags
+ * Signature: (J)I
+ */
+JNIEXPORT jint JNICALL Java_hdf_hdf5lib_H5_H5Pget_1elink_1acc_1flags
+(JNIEnv *, jclass, jlong);
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5Pset_elink_acc_flags
+ * Signature: (JI)I
+ */
+JNIEXPORT jint JNICALL Java_hdf_hdf5lib_H5_H5Pset_1elink_1acc_1flags
+(JNIEnv *, jclass, jlong, jint);
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5Pset_link_phase_change
+ * Signature: (JII)I
+ */
+JNIEXPORT jint JNICALL Java_hdf_hdf5lib_H5_H5Pset_1link_1phase_1change
+(JNIEnv *, jclass, jlong, jint, jint);
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5Pget_link_phase_change
+ * Signature: (J[I)I
+ */
+JNIEXPORT jint JNICALL Java_hdf_hdf5lib_H5_H5Pget_1link_1phase_1change
+(JNIEnv *, jclass, jlong, jintArray);
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5Pget_attr_phase_change
+ * Signature: (J[I)I
+ */
+JNIEXPORT jint JNICALL Java_hdf_hdf5lib_H5_H5Pget_1attr_1phase_1change
+(JNIEnv *, jclass, jlong, jintArray);
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5Pset_attr_phase_change
+ * Signature: (JII)V
+ */
+JNIEXPORT void JNICALL Java_hdf_hdf5lib_H5_H5Pset_1attr_1phase_1change
+(JNIEnv *, jclass, jlong, jint, jint);
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5Pget_shared_mesg_phase_change
+ * Signature: (J[I)I
+ */
+JNIEXPORT jint JNICALL Java_hdf_hdf5lib_H5_H5Pget_1shared_1mesg_1phase_1change
+(JNIEnv *, jclass, jlong, jintArray);
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5Pset_shared_mesg_phase_change
+ * Signature: (JII)I
+ */
+JNIEXPORT jint JNICALL Java_hdf_hdf5lib_H5_H5Pset_1shared_1mesg_1phase_1change
+(JNIEnv *, jclass, jlong, jint, jint);
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5Pget_shared_mesg_nindexes
+ * Signature: (J)I
+ */
+JNIEXPORT jint JNICALL Java_hdf_hdf5lib_H5_H5Pget_1shared_1mesg_1nindexes
+(JNIEnv *, jclass, jlong);
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5Pset_shared_mesg_nindexes
+ * Signature: (JI)I
+ */
+JNIEXPORT jint JNICALL Java_hdf_hdf5lib_H5_H5Pset_1shared_1mesg_1nindexes
+(JNIEnv *, jclass, jlong, jint);
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5Pset_shared_mesg_index
+ * Signature: (JIII)I
+ */
+JNIEXPORT jint JNICALL Java_hdf_hdf5lib_H5_H5Pset_1shared_1mesg_1index
+(JNIEnv *, jclass, jlong, jint, jint, jint);
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5Pget_shared_mesg_index
+ * Signature: (JI[I)I
+ */
+JNIEXPORT jint JNICALL Java_hdf_hdf5lib_H5_H5Pget_1shared_1mesg_1index
+(JNIEnv *, jclass, jlong, jint, jintArray);
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5Pset_local_heap_size_hint
+ * Signature: (JJ)I
+ */
+JNIEXPORT jint JNICALL Java_hdf_hdf5lib_H5_H5Pset_1local_1heap_1size_1hint
+(JNIEnv *, jclass, jlong, jlong);
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5Pget_local_heap_size_hint
+ * Signature: (J)J
+ */
+JNIEXPORT jlong JNICALL Java_hdf_hdf5lib_H5_H5Pget_1local_1heap_1size_1hint
+(JNIEnv *, jclass, jlong);
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5Pset_nbit
+ * Signature: (J)I
+ */
+JNIEXPORT jint JNICALL Java_hdf_hdf5lib_H5_H5Pset_1nbit
+(JNIEnv *, jclass, jlong);
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5Pset_scaleoffset
+ * Signature: (JII)I
+ */
+JNIEXPORT jint JNICALL Java_hdf_hdf5lib_H5_H5Pset_1scaleoffset
+(JNIEnv *, jclass, jlong, jint, jint);
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5Pset_est_link_info
+ * Signature: (JII)I
+ */
+JNIEXPORT jint JNICALL Java_hdf_hdf5lib_H5_H5Pset_1est_1link_1info
+(JNIEnv *, jclass, jlong, jint, jint);
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5Pget_est_link_info
+ * Signature: (J[I)I
+ */
+JNIEXPORT jint JNICALL Java_hdf_hdf5lib_H5_H5Pget_1est_1link_1info
+(JNIEnv *, jclass, jlong, jintArray);
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5Pset_elink_fapl
+ * Signature: (JJ)I
+ */
+JNIEXPORT jint JNICALL Java_hdf_hdf5lib_H5_H5Pset_1elink_1fapl
+(JNIEnv *, jclass, jlong, jlong);
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: _H5Pget_elink_fapl
+ * Signature: (J)J
+ */
+JNIEXPORT jlong JNICALL Java_hdf_hdf5lib_H5__1H5Pget_1elink_1fapl
+(JNIEnv *, jclass, jlong);
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5Pset_elink_prefix
+ * Signature: (JLjava/lang/String;)I
+ */
+JNIEXPORT jint JNICALL Java_hdf_hdf5lib_H5_H5Pset_1elink_1prefix
+(JNIEnv *, jclass, jlong, jstring);
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5Pget_elink_prefix
+ * Signature: (J[Ljava/lang/String;)J
+ */
+JNIEXPORT jlong JNICALL Java_hdf_hdf5lib_H5_H5Pget_1elink_1prefix
+(JNIEnv *, jclass, jlong, jobjectArray);
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5Pset_fapl_direct
+ * Signature: (JJJJ)I
+ */
+JNIEXPORT jint JNICALL Java_hdf_hdf5lib_H5_H5Pset_1fapl_1direct
+(JNIEnv *, jclass, jlong, jlong, jlong, jlong);
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5Pget_fapl_direct
+ * Signature: (J[J)I
+ */
+JNIEXPORT jint JNICALL Java_hdf_hdf5lib_H5_H5Pget_1fapl_1direct
+(JNIEnv *, jclass, jlong, jlongArray);
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5Pset_fapl_sec2
+ * Signature: (J)I
+ */
+JNIEXPORT jint JNICALL Java_hdf_hdf5lib_H5_H5Pset_1fapl_1sec2
+(JNIEnv *, jclass, jlong);
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5Pset_fapl_stdio
+ * Signature: (J)I
+ */
+JNIEXPORT jint JNICALL Java_hdf_hdf5lib_H5_H5Pset_1fapl_1stdio
+(JNIEnv *, jclass, jlong);
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5Pset_fapl_windows
+ * Signature: (J)I
+ */
+JNIEXPORT jint JNICALL Java_hdf_hdf5lib_H5_H5Pset_1fapl_1windows
+(JNIEnv *, jclass, jlong);
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5Pget_fapl_muti
+ * Signature: (J[I[J[Ljava/lang/String;[J)Z
+ */
+JNIEXPORT jboolean JNICALL Java_hdf_hdf5lib_H5_H5Pget_1fapl_1multi
+(JNIEnv *, jclass, jlong, jintArray, jlongArray, jobjectArray, jlongArray);
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5Pset_fapl_muti
+ * Signature: (J[I[J[Ljava/lang/String;[JZ)V
+ */
+JNIEXPORT void JNICALL Java_hdf_hdf5lib_H5_H5Pset_1fapl_1multi
+(JNIEnv *, jclass, jlong, jintArray, jlongArray, jobjectArray, jlongArray, jboolean);
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5Pset_fapl_split
+ * Signature: (JLjava/lang/String;JLjava/lang/String;J)V
+ */
+JNIEXPORT void JNICALL Java_hdf_hdf5lib_H5_H5Pset_1fapl_1split
+(JNIEnv *, jclass, jlong, jstring, jlong, jstring, jlong);
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5Pset_meta_block_size
+ * Signature: (JJ)V
+ */
+JNIEXPORT void JNICALL Java_hdf_hdf5lib_H5_H5Pset_1meta_1block_1size
+(JNIEnv *, jclass, jlong, jlong);
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5Pget_meta_block_size
+ * Signature: (J)J
+ */
+JNIEXPORT jlong JNICALL Java_hdf_hdf5lib_H5_H5Pget_1meta_1block_1size
+(JNIEnv *, jclass, jlong);
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5Pset_sieve_buf_size
+ * Signature: (JJ)V
+ */
+JNIEXPORT void JNICALL Java_hdf_hdf5lib_H5_H5Pset_1sieve_1buf_1size
+(JNIEnv *, jclass, jlong, jlong);
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5Pget_sieve_buf_size
+ * Signature: (J)J
+ */
+JNIEXPORT jlong JNICALL Java_hdf_hdf5lib_H5_H5Pget_1sieve_1buf_1size
+(JNIEnv *, jclass, jlong);
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5Pset_elink_file_cache_size
+ * Signature: (JI)V
+ */
+JNIEXPORT void JNICALL Java_hdf_hdf5lib_H5_H5Pset_1elink_1file_1cache_1size
+(JNIEnv *, jclass, jlong, jint);
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5Pget_elink_file_cache_size
+ * Signature: (J)I
+ */
+JNIEXPORT jint JNICALL Java_hdf_hdf5lib_H5_H5Pget_1elink_1file_1cache_1size
+(JNIEnv *, jclass, jlong);
+
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5Pget_mdc_config
+ * Signature: (J)Lhdf/hdf5lib/structs/H5AC_cache_config_t;
+ */
+JNIEXPORT jobject JNICALL Java_hdf_hdf5lib_H5_H5Pget_1mdc_1config
+(JNIEnv *, jclass, jlong);
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5Pset_mdc_config
+ * Signature: (JLhdf/hdf5lib/structs/H5AC_cache_config_t;)V
+ */
+JNIEXPORT void JNICALL Java_hdf_hdf5lib_H5_H5Pset_1mdc_1config
+(JNIEnv *, jclass, jlong, jobject);
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5Pset_chunk_cache
+ * Signature: (JJJD)V
+ */
+JNIEXPORT void JNICALL Java_hdf_hdf5lib_H5_H5Pset_1chunk_1cache
+(JNIEnv *, jclass, jlong, jlong, jlong, jdouble);
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5Pget_chunk_cache
+ * Signature: (J[J[J[D)V
+ */
+JNIEXPORT void JNICALL Java_hdf_hdf5lib_H5_H5Pget_1chunk_1cache
+(JNIEnv *, jclass, jlong, jlongArray, jlongArray, jdoubleArray);
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5Pget_obj_track_times
+ * Signature: (J)Z
+ */
+JNIEXPORT jboolean JNICALL Java_hdf_hdf5lib_H5_H5Pget_1obj_1track_1times
+(JNIEnv *, jclass, jlong);
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5Pset_obj_track_times
+ * Signature: (JZ)V
+ */
+JNIEXPORT void JNICALL Java_hdf_hdf5lib_H5_H5Pset_1obj_1track_1times
+(JNIEnv *, jclass, jlong, jboolean);
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5Pget_char_encoding
+ * Signature: (J)I
+ */
+JNIEXPORT jint JNICALL Java_hdf_hdf5lib_H5_H5Pget_1char_1encoding
+(JNIEnv *, jclass, jlong);
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5Pset_char_encoding
+ * Signature: (JI)V
+ */
+JNIEXPORT void JNICALL Java_hdf_hdf5lib_H5_H5Pset_1char_1encoding
+(JNIEnv *, jclass, jlong, jint);
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5Pset_virtual
+ * Signature: (JJLjava/lang/String;Ljava/lang/String;J)V
+ */
+JNIEXPORT void JNICALL Java_hdf_hdf5lib_H5_H5Pset_1virtual
+(JNIEnv *, jclass, jlong, jlong, jstring, jstring, jlong);
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5Pget_virtual_count
+ * Signature: (J)J
+ */
+JNIEXPORT jlong JNICALL Java_hdf_hdf5lib_H5_H5Pget_1virtual_1count
+(JNIEnv *, jclass, jlong);
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5Pget_virtual_vspace
+ * Signature: (JJ)J
+ */
+JNIEXPORT jlong JNICALL Java_hdf_hdf5lib_H5_H5Pget_1virtual_1vspace
+(JNIEnv *, jclass, jlong, jlong);
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5Pget_virtual_srcspace
+ * Signature: (JJ)J
+ */
+JNIEXPORT jlong JNICALL Java_hdf_hdf5lib_H5_H5Pget_1virtual_1srcspace
+(JNIEnv *, jclass, jlong, jlong);
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5Pget_virtual_filename
+ * Signature: (JJ)Ljava/lang/String;
+ */
+JNIEXPORT jstring JNICALL Java_hdf_hdf5lib_H5_H5Pget_1virtual_1filename
+(JNIEnv *, jclass, jlong, jlong);
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5Pget_virtual_dsetname
+ * Signature: (JJ)Ljava/lang/String;
+ */
+JNIEXPORT jstring JNICALL Java_hdf_hdf5lib_H5_H5Pget_1virtual_1dsetname
+(JNIEnv *, jclass, jlong, jlong);
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5Pget_virtual_view
+ * Signature: (J)I
+ */
+JNIEXPORT jint JNICALL Java_hdf_hdf5lib_H5_H5Pget_1virtual_1view
+(JNIEnv *, jclass, jlong);
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5Pset_virtual_view
+ * Signature: (JI)V
+ */
+JNIEXPORT void JNICALL Java_hdf_hdf5lib_H5_H5Pset_1virtual_1view
+(JNIEnv *, jclass, jlong, jint);
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5Pget_virtual_printf_gap
+ * Signature: (J)J
+ */
+JNIEXPORT jlong JNICALL Java_hdf_hdf5lib_H5_H5Pget_1virtual_1printf_1gap
+(JNIEnv *, jclass, jlong);
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5Pset_virtual_printf_gap
+ * Signature: (JJ)V
+ */
+JNIEXPORT void JNICALL Java_hdf_hdf5lib_H5_H5Pset_1virtual_1printf_1gap
+(JNIEnv *, jclass, jlong, jlong);
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5Pget_file_space
+ * Signature: (J[I[J)V
+ */
+JNIEXPORT void JNICALL Java_hdf_hdf5lib_H5_H5Pget_1file_1space
+(JNIEnv *, jclass, jlong, jintArray, jlongArray);
+
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5Pset_file_space
+ * Signature: (JIJ)V
+ */
+JNIEXPORT void JNICALL Java_hdf_hdf5lib_H5_H5Pset_1file_1space
+(JNIEnv *, jclass, jlong, jint, jlong);
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: _H5Pcreate_class_nocb
+ * Signature: (JLjava/lang/String;)J
+ */
+JNIEXPORT jlong JNICALL Java_hdf_hdf5lib_H5__1H5Pcreate_1class_1nocb
+ (JNIEnv*, jclass, jlong, jstring);
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: _H5Pcreate_class
+ * Signature: (JLjava/lang/String;Ljava/lang/Object;Ljava/lang/Object;Ljava/lang/Object;Ljava/lang/Object;Ljava/lang/Object;Ljava/lang/Object;)J
+ */
+JNIEXPORT jlong JNICALL Java_hdf_hdf5lib_H5__1H5Pcreate_1class
+ (JNIEnv*, jclass, jlong, jstring, jobject, jobject, jobject, jobject, jobject, jobject);
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5Pregister2_nocb
+ * Signature: (JLjava/lang/String;J[B)V
+ */
+JNIEXPORT void JNICALL Java_hdf_hdf5lib_H5_H5Pregister2_1nocb
+ (JNIEnv*, jclass, jlong, jstring, jlong, jbyteArray);
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5Pregister2
+ * Signature: (JLjava/lang/String;J[BLjava/lang/Object;Ljava/lang/Object;Ljava/lang/Object;Ljava/lang/Object;Ljava/lang/Object;Ljava/lang/Object;Ljava/lang/Object;)V
+ */
+JNIEXPORT void JNICALL Java_hdf_hdf5lib_H5_H5Pregister2
+ (JNIEnv*, jclass, jlong, jstring, jlong, jbyteArray, jobject, jobject, jobject, jobject, jobject, jobject, jobject);
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5Pinsert2_nocb
+ * Signature: (JLjava/lang/String;J[B)V
+ */
+JNIEXPORT void JNICALL Java_hdf_hdf5lib_H5_H5Pinsert2_1nocb
+ (JNIEnv*, jclass, jlong, jstring, jlong, jbyteArray);
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5Pinsert2
+ * Signature: (JLjava/lang/String;J[BLjava/lang/Object;Ljava/lang/Object;Ljava/lang/Object;Ljava/lang/Object;Ljava/lang/Object;)V
+ */
+JNIEXPORT void JNICALL Java_hdf_hdf5lib_H5_H5Pinsert2
+ (JNIEnv*, jclass, jlong, jstring, jlong, jbyteArray, jobject, jobject, jobject, jobject, jobject, jobject);
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5Piterate
+ * Signature: (J[ILjava/lang/Object;Ljava/lang/Object;)I
+ */
+JNIEXPORT jint JNICALL Java_hdf_hdf5lib_H5_H5Piterate
+ (JNIEnv*, jclass, jlong, jintArray, jobject, jobject);
+
+
+#ifdef __cplusplus
+} /* end extern "C" */
+#endif /* __cplusplus */
+
+#endif /* _Included_hdf_hdf5lib_H5_H5P */
diff --git a/java/src/jni/h5plImp.c b/java/src/jni/h5plImp.c
new file mode 100644
index 0000000..dfaeb53
--- /dev/null
+++ b/java/src/jni/h5plImp.c
@@ -0,0 +1,64 @@
+/* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
+ * Copyright by The HDF Group. *
+ * Copyright by the Board of Trustees of the University of Illinois. *
+ * All rights reserved. *
+ * *
+ * This file is part of HDF5. The full HDF5 copyright notice, including *
+ * terms governing use, modification, and redistribution, is contained in *
+ * the files COPYING and Copyright.html. COPYING can be found at the root *
+ * of the source code distribution tree; Copyright.html can be found at the *
+ * root level of an installed copy of the electronic HDF5 document set and *
+ * is linked from the top-level documents page. It can also be found at *
+ * http://hdfgroup.org/HDF5/doc/Copyright.html. If you do not have *
+ * access to either file, you may request a copy from help@hdfgroup.org. *
+ * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */
+
+/*
+ * For details of the HDF libraries, see the HDF Documentation at:
+ * http://hdfdfgroup.org/HDF5/doc/
+ *
+ */
+
+#ifdef __cplusplus
+extern "C" {
+#endif /* __cplusplus */
+
+#include "hdf5.h"
+#include <stdlib.h>
+#include "h5jni.h"
+#include "h5plImp.h"
+
+extern JavaVM *jvm;
+extern jobject visit_callback;
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5PLset_loading_state
+ * Signature: (I)V
+ */
+JNIEXPORT void JNICALL Java_hdf_hdf5lib_H5_H5PLset_1loading_1state
+ (JNIEnv *env, jclass clss, jint plugin_flags)
+{
+ if (H5PLset_loading_state((unsigned int)plugin_flags) < 0) {
+ h5libraryError(env);
+ }
+}
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5PLget_loading_state
+ * Signature: (V)I
+ */
+JNIEXPORT jint JNICALL Java_hdf_hdf5lib_H5_H5PLget_1loading_1state
+ (JNIEnv *env, jclass clss)
+{
+ unsigned int plugin_type = 0;
+ if (H5PLget_loading_state(&plugin_type) < 0) {
+ h5libraryError(env);
+ }
+ return (jint)plugin_type;
+}
+
+#ifdef __cplusplus
+} /* end extern "C" */
+#endif /* __cplusplus */
diff --git a/java/src/jni/h5plImp.h b/java/src/jni/h5plImp.h
new file mode 100644
index 0000000..7c55bf4
--- /dev/null
+++ b/java/src/jni/h5plImp.h
@@ -0,0 +1,46 @@
+/* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
+ * Copyright by The HDF Group. *
+ * Copyright by the Board of Trustees of the University of Illinois. *
+ * All rights reserved. *
+ * *
+ * This file is part of HDF5. The full HDF5 copyright notice, including *
+ * terms governing use, modification, and redistribution, is contained in *
+ * the files COPYING and Copyright.html. COPYING can be found at the root *
+ * of the source code distribution tree; Copyright.html can be found at the *
+ * root level of an installed copy of the electronic HDF5 document set and *
+ * is linked from the top-level documents page. It can also be found at *
+ * http://hdfgroup.org/HDF5/doc/Copyright.html. If you do not have *
+ * access to either file, you may request a copy from help@hdfgroup.org. *
+ * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */
+
+#include <jni.h>
+/* Header for class hdf_hdf5lib_H5_H5PL */
+
+#ifndef _Included_hdf_hdf5lib_H5_H5PL
+#define _Included_hdf_hdf5lib_H5_H5PL
+
+#ifdef __cplusplus
+extern "C" {
+#endif /* __cplusplus */
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5PLset_loading_state
+ * Signature: (I)V
+ */
+JNIEXPORT void JNICALL Java_hdf_hdf5lib_H5_H5PLset_1loading_1state
+ (JNIEnv *, jclass, jint);
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5PLget_loading_state
+ * Signature: (V)I
+ */
+JNIEXPORT jint JNICALL Java_hdf_hdf5lib_H5_H5PLget_1loading_1state
+ (JNIEnv *, jclass);
+
+#ifdef __cplusplus
+} /* end extern "C" */
+#endif /* __cplusplus */
+
+#endif /* _Included_hdf_hdf5lib_H5_H5PL */
diff --git a/java/src/jni/h5rImp.c b/java/src/jni/h5rImp.c
new file mode 100644
index 0000000..9b19bfb
--- /dev/null
+++ b/java/src/jni/h5rImp.c
@@ -0,0 +1,324 @@
+/* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
+ * Copyright by The HDF Group. *
+ * Copyright by the Board of Trustees of the University of Illinois. *
+ * All rights reserved. *
+ * *
+ * This file is part of HDF5. The full HDF5 copyright notice, including *
+ * terms governing use, modification, and redistribution, is contained in *
+ * the files COPYING and Copyright.html. COPYING can be found at the root *
+ * of the source code distribution tree; Copyright.html can be found at the *
+ * root level of an installed copy of the electronic HDF5 document set and *
+ * is linked from the top-level documents page. It can also be found at *
+ * http://hdfgroup.org/HDF5/doc/Copyright.html. If you do not have *
+ * access to either file, you may request a copy from help@hdfgroup.org. *
+ * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */
+
+/*
+ * For details of the HDF libraries, see the HDF Documentation at:
+ * http://hdfdfgroup.org/HDF5/doc/
+ *
+ */
+#ifdef __cplusplus
+extern "C" {
+#endif /* __cplusplus */
+
+#include "hdf5.h"
+#include <jni.h>
+#include <stdlib.h>
+#include "h5jni.h"
+#include "h5rImp.h"
+
+extern JavaVM *jvm;
+extern jobject visit_callback;
+
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5Rcreate
+ * Signature: ([BJLjava/lang/String;IJ)I
+ */
+JNIEXPORT jint JNICALL
+Java_hdf_hdf5lib_H5_H5Rcreate(JNIEnv *env, jclass clss, jbyteArray ref, jlong loc_id, jstring name, jint ref_type, jlong space_id)
+{
+ const char *rName;
+ herr_t status = -1;
+ jbyte *refP;
+ jboolean isCopy2;
+
+ PIN_JAVA_STRING(name, rName, -1);
+
+ if (ref == NULL) {
+ UNPIN_JAVA_STRING(name, rName);
+ h5nullArgument( env, "H5Rcreate: ref is NULL");
+ } /* end if */
+ else {
+ if ((ref_type == H5R_OBJECT) && ENVPTR->GetArrayLength(ENVPAR ref) != H5R_OBJ_REF_BUF_SIZE) {
+ UNPIN_JAVA_STRING(name, rName);
+ h5badArgument( env, "H5Rcreate: ref input array != H5R_OBJ_REF_BUF_SIZE");
+ } /* end if */
+ else if ((ref_type == H5R_DATASET_REGION) && ENVPTR->GetArrayLength(ENVPAR ref) != H5R_DSET_REG_REF_BUF_SIZE) {
+ UNPIN_JAVA_STRING(name, rName);
+ h5badArgument( env, "H5Rcreate: region ref input array != H5R_DSET_REG_REF_BUF_SIZE");
+ } /* end else if */
+ else if ((ref_type != H5R_OBJECT) && (ref_type != H5R_DATASET_REGION)) {
+ UNPIN_JAVA_STRING(name, rName);
+ h5badArgument( env, "H5Rcreate: ref_type unknown type ");
+ } /* end else if */
+ else {
+ refP = (jbyte*)ENVPTR->GetByteArrayElements(ENVPAR ref, &isCopy2);
+ if (refP == NULL) {
+ UNPIN_JAVA_STRING(name, rName);
+ h5JNIFatalError(env, "H5Rcreate: ref not pinned");
+ } /* end if */
+ else {
+ status = H5Rcreate(refP, (hid_t)loc_id, rName, (H5R_type_t)ref_type, (hid_t)space_id);
+
+ UNPIN_JAVA_STRING(name, rName);
+ if (status < 0) {
+ ENVPTR->ReleaseByteArrayElements(ENVPAR ref, refP, JNI_ABORT);
+ h5libraryError(env);
+ } /* end if */
+ else {
+ ENVPTR->ReleaseByteArrayElements(ENVPAR ref, refP, 0);
+ } /* end else */
+ } /* end else */
+ } /* end else */
+ } /* end else */
+
+ return (jint)status;
+} /* end Java_hdf_hdf5lib_H5_H5Rcreate */
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: _H5Rdereference
+ * Signature: (JJI[B)J
+ */
+JNIEXPORT jlong JNICALL
+Java_hdf_hdf5lib_H5__1H5Rdereference(JNIEnv *env, jclass clss, jlong dataset, jlong access_list, jint ref_type, jbyteArray ref)
+{
+ jboolean isCopy;
+ jbyte *refP;
+ hid_t retVal = -1;
+
+ if (ref == NULL) {
+ h5nullArgument( env, "H5Rdereference: ref is NULL");
+ } /* end if */
+ else if ((ref_type == H5R_OBJECT) && ENVPTR->GetArrayLength(ENVPAR ref) != H5R_OBJ_REF_BUF_SIZE) {
+ h5badArgument( env, "H5Rdereference: obj ref input array != H5R_OBJ_REF_BUF_SIZE");
+ } /* end else if */
+ else if ((ref_type == H5R_DATASET_REGION) && ENVPTR->GetArrayLength(ENVPAR ref) != H5R_DSET_REG_REF_BUF_SIZE) {
+ h5badArgument( env, "H5Rdereference: region ref input array != H5R_DSET_REG_REF_BUF_SIZE");
+ } /* end else if */
+ else {
+ refP = (jbyte*)ENVPTR->GetByteArrayElements(ENVPAR ref, &isCopy);
+ if (refP == NULL) {
+ h5JNIFatalError(env, "H5Rderefernce: ref not pinned");
+ } /* end if */
+ else {
+ retVal = H5Rdereference2((hid_t)dataset, (hid_t)access_list, (H5R_type_t)ref_type, refP);
+
+ ENVPTR->ReleaseByteArrayElements(ENVPAR ref, refP, JNI_ABORT);
+
+ if (retVal < 0)
+ h5libraryError(env);
+ } /* end else */
+ } /* end else */
+
+ return (jlong)retVal;
+} /* end Java_hdf_hdf5lib_H5__1H5Rdereference */
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5Rget_region
+ * Signature: (JI[B)J
+ */
+JNIEXPORT jlong JNICALL
+Java_hdf_hdf5lib_H5__1H5Rget_1region(JNIEnv *env, jclass clss, jlong dataset, jint ref_type, jbyteArray ref)
+{
+ hid_t retVal = -1;
+ jboolean isCopy;
+ jbyte *refP;
+
+ if (ref_type != H5R_DATASET_REGION) {
+ h5badArgument( env, "H5Rget_region: bad ref_type ");
+ } /* end if */
+ else if (ref == NULL) {
+ h5nullArgument( env, "H5Rget_region: ref is NULL");
+ } /* end if */
+ else if ( ENVPTR->GetArrayLength(ENVPAR ref) != H5R_DSET_REG_REF_BUF_SIZE) {
+ h5badArgument( env, "H5Rget_region: region ref input array != H5R_DSET_REG_REF_BUF_SIZE");
+ } /* end if */
+ else {
+ refP = (jbyte*)ENVPTR->GetByteArrayElements(ENVPAR ref, &isCopy);
+ if (refP == NULL) {
+ h5JNIFatalError(env, "H5Rget_region: ref not pinned");
+ } /* end if */
+ else {
+ retVal = H5Rget_region((hid_t)dataset, (H5R_type_t)ref_type, refP);
+
+ ENVPTR->ReleaseByteArrayElements(ENVPAR ref, refP, JNI_ABORT);
+
+ if (retVal < 0)
+ h5libraryError(env);
+ } /* end else */
+ } /* end else */
+
+ return (jlong)retVal;
+} /* end Java_hdf_hdf5lib_H5__1H5Rget_1region */
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5G_obj_t H5Rget_obj_type
+ * Signature: (JI[B)I
+ */
+JNIEXPORT jint JNICALL
+Java_hdf_hdf5lib_H5_H5Rget_1obj_1type(JNIEnv *env, jclass clss, jlong loc_id, jint ref_type, jbyteArray ref)
+{
+ int retVal =-1;
+ jboolean isCopy;
+ jbyte *refP;
+ H5O_type_t object_info;
+
+
+ if (ref == NULL) {
+ h5nullArgument( env, "H5Rget_object_type: ref is NULL");
+ } /* end if */
+ else {
+ refP = (jbyte*)ENVPTR->GetByteArrayElements(ENVPAR ref, &isCopy);
+ if (refP == NULL) {
+ h5JNIFatalError(env, "H5Rget_object_type: ref not pinned");
+ } /* end if */
+ else {
+ retVal = H5Rget_obj_type2((hid_t)loc_id, (H5R_type_t)ref_type, refP, &object_info);
+ if(retVal >= 0)
+ retVal = object_info;
+
+ ENVPTR->ReleaseByteArrayElements(ENVPAR ref, refP, JNI_ABORT);
+
+ if (retVal < 0)
+ h5libraryError(env);
+ } /* end else */
+ } /* end else */
+
+ return (jint)retVal;
+} /* end Java_hdf_hdf5lib_H5_H5Rget_1obj_1type */
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: int H5Rget_obj_type2
+ * Signature: (JI[B[I)I
+ */
+JNIEXPORT jint JNICALL
+Java_hdf_hdf5lib_H5_H5Rget_1obj_1type2(JNIEnv *env, jclass clss, jlong loc_id, jint ref_type, jbyteArray ref, jintArray ref_obj)
+{
+
+ jint status;
+ jboolean isCopy;
+ jbyte *refP;
+ jint *ref_objP;
+ int retVal = -1;
+
+
+ if (ref == NULL) {
+ h5nullArgument( env, "H5Rget_object_type: ref is NULL");
+ } /* end if */
+ else if (ref_obj == NULL) {
+ h5nullArgument( env, "H5Rget_object_type: ref_obj is NULL");
+ } /* end else if */
+ else {
+ refP = (jbyte *)ENVPTR->GetByteArrayElements(ENVPAR ref, &isCopy);
+ if (refP == NULL) {
+ h5JNIFatalError(env, "H5Rget_object_type: ref not pinned");
+ } /* end if */
+ else {
+ ref_objP = (jint *)ENVPTR->GetIntArrayElements(ENVPAR ref_obj, &isCopy);
+ if (ref_objP == NULL) {
+ ENVPTR->ReleaseByteArrayElements(ENVPAR ref,refP,0);
+ h5JNIFatalError(env, "H5Rget_object_type: ref_obj not pinned");
+ } /* end if */
+ else {
+ status = H5Rget_obj_type2((hid_t)loc_id, (H5R_type_t)ref_type, refP, (H5O_type_t*)ref_objP);
+ retVal = ref_objP[0];
+
+ ENVPTR->ReleaseByteArrayElements(ENVPAR ref, refP, JNI_ABORT);
+ if (status < 0) {
+ ENVPTR->ReleaseIntArrayElements(ENVPAR ref_obj,ref_objP, JNI_ABORT);
+ h5libraryError(env);
+ } /* end if */
+ else {
+ ENVPTR->ReleaseIntArrayElements(ENVPAR ref_obj, ref_objP, 0);
+ } /* end else */
+ } /* end else */
+ } /* end else */
+ } /* end else */
+
+ return (jint)retVal;
+} /* end Java_hdf_hdf5lib_H5_H5Rget_1obj_1type2 */
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5Rget_name
+ * Signature: (JI[B[Ljava/lang/String;J)J
+ */
+JNIEXPORT jlong JNICALL
+Java_hdf_hdf5lib_H5_H5Rget_1name(JNIEnv *env, jclass clss, jlong loc_id, jint ref_type, jbyteArray ref, jobjectArray name, jlong size)
+{
+ jlong ret_val = -1;
+ jbyte *refP;
+ jboolean isCopy;
+ char *aName = NULL;
+ jstring str;
+ size_t bs;
+
+ bs = (size_t)size;
+ if (bs <= 0) {
+ h5badArgument(env, "H5Rget_name: size <= 0");
+ } /* end if */
+ else if (ref == NULL) {
+ h5nullArgument(env, "H5Rget_name: ref is NULL");
+ } /* end else if */
+ else {
+ if ((ref_type == H5R_OBJECT) && ENVPTR->GetArrayLength(ENVPAR ref) != H5R_OBJ_REF_BUF_SIZE) {
+ h5badArgument(env, "H5Rget_name: obj ref input array != H5R_OBJ_REF_BUF_SIZE");
+ } /* end if */
+ else if ((ref_type == H5R_DATASET_REGION)
+ && ENVPTR->GetArrayLength(ENVPAR ref) != H5R_DSET_REG_REF_BUF_SIZE) {
+ h5badArgument(env, "H5Rget_name: region ref input array != H5R_DSET_REG_REF_BUF_SIZE");
+ } /* end else if */
+ else {
+ refP = (jbyte *)ENVPTR->GetByteArrayElements(ENVPAR ref, &isCopy);
+ if (refP == NULL) {
+ h5JNIFatalError(env, "H5Rget_name: ref not pinned");
+ } /* end if */
+ else {
+ aName = (char*)HDmalloc(sizeof(char)*bs);
+ if (aName == NULL) {
+ ENVPTR->ReleaseByteArrayElements(ENVPAR ref, refP, JNI_ABORT);
+ h5outOfMemory(env, "H5Rget_name: malloc failed");
+ } /* end if */
+ else {
+ ret_val = (jlong)H5Rget_name((hid_t)loc_id, (H5R_type_t)ref_type, refP, aName, bs) ;
+
+ ENVPTR->ReleaseByteArrayElements(ENVPAR ref, refP, JNI_ABORT);
+ if (ret_val < 0) {
+ HDfree(aName);
+ h5libraryError(env);
+ } /* end if */
+ else {
+ str = ENVPTR->NewStringUTF(ENVPAR aName);
+ ENVPTR->SetObjectArrayElement(ENVPAR name, 0, str);
+
+ HDfree(aName);
+ } /* end else */
+ } /* end else */
+ } /* end else */
+ } /* end else */
+ } /* end else */
+
+ return ret_val;
+} /* end Java_hdf_hdf5lib_H5_H5Rget_1name */
+
+
+#ifdef __cplusplus
+} /* end extern "C" */
+#endif /* __cplusplus */
diff --git a/java/src/jni/h5rImp.h b/java/src/jni/h5rImp.h
new file mode 100644
index 0000000..2afb345
--- /dev/null
+++ b/java/src/jni/h5rImp.h
@@ -0,0 +1,78 @@
+/* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
+ * Copyright by The HDF Group. *
+ * Copyright by the Board of Trustees of the University of Illinois. *
+ * All rights reserved. *
+ * *
+ * This file is part of HDF5. The full HDF5 copyright notice, including *
+ * terms governing use, modification, and redistribution, is contained in *
+ * the files COPYING and Copyright.html. COPYING can be found at the root *
+ * of the source code distribution tree; Copyright.html can be found at the *
+ * root level of an installed copy of the electronic HDF5 document set and *
+ * is linked from the top-level documents page. It can also be found at *
+ * http://hdfgroup.org/HDF5/doc/Copyright.html. If you do not have *
+ * access to either file, you may request a copy from help@hdfgroup.org. *
+ * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */
+
+#include <jni.h>
+/* Header for class hdf_hdf5lib_H5_H5R */
+
+#ifndef _Included_hdf_hdf5lib_H5_H5R
+#define _Included_hdf_hdf5lib_H5_H5R
+
+#ifdef __cplusplus
+extern "C" {
+#endif /* __cplusplus */
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5Rcreate
+ * Signature: ([BJLjava/lang/String;IJ)I
+ */
+JNIEXPORT jint JNICALL Java_hdf_hdf5lib_H5_H5Rcreate
+ (JNIEnv *, jclass, jbyteArray, jlong, jstring, jint, jlong);
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: _H5Rdereference
+ * Signature: (JJI[B)J
+ */
+JNIEXPORT jlong JNICALL Java_hdf_hdf5lib_H5__1H5Rdereference
+ (JNIEnv *, jclass, jlong, jlong, jint, jbyteArray);
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5Rget_region
+ * Signature: (JI[B)J
+ */
+JNIEXPORT jlong JNICALL Java_hdf_hdf5lib_H5__1H5Rget_1region
+ (JNIEnv *, jclass, jlong, jint, jbyteArray);
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5G_obj_t H5Rget_obj_type
+ * Signature: (JI[B)I
+ */
+JNIEXPORT jint JNICALL Java_hdf_hdf5lib_H5_H5Rget_1obj_1type
+ (JNIEnv *, jclass, jlong, jint, jbyteArray);
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: int H5Rget_obj_type2
+ * Signature: (JI[B[I)I
+ */
+JNIEXPORT jint JNICALL Java_hdf_hdf5lib_H5_H5Rget_1obj_1type2
+ (JNIEnv *, jclass, jlong, jint, jbyteArray, jintArray);
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5Rget_name
+ * Signature: (JI[B[Ljava/lang/String;J)J
+ */
+JNIEXPORT jlong JNICALL Java_hdf_hdf5lib_H5_H5Rget_1name
+ (JNIEnv *, jclass, jlong, jint, jbyteArray, jobjectArray, jlong);
+
+#ifdef __cplusplus
+} /* end extern "C" */
+#endif /* __cplusplus */
+
+#endif /* _Included_hdf_hdf5lib_H5_H5R */
diff --git a/java/src/jni/h5sImp.c b/java/src/jni/h5sImp.c
new file mode 100644
index 0000000..2aad3d0
--- /dev/null
+++ b/java/src/jni/h5sImp.c
@@ -0,0 +1,1408 @@
+/* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
+ * Copyright by The HDF Group. *
+ * Copyright by the Board of Trustees of the University of Illinois. *
+ * All rights reserved. *
+ * *
+ * This file is part of HDF5. The full HDF5 copyright notice, including *
+ * terms governing use, modification, and redistribution, is contained in *
+ * the files COPYING and Copyright.html. COPYING can be found at the root *
+ * of the source code distribution tree; Copyright.html can be found at the *
+ * root level of an installed copy of the electronic HDF5 document set and *
+ * is linked from the top-level documents page. It can also be found at *
+ * http://hdfgroup.org/HDF5/doc/Copyright.html. If you do not have *
+ * access to either file, you may request a copy from help@hdfgroup.org. *
+ * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */
+
+/*
+ * For details of the HDF libraries, see the HDF Documentation at:
+ * http://hdfdfgroup.org/HDF5/doc/
+ *
+ */
+
+#ifdef __cplusplus
+extern "C" {
+#endif /* __cplusplus */
+
+#include <stdlib.h>
+#include "hdf5.h"
+#include "h5jni.h"
+#include "h5sImp.h"
+
+extern JavaVM *jvm;
+extern jobject visit_callback;
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: _H5Screate
+ * Signature: (I)J
+ */
+JNIEXPORT jlong JNICALL
+Java_hdf_hdf5lib_H5__1H5Screate(JNIEnv *env, jclass clss, jint type)
+{
+ hid_t retVal = -1;
+
+ retVal = H5Screate((H5S_class_t) type);
+ if (retVal < 0)
+ h5libraryError(env);
+
+ return (jlong)retVal;
+} /* end Java_hdf_hdf5lib_H5__1H5Screate */
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: _H5Screate_simple
+ * Signature: (I[J[J)G
+ */
+JNIEXPORT jlong JNICALL
+Java_hdf_hdf5lib_H5__1H5Screate_1simple(JNIEnv *env, jclass clss, jint rank,
+ jlongArray dims, jlongArray maxdims)
+{
+ hid_t retVal = -1;
+ jlong *dimsP, *maxdimsP;
+ jboolean isCopy;
+ hsize_t *sa = NULL;
+ hsize_t *msa = NULL;
+ int i;
+ int drank, mrank;
+ hsize_t *lp;
+ jlong *jlp;
+
+ if (rank < 0) {
+ h5badArgument(env, "H5Screate_simple: rank is invalid");
+ } /* end if */
+ else if (dims == NULL) {
+ h5nullArgument(env, "H5Screate_simple: dims is NULL");
+ } /* end else if */
+ else {
+ drank = (int)ENVPTR->GetArrayLength(ENVPAR dims);
+ if (drank != rank) {
+ h5badArgument(env, "H5Screate_simple: dims rank is invalid");
+ return -1;
+ } /* end if */
+ if (maxdims != NULL) {
+ mrank = (int)ENVPTR->GetArrayLength(ENVPAR maxdims);
+ if (mrank != rank) {
+ h5badArgument(env, "H5Screate_simple: maxdims rank is invalid");
+ return -1;
+ } /* end if */
+ } /* end if */
+ dimsP = ENVPTR->GetLongArrayElements(ENVPAR dims, &isCopy);
+ if (dimsP == NULL) {
+ h5JNIFatalError(env, "H5Screate_simple: dims not pinned");
+ return -1;
+ } /* end if */
+
+ sa = lp = (hsize_t *)HDmalloc((size_t)rank * sizeof(hsize_t));
+ if (sa == NULL) {
+ ENVPTR->ReleaseLongArrayElements(ENVPAR dims, dimsP, JNI_ABORT);
+ h5JNIFatalError(env, "H5Screate_simple: dims not converted to hsize_t");
+ return -1;
+ } /* end if */
+
+ jlp = (jlong *) dimsP;
+ for (i = 0; i < rank; i++) {
+ *lp = (hsize_t) *jlp;
+ lp++;
+ jlp++;
+ } /* end for */
+
+ if (maxdims == NULL) {
+ maxdimsP = NULL;
+ msa = (hsize_t *)maxdimsP;
+ } /* end if */
+ else {
+ maxdimsP = ENVPTR->GetLongArrayElements(ENVPAR maxdims, &isCopy);
+ if (maxdimsP == NULL) {
+ ENVPTR->ReleaseLongArrayElements(ENVPAR dims, dimsP, JNI_ABORT);
+ HDfree(sa);
+ h5JNIFatalError(env, "H5Screate_simple: maxdims not pinned");
+ return -1;
+ } /* end if */
+ msa = lp = (hsize_t *)HDmalloc((size_t)rank * sizeof(hsize_t));
+ if (msa == NULL) {
+ ENVPTR->ReleaseLongArrayElements(ENVPAR dims, dimsP, JNI_ABORT);
+ ENVPTR->ReleaseLongArrayElements(ENVPAR maxdims, maxdimsP, JNI_ABORT);
+ HDfree(sa);
+ h5JNIFatalError(env, "H5Screate_simple: dims not converted to hsize_t");
+ return -1;
+ } /* end if */
+ jlp = (jlong *)maxdimsP;
+ for (i = 0; i < mrank; i++) {
+ *lp = (hsize_t)*jlp;
+ lp++;
+ jlp++;
+ } /* end for */
+ } /* end else */
+
+ retVal = H5Screate_simple(rank, (const hsize_t *)sa, (const hsize_t *)msa);
+
+ if (maxdimsP != NULL) {
+ ENVPTR->ReleaseLongArrayElements(ENVPAR maxdims, maxdimsP, JNI_ABORT);
+ if (msa)
+ HDfree(msa);
+ } /* end if */
+
+ ENVPTR->ReleaseLongArrayElements(ENVPAR dims, dimsP, JNI_ABORT);
+ if (sa)
+ HDfree(sa);
+
+ if (retVal < 0)
+ h5libraryError(env);
+ } /* end else */
+
+ return (jlong)retVal;
+} /* end Java_hdf_hdf5lib_H5__1H5Screate_1simple */
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: _H5Scopy
+ * Signature: (J)J
+ */
+JNIEXPORT jlong JNICALL
+Java_hdf_hdf5lib_H5__1H5Scopy(JNIEnv *env, jclass clss, jlong space_id)
+{
+ hid_t retVal = -1;
+
+ retVal = H5Scopy(space_id);
+ if (retVal < 0)
+ h5libraryError(env);
+
+ return (jlong)retVal;
+} /* end Java_hdf_hdf5lib_H5__1H5Scopy */
+
+#ifdef notdef
+// 10/28/99 -- added code to copy the array -- this is not used,
+// but serves as a reminder in case we try to implement this in
+// the future....
+/*
+ * Note: the argument coord is actually long coord[][], which has been
+ * flattened by the caller.
+ */
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5Sselect_elements
+ * Signature: (JII[J)I
+ */
+JNIEXPORT jint JNICALL
+Java_hdf_hdf5lib_H5_H5Sselect_1elements(JNIEnv *env, jclass clss, jlong space_id, jint op, jint num_elemn, jlongArray coord)
+{
+ herr_t status = -1;
+ jint i;
+ jlong *P;
+ jboolean isCopy;
+ hssize_t *sa;
+ int rank;
+
+ if (coord == NULL) {
+ h5nullArgument( env, "H5Sselect_elements: coord is NULL");
+ return -1;
+ } /* end if */
+
+ P = ENVPTR->GetLongArrayElements(ENVPAR env,coord,&isCopy);
+ if (P == NULL) {
+ h5JNIFatalError(env, "H5Sselect_elements: coord not pinned");
+ return -1;
+ } /* end if */
+ sa = (hssize_t *)HDmalloc( (size_t)num_elems * 2 * sizeof(hssize_t));
+ if (sa == NULL) {
+ ENVPTR->ReleaseLongArrayElements(ENVPAR env,coord,P,JNI_ABORT);
+ h5JNIFatalError(env, "H5Sselect_elements: coord array not converted to hssize_t");
+ return -1;
+ } /* end if */
+ for (i= 0; i < (num_elsms * 2); i++) {
+ sa[i] = P[i];
+ } /* end for */
+
+ status = H5Sselect_elements (space_id, (H5S_seloper_t)op, num_elemn, (const hssize_t **)&sa);
+ ENVPTR->ReleaseLongArrayElements(ENVPAR env, coord, P, JNI_ABORT);
+ HDfree(sa);
+
+ if (status < 0)
+ h5libraryError(env);
+
+ return (jint)status;
+} /* end Java_hdf_hdf5lib_H5_H5Sselect_1elements */
+#endif
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5Sselect_elements
+ * Signature: (JII[B)I
+ */
+JNIEXPORT jint JNICALL
+Java_hdf_hdf5lib_H5_H5Sselect_1elements(JNIEnv *env, jclass clss, jlong space_id, jint op,
+ jint num_elemn, jbyteArray coord)
+{
+ int ii;
+ hsize_t *lp = NULL;
+ hsize_t *llp;
+ jlong *jlp;
+ herr_t status = -1;
+ jbyte *P;
+ jboolean isCopy;
+ jsize size;
+ int nlongs;
+
+ if (coord == NULL) {
+ h5nullArgument(env, "H5Sselect_elements: coord is NULL");
+ } /* end if */
+ else {
+ P = ENVPTR->GetByteArrayElements(ENVPAR coord, &isCopy);
+ if (P == NULL) {
+ h5JNIFatalError(env, "H5Sselect_elements: coord not pinned");
+ } /* end if */
+ else {
+ size = (int)ENVPTR->GetArrayLength(ENVPAR coord);
+ nlongs = (int)((size_t)size / sizeof(jlong));
+ lp = (hsize_t *)HDmalloc((size_t)nlongs * sizeof(hsize_t));
+ jlp = (jlong *) P;
+ llp = lp;
+ for (ii = 0; ii < nlongs; ii++) {
+ *lp = (hsize_t) *jlp;
+ lp++;
+ jlp++;
+ } /* end for */
+
+ status = H5Sselect_elements(space_id, (H5S_seloper_t)op, (size_t)num_elemn, (const hsize_t *)llp);
+
+ ENVPTR->ReleaseByteArrayElements(ENVPAR coord, P, JNI_ABORT);
+
+ if (llp)
+ HDfree(llp);
+
+ if (status < 0)
+ h5libraryError(env);
+ } /* end else */
+ } /* end else */
+
+ return (jint)status;
+} /* end Java_hdf_hdf5lib_H5_H5Sselect_1elements */
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5Sselect_all
+ * Signature: (J)I
+ */
+JNIEXPORT jint JNICALL
+Java_hdf_hdf5lib_H5_H5Sselect_1all(JNIEnv *env, jclass clss, jlong space_id)
+{
+ herr_t retVal = -1;
+
+ retVal = H5Sselect_all(space_id);
+ if (retVal < 0)
+ h5libraryError(env);
+
+ return (jint) retVal;
+} /* end Java_hdf_hdf5lib_H5_H5Sselect_1all */
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5Sselect_none
+ * Signature: (J)I
+ */
+JNIEXPORT jint JNICALL
+Java_hdf_hdf5lib_H5_H5Sselect_1none(JNIEnv *env, jclass clss, jlong space_id)
+{
+ herr_t retVal = -1;
+
+ retVal = H5Sselect_none(space_id);
+ if (retVal < 0)
+ h5libraryError(env);
+
+ return (jint) retVal;
+} /* end Java_hdf_hdf5lib_H5_H5Sselect_1none */
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5Sselect_valid
+ * Signature: (J)Z
+ */
+JNIEXPORT jboolean JNICALL
+Java_hdf_hdf5lib_H5_H5Sselect_1valid(JNIEnv *env, jclass clss, jlong space_id)
+{
+ htri_t bval = JNI_FALSE;
+
+ bval = H5Sselect_valid(space_id);
+ if (bval > 0)
+ bval = JNI_TRUE;
+ else if (bval < 0)
+ h5libraryError(env);
+
+ return (jboolean)bval;
+} /* end Java_hdf_hdf5lib_H5_H5Sselect_1valid */
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5Sget_simple_extent_npoints
+ * Signature: (J)J
+ */
+JNIEXPORT jlong JNICALL
+Java_hdf_hdf5lib_H5_H5Sget_1simple_1extent_1npoints(JNIEnv *env, jclass clss, jlong space_id)
+{
+ hssize_t retVal = H5Sget_simple_extent_npoints(space_id);
+
+ if (retVal < 0)
+ h5libraryError(env);
+
+ return (jlong) retVal;
+} /* end Java_hdf_hdf5lib_H5_H5Sget_1simple_1extent_1npoints */
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5Sget_select_npoints
+ * Signature: (J)J
+ */
+JNIEXPORT jlong JNICALL
+Java_hdf_hdf5lib_H5_H5Sget_1select_1npoints(JNIEnv *env, jclass clss, jlong space_id)
+{
+ hssize_t retVal = H5Sget_select_npoints(space_id);
+
+ if (retVal < 0)
+ h5libraryError(env);
+
+ return (jlong) retVal;
+} /* end Java_hdf_hdf5lib_H5_H5Sget_1select_1npoints */
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5Sget_select_type
+ * Signature: (J)I
+ */
+JNIEXPORT jint JNICALL
+Java_hdf_hdf5lib_H5_H5Sget_1select_1type(JNIEnv *env, jclass clss, jlong space_id)
+{
+ int retVal = -1;
+
+ retVal = H5Sget_select_type(space_id);
+ if (retVal < 0)
+ h5libraryError(env);
+
+ return (jint) retVal;
+} /* end Java_hdf_hdf5lib_H5_H5Sget_1select_1type */
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5Sget_simple_extent_ndims
+ * Signature: (J)I
+ */
+JNIEXPORT jint JNICALL
+Java_hdf_hdf5lib_H5_H5Sget_1simple_1extent_1ndims(JNIEnv *env, jclass clss, jlong space_id)
+{
+ int retVal = -1;
+
+ retVal = H5Sget_simple_extent_ndims(space_id);
+ if (retVal < 0)
+ h5libraryError(env);
+
+ return (jint) retVal;
+} /* end Java_hdf_hdf5lib_H5_H5Sget_1simple_1extent_1ndims */
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5Sget_simple_extent_dims
+ * Signature: (J[J[J)I
+ */
+JNIEXPORT jint JNICALL
+Java_hdf_hdf5lib_H5_H5Sget_1simple_1extent_1dims(JNIEnv *env, jclass clss, jlong space_id,
+ jlongArray dims, jlongArray maxdims)
+{
+ int status = -1;
+ jlong *dimsP, *maxdimsP;
+ jboolean isCopy;
+ hsize_t *sa;
+ hsize_t *msa;
+ int i;
+ int rank = -1;
+ int mrank;
+
+ if (dims == NULL) {
+ dimsP = NULL;
+ sa = (hsize_t *) dimsP;
+ } /* end if */
+ else {
+ dimsP = ENVPTR->GetLongArrayElements(ENVPAR dims, &isCopy);
+ if (dimsP == NULL) {
+ h5JNIFatalError(env, "H5Pget_simple_extent_dims: dims not pinned");
+ return -1;
+ } /* end if */
+ rank = (int)ENVPTR->GetArrayLength(ENVPAR dims);
+ sa = (hsize_t *)HDmalloc((size_t)rank * sizeof(hsize_t));
+ if (sa == NULL) {
+ ENVPTR->ReleaseLongArrayElements(ENVPAR dims, dimsP, JNI_ABORT);
+ h5JNIFatalError(env, "H5Sget_simple_extent_dims: dims not converted to hsize_t");
+ return -1;
+ } /* end if */
+ } /* end else */
+ if (maxdims == NULL) {
+ maxdimsP = NULL;
+ msa = (hsize_t *) maxdimsP;
+ } /* end if */
+ else {
+ maxdimsP = ENVPTR->GetLongArrayElements(ENVPAR maxdims, &isCopy);
+ if (maxdimsP == NULL) {
+ if (dimsP != NULL) {
+ ENVPTR->ReleaseLongArrayElements(ENVPAR dims, dimsP, JNI_ABORT);
+ HDfree(sa);
+ } /* end if */
+ h5JNIFatalError(env, "H5Pget_simple_extent_dims: maxdims not pinned");
+ return -1;
+ } /* end if */
+ mrank = (int) ENVPTR->GetArrayLength(ENVPAR maxdims);
+ if (rank < 0)
+ rank = mrank;
+ else if (mrank != rank) {
+ if (dimsP != NULL) {
+ ENVPTR->ReleaseLongArrayElements(ENVPAR dims, dimsP, JNI_ABORT);
+ HDfree(sa);
+ } /* end if */
+ ENVPTR->ReleaseLongArrayElements(ENVPAR maxdims, maxdimsP, JNI_ABORT);
+ h5JNIFatalError(env, "H5Sget_simple_extent_dims: maxdims rank not same as dims");
+ return -1;
+ } /* end else if */
+ msa = (hsize_t *)HDmalloc((size_t)rank * sizeof(hsize_t));
+ if (msa == NULL) {
+ if (dimsP != NULL) {
+ ENVPTR->ReleaseLongArrayElements(ENVPAR dims, dimsP, JNI_ABORT);
+ HDfree(sa);
+ } /* end if */
+ ENVPTR->ReleaseLongArrayElements(ENVPAR maxdims, maxdimsP, JNI_ABORT);
+ h5JNIFatalError(env, "H5Sget_simple_extent_dims: maxdims not converted to hsize_t");
+ return -1;
+ } /* end if */
+ } /* end else */
+
+ status = H5Sget_simple_extent_dims(space_id, (hsize_t *)sa, (hsize_t *)msa);
+
+ if (status < 0) {
+ if (dimsP != NULL) {
+ ENVPTR->ReleaseLongArrayElements(ENVPAR dims, dimsP, JNI_ABORT);
+ HDfree(sa);
+ } /* end if */
+ if (maxdimsP != NULL) {
+ ENVPTR->ReleaseLongArrayElements(ENVPAR maxdims, maxdimsP, JNI_ABORT);
+ HDfree(msa);
+ } /* end if */
+ h5libraryError(env);
+ return -1;
+ } /* end if */
+
+ if (dimsP != NULL) {
+ for (i = 0; i < rank; i++) {
+ dimsP[i] = (jlong)sa[i];
+ } /* end for */
+ HDfree(sa);
+ ENVPTR->ReleaseLongArrayElements(ENVPAR dims, dimsP, 0);
+ } /* end if */
+ if (maxdimsP != NULL) {
+ for (i = 0; i < rank; i++) {
+ maxdimsP[i] = (jlong)msa[i];
+ } /* end for */
+ HDfree(msa);
+ ENVPTR->ReleaseLongArrayElements(ENVPAR maxdims, maxdimsP, 0);
+ } /* end if */
+
+ return (jint)status;
+} /* end Java_hdf_hdf5lib_H5_H5Sget_1simple_1extent_1dims */
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5Sget_simple_extent_type
+ * Signature: (J)I
+ */
+JNIEXPORT jint JNICALL
+Java_hdf_hdf5lib_H5_H5Sget_1simple_1extent_1type(JNIEnv *env, jclass clss, jlong space_id)
+{
+ H5S_class_t retVal = H5S_NO_CLASS;
+
+ if (space_id < 0)
+ h5libraryError(env);
+
+ retVal = H5Sget_simple_extent_type(space_id);
+
+ return (jint)retVal;
+} /* end Java_hdf_hdf5lib_H5_H5Sget_1simple_1extent_1type */
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5Sset_extent_simple
+ * Signature: (JI[J[J)I
+ */
+JNIEXPORT jint JNICALL
+Java_hdf_hdf5lib_H5_H5Sset_1extent_1simple(JNIEnv *env, jclass clss, jlong space_id,
+ jint rank, jlongArray dims, jlongArray maxdims)
+{
+ herr_t status = -1;
+ jlong *dimsP, *maxdimsP;
+ jboolean isCopy;
+ hsize_t *sa;
+ hsize_t *msa;
+ int i;
+ int drank, mrank;
+ hsize_t *lp;
+ jlong *jlp;
+
+ if (dims == NULL) {
+ h5nullArgument(env, "H5Sset_simple_extent: dims is NULL");
+ return -1;
+ } /* end if */
+ drank = (int) ENVPTR->GetArrayLength(ENVPAR dims);
+ if (drank != rank) {
+ h5badArgument(env, "H5Screate_simple: dims rank is invalid");
+ return -1;
+ } /* end if */
+ if (maxdims != NULL) {
+ mrank = (int) ENVPTR->GetArrayLength(ENVPAR maxdims);
+ if (mrank != rank) {
+ h5badArgument(env, "H5Screate_simple: maxdims rank is invalid");
+ return -1;
+ } /* end if */
+ } /* end if */
+ dimsP = ENVPTR->GetLongArrayElements(ENVPAR dims, &isCopy);
+ if (dimsP == NULL) {
+ h5JNIFatalError(env, "H5Pset_simple_extent: dims not pinned");
+ return -1;
+ } /* end if */
+ sa = lp = (hsize_t *) malloc((size_t)rank * sizeof(hsize_t));
+ if (sa == NULL) {
+ ENVPTR->ReleaseLongArrayElements(ENVPAR dims, dimsP, JNI_ABORT);
+ h5JNIFatalError(env, "H5Sset_simple_extent: dims not converted to hsize_t");
+ return -1;
+ } /* end if */
+ jlp = (jlong *) dimsP;
+ for (i = 0; i < rank; i++) {
+ *lp = (hsize_t) *jlp;
+ lp++;
+ jlp++;
+ } /* end for */
+ if (maxdims == NULL) {
+ maxdimsP = NULL;
+ msa = (hsize_t *) maxdimsP;
+ } /* end if */
+ else {
+ maxdimsP = ENVPTR->GetLongArrayElements(ENVPAR maxdims, &isCopy);
+ if (maxdimsP == NULL) {
+ ENVPTR->ReleaseLongArrayElements(ENVPAR dims, dimsP, JNI_ABORT);
+ h5JNIFatalError(env, "H5Pset_simple_extent: maxdims not pinned");
+ return -1;
+ } /* end if */
+ msa = lp = (hsize_t *)HDmalloc((size_t)rank * sizeof(hsize_t));
+ if (msa == NULL) {
+ ENVPTR->ReleaseLongArrayElements(ENVPAR dims, dimsP, JNI_ABORT);
+ ENVPTR->ReleaseLongArrayElements(ENVPAR maxdims, maxdimsP, JNI_ABORT);
+ HDfree(sa);
+ h5JNIFatalError(env, "H5Sset_simple_extent: maxdims not converted to hsize_t");
+ return -1;
+ } /* end if */
+ jlp = (jlong *) maxdimsP;
+ for (i = 0; i < rank; i++) {
+ *lp = (hsize_t) *jlp;
+ lp++;
+ jlp++;
+ } /* end for */
+ } /* end else */
+
+ status = H5Sset_extent_simple(space_id, rank, (hsize_t *) sa, (hsize_t *) msa);
+
+ ENVPTR->ReleaseLongArrayElements(ENVPAR dims, dimsP, JNI_ABORT);
+ HDfree(sa);
+ if (maxdimsP != NULL) {
+ ENVPTR->ReleaseLongArrayElements(ENVPAR maxdims, maxdimsP, JNI_ABORT);
+ HDfree(msa);
+ } /* end if */
+
+ if (status < 0)
+ h5libraryError(env);
+
+ return (jint)status;
+} /* end v */
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5Sis_simple
+ * Signature: (J)Z
+ */
+JNIEXPORT jboolean JNICALL
+Java_hdf_hdf5lib_H5_H5Sis_1simple(JNIEnv *env, jclass clss, jlong space_id)
+{
+ htri_t bval = JNI_FALSE;
+
+ bval = H5Sis_simple(space_id);
+ if (bval > 0)
+ bval = JNI_TRUE;
+ else if (bval < 0)
+ h5libraryError(env);
+
+ return (jboolean)bval;
+} /* end Java_hdf_hdf5lib_H5_H5Sis_1simple */
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5Soffset_simple
+ * Signature: (J[B)I
+ */
+JNIEXPORT jint JNICALL
+Java_hdf_hdf5lib_H5_H5Soffset_1simple(JNIEnv *env, jclass clss, jlong space_id, jbyteArray offset)
+{
+ herr_t status;
+ jbyte *P = NULL;
+ jboolean isCopy;
+ hssize_t *sa;
+ size_t rank;
+ size_t i;
+ hssize_t *lp;
+ jlong *jlp;
+
+ if (offset != NULL) {
+ P = ENVPTR->GetByteArrayElements(ENVPAR offset, &isCopy);
+ if (P == NULL) {
+ h5JNIFatalError(env, "H5Soffset_simple: offset not pinned");
+ return -1;
+ } /* end if */
+ i = (size_t)ENVPTR->GetArrayLength(ENVPAR offset);
+ rank = i / sizeof(jlong);
+ sa = lp = (hssize_t *)HDmalloc((size_t)rank * sizeof(hssize_t));
+ if (sa == NULL) {
+ ENVPTR->ReleaseByteArrayElements(ENVPAR offset, P, JNI_ABORT);
+ h5JNIFatalError(env, "H5Soffset_simple: offset not converted to hssize_t");
+ return -1;
+ } /* end if */
+ jlp = (jlong *) P;
+ for (i = 0; i < rank; i++) {
+ *lp = (hssize_t) *jlp;
+ lp++;
+ jlp++;
+ } /* end for */
+ } /* end if */
+ else {
+ P = NULL;
+ sa = (hssize_t *) P;
+ } /* end else */
+
+ status = H5Soffset_simple(space_id, sa);
+ if (P != NULL) {
+ ENVPTR->ReleaseByteArrayElements(ENVPAR offset, P, JNI_ABORT);
+ HDfree(sa);
+ } /* end if */
+
+ if (status < 0)
+ h5libraryError(env);
+
+ return (jint)status;
+} /* end Java_hdf_hdf5lib_H5_H5Soffset_1simple */
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5Sextent_copy
+ * Signature: (JJ)I
+ */
+JNIEXPORT jint JNICALL
+Java_hdf_hdf5lib_H5_H5Sextent_1copy(JNIEnv *env, jclass clss, jlong space_id, jlong src_id)
+{
+ herr_t retVal = -1;
+
+ retVal = H5Sextent_copy(space_id, src_id);
+ if (retVal < 0)
+ h5libraryError(env);
+
+ return (jint)retVal;
+} /* end Java_hdf_hdf5lib_H5_H5Sextent_1copy */
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5Sextent_equal
+ * Signature: (JJ)Z
+ */
+JNIEXPORT jboolean JNICALL
+Java_hdf_hdf5lib_H5_H5Sextent_1equal(JNIEnv *env, jclass clss, jlong space_id, jlong src_id)
+{
+ htri_t bval = JNI_FALSE;
+
+ bval = H5Sextent_equal(space_id, src_id);
+ if (bval > 0)
+ bval = JNI_TRUE;
+ else if (bval < 0)
+ h5libraryError(env);
+
+ return (jboolean)bval;
+} /* end Java_hdf_hdf5lib_H5_H5Sextent_1equal */
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5Sset_extent_none
+ * Signature: (J)I
+ */
+JNIEXPORT jint JNICALL
+Java_hdf_hdf5lib_H5_H5Sset_1extent_1none(JNIEnv *env, jclass clss, jlong space_id)
+{
+ herr_t retVal = -1;
+
+ retVal = H5Sset_extent_none(space_id);
+ if (retVal < 0)
+ h5libraryError(env);
+
+ return (jint)retVal;
+} /* end Java_hdf_hdf5lib_H5_H5Sset_1extent_1none */
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5Sselect_hyperslab
+ * Signature: (JI[J[J[J[J)I
+ */
+JNIEXPORT jint JNICALL
+Java_hdf_hdf5lib_H5_H5Sselect_1hyperslab(JNIEnv *env, jclass clss, jlong space_id, jint op,
+ jlongArray start, jlongArray stride, jlongArray count, jlongArray block)
+{
+ herr_t status = -1;
+ jlong *startP, *strideP, *countP, *blockP;
+ jboolean isCopy;
+ hsize_t *strt;
+ hsize_t *strd;
+ hsize_t *cnt;
+ hsize_t *blk;
+ int rank;
+ int i;
+ hsize_t *lp;
+ jlong *jlp;
+
+ if (start == NULL) {
+ h5nullArgument(env, "H5Sselect_hyperslab: start is NULL");
+ } /* end if */
+ else if (count == NULL) {
+ h5nullArgument(env, "H5Sselect_hyperslab: count is NULL");
+ } /* end if */
+ else {
+ rank = (int) ENVPTR->GetArrayLength(ENVPAR start);
+ if (rank != ENVPTR->GetArrayLength(ENVPAR count)) {
+ h5badArgument(env, "H5Sselect_hyperslab: count and start have different rank!");
+ return -1;
+ } /* end if */
+
+ startP = ENVPTR->GetLongArrayElements(ENVPAR start, &isCopy);
+ if (startP == NULL) {
+ h5JNIFatalError(env, "H5Sselect_hyperslab: start not pinned");
+ return -1;
+ } /* end if */
+ strt = lp = (hsize_t *)HDmalloc((size_t)rank * sizeof(hsize_t));
+ if (strt == NULL) {
+ ENVPTR->ReleaseLongArrayElements(ENVPAR start, startP, JNI_ABORT);
+ h5JNIFatalError(env, "H5Sselect_hyperslab: start not converted to hsize_t");
+ return -1;
+ } /* end if */
+
+ jlp = (jlong *) startP;
+ for (i = 0; i < rank; i++) {
+ *lp = (hsize_t) *jlp;
+ lp++;
+ jlp++;
+ } /* end if */
+
+ countP = ENVPTR->GetLongArrayElements(ENVPAR count, &isCopy);
+ if (countP == NULL) {
+ ENVPTR->ReleaseLongArrayElements(ENVPAR start, startP, JNI_ABORT);
+ HDfree(strt);
+ h5JNIFatalError(env, "H5Sselect_hyperslab: count not pinned");
+ return -1;
+ } /* end if */
+ cnt = lp = (hsize_t *)HDmalloc((size_t)rank * sizeof(hsize_t));
+ if (cnt == NULL) {
+ ENVPTR->ReleaseLongArrayElements(ENVPAR start, startP, JNI_ABORT);
+ ENVPTR->ReleaseLongArrayElements(ENVPAR count, countP, JNI_ABORT);
+ HDfree(strt);
+ h5JNIFatalError(env, "H5Sselect_hyperslab: count not converted to hsize_t");
+ return -1;
+ } /* end if */
+
+ jlp = (jlong *) countP;
+ for (i = 0; i < rank; i++) {
+ *lp = (hsize_t) *jlp;
+ lp++;
+ jlp++;
+ } /* end if */
+ if (stride == NULL) {
+ strideP = NULL;
+ strd = (hsize_t *) strideP;
+ } /* end if */
+ else {
+ strideP = ENVPTR->GetLongArrayElements(ENVPAR stride, &isCopy);
+ if (strideP == NULL) {
+ ENVPTR->ReleaseLongArrayElements(ENVPAR count, countP, JNI_ABORT);
+ ENVPTR->ReleaseLongArrayElements(ENVPAR start, startP, JNI_ABORT);
+ HDfree(cnt);
+ HDfree(strt);
+ h5badArgument(env, "H5Sselect_hyperslab: stride not pinned");
+ return -1;
+ } /* end if */
+ strd = lp = (hsize_t *)HDmalloc((size_t)rank * sizeof(hsize_t));
+ if (strd == NULL) {
+ ENVPTR->ReleaseLongArrayElements(ENVPAR count, countP, JNI_ABORT);
+ ENVPTR->ReleaseLongArrayElements(ENVPAR start, startP, JNI_ABORT);
+ ENVPTR->ReleaseLongArrayElements(ENVPAR stride, strideP, JNI_ABORT);
+ HDfree(cnt);
+ HDfree(strt);
+ h5JNIFatalError(env, "H5Sselect_hyperslab: stride not converted to hsize_t");
+ return -1;
+ } /* end if */
+ jlp = (jlong *) strideP;
+ for (i = 0; i < rank; i++) {
+ *lp = (hsize_t) *jlp;
+ lp++;
+ jlp++;
+ } /* end if */
+ } /* end if */
+ if (block == NULL) {
+ blockP = NULL;
+ blk = (hsize_t *) blockP;
+ } /* end if */
+ else {
+ blockP = ENVPTR->GetLongArrayElements(ENVPAR block, &isCopy);
+ if (blockP == NULL) {
+ ENVPTR->ReleaseLongArrayElements(ENVPAR stride, strideP, JNI_ABORT);
+ ENVPTR->ReleaseLongArrayElements(ENVPAR count, countP, JNI_ABORT);
+ ENVPTR->ReleaseLongArrayElements(ENVPAR start, startP, JNI_ABORT);
+ HDfree(cnt);
+ HDfree(strt);
+ if (strd != NULL)
+ free(strd);
+
+ h5JNIFatalError(env, "H5Sselect_hyperslab: block not pinned");
+ return -1;
+ } /* end if */
+ blk = lp = (hsize_t *)HDmalloc((size_t)rank * sizeof(hsize_t));
+ if (blk == NULL) {
+ ENVPTR->ReleaseLongArrayElements(ENVPAR stride, strideP, JNI_ABORT);
+ ENVPTR->ReleaseLongArrayElements(ENVPAR count, countP, JNI_ABORT);
+ ENVPTR->ReleaseLongArrayElements(ENVPAR start, startP, JNI_ABORT);
+ ENVPTR->ReleaseLongArrayElements(ENVPAR block, blockP, JNI_ABORT);
+ HDfree(cnt);
+ HDfree(strt);
+ if (strd != NULL)
+ free(strd);
+
+ h5JNIFatalError(env, "H5Sget_simple_extent: block not converted to hsize_t");
+ return -1;
+ } /* end if */
+ jlp = (jlong *) blockP;
+ for (i = 0; i < rank; i++) {
+ *lp = (hsize_t) *jlp;
+ lp++;
+ jlp++;
+ } /* end for */
+ } /* end else */
+
+ status = H5Sselect_hyperslab(space_id, (H5S_seloper_t) op, (const hsize_t *) strt, (const hsize_t *) strd,
+ (const hsize_t *) cnt, (const hsize_t *) blk);
+
+ ENVPTR->ReleaseLongArrayElements(ENVPAR start, startP, JNI_ABORT);
+ ENVPTR->ReleaseLongArrayElements(ENVPAR count, countP, JNI_ABORT);
+ HDfree(strt);
+ HDfree(cnt);
+ if (strideP != NULL) {
+ ENVPTR->ReleaseLongArrayElements(ENVPAR stride, strideP, JNI_ABORT);
+ HDfree(strd);
+ } /* end if */
+ if (blockP != NULL) {
+ ENVPTR->ReleaseLongArrayElements(ENVPAR block, blockP, JNI_ABORT);
+ HDfree(blk);
+ } /* end if */
+
+ if (status < 0)
+ h5libraryError(env);
+ } /* end else */
+
+ return (jint)status;
+} /* end Java_hdf_hdf5lib_H5_H5Sselect_1hyperslab */
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5Sclose
+ * Signature: (J)I
+ */
+JNIEXPORT jint JNICALL
+Java_hdf_hdf5lib_H5__1H5Sclose(JNIEnv *env, jclass clss, jlong space_id)
+{
+ herr_t retVal = -1;
+
+ retVal = H5Sclose(space_id);
+ if (retVal < 0)
+ h5libraryError(env);
+
+ return (jint)retVal;
+} /* end Java_hdf_hdf5lib_H5__1H5Sclose */
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5Sget_select_hyper_nblocks
+ * Signature: (J)J
+ */
+JNIEXPORT jlong JNICALL
+Java_hdf_hdf5lib_H5_H5Sget_1select_1hyper_1nblocks(JNIEnv *env, jclass clss, jlong spaceid)
+{
+ hssize_t retVal = -1;
+
+ retVal = H5Sget_select_hyper_nblocks((hid_t) spaceid);
+ if (retVal < 0)
+ h5libraryError(env);
+
+ return (jlong)retVal;
+} /* end Java_hdf_hdf5lib_H5_H5Sget_1select_1hyper_1nblocks */
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5Sget_select_elem_npoints
+ * Signature: (J)J
+ */
+JNIEXPORT jlong JNICALL
+Java_hdf_hdf5lib_H5_H5Sget_1select_1elem_1npoints(JNIEnv *env, jclass clss, jlong spaceid)
+{
+ hssize_t retVal = -1;
+
+ retVal = H5Sget_select_elem_npoints((hid_t) spaceid);
+ if (retVal < 0)
+ h5libraryError(env);
+
+ return (jlong)retVal;
+} /* end Java_hdf_hdf5lib_H5_H5Sget_1select_1elem_1npoints */
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5Sget_select_hyper_blocklist
+ * Signature: (JJJ[J)I
+ */
+JNIEXPORT jint JNICALL
+Java_hdf_hdf5lib_H5_H5Sget_1select_1hyper_1blocklist(JNIEnv *env, jclass clss,
+ jlong spaceid, jlong startblock, jlong numblocks, jlongArray buf)
+{
+ herr_t status = -1;
+ jlong *bufP;
+ jboolean isCopy;
+ hsize_t *ba;
+ int i;
+ int rank;
+ hsize_t st;
+ hsize_t nb;
+
+ st = (hsize_t) startblock;
+ nb = (hsize_t) numblocks;
+
+ if (buf == NULL) {
+ h5nullArgument(env, "H5Sget_select_hyper_blocklist: buf is NULL");
+ } /* end if */
+ else {
+ rank = H5Sget_simple_extent_ndims(spaceid);
+ if (rank <= 0)
+ rank = 1;
+ if (ENVPTR->GetArrayLength(ENVPAR buf) < (numblocks * rank)) {
+ h5badArgument(env, "H5Sget_select_hyper_blocklist: buf input array too small");
+ } /* end if */
+ else {
+ bufP = ENVPTR->GetLongArrayElements(ENVPAR buf, &isCopy);
+ if (bufP == NULL) {
+ h5JNIFatalError(env, "H5Sget_select_hyper_blocklist: buf not pinned");
+ } /* end if */
+ else {
+ ba = (hsize_t *) malloc((size_t)nb * 2 * (size_t)rank * sizeof(hsize_t));
+ if (ba == NULL) {
+ ENVPTR->ReleaseLongArrayElements(ENVPAR buf, bufP, JNI_ABORT);
+ h5JNIFatalError(env, "H5Screate-simple: buffer not converted to hsize_t");
+ } /* end if */
+ else {
+ status = H5Sget_select_hyper_blocklist((hid_t)spaceid, st, nb, (hsize_t *)ba);
+
+ if (status < 0) {
+ ENVPTR->ReleaseLongArrayElements(ENVPAR buf, bufP, JNI_ABORT);
+ free(ba);
+ h5libraryError(env);
+ } /* end if */
+ else {
+ for (i = 0; i < (numblocks * 2 * rank); i++) {
+ bufP[i] = (jlong)ba[i];
+ } /* end for */
+ free(ba);
+ ENVPTR->ReleaseLongArrayElements(ENVPAR buf, bufP, 0);
+ } /* end else */
+ } /* end else */
+ } /* end else */
+ } /* end else */
+ } /* end else */
+
+ return (jint)status;
+} /* end Java_hdf_hdf5lib_H5_H5Sget_1select_1hyper_1blocklist */
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5Sget_select_elem_pointlist
+ * Signature: (JJJ[J)I
+ */
+JNIEXPORT jint JNICALL
+Java_hdf_hdf5lib_H5_H5Sget_1select_1elem_1pointlist(JNIEnv *env, jclass clss, jlong spaceid,
+ jlong startpoint, jlong numpoints, jlongArray buf)
+{
+ herr_t status = -1;
+ jlong *bufP;
+ jboolean isCopy;
+ hsize_t *ba;
+ int i;
+ int rank;
+
+ if (buf == NULL) {
+ h5nullArgument(env, "H5Sget_select_elem_pointlist: buf is NULL");
+ } /* end if */
+ else {
+ rank = H5Sget_simple_extent_ndims(spaceid);
+ if (rank <= 0)
+ rank = 1;
+ if (ENVPTR->GetArrayLength(ENVPAR buf) < (numpoints * rank)) {
+ h5badArgument(env, "H5Sget_select_elem_pointlist: buf input array too small");
+ } /* end if */
+ else {
+ bufP = ENVPTR->GetLongArrayElements(ENVPAR buf, &isCopy);
+ if (bufP == NULL) {
+ h5JNIFatalError(env, "H5Sget_select_elem_pointlist: buf not pinned");
+ } /* end if */
+ else {
+ ba = (hsize_t *)HDmalloc(((size_t)numpoints * (size_t)rank) * sizeof(hsize_t));
+ if (ba == NULL) {
+ ENVPTR->ReleaseLongArrayElements(ENVPAR buf, bufP, JNI_ABORT);
+ h5JNIFatalError(env, "H5Sget_select_elem_pointlist: buf not converted to hsize_t");
+ } /* end if */
+ else {
+ status = H5Sget_select_elem_pointlist((hid_t) spaceid, (hsize_t)startpoint, (hsize_t)numpoints, (hsize_t *)ba);
+
+ if (status < 0) {
+ HDfree(ba);
+ ENVPTR->ReleaseLongArrayElements(ENVPAR buf, bufP, JNI_ABORT);
+ h5libraryError(env);
+ } /* end if */
+ else {
+ for (i = 0; i < (numpoints * rank); i++) {
+ bufP[i] = (jlong)ba[i];
+ } /* end for */
+ HDfree(ba);
+ ENVPTR->ReleaseLongArrayElements(ENVPAR buf, bufP, 0);
+ } /* end else */
+ } /* end else */
+ } /* end else */
+ } /* end else */
+ } /* end else */
+
+ return (jint)status;
+} /* end if */
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5Sget_select_bounds
+ * Signature: (J[J[J)I
+ */
+JNIEXPORT jint JNICALL
+Java_hdf_hdf5lib_H5_H5Sget_1select_1bounds(JNIEnv *env, jclass clss, jlong spaceid,
+ jlongArray start, jlongArray end)
+{
+ herr_t status = -1;
+ jlong *startP, *endP;
+ jboolean isCopy;
+ hsize_t *strt;
+ hsize_t *en;
+ int rank;
+ int i;
+
+ if (start == NULL) {
+ h5nullArgument(env, "H5Sget_select_bounds: start is NULL");
+ } /* end if */
+ else if (end == NULL) {
+ h5nullArgument(env, "H5Sget_select_bounds: end is NULL");
+ } /* end else if */
+ else {
+ startP = ENVPTR->GetLongArrayElements(ENVPAR start, &isCopy);
+ if (startP == NULL) {
+ h5JNIFatalError(env, "H5Sget_select_bounds: start not pinned");
+ return -1;
+ } /* end if */
+ rank = (int) ENVPTR->GetArrayLength(ENVPAR start);
+ strt = (hsize_t *)malloc((size_t)rank * sizeof(hsize_t));
+ if (strt == NULL) {
+ ENVPTR->ReleaseLongArrayElements(ENVPAR start, startP, JNI_ABORT);
+ h5JNIFatalError(env, "H5Sget_select_bounds: start not converted to hsize_t");
+ return -1;
+ } /* end if */
+
+ endP = ENVPTR->GetLongArrayElements(ENVPAR end, &isCopy);
+ if (endP == NULL) {
+ ENVPTR->ReleaseLongArrayElements(ENVPAR start, startP, JNI_ABORT);
+ free(strt);
+ h5JNIFatalError(env, "H5Sget_select_bounds: end not pinned");
+ return -1;
+ } /* end if */
+ en = (hsize_t *)HDmalloc((size_t)rank * sizeof(hsize_t));
+ if (en == NULL) {
+ ENVPTR->ReleaseLongArrayElements(ENVPAR end, endP, JNI_ABORT);
+ ENVPTR->ReleaseLongArrayElements(ENVPAR start, startP, JNI_ABORT);
+ HDfree(strt);
+ h5JNIFatalError(env, "H5Sget_simple_extent: dims not converted to hsize_t");
+ return -1;
+ } /* end if */
+
+ status = H5Sget_select_bounds((hid_t) spaceid, (hsize_t *) strt, (hsize_t *) en);
+
+ if (status < 0) {
+ ENVPTR->ReleaseLongArrayElements(ENVPAR start, startP, JNI_ABORT);
+ ENVPTR->ReleaseLongArrayElements(ENVPAR end, endP, JNI_ABORT);
+ HDfree(strt);
+ HDfree(en);
+ h5libraryError(env);
+ } /* end if */
+ else {
+ for (i = 0; i < rank; i++) {
+ startP[i] = (jlong)strt[i];
+ endP[i] = (jlong)en[i];
+ } /* end for */
+ ENVPTR->ReleaseLongArrayElements(ENVPAR start, startP, 0);
+ ENVPTR->ReleaseLongArrayElements(ENVPAR end, endP, 0);
+ HDfree(strt);
+ HDfree(en);
+ } /* end else */
+ } /* end else */
+
+ return (jint)status;
+} /* end Java_hdf_hdf5lib_H5_H5Sget_1select_1bounds */
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5Sencode
+ * Signature: (J)[B
+ */
+JNIEXPORT jbyteArray JNICALL
+Java_hdf_hdf5lib_H5_H5Sencode(JNIEnv *env, jclass cls, jlong obj_id)
+{
+ herr_t status = -1;
+ unsigned char *bufPtr;
+ size_t buf_size = 0;
+ jbyteArray returnedArray = NULL;
+
+ if (obj_id < 0) {
+ h5badArgument(env, "H5Sencode: invalid argument");
+ } /* end if */
+ else {
+ status = H5Sencode(obj_id, NULL, &buf_size);
+
+ if (status < 0) {
+ h5libraryError(env);
+ } /* end else if */
+ else if (buf_size < 0) {
+ h5badArgument(env, "H5Sencode: buf_size < 0");
+ } /* end if */
+ else {
+ bufPtr = (unsigned char*)HDcalloc((size_t) 1, buf_size);
+ if (bufPtr == NULL) {
+ h5outOfMemory(env, "H5Sencode: calloc failed");
+ } /* end if */
+ else {
+ status = H5Sencode((hid_t) obj_id, bufPtr, &buf_size);
+
+ if (status < 0) {
+ HDfree(bufPtr);
+ h5libraryError(env);
+ } /* end if */
+ else {
+ returnedArray = ENVPTR->NewByteArray(ENVPAR (jsize)buf_size);
+ ENVPTR->SetByteArrayRegion(ENVPAR returnedArray, 0, (jsize)buf_size, (jbyte*) bufPtr);
+ HDfree(bufPtr);
+ } /* end else */
+ } /* end else */
+ } /* end else */
+ } /* end else */
+
+ return returnedArray;
+} /* end Java_hdf_hdf5lib_H5_H5Sencode */
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5Sdecode
+ * Signature: ([B)J
+ */
+JNIEXPORT jlong JNICALL
+Java_hdf_hdf5lib_H5_H5Sdecode(JNIEnv *env, jclass cls, jbyteArray buf)
+{
+ hid_t sid = -1;
+ jbyte *bufP;
+ jboolean isCopy;
+
+ if (buf == NULL) {
+ h5nullArgument(env, "H5Sdecode: buf is NULL");
+ } /* end if */
+ else {
+ bufP = ENVPTR->GetByteArrayElements(ENVPAR buf, &isCopy);
+ if (bufP == NULL) {
+ h5JNIFatalError( env, "H5Sdecode: buf not pinned");
+ } /* end if */
+ else {
+ sid = H5Sdecode(bufP);
+
+ ENVPTR->ReleaseByteArrayElements(ENVPAR buf, bufP, JNI_ABORT);
+
+ if (sid < 0)
+ h5libraryError(env);
+ } /* end else if */
+ } /* end else if */
+
+ return (jlong)sid;
+} /* end Java_hdf_hdf5lib_H5_H5Sdecode */
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5Sis_regular_hyperslab
+ * Signature: (J)Z
+ */
+JNIEXPORT jboolean JNICALL
+Java_hdf_hdf5lib_H5_H5Sis_1regular_1hyperslab(JNIEnv *env, jclass cls, jlong obj_id)
+{
+ htri_t bval = JNI_FALSE;
+
+ bval = H5Sis_regular_hyperslab((hid_t)obj_id);
+ if (bval > 0)
+ bval = JNI_TRUE;
+ else if (bval < 0)
+ h5libraryError(env);
+
+ return (jboolean)bval;
+} /* end Java_hdf_hdf5lib_H5_H5Sis_1regular_1hyperslab */
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5Sget_regular_hyperslab
+ * Signature: (J[J[J[J[J)V
+ */
+JNIEXPORT void JNICALL
+Java_hdf_hdf5lib_H5_H5Sget_1regular_1hyperslab(JNIEnv *env, jclass clss, jlong space_id,
+ jlongArray start, jlongArray stride, jlongArray count, jlongArray block)
+{
+ herr_t status;
+ jlong *startP, *strideP, *countP, *blockP;
+ jboolean isCopy;
+ hsize_t *strt;
+ hsize_t *strd;
+ hsize_t *cnt;
+ hsize_t *blk;
+ int rank;
+ int i;
+
+ if (start == NULL) {
+ h5nullArgument(env, "H5Sget_regular_hyperslab: start is NULL");
+ } /* end if */
+ else if (count == NULL) {
+ h5nullArgument(env, "H5Sget_regular_hyperslab: count is NULL");
+ } /* end else if */
+ else {
+ rank = (int) ENVPTR->GetArrayLength(ENVPAR start);
+ if (rank != ENVPTR->GetArrayLength(ENVPAR count)) {
+ h5badArgument(env, "H5Sget_regular_hyperslab: count and start have different rank!");
+ return;
+ } /* end if */
+
+ startP = ENVPTR->GetLongArrayElements(ENVPAR start, &isCopy);
+ if (startP == NULL) {
+ h5JNIFatalError(env, "H5Sget_regular_hyperslab: start not pinned");
+ return;
+ } /* end if */
+ strt = (hsize_t *)HDmalloc((size_t)rank * sizeof(hsize_t));
+ if (strt == NULL) {
+ ENVPTR->ReleaseLongArrayElements(ENVPAR start, startP, JNI_ABORT);
+ h5JNIFatalError(env, "H5Sget_regular_hyperslab: start not converted to hsize_t");
+ return;
+ } /* end if */
+
+ countP = ENVPTR->GetLongArrayElements(ENVPAR count, &isCopy);
+ if (countP == NULL) {
+ ENVPTR->ReleaseLongArrayElements(ENVPAR start, startP, JNI_ABORT);
+ HDfree(strt);
+ h5JNIFatalError(env, "H5Sget_regular_hyperslab: count not pinned");
+ return;
+ } /* end if */
+ cnt = (hsize_t *)HDmalloc((size_t)rank * sizeof(hsize_t));
+ if (cnt == NULL) {
+ ENVPTR->ReleaseLongArrayElements(ENVPAR start, startP, JNI_ABORT);
+ ENVPTR->ReleaseLongArrayElements(ENVPAR count, countP, JNI_ABORT);
+ HDfree(strt);
+ h5JNIFatalError(env, "H5Sget_regular_hyperslab: count not converted to hsize_t");
+ return;
+ } /* end if */
+
+ strideP = ENVPTR->GetLongArrayElements(ENVPAR stride, &isCopy);
+ if (strideP == NULL) {
+ ENVPTR->ReleaseLongArrayElements(ENVPAR count, countP, JNI_ABORT);
+ ENVPTR->ReleaseLongArrayElements(ENVPAR start, startP, JNI_ABORT);
+ HDfree(cnt);
+ HDfree(strt);
+ h5badArgument(env, "H5Sget_regular_hyperslab: stride not pinned");
+ return;
+ } /* end if */
+ strd = (hsize_t *)HDmalloc((size_t)rank * sizeof(hsize_t));
+ if (strd == NULL) {
+ ENVPTR->ReleaseLongArrayElements(ENVPAR count, countP, JNI_ABORT);
+ ENVPTR->ReleaseLongArrayElements(ENVPAR start, startP, JNI_ABORT);
+ ENVPTR->ReleaseLongArrayElements(ENVPAR stride, strideP, JNI_ABORT);
+ HDfree(cnt);
+ HDfree(strt);
+ h5JNIFatalError(env, "H5Sget_regular_hyperslab: stride not converted to hsize_t");
+ return;
+ } /* end if */
+
+ blockP = ENVPTR->GetLongArrayElements(ENVPAR block, &isCopy);
+ if (blockP == NULL) {
+ ENVPTR->ReleaseLongArrayElements(ENVPAR stride, strideP, JNI_ABORT);
+ ENVPTR->ReleaseLongArrayElements(ENVPAR count, countP, JNI_ABORT);
+ ENVPTR->ReleaseLongArrayElements(ENVPAR start, startP, JNI_ABORT);
+ HDfree(cnt);
+ HDfree(strt);
+ HDfree(strd);
+ h5JNIFatalError(env, "H5Sget_regular_hyperslab: block not pinned");
+ return;
+ } /* end if */
+ blk = (hsize_t *)HDmalloc((size_t)rank * sizeof(hsize_t));
+ if (blk == NULL) {
+ ENVPTR->ReleaseLongArrayElements(ENVPAR stride, strideP, JNI_ABORT);
+ ENVPTR->ReleaseLongArrayElements(ENVPAR count, countP, JNI_ABORT);
+ ENVPTR->ReleaseLongArrayElements(ENVPAR start, startP, JNI_ABORT);
+ ENVPTR->ReleaseLongArrayElements(ENVPAR block, blockP, JNI_ABORT);
+ HDfree(cnt);
+ HDfree(strt);
+ HDfree(strd);
+ h5JNIFatalError(env, "H5Sget_regular_hyperslab: block not converted to hsize_t");
+ return;
+ } /* end if */
+
+ status = H5Sget_regular_hyperslab(space_id, (hsize_t *) strt, (hsize_t *) strd, (hsize_t *) cnt, (hsize_t *) blk);
+
+ if (status < 0) {
+ ENVPTR->ReleaseLongArrayElements(ENVPAR start, startP, JNI_ABORT);
+ ENVPTR->ReleaseLongArrayElements(ENVPAR count, countP, JNI_ABORT);
+ ENVPTR->ReleaseLongArrayElements(ENVPAR stride, strideP, JNI_ABORT);
+ ENVPTR->ReleaseLongArrayElements(ENVPAR block, blockP, JNI_ABORT);
+ HDfree(strt);
+ HDfree(cnt);
+ HDfree(strd);
+ HDfree(blk);
+ h5libraryError(env);
+ } /* end if */
+ else {
+ for (i = 0; i < (rank); i++) {
+ startP[i] = (jlong)strt[i];
+ countP[i] = (jlong)cnt[i];
+ strideP[i] = (jlong)strd[i];
+ blockP[i] = (jlong)blk[i];
+ } /* end for */
+ HDfree(strt);
+ HDfree(cnt);
+ HDfree(strd);
+ HDfree(blk);
+ ENVPTR->ReleaseLongArrayElements(ENVPAR start, startP, 0);
+ ENVPTR->ReleaseLongArrayElements(ENVPAR count, countP, 0);
+ ENVPTR->ReleaseLongArrayElements(ENVPAR stride, strideP, 0);
+ ENVPTR->ReleaseLongArrayElements(ENVPAR block, blockP, 0);
+ } /* end else */
+ } /* end else */
+} /* end Java_hdf_hdf5lib_H5_H5Sget_1regular_1hyperslab */
+
+#ifdef __cplusplus
+} /* end extern "C" */
+#endif /* __cplusplus */
diff --git a/java/src/jni/h5sImp.h b/java/src/jni/h5sImp.h
new file mode 100644
index 0000000..007b9a7
--- /dev/null
+++ b/java/src/jni/h5sImp.h
@@ -0,0 +1,288 @@
+/* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
+ * Copyright by The HDF Group. *
+ * Copyright by the Board of Trustees of the University of Illinois. *
+ * All rights reserved. *
+ * *
+ * This file is part of HDF5. The full HDF5 copyright notice, including *
+ * terms governing use, modification, and redistribution, is contained in *
+ * the files COPYING and Copyright.html. COPYING can be found at the root *
+ * of the source code distribution tree; Copyright.html can be found at the *
+ * root level of an installed copy of the electronic HDF5 document set and *
+ * is linked from the top-level documents page. It can also be found at *
+ * http://hdfgroup.org/HDF5/doc/Copyright.html. If you do not have *
+ * access to either file, you may request a copy from help@hdfgroup.org. *
+ * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */
+
+#include <jni.h>
+/* Header for class hdf_hdf5lib_H5_H5S */
+
+#ifndef _Included_hdf_hdf5lib_H5_H5S
+#define _Included_hdf_hdf5lib_H5_H5S
+
+#ifdef __cplusplus
+extern "C" {
+#endif /* __cplusplus */
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: _H5Screate
+ * Signature: (I)J
+ */
+JNIEXPORT jlong JNICALL Java_hdf_hdf5lib_H5__1H5Screate(
+ JNIEnv *, jclass, jint);
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: _H5Screate_simple
+ * Signature: (I[J[J)J
+ */
+JNIEXPORT jlong JNICALL Java_hdf_hdf5lib_H5__1H5Screate_1simple(
+ JNIEnv *, jclass, jint, jlongArray, jlongArray);
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: _H5Scopy
+ * Signature: (J)J
+ */
+JNIEXPORT jlong JNICALL Java_hdf_hdf5lib_H5__1H5Scopy(
+ JNIEnv *, jclass, jlong);
+
+#ifdef notdef
+// 10/28/99 -- added code to copy the array -- this is not used,
+// but serves as a reminder in case we try to implement this in
+// the future....
+/*
+ * Note: the argument coord is actually long coord[][], which has been
+ * flattened by the caller.
+ */
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5Sselect_elements
+ * Signature: (JII[J)I
+ */
+JNIEXPORT jint JNICALL Java_hdf_hdf5lib_H5_H5Sselect_1elements
+(JNIEnv *, jclass, jlong, jint, jint, jlongArray);
+#endif
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5Sselect_elements
+ * Signature: (JII[B)I
+ */
+JNIEXPORT jint JNICALL Java_hdf_hdf5lib_H5_H5Sselect_1elements(
+ JNIEnv *, jclass, jlong, jint, jint, jbyteArray);
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5Sselect_all
+ * Signature: (J)I
+ */
+JNIEXPORT jint JNICALL Java_hdf_hdf5lib_H5_H5Sselect_1all(
+ JNIEnv *, jclass, jlong);
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5Sselect_none
+ * Signature: (J)I
+ */
+JNIEXPORT jint JNICALL Java_hdf_hdf5lib_H5_H5Sselect_1none(
+ JNIEnv *, jclass, jlong);
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5Sselect_valid
+ * Signature: (J)Z
+ */
+JNIEXPORT jboolean JNICALL Java_hdf_hdf5lib_H5_H5Sselect_1valid(
+ JNIEnv *, jclass, jlong);
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5Sget_simple_extent_npoints
+ * Signature: (J)J
+ */
+JNIEXPORT jlong JNICALL Java_hdf_hdf5lib_H5_H5Sget_1simple_1extent_1npoints(
+ JNIEnv *, jclass, jlong);
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5Sget_select_npoints
+ * Signature: (J)J
+ */
+JNIEXPORT jlong JNICALL Java_hdf_hdf5lib_H5_H5Sget_1select_1npoints(
+ JNIEnv *, jclass, jlong);
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5Sget_select_type
+ * Signature: (J)I
+ */
+JNIEXPORT jint JNICALL Java_hdf_hdf5lib_H5_H5Sget_1select_1type(
+ JNIEnv *, jclass, jlong);
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5Sget_simple_extent_ndims
+ * Signature: (J)I
+ */
+JNIEXPORT jint JNICALL Java_hdf_hdf5lib_H5_H5Sget_1simple_1extent_1ndims(
+ JNIEnv *, jclass, jlong);
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5Sget_simple_extent_dims
+ * Signature: (J[J[J)I
+ */
+JNIEXPORT jint JNICALL Java_hdf_hdf5lib_H5_H5Sget_1simple_1extent_1dims(
+ JNIEnv *, jclass, jlong, jlongArray, jlongArray);
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5Sget_simple_extent_type
+ * Signature: (J)I
+ */
+JNIEXPORT jint JNICALL Java_hdf_hdf5lib_H5_H5Sget_1simple_1extent_1type(
+ JNIEnv *, jclass, jlong);
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5Sset_extent_simple
+ * Signature: (JI[J[J)I
+ */
+JNIEXPORT jint JNICALL Java_hdf_hdf5lib_H5_H5Sset_1extent_1simple(
+ JNIEnv *, jclass, jlong, jint, jlongArray, jlongArray);
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5Sis_simple
+ * Signature: (J)Z
+ */
+JNIEXPORT jboolean JNICALL Java_hdf_hdf5lib_H5_H5Sis_1simple(
+ JNIEnv *, jclass, jlong);
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5Soffset_simple
+ * Signature: (J[B)I
+ */
+JNIEXPORT jint JNICALL Java_hdf_hdf5lib_H5_H5Soffset_1simple(
+ JNIEnv *, jclass, jlong, jbyteArray);
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5Sextent_copy
+ * Signature: (JJ)I
+ */
+JNIEXPORT jint JNICALL Java_hdf_hdf5lib_H5_H5Sextent_1copy(
+ JNIEnv *, jclass, jlong, jlong);
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5Sextent_equal
+ * Signature: (JJ)Z
+ */
+JNIEXPORT jboolean JNICALL Java_hdf_hdf5lib_H5_H5Sextent_1equal
+ (JNIEnv *, jclass, jlong, jlong);
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5Sset_extent_none
+ * Signature: (J)I
+ */
+JNIEXPORT jint JNICALL Java_hdf_hdf5lib_H5_H5Sset_1extent_1none(
+ JNIEnv *, jclass, jlong);
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5Sselect_hyperslab
+ * Signature: (JI[J[J[J[J)I
+ */
+JNIEXPORT jint JNICALL Java_hdf_hdf5lib_H5_H5Sselect_1hyperslab(
+ JNIEnv *, jclass, jlong, jint, jlongArray, jlongArray, jlongArray, jlongArray);
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5Sclose
+ * Signature: (J)I
+ */
+JNIEXPORT jint JNICALL Java_hdf_hdf5lib_H5__1H5Sclose(
+ JNIEnv *, jclass, jlong);
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5Sget_select_hyper_nblocks
+ * Signature: (J)J
+ */
+JNIEXPORT jlong JNICALL Java_hdf_hdf5lib_H5_H5Sget_1select_1hyper_1nblocks(
+ JNIEnv *, jclass, jlong);
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5Sget_select_elem_npoints
+ * Signature: (J)J
+ */
+JNIEXPORT jlong JNICALL Java_hdf_hdf5lib_H5_H5Sget_1select_1elem_1npoints(
+ JNIEnv *, jclass, jlong);
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5Sget_select_hyper_blocklist
+ * Signature: (JJJ[J)I
+ */
+JNIEXPORT jint JNICALL Java_hdf_hdf5lib_H5_H5Sget_1select_1hyper_1blocklist(
+ JNIEnv *, jclass, jlong, jlong, jlong, jlongArray);
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5Sget_select_elem_pointlist
+ * Signature: (JJJ[J)I
+ */
+JNIEXPORT jint JNICALL Java_hdf_hdf5lib_H5_H5Sget_1select_1elem_1pointlist(
+ JNIEnv *, jclass, jlong, jlong, jlong, jlongArray);
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5Sget_select_bounds
+ * Signature: (J[J[J)I
+ */
+JNIEXPORT jint JNICALL Java_hdf_hdf5lib_H5_H5Sget_1select_1bounds(
+ JNIEnv *, jclass, jlong, jlongArray, jlongArray);
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5Sencode
+ * Signature: (J)[B
+ */
+JNIEXPORT jbyteArray JNICALL Java_hdf_hdf5lib_H5_H5Sencode
+ (JNIEnv *, jclass, jlong);
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5Sdecode
+ * Signature: ([B)J
+ */
+JNIEXPORT jlong JNICALL Java_hdf_hdf5lib_H5_H5Sdecode
+ (JNIEnv *, jclass, jbyteArray);
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5Sis_regular_hyperslab
+ * Signature: (J)Z
+ */
+JNIEXPORT jboolean JNICALL Java_hdf_hdf5lib_H5_H5Sis_1regular_1hyperslab
+ (JNIEnv *, jclass, jlong);
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5Sget_regular_hyperslab
+ * Signature: (J[J[J[J[J)V
+ */
+JNIEXPORT void JNICALL Java_hdf_hdf5lib_H5_H5Sget_1regular_1hyperslab(
+ JNIEnv *, jclass, jlong, jlongArray, jlongArray, jlongArray, jlongArray);
+
+#ifdef __cplusplus
+} /* end extern "C" */
+#endif /* __cplusplus */
+
+#endif /* _Included_hdf_hdf5lib_H5_H5S */
diff --git a/java/src/jni/h5tImp.c b/java/src/jni/h5tImp.c
new file mode 100644
index 0000000..1f587de
--- /dev/null
+++ b/java/src/jni/h5tImp.c
@@ -0,0 +1,1588 @@
+/* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
+ * Copyright by The HDF Group. *
+ * Copyright by the Board of Trustees of the University of Illinois. *
+ * All rights reserved. *
+ * *
+ * This file is part of HDF5. The full HDF5 copyright notice, including *
+ * terms governing use, modification, and redistribution, is contained in *
+ * the files COPYING and Copyright.html. COPYING can be found at the root *
+ * of the source code distribution tree; Copyright.html can be found at the *
+ * root level of an installed copy of the electronic HDF5 document set and *
+ * is linked from the top-level documents page. It can also be found at *
+ * http://hdfgroup.org/HDF5/doc/Copyright.html. If you do not have *
+ * access to either file, you may request a copy from help@hdfgroup.org. *
+ * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */
+
+/*
+ * For details of the HDF libraries, see the HDF Documentation at:
+ * http://hdfdfgroup.org/HDF5/doc/
+ *
+ */
+
+#ifdef __cplusplus
+extern "C" {
+#endif /* __cplusplus */
+
+#include <jni.h>
+#include <stdio.h>
+#include <stdlib.h>
+#include <string.h>
+#include "hdf5.h"
+#include "h5jni.h"
+#include "h5tImp.h"
+
+extern JavaVM *jvm;
+extern jobject visit_callback;
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: _H5Topen2
+ * Signature: (JLjava/lang/String;J)J
+ */
+JNIEXPORT jlong JNICALL
+Java_hdf_hdf5lib_H5__1H5Topen2(JNIEnv *env, jclass clss, jlong loc_id, jstring name, jlong access_plist)
+{
+ hid_t status;
+ const char *tName;
+
+ PIN_JAVA_STRING(name, tName, -1);
+
+ status = H5Topen2((hid_t)loc_id, tName, (hid_t)access_plist);
+
+ UNPIN_JAVA_STRING(name, tName);
+
+ if (status < 0)
+ h5libraryError(env);
+
+ return (jlong)status;
+} /* end Java_hdf_hdf5lib_H5__1H5Topen2 */
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5Tcommitted
+ * Signature: (J)Z
+ */
+JNIEXPORT jboolean JNICALL
+Java_hdf_hdf5lib_H5_H5Tcommitted(JNIEnv *env, jclass clss, jlong type_id)
+{
+ htri_t bval = JNI_FALSE;
+
+ bval = H5Tcommitted(type_id);
+ if (bval > 0)
+ bval = JNI_TRUE;
+ else if (bval < 0)
+ h5libraryError(env);
+
+ return (jboolean)bval;
+} /* end Java_hdf_hdf5lib_H5_H5Tcommitted */
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: _H5Tcreate
+ * Signature: (IJ)J
+ */
+JNIEXPORT jlong JNICALL
+Java_hdf_hdf5lib_H5__1H5Tcreate(JNIEnv *env, jclass clss, jint dclass, jlong size)
+{
+ hid_t retVal = -1;
+
+ retVal = H5Tcreate((H5T_class_t )dclass, (size_t)size);
+ if (retVal < 0)
+ h5libraryError(env);
+
+ return (jlong)retVal;
+} /* end Java_hdf_hdf5lib_H5__1H5Tcreate */
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5Tcopy
+ * Signature: (J)J
+ */
+JNIEXPORT jlong JNICALL
+Java_hdf_hdf5lib_H5__1H5Tcopy(JNIEnv *env, jclass clss, jlong type_id)
+{
+ hid_t retVal = -1;
+
+ retVal = H5Tcopy((hid_t)type_id);
+ if (retVal < 0)
+ h5libraryError(env);
+
+ return (jlong)retVal;
+} /* end Java_hdf_hdf5lib_H5__1H5Tcopy */
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5Tequal
+ * Signature: (JJ)Z
+ */
+JNIEXPORT jboolean JNICALL
+Java_hdf_hdf5lib_H5_H5Tequal(JNIEnv *env, jclass clss, jlong type_id1, jlong type_id2)
+{
+ htri_t bval = JNI_FALSE;
+
+ bval = H5Tequal((hid_t)type_id1, (hid_t)type_id2);
+ if (bval > 0)
+ bval = JNI_TRUE;
+ else if (bval < 0)
+ h5libraryError(env);
+
+ return (jboolean)bval;
+} /* end Java_hdf_hdf5lib_H5_H5Tequal */
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5Tlock
+ * Signature: (J)I
+ */
+JNIEXPORT jint JNICALL
+Java_hdf_hdf5lib_H5_H5Tlock(JNIEnv *env, jclass clss, jlong type_id)
+{
+ herr_t retVal = -1;
+
+ retVal = H5Tlock((hid_t)type_id);
+ if (retVal < 0)
+ h5libraryError(env);
+
+ return (jint)retVal;
+} /* end Java_hdf_hdf5lib_H5_H5Tlock */
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5Tget_class
+ * Signature: (J)I
+ */
+JNIEXPORT jint JNICALL
+Java_hdf_hdf5lib_H5_H5Tget_1class(JNIEnv *env, jclass clss, jlong type_id)
+{
+ H5T_class_t retVal = H5T_NO_CLASS;
+
+ retVal = H5Tget_class((hid_t)type_id);
+ if (retVal == H5T_NO_CLASS)
+ h5libraryError(env);
+
+ return (jint)retVal;
+} /* end Java_hdf_hdf5lib_H5_H5Tget_1class */
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5Tget_size
+ * Signature: (J)J
+ */
+JNIEXPORT jlong JNICALL
+Java_hdf_hdf5lib_H5_H5Tget_1size(JNIEnv *env, jclass clss, jlong type_id)
+{
+ size_t retVal = 0;
+
+ retVal = H5Tget_size((hid_t)type_id);
+ if (retVal == 0)
+ h5libraryError(env);
+
+ return (jlong)retVal;
+} /* end Java_hdf_hdf5lib_H5_H5Tget_1size*/
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5Tset_size
+ * Signature: (JJ)I
+ */
+JNIEXPORT jint JNICALL
+Java_hdf_hdf5lib_H5_H5Tset_1size(JNIEnv *env, jclass clss, jlong type_id, jlong size)
+{
+ size_t tsize = (size_t)size;
+ herr_t retVal = -1;
+
+ retVal = H5Tset_size((hid_t)type_id, tsize);
+ if (retVal < 0)
+ h5libraryError(env);
+
+ return (jint)retVal;
+} /* end ifJava_hdf_hdf5lib_H5_H5Tset_1size */
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5Tget_order
+ * Signature: (J)I
+ */
+JNIEXPORT jint JNICALL
+Java_hdf_hdf5lib_H5_H5Tget_1order(JNIEnv *env, jclass clss, jlong type_id)
+{
+ H5T_order_t retVal = H5T_ORDER_ERROR;
+
+ retVal = H5Tget_order((hid_t)type_id);
+ if (retVal == H5T_ORDER_ERROR)
+ h5libraryError(env);
+
+ return (jint)retVal;
+} /* end Java_hdf_hdf5lib_H5_H5Tget_1order */
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5Tset_order
+ * Signature: (JI)I
+ */
+JNIEXPORT jint JNICALL
+Java_hdf_hdf5lib_H5_H5Tset_1order(JNIEnv *env, jclass clss, jlong type_id, jint order)
+{
+ herr_t retVal = -1;
+
+ retVal = H5Tset_order((hid_t)type_id, (H5T_order_t)order);
+ if (retVal < 0)
+ h5libraryError(env);
+
+ return (jint)retVal;
+} /* end Java_hdf_hdf5lib_H5_H5Tset_1order */
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5Tget_precision
+ * Signature: (J)I
+ */
+JNIEXPORT jint JNICALL
+Java_hdf_hdf5lib_H5_H5Tget_1precision(JNIEnv *env, jclass clss, jlong type_id)
+{
+ size_t retVal = 0;
+
+ retVal = H5Tget_precision((hid_t)type_id);
+ if (retVal == 0)
+ h5libraryError(env);
+
+ return (jint)retVal;
+} /* end Java_hdf_hdf5lib_H5_H5Tget_1precision */
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5Tget_precision_long
+ * Signature: (J)J
+ */
+JNIEXPORT jlong JNICALL
+Java_hdf_hdf5lib_H5_H5Tget_1precision_1long(JNIEnv *env, jclass clss, jlong type_id)
+{
+ size_t retVal = 0;
+
+ retVal = H5Tget_precision((hid_t)type_id);
+ if (retVal == 0)
+ h5libraryError(env);
+
+ return (jlong)retVal;
+} /* end Java_hdf_hdf5lib_H5_H5Tget_1precision_1long */
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5Tset_precision
+ * Signature: (JJ)I
+ */
+JNIEXPORT jint JNICALL
+Java_hdf_hdf5lib_H5_H5Tset_1precision(JNIEnv *env, jclass clss, jlong type_id, jlong precision)
+{
+ herr_t retVal = -1;
+
+ retVal = H5Tset_precision((hid_t)type_id, (size_t)precision);
+ if (retVal < 0)
+ h5libraryError(env);
+
+ return (jint)retVal;
+} /* end Java_hdf_hdf5lib_H5_H5Tset_1precision */
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5Tget_offset
+ * Signature: (J)I
+ */
+JNIEXPORT jint JNICALL
+Java_hdf_hdf5lib_H5_H5Tget_1offset(JNIEnv *env, jclass clss, jlong type_id)
+{
+ int retVal = -1;
+
+ retVal = H5Tget_offset((hid_t)type_id);
+ if (retVal < 0)
+ h5libraryError(env);
+
+ return (jint)retVal;
+} /* end Java_hdf_hdf5lib_H5_H5Tget_1offset */
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5Tset_offset
+ * Signature: (JJ)I
+ */
+JNIEXPORT jint JNICALL
+Java_hdf_hdf5lib_H5_H5Tset_1offset(JNIEnv *env, jclass clss, jlong type_id, jlong offset)
+{
+ herr_t retVal = -1;
+
+ retVal = H5Tset_offset((hid_t)type_id, (size_t)offset);
+ if (retVal < 0)
+ h5libraryError(env);
+
+ return (jint)retVal;
+} /* end Java_hdf_hdf5lib_H5_H5Tset_1offset */
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5Tget_pad
+ * Signature: (J[I)I
+ */
+JNIEXPORT jint JNICALL
+Java_hdf_hdf5lib_H5_H5Tget_1pad(JNIEnv *env, jclass clss, jlong type_id, jintArray pad)
+{
+ herr_t status = -1;
+ jboolean isCopy;
+ jint *P;
+
+ if (pad == NULL) {
+ h5nullArgument(env, "H5Tget_pad: pad is NULL");
+ } /* end if */
+ else {
+ P = ENVPTR->GetIntArrayElements(ENVPAR pad, &isCopy);
+ if (P == NULL) {
+ h5JNIFatalError(env, "H5Tget_pad: pad not pinned");
+ } /* end if */
+ else {
+ status = H5Tget_pad((hid_t)type_id, (H5T_pad_t *)&(P[0]), (H5T_pad_t *)&(P[1]));
+
+ if (status < 0) {
+ ENVPTR->ReleaseIntArrayElements(ENVPAR pad, P, JNI_ABORT);
+ h5libraryError(env);
+ } /* end if */
+ else
+ ENVPTR->ReleaseIntArrayElements(ENVPAR pad, P, 0);
+ } /* end else */
+ } /* end else */
+
+ return (jint)status;
+} /* end Java_hdf_hdf5lib_H5_H5Tget_1pad */
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5Tset_pad
+ * Signature: (JII)I
+ */
+JNIEXPORT jint JNICALL
+Java_hdf_hdf5lib_H5_H5Tset_1pad(JNIEnv *env, jclass clss, jlong type_id, jint lsb, jint msb)
+{
+ herr_t retVal = -1;
+
+ retVal = H5Tset_pad((hid_t)type_id, (H5T_pad_t)lsb, (H5T_pad_t)msb);
+ if (retVal < 0)
+ h5libraryError(env);
+
+ return (jint)retVal;
+} /* end Java_hdf_hdf5lib_H5_H5Tset_1pad */
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5Tget_sign
+ * Signature: (J)I
+ */
+JNIEXPORT jint JNICALL
+Java_hdf_hdf5lib_H5_H5Tget_1sign(JNIEnv *env, jclass clss, jlong type_id)
+{
+ H5T_sign_t retVal = H5T_SGN_ERROR;
+
+ retVal = H5Tget_sign((hid_t)type_id);
+ if (retVal == H5T_SGN_ERROR)
+ h5libraryError(env);
+
+ return (jint)retVal;
+} /* end Java_hdf_hdf5lib_H5_H5Tget_1sign */
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5Tset_sign
+ * Signature: (JI)I
+ */
+JNIEXPORT jint JNICALL
+Java_hdf_hdf5lib_H5_H5Tset_1sign(JNIEnv *env, jclass clss, jlong type_id, jint sign)
+{
+ herr_t retVal = -1;
+
+ retVal = H5Tset_sign((hid_t)type_id, (H5T_sign_t)sign);
+ if (retVal < 0)
+ h5libraryError(env);
+
+ return (jint)retVal;
+} /* end Java_hdf_hdf5lib_H5_H5Tset_1sign */
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5Tget_fields_int
+ * Signature: (J[I)I
+ */
+JNIEXPORT jint JNICALL
+Java_hdf_hdf5lib_H5_H5Tget_1fields_1int(JNIEnv *env, jclass clss, jlong type_id, jintArray fields)
+{
+ herr_t status;
+ jboolean isCopy;
+ jint *P;
+
+ if (fields == NULL) {
+ h5nullArgument(env, "H5Tget_fields: fields is NULL");
+ } /* end if */
+ else if (ENVPTR->GetArrayLength(ENVPAR fields) < 5) {
+ h5badArgument(env, "H5Tget_fields: fields input array < order 5");
+ } /* end else if */
+ else {
+ P = ENVPTR->GetIntArrayElements(ENVPAR fields, &isCopy);
+ if (P == NULL) {
+ h5JNIFatalError(env, "H5Tget_fields: fields not pinned");
+ } /* end if */
+ else {
+ status = H5Tget_fields((hid_t)type_id, (size_t *)&(P[0]), (size_t *)&(P[1]), (size_t *)&(P[2]), (size_t *)&(P[3]), (size_t *)&(P[4]));
+
+ if (status < 0) {
+ ENVPTR->ReleaseIntArrayElements(ENVPAR fields, P, JNI_ABORT);
+ h5libraryError(env);
+ } /* end if */
+ else
+ ENVPTR->ReleaseIntArrayElements(ENVPAR fields, P, 0);
+ } /* end else */
+ } /* end else */
+
+ return (jint)status;
+} /* end Java_hdf_hdf5lib_H5_H5Tget_1fields_1int */
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5Tget_fields
+ * Signature: (J[J)V
+ */
+JNIEXPORT void JNICALL
+Java_hdf_hdf5lib_H5_H5Tget_1fields(JNIEnv *env, jclass clss, jlong type_id, jlongArray fields)
+{
+ herr_t status;
+ jboolean isCopy;
+ jlong *fieldsArray;
+
+ if (fields == NULL) {
+ h5nullArgument(env, "H5Tget_fields: fields is NULL");
+ } /* end if */
+ else if (ENVPTR->GetArrayLength(ENVPAR fields) < 5) {
+ h5badArgument(env, "H5Tget_fields: fields input array < order 5");
+ } /* end else if */
+ else {
+ fieldsArray = ENVPTR->GetLongArrayElements(ENVPAR fields, &isCopy);
+ if (fieldsArray == NULL) {
+ h5JNIFatalError(env, "H5Tget_fields: fields not pinned");
+ } /* end if */
+ else {
+ { /* direct cast (size_t *)variable fails on 32-bit environment */
+ size_t spos_t = 0;
+ size_t epos_t = 0;
+ size_t esize_t = 0;
+ size_t mpos_t = 0;
+ size_t msize_t = 0;
+ long long fields_temp = *(&fieldsArray[0]);
+ spos_t = (size_t)fields_temp;
+ fields_temp = *(&fieldsArray[1]);
+ epos_t = (size_t)fields_temp;
+ fields_temp = *(&fieldsArray[2]);
+ esize_t = (size_t)fields_temp;
+ fields_temp = *(&fieldsArray[3]);
+ mpos_t = (size_t)fields_temp;
+ fields_temp = *(&fieldsArray[4]);
+ msize_t = (size_t)fields_temp;
+
+ status = H5Tget_fields((hid_t)type_id, &spos_t, &epos_t, &esize_t, &mpos_t, &msize_t);
+
+ *(&fieldsArray[0]) = (jlong)spos_t;
+ *(&fieldsArray[1]) = (jlong)epos_t;
+ *(&fieldsArray[2]) = (jlong)esize_t;
+ *(&fieldsArray[3]) = (jlong)mpos_t;
+ *(&fieldsArray[4]) = (jlong)msize_t;
+ } /* end direct cast special handling */
+
+ if (status < 0) {
+ ENVPTR->ReleaseLongArrayElements(ENVPAR fields, fieldsArray, JNI_ABORT);
+ h5libraryError(env);
+ } /* end if */
+ else
+ ENVPTR->ReleaseLongArrayElements(ENVPAR fields, fieldsArray, 0);
+ } /* end else */
+ } /* end else */
+} /* end Java_hdf_hdf5lib_H5_H5Tget_1fields */
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5Tset_fields
+ * Signature: (JJJJJJ)V
+ */
+JNIEXPORT void JNICALL
+Java_hdf_hdf5lib_H5_H5Tset_1fields(JNIEnv *env, jclass clss, jlong type_id, jlong spos, jlong epos,
+ jlong esize, jlong mpos, jlong msize)
+{
+ if (H5Tset_fields((hid_t)type_id, (size_t)spos, (size_t)epos, (size_t)esize, (size_t)mpos, (size_t)msize) < 0)
+ h5libraryError(env);
+} /* end Java_hdf_hdf5lib_H5_H5Tset_1fields */
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5Tget_ebias
+ * Signature: (J)I
+ */
+JNIEXPORT jint JNICALL
+Java_hdf_hdf5lib_H5_H5Tget_1ebias(JNIEnv *env, jclass clss, jlong type_id)
+{
+ size_t retVal = 0;
+
+ retVal = H5Tget_ebias((hid_t)type_id);
+ if (retVal == 0)
+ h5libraryError(env);
+
+ return (jint)retVal;
+} /* end Java_hdf_hdf5lib_H5_H5Tget_1ebias */
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5Tget_ebias_long
+ * Signature: (J)J
+ */
+JNIEXPORT jlong JNICALL
+Java_hdf_hdf5lib_H5_H5Tget_1ebias_1long(JNIEnv *env, jclass clss, jlong type_id)
+{
+ size_t retVal = 0;
+
+ retVal = H5Tget_ebias((hid_t)type_id);
+ if (retVal == 0)
+ h5libraryError(env);
+
+ return (jlong)retVal;
+} /* end Java_hdf_hdf5lib_H5_H5Tget_1ebias_1long */
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5Tset_ebias
+ * Signature: (JJ)I
+ */
+JNIEXPORT jint JNICALL
+Java_hdf_hdf5lib_H5_H5Tset_1ebias(JNIEnv *env, jclass clss, jlong type_id, jlong ebias)
+{
+ herr_t retVal = -1;
+
+ retVal = H5Tset_ebias((hid_t)type_id, (size_t)ebias);
+ if (retVal < 0)
+ h5libraryError(env);
+
+ return (jint)retVal;
+} /* end Java_hdf_hdf5lib_H5_H5Tset_1ebias */
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5Tget_norm
+ * Signature: (J)I
+ */
+JNIEXPORT jint JNICALL
+Java_hdf_hdf5lib_H5_H5Tget_1norm(JNIEnv *env, jclass clss, jlong type_id)
+{
+ H5T_norm_t retVal = H5T_NORM_ERROR;
+
+ retVal = H5Tget_norm((hid_t)type_id);
+ if (retVal == H5T_NORM_ERROR)
+ h5libraryError(env);
+
+ return (jint)retVal;
+} /* end Java_hdf_hdf5lib_H5_H5Tget_1norm */
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5Tset_norm
+ * Signature: (JI)I
+ */
+JNIEXPORT jint JNICALL
+Java_hdf_hdf5lib_H5_H5Tset_1norm(JNIEnv *env, jclass clss, jlong type_id, jint norm)
+{
+ herr_t retVal = -1;
+
+ retVal = H5Tset_norm((hid_t)type_id, (H5T_norm_t )norm);
+ if (retVal < 0)
+ h5libraryError(env);
+
+ return (jint)retVal;
+} /* end Java_hdf_hdf5lib_H5_H5Tset_1norm */
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5Tget_inpad
+ * Signature: (J)I
+ */
+JNIEXPORT jint JNICALL
+Java_hdf_hdf5lib_H5_H5Tget_1inpad(JNIEnv *env, jclass clss, jlong type_id)
+{
+ H5T_pad_t retVal = H5T_PAD_ERROR;
+
+ retVal = H5Tget_inpad((hid_t)type_id );
+ if (retVal == H5T_PAD_ERROR)
+ h5libraryError(env);
+
+ return (jint)retVal;
+} /* end Java_hdf_hdf5lib_H5_H5Tget_1inpad */
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5Tset_inpad
+ * Signature: (JI)I
+ */
+JNIEXPORT jint JNICALL
+Java_hdf_hdf5lib_H5_H5Tset_1inpad(JNIEnv *env, jclass clss, jlong type_id, jint inpad)
+{
+ herr_t retVal = -1;
+
+ retVal = H5Tset_inpad((hid_t)type_id, (H5T_pad_t) inpad);
+ if (retVal < 0)
+ h5libraryError(env);
+
+ return (jint)retVal;
+} /* end Java_hdf_hdf5lib_H5_H5Tset_1inpad */
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5Tget_cset
+ * Signature: (J)I
+ */
+JNIEXPORT jint JNICALL
+Java_hdf_hdf5lib_H5_H5Tget_1cset(JNIEnv *env, jclass clss, jlong type_id)
+{
+ H5T_cset_t retVal = H5T_CSET_ERROR;
+
+ retVal = H5Tget_cset((hid_t)type_id);
+ if (retVal == H5T_CSET_ERROR)
+ h5libraryError(env);
+
+ return (jint)retVal;
+} /* end Java_hdf_hdf5lib_H5_H5Tget_1cset */
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5Tset_cset
+ * Signature: (JI)I
+ */
+JNIEXPORT jint JNICALL
+Java_hdf_hdf5lib_H5_H5Tset_1cset(JNIEnv *env, jclass clss, jlong type_id, jint cset)
+{
+ herr_t retVal = -1;
+
+ retVal = H5Tset_cset((hid_t)type_id, (H5T_cset_t)cset);
+ if (retVal < 0)
+ h5libraryError(env);
+
+ return (jint)retVal;
+} /* end Java_hdf_hdf5lib_H5_H5Tset_1cset */
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5Tget_strpad
+ * Signature: (J)I
+ */
+JNIEXPORT jint JNICALL
+Java_hdf_hdf5lib_H5_H5Tget_1strpad(JNIEnv *env, jclass clss, jlong type_id)
+{
+ H5T_str_t retVal = H5T_STR_ERROR;
+
+ retVal = H5Tget_strpad((hid_t)type_id);
+ if (retVal == H5T_STR_ERROR)
+ h5libraryError(env);
+
+ return (jint)retVal;
+} /* end Java_hdf_hdf5lib_H5_H5Tget_1strpad */
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5Tset_strpad
+ * Signature: (JI)I
+ */
+JNIEXPORT jint JNICALL
+Java_hdf_hdf5lib_H5_H5Tset_1strpad(JNIEnv *env, jclass clss, jlong type_id, jint strpad)
+{
+ herr_t retVal = -1;
+
+ retVal = H5Tset_strpad((hid_t)type_id, (H5T_str_t)strpad);
+ if (retVal < 0)
+ h5libraryError(env);
+
+ return (jint)retVal;
+} /* end Java_hdf_hdf5lib_H5_H5Tset_1strpad */
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5Tget_nmembers
+ * Signature: (J)I
+ */
+JNIEXPORT jint JNICALL
+Java_hdf_hdf5lib_H5_H5Tget_1nmembers(JNIEnv *env, jclass clss, jlong type_id)
+{
+ int retVal = -1;
+
+ retVal = H5Tget_nmembers((hid_t)type_id);
+ if (retVal < 0)
+ h5libraryError(env);
+
+ return (jint)retVal;
+} /* end Java_hdf_hdf5lib_H5_H5Tget_1nmembers */
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5Tget_member_name
+ * Signature: (JI)Ljava/lang/String;
+ */
+JNIEXPORT jstring JNICALL
+Java_hdf_hdf5lib_H5_H5Tget_1member_1name(JNIEnv *env, jclass clss, jlong type_id, jint field_idx)
+{
+ char *name;
+ jstring str = NULL;
+
+ name = H5Tget_member_name((hid_t)type_id, (unsigned)field_idx);
+ if (name != NULL) {
+ str = ENVPTR->NewStringUTF(ENVPAR name);
+ H5free_memory(name);
+
+ if (str == NULL)
+ h5JNIFatalError(env, "H5Tget_member_name: returned string not created");
+ } /* end if */
+
+ return str;
+} /* end Java_hdf_hdf5lib_H5_H5Tget_1member_1name */
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5Tget_member_index
+ * Signature: (JLjava/lang/String)I
+ */
+JNIEXPORT jint JNICALL
+Java_hdf_hdf5lib_H5_H5Tget_1member_1index(JNIEnv *env, jclass clss, jlong type_id, jstring field_name)
+{
+ const char *tName;
+ int index = -1;
+
+ PIN_JAVA_STRING(field_name, tName, -1);
+
+ index = H5Tget_member_index((hid_t)type_id, tName);
+
+ UNPIN_JAVA_STRING(field_name, tName);
+
+ if (index < 0)
+ h5libraryError(env);
+
+ return (jint)index;
+} /* end Java_hdf_hdf5lib_H5_H5Tget_1member_1index */
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5Tget_member_type
+ * Signature: (JI)J
+ */
+JNIEXPORT jlong JNICALL
+Java_hdf_hdf5lib_H5__1H5Tget_1member_1type(JNIEnv *env, jclass clss, jlong type_id, jint field_idx)
+{
+ hid_t retVal = -1;
+
+ retVal = H5Tget_member_type((hid_t)type_id, (unsigned)field_idx);
+ if (retVal < 0)
+ h5libraryError(env);
+
+ return (jlong)retVal;
+} /* end Java_hdf_hdf5lib_H5__1H5Tget_1member_1type */
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5Tget_member_offset
+ * Signature: (JI)J
+ */
+JNIEXPORT jlong JNICALL
+Java_hdf_hdf5lib_H5_H5Tget_1member_1offset(JNIEnv *env, jclass clss, jlong type_id, jint memno)
+{
+ return (jlong)H5Tget_member_offset((hid_t)type_id, (unsigned)memno);
+} /* end Java_hdf_hdf5lib_H5_H5Tget_1member_1offset */
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5Tget_member_class
+ * Signature: (JI)I
+ */
+JNIEXPORT jint JNICALL
+Java_hdf_hdf5lib_H5_H5Tget_1member_1class(JNIEnv *env, jclass clss, jlong type_id, jint memno)
+{
+ int retVal = -1;
+
+ retVal = H5Tget_member_class((hid_t)type_id, (unsigned)memno);
+ if (retVal < 0)
+ h5libraryError(env);
+
+ return (jint)retVal;
+} /* end Java_hdf_hdf5lib_H5_H5Tget_1member_1class */
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5Tinsert
+ * Signature: (JLjava/lang/String;JJ)I
+ */
+JNIEXPORT jint JNICALL
+Java_hdf_hdf5lib_H5_H5Tinsert(JNIEnv *env, jclass clss, jlong type_id, jstring name, jlong offset, jlong field_id)
+{
+ herr_t status;
+ const char *tName;
+ long off = (long)offset;
+
+ PIN_JAVA_STRING(name, tName, -1);
+
+ status = H5Tinsert((hid_t)type_id, tName, (size_t)off, field_id);
+
+ UNPIN_JAVA_STRING(name,tName);
+ if (status < 0)
+ h5libraryError(env);
+
+ return (jint)status;
+} /* end Java_hdf_hdf5lib_H5_H5Tinsert */
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5Tpack
+ * Signature: (J)I
+ */
+JNIEXPORT jint JNICALL
+Java_hdf_hdf5lib_H5_H5Tpack(JNIEnv *env, jclass clss, jlong type_id)
+{
+ herr_t retVal = -1;
+
+ retVal = H5Tpack((hid_t)type_id);
+ if (retVal < 0)
+ h5libraryError(env);
+
+ return (jint)retVal;
+} /* end Java_hdf_hdf5lib_H5_H5Tpack */
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: _H5Tclose
+ * Signature: (J)I
+ */
+JNIEXPORT jint JNICALL
+Java_hdf_hdf5lib_H5__1H5Tclose(JNIEnv *env, jclass clss, jlong type_id)
+{
+ herr_t retVal = -1;
+
+ retVal = H5Tclose((hid_t)type_id);
+ if (retVal < 0)
+ h5libraryError(env);
+
+ return (jint)retVal;
+} /* end Java_hdf_hdf5lib_H5__1H5Tclose */
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: _H5Tvlen_create
+ * Signature: (J)J
+ */
+JNIEXPORT jlong JNICALL
+Java_hdf_hdf5lib_H5__1H5Tvlen_1create(JNIEnv *env, jclass clss, jlong base_id)
+{
+ hid_t retVal = -1;
+
+ retVal = H5Tvlen_create((hid_t)base_id);
+ if (retVal < 0)
+ h5libraryError(env);
+
+ return (jlong)retVal;
+} /* end Java_hdf_hdf5lib_H5__1H5Tvlen_1create */
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5Tset_tag
+ * Signature: (JLjava/lang/String;)I
+ */
+JNIEXPORT jint JNICALL
+Java_hdf_hdf5lib_H5_H5Tset_1tag(JNIEnv *env, jclass clss, jlong type, jstring tag)
+{
+ herr_t status = -1;
+ const char *tagP;
+
+ PIN_JAVA_STRING(tag, tagP, -1);
+
+ status = H5Tset_tag((hid_t)type, tagP);
+
+ UNPIN_JAVA_STRING(tag,tagP);
+
+ if (status < 0)
+ h5libraryError(env);
+
+ return (jint)status;
+} /* end Java_hdf_hdf5lib_H5_H5Tset_1tag */
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5Tget_tag
+ * Signature: (J)Ljava/lang/String;
+ */
+JNIEXPORT jstring JNICALL
+Java_hdf_hdf5lib_H5_H5Tget_1tag(JNIEnv *env, jclass clss, jlong type)
+{
+ jstring str = NULL;
+ char *tag;
+
+ tag = H5Tget_tag((hid_t)type);
+ if (tag != NULL) {
+ str = ENVPTR->NewStringUTF(ENVPAR tag);
+ H5free_memory(tag);
+
+ if (str == NULL)
+ h5JNIFatalError(env, "H5Tget_tag: returned string not created");
+ } /* end if */
+
+ return str;
+} /* end Java_hdf_hdf5lib_H5_H5Tget_1tag */
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5Tget_super
+ * Signature: (J)J
+ */
+JNIEXPORT jlong JNICALL
+Java_hdf_hdf5lib_H5__1H5Tget_1super(JNIEnv *env, jclass clss, jlong type_id)
+{
+ hid_t retVal = -1;
+
+ retVal = H5Tget_super((hid_t)type_id);
+ if (retVal < 0)
+ h5libraryError(env);
+
+ return (jlong)retVal;
+} /* end Java_hdf_hdf5lib_H5__1H5Tget_1super */
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: _H5Tenum_create
+ * Signature: (J)J
+ */
+JNIEXPORT jlong JNICALL
+Java_hdf_hdf5lib_H5__1H5Tenum_1create(JNIEnv *env, jclass clss, jlong base_id)
+{
+ hid_t retVal = -1;
+
+ retVal = H5Tenum_create((hid_t)base_id);
+ if (retVal < 0)
+ h5libraryError(env);
+
+ return (jlong)retVal;
+} /* end Java_hdf_hdf5lib_H5__1H5Tenum_1create */
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5Tenum_insert_int
+ * Signature: (JLjava/lang/String;[I)I
+ */
+JNIEXPORT jint JNICALL
+Java_hdf_hdf5lib_H5_H5Tenum_1insert_1int(JNIEnv *env, jclass clss, jlong type_id, jstring name, jintArray value)
+{
+ herr_t status = -1;
+ jint *intP;
+ const char *nameP;
+ jboolean isCopy2;
+
+ if (value == NULL ) {
+ h5nullArgument(env, "H5Tenum_insert: value is NULL");
+ } /* end if */
+ else {
+ PIN_JAVA_STRING(name, nameP, -1);
+
+ intP = ENVPTR->GetIntArrayElements(ENVPAR value, &isCopy2);
+ if (intP == NULL) {
+ UNPIN_JAVA_STRING(name, nameP);
+ h5JNIFatalError(env, "H5Tenum_insert: value not pinned");
+ return -1;
+ } /* end if */
+ else {
+ status = H5Tenum_insert((hid_t)type_id, nameP, intP);
+
+ UNPIN_JAVA_STRING(name, nameP);
+ ENVPTR->ReleaseIntArrayElements(ENVPAR value, intP, JNI_ABORT);
+
+ if (status < 0)
+ h5libraryError(env);
+ } /* end else */
+ } /* end else */
+
+ return (jint)status;
+} /* end Java_hdf_hdf5lib_H5_H5Tenum_1insert_1int */
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5Tenum_insert
+ * Signature: (JLjava/lang/String;[B)V
+ */
+JNIEXPORT void JNICALL
+Java_hdf_hdf5lib_H5_H5Tenum_1insert(JNIEnv *env, jclass clss, jlong type_id, jstring name, jbyteArray value)
+{
+ herr_t status = -1;
+ jbyte *byteP;
+ const char *nameP;
+ jboolean isCopy2;
+
+ if (value == NULL) {
+ h5nullArgument(env, "H5Tenum_insert: value is NULL");
+ } /* end if */
+ else {
+ PIN_JAVA_STRING0(name, nameP);
+
+ byteP = ENVPTR->GetByteArrayElements(ENVPAR value, &isCopy2);
+ if (byteP == NULL) {
+ UNPIN_JAVA_STRING(name, nameP);
+ h5JNIFatalError(env, "H5Tenum_insert: value not pinned");
+ } /* end if */
+ else {
+ status = H5Tenum_insert((hid_t)type_id, nameP, byteP);
+
+ UNPIN_JAVA_STRING(name, nameP);
+ ENVPTR->ReleaseByteArrayElements(ENVPAR value, byteP, JNI_ABORT);
+
+ if (status < 0)
+ h5libraryError(env);
+ } /* end else */
+ } /* end else */
+} /* end Java_hdf_hdf5lib_H5_H5Tenum_1insert */
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5Tenum_nameof_int
+ * Signature: (J[I[Ljava/lang/String;I)I
+ */
+JNIEXPORT jint JNICALL
+Java_hdf_hdf5lib_H5_H5Tenum_1nameof_1int(JNIEnv *env, jclass clss, jlong type_id, jintArray value, jobjectArray name,
+ jint size)
+{
+ herr_t status = -1;
+ jint *intP;
+ char *nameP;
+ jboolean isCopy;
+ jstring str;
+
+ if (size <= 0) {
+ h5badArgument(env, "H5Tenum_nameof: name size < 0");
+ } /* end if */
+ else if (value == NULL) {
+ h5nullArgument(env, "H5Tenum_nameof: value is NULL");
+ } /* end if */
+ else {
+ nameP = (char*)HDmalloc(sizeof(char) * (size_t)size);
+ if (nameP == NULL) {
+ /* exception -- out of memory */
+ h5outOfMemory(env, "H5Tenum_nameof: malloc name size");
+ } /* end if */
+ else {
+ intP = ENVPTR->GetIntArrayElements(ENVPAR value, &isCopy);
+ if (intP == NULL) {
+ HDfree(nameP);
+ h5JNIFatalError(env, "H5Tenum_nameof: value not pinned");
+ } /* end if */
+ else {
+ status = H5Tenum_nameof((hid_t)type_id, intP, nameP, (size_t)size);
+
+ ENVPTR->ReleaseIntArrayElements(ENVPAR value, intP, JNI_ABORT);
+
+ if (status < 0) {
+ HDfree(nameP);
+ h5libraryError(env);
+ } /* end if */
+ else {
+ str = ENVPTR->NewStringUTF(ENVPAR nameP);
+ HDfree(nameP);
+ if (str == NULL) {
+ h5JNIFatalError(env, "H5Tenum_nameof: return array not created");
+ } /* end if */
+ else
+ ENVPTR->SetObjectArrayElement(ENVPAR name, 0, (jobject)str);
+ } /* end else */
+ } /* end else */
+ } /* end else */
+ } /* end else */
+
+ return (jint)status;
+} /* end Java_hdf_hdf5lib_H5_H5Tenum_1nameof_1int */
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5Tenum_nameof
+ * Signature: (J[BJ)Ljava/lang/String;
+ */
+JNIEXPORT jstring JNICALL
+Java_hdf_hdf5lib_H5_H5Tenum_1nameof(JNIEnv *env, jclass clss, jlong type_id, jbyteArray value, jlong size)
+{
+ herr_t status = -1;
+ jbyte *byteP;
+ char *nameP;
+ jboolean isCopy;
+ jstring str = NULL;
+
+ if (size <= 0) {
+ h5badArgument(env, "H5Tenum_nameof: name size < 0");
+ } /* end if */
+ else if (value == NULL ) {
+ h5nullArgument(env, "H5Tenum_nameof: value is NULL");
+ } /* end if */
+ else {
+ nameP = (char*)HDmalloc(sizeof(char) * (size_t)size);
+ if (nameP == NULL) {
+ h5outOfMemory(env, "H5Tenum_nameof: malloc name size");
+ } /* end if */
+ else {
+ byteP = ENVPTR->GetByteArrayElements(ENVPAR value, &isCopy);
+ if (byteP == NULL) {
+ HDfree(nameP);
+ h5JNIFatalError(env, "H5Tenum_nameof: value not pinned");
+ } /* end if */
+ else {
+ status = H5Tenum_nameof((hid_t)type_id, byteP, nameP, (size_t)size);
+
+ /* free the buffer without copying back */
+ ENVPTR->ReleaseByteArrayElements(ENVPAR value, byteP, JNI_ABORT);
+
+ if (status < 0) {
+ HDfree(nameP);
+ h5libraryError(env);
+ } /* end if */
+ else {
+ str = ENVPTR->NewStringUTF(ENVPAR nameP);
+ HDfree(nameP);
+ if (str == NULL) {
+ h5JNIFatalError(env, "H5Tenum_nameof: return array not created");
+ } /* end if */
+ } /* end else */
+ } /* end else */
+ } /* end else */
+ } /* end else */
+
+ return str;
+} /* end Java_hdf_hdf5lib_H5_H5Tenum_1nameof */
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5Tenum_valueof_int
+ * Signature: (JLjava/lang/String;[I)I
+ */
+JNIEXPORT jint JNICALL
+Java_hdf_hdf5lib_H5_H5Tenum_1valueof_1int(JNIEnv *env, jclass clss, jlong type_id, jstring name, jintArray value)
+{
+ herr_t status = -1;
+ jint *intP;
+ const char *nameP;
+ jboolean isCopy2;
+
+ if (value == NULL) {
+ h5nullArgument(env, "H5Tenum_valueof: value is NULL");
+ } /* end if */
+ else {
+ PIN_JAVA_STRING(name, nameP, -1);
+
+ intP = ENVPTR->GetIntArrayElements(ENVPAR value, &isCopy2);
+ if (intP == NULL) {
+ UNPIN_JAVA_STRING(name, nameP);
+ h5JNIFatalError(env, "H5Tenum_valueof: value not pinned");
+ } /* end if */
+ else {
+ status = H5Tenum_valueof((hid_t)type_id, nameP, intP);
+
+ UNPIN_JAVA_STRING(name, nameP);
+
+ if (status < 0) {
+ ENVPTR->ReleaseIntArrayElements(ENVPAR value, intP, JNI_ABORT);
+ h5libraryError(env);
+ } /* end if */
+ else
+ ENVPTR->ReleaseIntArrayElements(ENVPAR value, intP, 0);
+ } /* end else */
+ } /* end else */
+
+ return (jint)status;
+} /* end Java_hdf_hdf5lib_H5_H5Tenum_1valueof_1int */
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5Tenum_valueof
+ * Signature: (JLjava/lang/String;[B)V
+ */
+JNIEXPORT void JNICALL
+Java_hdf_hdf5lib_H5_H5Tenum_1valueof(JNIEnv *env, jclass clss, jlong type_id, jstring name, jbyteArray value)
+{
+ herr_t status = -1;
+ jbyte *byteP;
+ const char *nameP;
+ jboolean isCopy2;
+
+ if (value == NULL) {
+ h5nullArgument(env, "H5Tenum_valueof: value is NULL");
+ } /* end if */
+ else {
+ PIN_JAVA_STRING0(name, nameP);
+
+ byteP = ENVPTR->GetByteArrayElements(ENVPAR value, &isCopy2);
+ if (byteP == NULL) {
+ UNPIN_JAVA_STRING(name,nameP);
+ h5JNIFatalError(env, "H5Tenum_valueof: value not pinned");
+ } /* end if */
+ else {
+ status = H5Tenum_valueof((hid_t)type_id, nameP, byteP);
+
+ UNPIN_JAVA_STRING(name, nameP);
+
+ if (status < 0) {
+ ENVPTR->ReleaseByteArrayElements(ENVPAR value, byteP, JNI_ABORT);
+ h5libraryError(env);
+ } /* end if */
+ else
+ ENVPTR->ReleaseByteArrayElements(ENVPAR value, byteP, 0);
+ } /* end else */
+ } /* end else */
+} /* end Java_hdf_hdf5lib_H5_H5Tenum_1valueof */
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5Tget_member_value_int
+ * Signature: (JI[I)I
+ */
+JNIEXPORT jint JNICALL
+Java_hdf_hdf5lib_H5_H5Tget_1member_1value_1int(JNIEnv *env, jclass clss, jlong type_id, jint membno, jintArray value)
+{
+ herr_t status = -1;
+ jint *intP;
+ jboolean isCopy;
+
+ if (value == NULL) {
+ h5nullArgument(env, "H5Tget_member_value: value is NULL");
+ } /* end if */
+ else {
+ intP = ENVPTR->GetIntArrayElements(ENVPAR value, &isCopy);
+ if (intP == NULL) {
+ h5JNIFatalError(env, "H5Tget_member_value: value not pinned");
+ } /* end if */
+ else {
+ status = H5Tget_member_value((hid_t)type_id, (unsigned)membno, intP);
+
+
+ if (status < 0) {
+ ENVPTR->ReleaseIntArrayElements(ENVPAR value, intP, JNI_ABORT);
+ h5libraryError(env);
+ } /* end if */
+ else
+ ENVPTR->ReleaseIntArrayElements(ENVPAR value, intP, 0);
+ } /* end else */
+ } /* end else */
+
+ return (jint)status;
+} /* end Java_hdf_hdf5lib_H5_H5Tget_1member_1value_1int */
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5Tget_member_value
+ * Signature: (JI[B)V
+ */
+JNIEXPORT void JNICALL
+Java_hdf_hdf5lib_H5_H5Tget_1member_1value(JNIEnv *env, jclass clss, jlong type_id, jint membno, jbyteArray value)
+{
+ herr_t status = -1;
+ jbyte *byteP;
+ jboolean isCopy;
+
+ if (value == NULL) {
+ h5nullArgument( env, "H5Tget_member_value: value is NULL");
+ } /* end if */
+ else {
+ byteP = ENVPTR->GetByteArrayElements(ENVPAR value,&isCopy);
+ if (byteP == NULL) {
+ h5JNIFatalError(env, "H5Tget_member_value: value not pinned");
+ } /* end if */
+ else {
+ status = H5Tget_member_value((hid_t)type_id, (unsigned)membno, byteP);
+
+ if (status < 0) {
+ ENVPTR->ReleaseByteArrayElements(ENVPAR value, byteP, JNI_ABORT);
+ h5libraryError(env);
+ } /* end if */
+ else
+ ENVPTR->ReleaseByteArrayElements(ENVPAR value, byteP, 0);
+ } /* end else */
+ } /* end else */
+} /* end Java_hdf_hdf5lib_H5_H5Tget_1member_1value */
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5Tget_array_dims
+ * Signature: (J)I
+ */
+JNIEXPORT jint JNICALL
+Java_hdf_hdf5lib_H5_H5Tget_1array_1ndims(JNIEnv *env, jclass clss, jlong type_id)
+{
+ int ndims = -1;
+
+ ndims = H5Tget_array_ndims((hid_t)type_id);
+ if (ndims < 0)
+ h5libraryError(env);
+
+ return (jint)ndims;
+} /* end Java_hdf_hdf5lib_H5_H5Tget_1array_1ndims */
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5Tarray_get_dims
+ * Signature: (J[I[I)I
+ */
+JNIEXPORT jint JNICALL
+Java_hdf_hdf5lib_H5_H5Tget_1array_1dims(JNIEnv *env, jclass clss, jlong type_id, jintArray dims, jintArray perms)
+{
+ int ndims = -1;
+ int dlen;
+ int i;
+ jint *dimsP;
+ hsize_t *cdims = NULL;
+ jboolean isCopy;
+
+ if (dims == NULL) {
+ h5nullArgument( env, "H5Tget_array_dims: value is NULL");
+ } /* end if */
+ else {
+ dimsP = ENVPTR->GetIntArrayElements(ENVPAR dims, &isCopy);
+ if (dimsP == NULL) {
+ h5JNIFatalError(env, "H5Tget_array_dims: dimsP not pinned");
+ } /* end if */
+ else {
+ dlen = ENVPTR->GetArrayLength(ENVPAR dims);
+ cdims = (hsize_t*)HDmalloc((size_t)dlen * sizeof(hsize_t));
+
+ ndims = H5Tget_array_dims2((hid_t)type_id, cdims);
+
+ if (ndims < 0) {
+ ENVPTR->ReleaseIntArrayElements(ENVPAR dims, dimsP, JNI_ABORT);
+ h5libraryError(env);
+ } /* end if */
+ else {
+ for (i = 0; i < dlen; i++) {
+ dimsP[i] = (jint) cdims[i];
+ } /* end for */
+ ENVPTR->ReleaseIntArrayElements(ENVPAR dims, dimsP, 0);
+
+ if (cdims) HDfree(cdims);
+ } /* end else */
+ } /* end else */
+ } /* end else */
+
+ return (jint)ndims;
+} /* end Java_hdf_hdf5lib_H5_H5Tget_1array_1dims */
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5Tis_variable_str
+ * Signature: (J)Z
+ */
+JNIEXPORT jboolean JNICALL
+Java_hdf_hdf5lib_H5_H5Tis_1variable_1str(JNIEnv *env, jclass clss, jlong type_id)
+{
+ htri_t bval = JNI_FALSE;
+
+ bval = H5Tis_variable_str((hid_t)type_id);
+ if (bval > 0)
+ bval = JNI_TRUE;
+ else if (bval < 0)
+ h5libraryError(env);
+
+ return (jboolean)bval;
+} /* end Java_hdf_hdf5lib_H5_H5Tis_1variable_1str */
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5Tget_native_type
+ * Signature: (JI)J
+ */
+JNIEXPORT jlong JNICALL
+Java_hdf_hdf5lib_H5__1H5Tget_1native_1type(JNIEnv *env, jclass clss, jlong type_id, jint direction)
+{
+ hid_t native_tid = -1;
+
+ native_tid = H5Tget_native_type((hid_t)type_id, (H5T_direction_t)direction);
+
+ if (native_tid < 0)
+ h5libraryError(env);
+
+ return (jlong)native_tid;
+} /* end Java_hdf_hdf5lib_H5__1H5Tget_1native_1type */
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5Tdetect_class
+ * Signature: (JI)Z
+ */
+JNIEXPORT jboolean JNICALL
+Java_hdf_hdf5lib_H5_H5Tdetect_1class(JNIEnv *env, jclass clss, jlong type_id, jint dtype_class)
+{
+ htri_t bval = JNI_FALSE;
+
+ bval = H5Tdetect_class((hid_t)type_id, (H5T_class_t)dtype_class);
+ if (bval > 0)
+ bval = JNI_TRUE;
+ else if (bval < 0)
+ h5libraryError(env);
+
+ return (jboolean)bval;
+} /* end Java_hdf_hdf5lib_H5_H5Tdetect_1class */
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5Tcommit
+ * Signature: (JLjava/lang/String;JJJJ)V
+ */
+JNIEXPORT void JNICALL
+Java_hdf_hdf5lib_H5_H5Tcommit(JNIEnv *env, jclass clss, jlong loc_id, jstring name, jlong type,
+ jlong link_plist_id, jlong create_plist_id, jlong access_plist_id)
+{
+ herr_t status = -1;
+ const char *tName;
+
+ PIN_JAVA_STRING0(name, tName);
+
+ status = H5Tcommit2((hid_t)loc_id, tName, (hid_t)type, (hid_t)link_plist_id, (hid_t)create_plist_id, (hid_t)access_plist_id);
+
+ UNPIN_JAVA_STRING(name, tName);
+
+ if (status < 0)
+ h5libraryError(env);
+} /* end Java_hdf_hdf5lib_H5_H5Tcommit */
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: _H5Tarray_create2
+ * Signature: (JI[J)J
+ */
+JNIEXPORT jlong JNICALL
+Java_hdf_hdf5lib_H5__1H5Tarray_1create2(JNIEnv *env, jclass clss, jlong base_id, jint rank, jlongArray dims)
+{
+ hid_t retVal = -1;
+ jlong *dimsP;
+ int dlen;
+ hsize_t *cdims = NULL;
+ jboolean isCopy;
+ int i;
+
+ if (rank <= 0) {
+ h5badArgument(env, "H5Tarray_create: rank is < 1");
+ } /* end if */
+ else if (dims == NULL) {
+ h5nullArgument(env, "H5Tarray_create: dims is NULL");
+ } /* end else if */
+ else {
+ dimsP = ENVPTR->GetLongArrayElements(ENVPAR dims, &isCopy);
+ if (dimsP == NULL) {
+ h5JNIFatalError( env, "H5Tarray_create: dimsP not pinned");
+ } /* end if */
+ else {
+ dlen = ENVPTR->GetArrayLength(ENVPAR dims);
+ if (dlen != rank) {
+ ENVPTR->ReleaseLongArrayElements(ENVPAR dims, dimsP, JNI_ABORT);
+ } /* end if */
+ else {
+ cdims = (hsize_t*)HDmalloc((size_t)dlen * sizeof(hsize_t));
+ for (i = 0; i < dlen; i++) {
+ cdims[i] = (hsize_t)dimsP[i];
+ } /* end for */
+
+ retVal = H5Tarray_create2((hid_t)base_id, (unsigned)rank, (const hsize_t*)cdims);
+
+ ENVPTR->ReleaseLongArrayElements(ENVPAR dims, dimsP, 0);
+
+ HDfree(cdims);
+ if (retVal < 0)
+ h5libraryError(env);
+ } /* end else */
+ } /* end else */
+ } /* end else */
+
+ return (jlong)retVal;
+} /* end Java_hdf_hdf5lib_H5__1H5Tarray_1create2 */
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5Tarray_get_dims2
+ * Signature: (J[J)I
+ */
+JNIEXPORT jint JNICALL
+Java_hdf_hdf5lib_H5_H5Tget_1array_1dims2(JNIEnv *env, jclass clss, jlong type_id, jlongArray dims)
+{
+ int ndims = -1;
+ int dlen;
+ int i;
+ jlong *dimsP;
+ hsize_t *cdims=NULL;
+ jboolean isCopy;
+
+ if (dims == NULL) {
+ h5nullArgument(env, "H5Tget_array_dims: value is NULL");
+ } /* end if */
+ else {
+ dimsP = ENVPTR->GetLongArrayElements(ENVPAR dims,&isCopy);
+ if (dimsP == NULL) {
+ h5JNIFatalError(env, "H5Tget_array_dims: dimsP not pinned");
+ } /* end if */
+ else {
+ dlen = ENVPTR->GetArrayLength(ENVPAR dims);
+ cdims = (hsize_t*)HDmalloc((size_t)dlen * sizeof(hsize_t));
+
+ ndims = H5Tget_array_dims2((hid_t)type_id, (hsize_t*)cdims);
+
+ if (ndims < 0) {
+ if (cdims)
+ free(cdims);
+ ENVPTR->ReleaseLongArrayElements(ENVPAR dims, dimsP, JNI_ABORT);
+ h5libraryError(env);
+ } /* end if */
+ else {
+ for (i = 0; i < dlen; i++) {
+ dimsP[i] = (jlong) cdims[i];
+ } /* end for */
+ ENVPTR->ReleaseLongArrayElements(ENVPAR dims, dimsP, 0);
+
+ if (cdims)
+ HDfree(cdims);
+ } /* end else */
+ } /* end else */
+ } /* end else */
+
+ return (jint)ndims;
+} /* end Java_hdf_hdf5lib_H5_H5Tget_1array_1dims2 */
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5Tconvert
+ * Signature: (JJJ[B[BJ)V
+ */
+JNIEXPORT void JNICALL
+Java_hdf_hdf5lib_H5_H5Tconvert(JNIEnv *env, jclass clss, jlong src_id, jlong dst_id, jlong nelmts,
+ jbyteArray buf, jbyteArray background, jlong plist_id)
+{
+ herr_t status;
+ jbyte *bufP;
+ jbyte *bgP = NULL;
+ jboolean isCopy;
+
+ if (nelmts <= 0) {
+ h5badArgument(env, "H5Tconvert: name nelmts < 0");
+ } /* end if */
+ else {
+ bufP = ENVPTR->GetByteArrayElements(ENVPAR buf, &isCopy);
+ if (bufP == NULL) {
+ h5JNIFatalError(env, "H5Tconvert: value not pinned");
+ } /* end if */
+ else {
+ if (background)
+ bgP = ENVPTR->GetByteArrayElements(ENVPAR background, &isCopy);
+
+ status = H5Tconvert((hid_t)src_id, (hid_t)dst_id, (size_t)nelmts, (void *)bufP, (void *)bgP, (hid_t)plist_id) ;
+
+ ENVPTR->ReleaseByteArrayElements(ENVPAR buf, bufP, 0);
+
+ if (bgP)
+ ENVPTR->ReleaseByteArrayElements(ENVPAR background, bgP, 0);
+
+ if (status < 0)
+ h5libraryError(env);
+ } /* end else */
+ } /* end else */
+} /* end Java_hdf_hdf5lib_H5_H5Tconvert */
+
+
+#ifdef __cplusplus
+} /* end extern "C" */
+#endif /* __cplusplus */
diff --git a/java/src/jni/h5tImp.h b/java/src/jni/h5tImp.h
new file mode 100644
index 0000000..9755a91
--- /dev/null
+++ b/java/src/jni/h5tImp.h
@@ -0,0 +1,550 @@
+/* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
+ * Copyright by The HDF Group. *
+ * Copyright by the Board of Trustees of the University of Illinois. *
+ * All rights reserved. *
+ * *
+ * This file is part of HDF5. The full HDF5 copyright notice, including *
+ * terms governing use, modification, and redistribution, is contained in *
+ * the files COPYING and Copyright.html. COPYING can be found at the root *
+ * of the source code distribution tree; Copyright.html can be found at the *
+ * root level of an installed copy of the electronic HDF5 document set and *
+ * is linked from the top-level documents page. It can also be found at *
+ * http://hdfgroup.org/HDF5/doc/Copyright.html. If you do not have *
+ * access to either file, you may request a copy from help@hdfgroup.org. *
+ * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */
+
+#include <jni.h>
+/* Header for class hdf_hdf5lib_H5_H5T */
+
+#ifndef _Included_hdf_hdf5lib_H5_H5T
+#define _Included_hdf_hdf5lib_H5_H5T
+
+#ifdef __cplusplus
+extern "C" {
+#endif /* __cplusplus */
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: _H5Topen2
+ * Signature: (JLjava/lang/String;J)J
+ */
+JNIEXPORT jlong JNICALL Java_hdf_hdf5lib_H5__1H5Topen2
+ (JNIEnv *, jclass, jlong, jstring, jlong);
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5Tcommitted
+ * Signature: (J)Z
+ */
+JNIEXPORT jboolean JNICALL Java_hdf_hdf5lib_H5_H5Tcommitted
+ (JNIEnv *, jclass, jlong);
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: _H5Tcreate
+ * Signature: (IJ)J
+ */
+JNIEXPORT jlong JNICALL Java_hdf_hdf5lib_H5__1H5Tcreate
+ (JNIEnv *, jclass, jint, jlong);
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: _H5Tcopy
+ * Signature: (J)J
+ */
+JNIEXPORT jlong JNICALL Java_hdf_hdf5lib_H5__1H5Tcopy
+ (JNIEnv *, jclass, jlong);
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5Tequal
+ * Signature: (JJ)Z
+ */
+JNIEXPORT jboolean JNICALL Java_hdf_hdf5lib_H5_H5Tequal
+ (JNIEnv *, jclass, jlong, jlong);
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5Tlock
+ * Signature: (J)I
+ */
+JNIEXPORT jint JNICALL Java_hdf_hdf5lib_H5_H5Tlock
+ (JNIEnv *, jclass, jlong);
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5Tget_class
+ * Signature: (J)I
+ */
+JNIEXPORT jint JNICALL Java_hdf_hdf5lib_H5_H5Tget_1class
+ (JNIEnv *, jclass, jlong);
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5Tget_size
+ * Signature: (J)J
+ */
+JNIEXPORT jlong JNICALL Java_hdf_hdf5lib_H5_H5Tget_1size
+ (JNIEnv *, jclass, jlong);
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5Tset_size
+ * Signature: (JJ)I
+ */
+JNIEXPORT jint JNICALL Java_hdf_hdf5lib_H5_H5Tset_1size
+ (JNIEnv *, jclass, jlong, jlong);
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5Tget_order
+ * Signature: (J)I
+ */
+JNIEXPORT jint JNICALL Java_hdf_hdf5lib_H5_H5Tget_1order
+ (JNIEnv *, jclass, jlong);
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5Tset_order
+ * Signature: (JI)I
+ */
+JNIEXPORT jint JNICALL Java_hdf_hdf5lib_H5_H5Tset_1order
+ (JNIEnv *, jclass, jlong, jint);
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5Tget_precision
+ * Signature: (J)I
+ */
+JNIEXPORT jint JNICALL Java_hdf_hdf5lib_H5_H5Tget_1precision
+ (JNIEnv *, jclass, jlong);
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5Tget_precision_long
+ * Signature: (J)J
+ */
+JNIEXPORT jlong JNICALL Java_hdf_hdf5lib_H5_H5Tget_1precision_1long
+ (JNIEnv *, jclass, jlong);
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5Tset_precision
+ * Signature: (JJ)I
+ */
+JNIEXPORT jint JNICALL Java_hdf_hdf5lib_H5_H5Tset_1precision
+ (JNIEnv *, jclass, jlong, jlong);
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5Tget_offset
+ * Signature: (J)I
+ */
+JNIEXPORT jint JNICALL Java_hdf_hdf5lib_H5_H5Tget_1offset
+ (JNIEnv *, jclass, jlong);
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5Tset_offset
+ * Signature: (JJ)I
+ */
+JNIEXPORT jint JNICALL Java_hdf_hdf5lib_H5_H5Tset_1offset
+ (JNIEnv *, jclass, jlong, jlong);
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5Tget_pad
+ * Signature: (J[I)I
+ */
+JNIEXPORT jint JNICALL Java_hdf_hdf5lib_H5_H5Tget_1pad
+ (JNIEnv *, jclass, jlong, jintArray);
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5Tset_pad
+ * Signature: (JII)I
+ */
+JNIEXPORT jint JNICALL Java_hdf_hdf5lib_H5_H5Tset_1pad
+ (JNIEnv *, jclass, jlong, jint, jint);
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5Tget_sign
+ * Signature: (J)I
+ */
+JNIEXPORT jint JNICALL Java_hdf_hdf5lib_H5_H5Tget_1sign
+ (JNIEnv *, jclass, jlong);
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5Tset_sign
+ * Signature: (JI)I
+ */
+JNIEXPORT jint JNICALL Java_hdf_hdf5lib_H5_H5Tset_1sign
+ (JNIEnv *, jclass, jlong, jint);
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5Tget_fields_int
+ * Signature: (J[I)I
+ */
+JNIEXPORT jint JNICALL Java_hdf_hdf5lib_H5_H5Tget_1fields_1int
+ (JNIEnv *, jclass, jlong, jintArray);
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5Tget_fields
+ * Signature: (J[J)V
+ */
+JNIEXPORT void JNICALL Java_hdf_hdf5lib_H5_H5Tget_1fields
+ (JNIEnv *, jclass, jlong, jlongArray);
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5Tset_fields
+ * Signature: (JJJJJJ)V
+ */
+JNIEXPORT void JNICALL Java_hdf_hdf5lib_H5_H5Tset_1fields
+ (JNIEnv *, jclass, jlong, jlong, jlong, jlong, jlong, jlong);
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5Tget_ebias
+ * Signature: (J)I
+ */
+JNIEXPORT jint JNICALL Java_hdf_hdf5lib_H5_H5Tget_1ebias
+ (JNIEnv *, jclass, jlong);
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5Tget_ebias_long
+ * Signature: (J)J
+ */
+JNIEXPORT jlong JNICALL Java_hdf_hdf5lib_H5_H5Tget_1ebias_1long
+ (JNIEnv *, jclass, jlong);
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5Tset_ebias
+ * Signature: (JJ)I
+ */
+JNIEXPORT jint JNICALL Java_hdf_hdf5lib_H5_H5Tset_1ebias
+ (JNIEnv *, jclass, jlong, jlong);
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5Tget_norm
+ * Signature: (J)I
+ */
+JNIEXPORT jint JNICALL Java_hdf_hdf5lib_H5_H5Tget_1norm
+ (JNIEnv *, jclass, jlong);
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5Tset_norm
+ * Signature: (JI)I
+ */
+JNIEXPORT jint JNICALL Java_hdf_hdf5lib_H5_H5Tset_1norm
+ (JNIEnv *, jclass, jlong, jint);
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5Tget_inpad
+ * Signature: (J)I
+ */
+JNIEXPORT jint JNICALL Java_hdf_hdf5lib_H5_H5Tget_1inpad
+ (JNIEnv *, jclass, jlong);
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5Tset_inpad
+ * Signature: (JI)I
+ */
+JNIEXPORT jint JNICALL Java_hdf_hdf5lib_H5_H5Tset_1inpad
+ (JNIEnv *, jclass, jlong, jint);
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5Tget_cset
+ * Signature: (J)I
+ */
+JNIEXPORT jint JNICALL Java_hdf_hdf5lib_H5_H5Tget_1cset
+ (JNIEnv *, jclass, jlong);
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5Tset_cset
+ * Signature: (JI)I
+ */
+JNIEXPORT jint JNICALL Java_hdf_hdf5lib_H5_H5Tset_1cset
+ (JNIEnv *, jclass, jlong, jint);
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5Tget_strpad
+ * Signature: (J)I
+ */
+JNIEXPORT jint JNICALL Java_hdf_hdf5lib_H5_H5Tget_1strpad
+ (JNIEnv *, jclass, jlong);
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5Tset_strpad
+ * Signature: (JI)I
+ */
+JNIEXPORT jint JNICALL Java_hdf_hdf5lib_H5_H5Tset_1strpad
+ (JNIEnv *, jclass, jlong, jint);
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5Tget_nmembers
+ * Signature: (J)I
+ */
+JNIEXPORT jint JNICALL Java_hdf_hdf5lib_H5_H5Tget_1nmembers
+ (JNIEnv *, jclass, jlong);
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5Tget_member_name
+ * Signature: (JI)Ljava/lang/String;
+ */
+JNIEXPORT jstring JNICALL Java_hdf_hdf5lib_H5_H5Tget_1member_1name
+ (JNIEnv *, jclass, jlong, jint);
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5Tget_member_index
+ * Signature: (JLjava/lang/String)I
+ */
+JNIEXPORT jint JNICALL Java_hdf_hdf5lib_H5_H5Tget_1member_1index
+ (JNIEnv *, jclass, jlong, jstring);
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5Tget_member_type
+ * Signature: (JI)J
+ */
+JNIEXPORT jlong JNICALL Java_hdf_hdf5lib_H5__1H5Tget_1member_1type
+ (JNIEnv *, jclass, jlong, jint);
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5Tget_member_offset
+ * Signature: (JI)J
+ */
+JNIEXPORT jlong JNICALL Java_hdf_hdf5lib_H5_H5Tget_1member_1offset
+ (JNIEnv *, jclass, jlong, jint);
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5Tget_member_class
+ * Signature: (JI)I
+ */
+JNIEXPORT jint JNICALL Java_hdf_hdf5lib_H5_H5Tget_1member_1class
+ (JNIEnv *, jclass, jlong, jint);
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5Tinsert
+ * Signature: (JLjava/lang/String;JJ)I
+ */
+JNIEXPORT jint JNICALL Java_hdf_hdf5lib_H5_H5Tinsert
+ (JNIEnv *, jclass, jlong, jstring, jlong, jlong);
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5Tpack
+ * Signature: (J)I
+ */
+JNIEXPORT jint JNICALL Java_hdf_hdf5lib_H5_H5Tpack
+ (JNIEnv *, jclass, jlong);
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: _H5Tclose
+ * Signature: (J)I
+ */
+JNIEXPORT jint JNICALL Java_hdf_hdf5lib_H5__1H5Tclose
+ (JNIEnv *, jclass, jlong);
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: _H5Tvlen_create
+ * Signature: (J)J
+ */
+JNIEXPORT jlong JNICALL Java_hdf_hdf5lib_H5__1H5Tvlen_1create
+ (JNIEnv *, jclass, jlong);
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5Tset_tag
+ * Signature: (JLjava/lang/String;)I
+ */
+JNIEXPORT jint JNICALL Java_hdf_hdf5lib_H5_H5Tset_1tag
+ (JNIEnv *, jclass, jlong, jstring);
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5Tget_tag
+ * Signature: (J)Ljava/lang/String;
+ */
+JNIEXPORT jstring JNICALL Java_hdf_hdf5lib_H5_H5Tget_1tag
+ (JNIEnv *, jclass, jlong);
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5Tget_super
+ * Signature: (J)J
+ */
+JNIEXPORT jlong JNICALL Java_hdf_hdf5lib_H5__1H5Tget_1super
+ (JNIEnv *, jclass, jlong);
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: _H5Tenum_create
+ * Signature: (J)J
+ */
+JNIEXPORT jlong JNICALL Java_hdf_hdf5lib_H5__1H5Tenum_1create
+ (JNIEnv *, jclass, jlong);
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5Tenum_insert_int
+ * Signature: (JLjava/lang/String;[I)I
+ */
+JNIEXPORT jint JNICALL Java_hdf_hdf5lib_H5_H5Tenum_1insert_1int
+ (JNIEnv *, jclass, jlong, jstring, jintArray);
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5Tenum_insert
+ * Signature: (JLjava/lang/String;[B)V
+ */
+JNIEXPORT void JNICALL Java_hdf_hdf5lib_H5_H5Tenum_1insert
+ (JNIEnv *, jclass, jlong, jstring, jbyteArray);
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5Tenum_nameof_int
+ * Signature: (J[I[Ljava/lang/String;I)I
+ */
+JNIEXPORT jint JNICALL Java_hdf_hdf5lib_H5_H5Tenum_1nameof_1int
+ (JNIEnv *, jclass, jlong, jintArray, jobjectArray, jint);
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5Tenum_nameof
+ * Signature: (J[BJ)Ljava/lang/String;
+ */
+JNIEXPORT jstring JNICALL Java_hdf_hdf5lib_H5_H5Tenum_1nameof
+ (JNIEnv *, jclass, jlong, jbyteArray, jlong);
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5Tenum_valueof_int
+ * Signature: (JLjava/lang/String;[I)I
+ */
+JNIEXPORT jint JNICALL Java_hdf_hdf5lib_H5_H5Tenum_1valueof_1int
+ (JNIEnv *, jclass, jlong, jstring, jintArray);
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5Tenum_valueof
+ * Signature: (JLjava/lang/String;[B)V
+ */
+JNIEXPORT void JNICALL Java_hdf_hdf5lib_H5_H5Tenum_1valueof
+ (JNIEnv *, jclass, jlong, jstring, jbyteArray);
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5Tget_member_value_int
+ * Signature: (JI[I)I
+ */
+JNIEXPORT jint JNICALL Java_hdf_hdf5lib_H5_H5Tget_1member_1value_1int
+ (JNIEnv *, jclass, jlong, jint, jintArray);
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5Tget_member_value
+ * Signature: (JI[B)V
+ */
+JNIEXPORT void JNICALL Java_hdf_hdf5lib_H5_H5Tget_1member_1value
+ (JNIEnv *, jclass, jlong, jint, jbyteArray);
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5Tget_array_ndims
+ * Signature: (J)I
+ */
+JNIEXPORT jint JNICALL Java_hdf_hdf5lib_H5_H5Tget_1array_1ndims
+ (JNIEnv *, jclass, jlong);
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5Tget_array_dims
+ * Signature: (J[I[I)I
+ */
+JNIEXPORT jint JNICALL Java_hdf_hdf5lib_H5_H5Tget_1array_1dims
+ (JNIEnv *, jclass, jlong, jintArray, jintArray);
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5Tis_variable_str
+ * Signature: (J)Z
+ */
+JNIEXPORT jboolean JNICALL Java_hdf_hdf5lib_H5_H5Tis_1variable_1str
+ (JNIEnv *, jclass, jlong);
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5Tget_native_type
+ * Signature: (JI)J
+ */
+JNIEXPORT jlong JNICALL Java_hdf_hdf5lib_H5__1H5Tget_1native_1type
+ (JNIEnv *, jclass, jlong, jint);
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5Tdetect_class
+ * Signature: (JI)Z
+ */
+JNIEXPORT jboolean JNICALL Java_hdf_hdf5lib_H5_H5Tdetect_1class
+ (JNIEnv *, jclass, jlong, jint);
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5Tcommit
+ * Signature: (JLjava/lang/String;JJJJ)V
+ */
+JNIEXPORT void JNICALL Java_hdf_hdf5lib_H5_H5Tcommit
+ (JNIEnv *, jclass, jlong, jstring, jlong, jlong, jlong, jlong);
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: _H5Tarray_create2
+ * Signature: (JI[J)J
+ */
+JNIEXPORT jlong JNICALL Java_hdf_hdf5lib_H5__1H5Tarray_1create2
+ (JNIEnv *, jclass, jlong, jint, jlongArray);
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5Tget_array_dims2
+ * Signature: (J[J)I
+ */
+JNIEXPORT jint JNICALL Java_hdf_hdf5lib_H5_H5Tget_1array_1dims2
+ (JNIEnv *, jclass, jlong, jlongArray);
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5Tconvert
+ * Signature: (JJJ[B[BJ)V
+ */
+JNIEXPORT void JNICALL Java_hdf_hdf5lib_H5_H5Tconvert
+ (JNIEnv *, jclass, jlong, jlong, jlong, jbyteArray, jbyteArray, jlong);
+
+#ifdef __cplusplus
+} /* end extern "C" */
+#endif /* __cplusplus */
+
+#endif /* _Included_hdf_hdf5lib_H5_H5T */
diff --git a/java/src/jni/h5util.c b/java/src/jni/h5util.c
new file mode 100644
index 0000000..90c8ad2
--- /dev/null
+++ b/java/src/jni/h5util.c
@@ -0,0 +1,2592 @@
+/* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
+ * Copyright by The HDF Group. *
+ * Copyright by the Board of Trustees of the University of Illinois. *
+ * All rights reserved. *
+ * *
+ * This file is part of HDF5. The full HDF5 copyright notice, including *
+ * terms governing use, modification, and redistribution, is contained in *
+ * the files COPYING and Copyright.html. COPYING can be found at the root *
+ * of the source code distribution tree; Copyright.html can be found at the *
+ * root level of an installed copy of the electronic HDF5 document set and *
+ * is linked from the top-level documents page. It can also be found at *
+ * http://hdfgroup.org/HDF5/doc/Copyright.html. If you do not have *
+ * access to either file, you may request a copy from help@hdfgroup.org. *
+ * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */
+
+/*
+ * For details of the HDF libraries, see the HDF Documentation at:
+ * http://hdfdfgroup.org/HDF5/doc/
+ *
+ */
+
+#ifdef __cplusplus
+extern "C" {
+#endif /* __cplusplus */
+
+#include <stdio.h>
+#include <string.h>
+#include <stdlib.h>
+#include "hdf5.h"
+#include "h5util.h"
+
+/* size of hyperslab buffer when a dataset is bigger than H5TOOLS_MALLOCSIZE */
+hsize_t H5TOOLS_BUFSIZE = (32 * 1024 * 1024); /* 32 MB */
+int H5TOOLS_TEXT_BLOCK = 16; /* Number of elements on a line in a text export file */
+
+JavaVM *jvm;
+jobject visit_callback;
+jobject copy_callback;
+jobject close_callback;
+jobject create_callback;
+jobject compare_callback;
+jobject get_callback;
+jobject set_callback;
+jobject delete_callback;
+
+/********************/
+/* Local Prototypes */
+/********************/
+
+static int h5str_dump_region_blocks(h5str_t *str, hid_t region, hid_t region_obj);
+static int h5str_dump_region_points(h5str_t *str, hid_t region, hid_t region_obj);
+static int h5str_is_zero(const void *_mem, size_t size);
+static hid_t h5str_get_native_type(hid_t type);
+static hid_t h5str_get_little_endian_type(hid_t type);
+static hid_t h5str_get_big_endian_type(hid_t type);
+static htri_t h5str_detect_vlen(hid_t tid);
+static htri_t h5str_detect_vlen_str(hid_t tid);
+static int h5tools_dump_simple_data(FILE *stream, hid_t container, hid_t type, void *_mem, hsize_t nelmts);
+static int h5str_render_bin_output(FILE *stream, hid_t container, hid_t tid, void *_mem, hsize_t block_nelmts);
+static int render_bin_output_region_data_blocks(FILE *stream, hid_t region_id,
+ hid_t container, int ndims, hid_t type_id, hssize_t nblocks, hsize_t *ptdata);
+static int render_bin_output_region_blocks(FILE *stream, hid_t region_space,
+ hid_t region_id, hid_t container);
+static int render_bin_output_region_data_points(FILE *stream, hid_t region_space, hid_t region_id,
+ hid_t container, int ndims, hid_t type_id, hssize_t npoints, hsize_t *ptdata);
+static int render_bin_output_region_points(FILE *stream, hid_t region_space,
+ hid_t region_id, hid_t container);
+
+/** frees memory held by array of strings */
+void
+h5str_array_free(char **strs, size_t len) {
+ size_t i;
+
+ if (!strs || len <= 0)
+ return;
+
+ for (i = 0; i < len; i++) {
+ if (*(strs + i))
+ HDfree(*(strs + i));
+ } /* for (i=0; i<n; i++)*/
+ HDfree(strs);
+} /* end h5str_array_free */
+
+/** allocate a new str with given length */
+void
+h5str_new(h5str_t *str, size_t len) {
+ if (str && len > 0) {
+ str->s = (char *)HDmalloc(len);
+ str->max = len;
+ str->s[0] = '\0';
+ } /* end if */
+} /* end h5str_new */
+
+/** free string memory */
+void
+h5str_free(h5str_t *str) {
+ if (str && str->max > 0) {
+ HDfree(str->s);
+ HDmemset(str, 0, sizeof(h5str_t));
+ } /* end if */
+} /* end h5str_free */
+
+/** reset the max size of the string */
+void
+h5str_resize(h5str_t *str, size_t new_len) {
+ char *new_str;
+
+ if (!str || new_len <= 0 || str->max == new_len)
+ return;
+
+ new_str = (char *)HDmalloc(new_len);
+ if (new_len > str->max) /* increase memory */
+ HDstrcpy(new_str, str->s);
+ else
+ HDstrncpy(new_str, str->s, new_len - 1);
+
+ HDfree(str->s);
+ str->s = new_str;
+ str->max = new_len;
+} /* end h5str_resize */
+
+/* appends a copy of the string pointed to by cstr to the h5str.
+ Return Value:
+ the char string point to str->s
+ */
+char*
+h5str_append(h5str_t *str, const char* cstr) {
+ size_t len;
+
+ if (!str)
+ return NULL;
+ else if (!cstr)
+ return str->s;
+
+ len = HDstrlen(str->s) + HDstrlen(cstr);
+ while (len >= str->max) /* not enough to hold the new string, double the space */
+ {
+ h5str_resize(str, str->max * 2);
+ }
+
+ return HDstrcat(str->s, cstr);
+} /* end h5str_append */
+
+/** print value of a data point into string.
+ Return Value:
+ On success, the total number of characters printed is returned.
+ On error, a negative number is returned.
+ */
+size_t
+h5str_sprintf(h5str_t *str, hid_t container, hid_t tid, void *ptr, int expand_data) {
+ unsigned char tmp_uchar = 0;
+ char tmp_char = 0;
+ unsigned short tmp_ushort = 0;
+ short tmp_short = 0;
+ unsigned int tmp_uint = 0;
+ int tmp_int = 0;
+ unsigned long tmp_ulong = 0;
+ long tmp_long = 0;
+ unsigned long long tmp_ullong = 0;
+ long long tmp_llong = 0;
+ float tmp_float = 0.0;
+ double tmp_double = 0.0;
+ long double tmp_ldouble = 0.0;
+ static char fmt_llong[8], fmt_ullong[8];
+
+ hid_t mtid = -1;
+ size_t offset;
+ size_t nll;
+ char *this_str;
+ size_t this_strlen;
+ int n;
+ int len;
+ hvl_t *vlptr;
+ char *cptr = (char*) ptr;
+ unsigned char *ucptr = (unsigned char*) ptr;
+ H5T_class_t tclass = H5Tget_class(tid);
+ size_t size = H5Tget_size(tid);
+ H5T_sign_t nsign = H5Tget_sign(tid);
+ int bdata_print = 0;
+
+ if (!str || !ptr)
+ return 0;
+
+ /* Build default formats for long long types */
+ if (!fmt_llong[0]) {
+ sprintf(fmt_llong, "%%%sd", H5_PRINTF_LL_WIDTH);
+ sprintf(fmt_ullong, "%%%su", H5_PRINTF_LL_WIDTH);
+ } /* end if */
+
+ this_str = NULL;
+ this_strlen = 0;
+
+ switch (tclass) {
+ case H5T_FLOAT:
+ if (sizeof(float) == size) {
+ /* if (H5Tequal(tid, H5T_NATIVE_FLOAT)) */
+ HDmemcpy(&tmp_float, ptr, sizeof(float));
+ this_str = (char*)HDmalloc(25);
+ sprintf(this_str, "%g", tmp_float);
+ }
+ else if (sizeof(double) == size) {
+ /* if (H5Tequal(tid, H5T_NATIVE_DOUBLE)) */
+ HDmemcpy(&tmp_double, ptr, sizeof(double));
+ this_str = (char*)HDmalloc(25);
+ sprintf(this_str, "%g", tmp_double);
+ }
+#if H5_SIZEOF_LONG_DOUBLE !=0
+ else if (sizeof(long double) == size) {
+ /* if (H5Tequal(tid, H5T_NATIVE_LDOUBLE)) */
+ HDmemcpy(&tmp_ldouble, ptr, sizeof(long double));
+ this_str = (char*)HDmalloc(27);
+ sprintf(this_str, "%Lf", tmp_ldouble);
+ }
+#endif
+ break;
+ case H5T_STRING:
+ {
+ char *tmp_str;
+ size = 0;
+
+ if (H5Tis_variable_str(tid)) {
+ tmp_str = *(char**) ptr;
+ if (tmp_str != NULL)
+ size = HDstrlen(tmp_str);
+ }
+ else {
+ tmp_str = cptr;
+ }
+
+ /* Check for NULL pointer for string */
+ if (tmp_str == NULL) {
+ this_str = (char *)HDmalloc(5);
+ HDstrncpy(this_str, "NULL", 4);
+ }
+ else {
+ if (size > 0) {
+ this_str = (char *)HDmalloc(size+1);
+ HDstrncpy(this_str, tmp_str, size);
+ }
+ }
+ }
+ break;
+ case H5T_INTEGER:
+ if (sizeof(char) == size) {
+ if(H5T_SGN_NONE == nsign) {
+ /* if (H5Tequal(tid, H5T_NATIVE_UCHAR)) */
+ HDmemcpy(&tmp_uchar, ptr, sizeof(unsigned char));
+ this_str = (char*)HDmalloc(7);
+ sprintf(this_str, "%u", tmp_uchar);
+ }
+ else {
+ /* if (H5Tequal(tid, H5T_NATIVE_SCHAR)) */
+ HDmemcpy(&tmp_char, ptr, sizeof(char));
+ this_str = (char*)HDmalloc(7);
+ sprintf(this_str, "%hhd", tmp_char);
+ }
+ }
+ else if (sizeof(int) == size) {
+ if(H5T_SGN_NONE == nsign) {
+ /* if (H5Tequal(tid, H5T_NATIVE_UINT)) */
+ HDmemcpy(&tmp_uint, ptr, sizeof(unsigned int));
+ this_str = (char*)HDmalloc(14);
+ sprintf(this_str, "%u", tmp_uint);
+ }
+ else {
+ /* if (H5Tequal(tid, H5T_NATIVE_INT)) */
+ HDmemcpy(&tmp_int, ptr, sizeof(int));
+ this_str = (char*)HDmalloc(14);
+ sprintf(this_str, "%d", tmp_int);
+ }
+ }
+ else if (sizeof(short) == size) {
+ if(H5T_SGN_NONE == nsign) {
+ /* if (H5Tequal(tid, H5T_NATIVE_USHORT)) */
+ HDmemcpy(&tmp_ushort, ptr, sizeof(unsigned short));
+ this_str = (char*)HDmalloc(9);
+ sprintf(this_str, "%u", tmp_ushort);
+ }
+ else {
+ /* if (H5Tequal(tid, H5T_NATIVE_SHORT)) */
+ HDmemcpy(&tmp_short, ptr, sizeof(short));
+ this_str = (char*)HDmalloc(9);
+ sprintf(this_str, "%d", tmp_short);
+ }
+ }
+ else if (sizeof(long) == size) {
+ if(H5T_SGN_NONE == nsign) {
+ /* if (H5Tequal(tid, H5T_NATIVE_ULONG)) */
+ HDmemcpy(&tmp_ulong, ptr, sizeof(unsigned long));
+ this_str = (char*)HDmalloc(23);
+ sprintf(this_str, "%lu", tmp_ulong);
+ }
+ else {
+ /* if (H5Tequal(tid, H5T_NATIVE_LONG)) */
+ HDmemcpy(&tmp_long, ptr, sizeof(long));
+ this_str = (char*)HDmalloc(23);
+ sprintf(this_str, "%ld", tmp_long);
+ }
+ }
+ else if (sizeof(long long) == size) {
+ if(H5T_SGN_NONE == nsign) {
+ /* if (H5Tequal(tid, H5T_NATIVE_ULLONG)) */
+ HDmemcpy(&tmp_ullong, ptr, sizeof(unsigned long long));
+ this_str = (char*)HDmalloc(25);
+ sprintf(this_str, fmt_ullong, tmp_ullong);
+ }
+ else {
+ /* if (H5Tequal(tid, H5T_NATIVE_LLONG)) */
+ HDmemcpy(&tmp_llong, ptr, sizeof(long long));
+ this_str = (char*)HDmalloc(25);
+ sprintf(this_str, fmt_llong, tmp_llong);
+ }
+ }
+ break;
+ case H5T_COMPOUND:
+ {
+ unsigned i;
+ n = H5Tget_nmembers(tid);
+ h5str_append(str, " {");
+
+ for (i = 0; i < n; i++) {
+ offset = H5Tget_member_offset(tid, i);
+ mtid = H5Tget_member_type(tid, i);
+ h5str_sprintf(str, container, mtid, cptr + offset, expand_data);
+ if (i < n - 1)
+ h5str_append(str, ", ");
+ H5Tclose(mtid);
+ }
+ h5str_append(str, "} ");
+ }
+ break;
+ case H5T_ENUM:
+ {
+ char enum_name[1024];
+ if (H5Tenum_nameof(tid, ptr, enum_name, sizeof enum_name) >= 0) {
+ h5str_append(str, enum_name);
+ }
+ else {
+ size_t i;
+ nll = H5Tget_size(tid);
+ this_str = (char*)HDmalloc(4 * (nll + 1));
+
+ if (1 == nll) {
+ sprintf(this_str, "0x%02x", ucptr[0]);
+ }
+ else {
+ for (i = 0; i < (int)nll; i++)
+ sprintf(this_str, "%s%02x", i ? ":" : "", ucptr[i]);
+ }
+ }
+ }
+ break;
+ case H5T_REFERENCE:
+ if (h5str_is_zero(ptr, size)) {
+ h5str_append(str, "NULL");
+ }
+ else {
+ if (H5R_DSET_REG_REF_BUF_SIZE == size) {
+ /* if (H5Tequal(tid, H5T_STD_REF_DSETREG)) */
+ /*
+ * Dataset region reference --
+ * show the type and the referenced object
+ */
+ char ref_name[1024];
+ hid_t region_obj;
+ hid_t region;
+ H5S_sel_type region_type;
+
+ /* get name of the dataset the region reference points to using H5Rget_name */
+ region_obj = H5Rdereference2(container, H5P_DEFAULT, H5R_DATASET_REGION, ptr);
+ if (region_obj >= 0) {
+ region = H5Rget_region(container, H5R_DATASET_REGION, ptr);
+ if (region >= 0) {
+ if(expand_data) {
+ region_type = H5Sget_select_type(region);
+ if(region_type==H5S_SEL_POINTS) {
+ h5str_dump_region_points_data(str, region, region_obj);
+ }
+ else {
+ h5str_dump_region_blocks_data(str, region, region_obj);
+ }
+ }
+ else {
+ if(H5Rget_name(region_obj, H5R_DATASET_REGION, ptr, (char*)ref_name, 1024) >= 0) {
+ h5str_append(str, ref_name);
+ }
+
+ region_type = H5Sget_select_type(region);
+
+ if(region_type==H5S_SEL_POINTS) {
+ h5str_append(str, " REGION_TYPE POINT");
+ h5str_dump_region_points(str, region, region_obj);
+ }
+ else {
+ h5str_append(str, " REGION_TYPE BLOCK");
+ h5str_dump_region_blocks(str, region, region_obj);
+ }
+ }
+
+ H5Sclose(region);
+ }
+ H5Dclose(region_obj);
+ }
+ }
+ else if (H5R_OBJ_REF_BUF_SIZE == size) {
+ /* if (H5Tequal(tid, H5T_STD_REF_OBJ)) */
+ /*
+ * Object references -- show the type and OID of the referenced
+ * object.
+ */
+ H5O_info_t oi;
+ hid_t obj;
+
+ this_str = (char*)HDmalloc(64);
+ obj = H5Rdereference2(container, H5P_DEFAULT, H5R_OBJECT, ptr);
+ H5Oget_info(obj, &oi);
+
+ /* Print object data and close object */
+ sprintf(this_str, "%u-%lu", (unsigned) oi.type, oi.addr);
+ H5Oclose(obj);
+ }
+ }
+ break;
+ case H5T_ARRAY:
+ {
+ int rank = 0;
+ hsize_t i, dims[H5S_MAX_RANK], total_elmts;
+ h5str_append(str, "[ ");
+
+ mtid = H5Tget_super(tid);
+ size = H5Tget_size(mtid);
+ rank = H5Tget_array_ndims(tid);
+
+ H5Tget_array_dims2(tid, dims);
+
+ total_elmts = 1;
+ for (i = 0; i < rank; i++)
+ total_elmts *= dims[i];
+
+ for (i = 0; i < total_elmts; i++) {
+ h5str_sprintf(str, container, mtid, cptr + i * size, expand_data);
+ if (i < total_elmts - 1)
+ h5str_append(str, ", ");
+ }
+ H5Tclose(mtid);
+ h5str_append(str, "] ");
+ }
+ break;
+ case H5T_VLEN:
+ {
+ unsigned int i;
+ mtid = H5Tget_super(tid);
+ size = H5Tget_size(mtid);
+
+ vlptr = (hvl_t *) cptr;
+
+ nll = vlptr->len;
+ for (i = 0; i < (int)nll; i++) {
+ h5str_sprintf(str, container, mtid, ((char *) (vlptr->p)) + i * size, expand_data);
+ if (i < (int)nll - 1)
+ h5str_append(str, ", ");
+ }
+ H5Tclose(mtid);
+ }
+ break;
+
+ default:
+ {
+ /* All other types get printed as hexadecimal */
+ size_t i;
+ nll = H5Tget_size(tid);
+ this_str = (char*)HDmalloc(4 * (nll + 1));
+
+ if (1 == nll) {
+ sprintf(this_str, "0x%02x", ucptr[0]);
+ }
+ else {
+ for (i = 0; i < (int)nll; i++)
+ sprintf(this_str, "%s%02x", i ? ":" : "", ucptr[i]);
+ }
+ }
+ break;
+ } /* end switch */
+
+ if (this_str) {
+ h5str_append(str, this_str);
+ this_strlen = HDstrlen(str->s);
+ HDfree(this_str);
+ } /* end if */
+
+ return this_strlen;
+} /* end h5str_sprintf */
+
+/*-------------------------------------------------------------------------
+ * Purpose: Print the data values from a dataset referenced by region blocks.
+ *
+ * Description:
+ * This is a special case subfunction to print the data in a region reference of type blocks.
+ *
+ * Return:
+ * The function returns FAIL if there was an error, otherwise SUCEED
+ *-------------------------------------------------------------------------
+ */
+static int
+h5str_print_region_data_blocks(hid_t region_id,
+ h5str_t *str, int ndims, hid_t type_id, hssize_t nblocks, hsize_t *ptdata)
+{
+ hsize_t *dims1 = NULL;
+ hsize_t *start = NULL;
+ hsize_t *count = NULL;
+ hsize_t blkndx;
+ hsize_t total_size[H5S_MAX_RANK];
+ unsigned int region_flags; /* buffer extent flags */
+ hsize_t numelem;
+ hsize_t numindex;
+ size_t jndx;
+ unsigned indx;
+ size_t type_size;
+ int ret_value = SUCCEED;
+ hid_t mem_space = -1;
+ hid_t sid1 = -1;
+ void *region_buf = NULL;
+
+ /* Get the dataspace of the dataset */
+ if((sid1 = H5Dget_space(region_id)) >= 0) {
+
+ /* Allocate space for the dimension array */
+ if((dims1 = (hsize_t *)HDmalloc(sizeof(hsize_t) * (size_t)ndims)) != NULL) {
+
+ /* find the dimensions of each data space from the block coordinates */
+ numelem = 1;
+ for (jndx = 0; jndx < ndims; jndx++) {
+ dims1[jndx] = ptdata[jndx + (size_t)ndims] - ptdata[jndx] + 1;
+ numelem = dims1[jndx] * numelem;
+ } /* end for */
+
+ /* Create dataspace for reading buffer */
+ if((mem_space = H5Screate_simple(ndims, dims1, NULL)) >= 0) {
+ if((type_size = H5Tget_size(type_id)) > 0) {
+ if((region_buf = HDmalloc(type_size * (size_t)numelem)) != NULL) {
+ /* Select (x , x , ..., x ) x (y , y , ..., y ) hyperslab for reading memory dataset */
+ /* 1 2 n 1 2 n */
+ if((start = (hsize_t *)HDmalloc(sizeof(hsize_t) * (size_t)ndims)) != NULL) {
+ if((count = (hsize_t *)HDmalloc(sizeof(hsize_t) * (size_t)ndims)) != NULL) {
+ for (blkndx = 0; blkndx < nblocks; blkndx++) {
+ for (indx = 0; indx < ndims; indx++) {
+ start[indx] = ptdata[indx + blkndx * (hsize_t)ndims * 2];
+ count[indx] = dims1[indx];
+ } /* end for */
+
+ if(H5Sselect_hyperslab(sid1, H5S_SELECT_SET, start, NULL, count, NULL) >= 0) {
+ if(H5Dread(region_id, type_id, mem_space, sid1, H5P_DEFAULT, region_buf) >= 0) {
+ if(H5Sget_simple_extent_dims(mem_space, total_size, NULL) >= 0) {
+ for (numindex = 0; numindex < numelem; numindex++) {
+ h5str_sprintf(str, region_id, type_id, ((char*)region_buf + numindex * type_size), 1);
+
+ if (numindex + 1 < numelem)
+ h5str_append(str, ", ");
+ } /* end for (jndx = 0; jndx < numelem; jndx++, region_elmtno++, ctx.cur_elmt++) */
+ } /* end if(H5Sget_simple_extent_dims(mem_space, total_size, NULL) >= 0) */
+ } /* end if(H5Dread(region_id, type_id, mem_space, sid1, H5P_DEFAULT, region_buf) >= 0) */
+ } /* end if(H5Sselect_hyperslab(sid1, H5S_SELECT_SET, start, NULL, count, NULL) >= 0) */
+ } /* end for (blkndx = 0; blkndx < nblocks; blkndx++) */
+
+ HDfree(count);
+ } /* end if((count = (hsize_t *) HDmalloc(sizeof(hsize_t) * ndims)) != NULL) */
+ else
+ ret_value = -1;
+
+ HDfree(start);
+ } /* end if((start = (hsize_t *) HDmalloc(sizeof(hsize_t) * ndims)) != NULL) */
+ else
+ ret_value = -1;
+
+ HDfree(region_buf);
+ } /* end if((region_buf = HDmalloc(type_size * (size_t)numelem)) != NULL) */
+ else
+ ret_value = -1;
+ } /* end if((type_size = H5Tget_size(type_id)) > 0) */
+ else
+ ret_value = -1;
+
+ if(H5Sclose(mem_space) < 0)
+ ret_value = -1;
+ } /* end if((mem_space = H5Screate_simple(ndims, dims1, NULL)) >= 0) */
+ else
+ ret_value = -1;
+
+ HDfree(dims1);
+ } /* end if((dims1 = (hsize_t *) HDmalloc(sizeof(hsize_t) * ndims)) != NULL) */
+ else
+ ret_value = -1;
+
+ if(H5Sclose(sid1) < 0)
+ ret_value = -1;
+ } /* end if((sid1 = H5Dget_space(region_id)) >= 0) */
+ else
+ ret_value = -1;
+
+ return ret_value;
+} /* end h5str_print_region_data_blocks */
+
+int
+h5str_dump_region_blocks_data(h5str_t *str, hid_t region, hid_t region_id)
+{
+ int ret_value = 0;
+ hssize_t nblocks;
+ hsize_t alloc_size;
+ hsize_t *ptdata;
+ hid_t dtype = -1;
+ hid_t type_id = -1;
+ char tmp_str[256];
+ int ndims = H5Sget_simple_extent_ndims(region);
+
+ /*
+ * This function fails if the region does not have blocks.
+ */
+ H5E_BEGIN_TRY {
+ nblocks = H5Sget_select_hyper_nblocks(region);
+ } H5E_END_TRY;
+
+ /* Print block information */
+ if (nblocks > 0) {
+ int i;
+
+ alloc_size = (hsize_t)nblocks * (hsize_t)ndims * 2 * (hsize_t)sizeof(ptdata[0]);
+ if (alloc_size == (hsize_t)((size_t) alloc_size)) {
+ ptdata = (hsize_t *)HDmalloc((size_t) alloc_size);
+ H5Sget_select_hyper_blocklist(region, (hsize_t) 0,
+ (hsize_t) nblocks, ptdata);
+
+
+ if((dtype = H5Dget_type(region_id)) >= 0) {
+ if((type_id = H5Tget_native_type(dtype, H5T_DIR_DEFAULT)) >= 0) {
+
+ h5str_print_region_data_blocks(region_id, str, ndims, type_id, nblocks, ptdata);
+
+ if(H5Tclose(type_id) < 0)
+ ret_value = -1;
+ } /* end if((type_id = H5Tget_native_type(dtype, H5T_DIR_DEFAULT)) >= 0) */
+ else
+ ret_value = -1;
+
+ if(H5Tclose(dtype) < 0)
+ ret_value = -1;
+ } /* end if((dtype = H5Dget_type(region_id)) >= 0) */
+ else
+ ret_value = -1;
+ HDfree(ptdata);
+ } /* if (alloc_size == (hsize_t)((size_t)alloc_size)) */
+ } /* if (nblocks > 0) */
+
+ return ret_value;
+} /* end h5str_dump_region_blocks_data */
+
+static int
+h5str_dump_region_blocks(h5str_t *str, hid_t region, hid_t region_id)
+{
+ int ret_value = 0;
+ hssize_t nblocks;
+ hsize_t alloc_size;
+ hsize_t *ptdata;
+ hid_t dtype = -1;
+ hid_t type_id = -1;
+ char tmp_str[256];
+ int ndims = H5Sget_simple_extent_ndims(region);
+
+ /*
+ * This function fails if the region does not have blocks.
+ */
+ H5E_BEGIN_TRY {
+ nblocks = H5Sget_select_hyper_nblocks(region);
+ } H5E_END_TRY;
+
+ /* Print block information */
+ if (nblocks > 0) {
+ int i;
+
+ alloc_size = (hsize_t)nblocks * (hsize_t)ndims * 2 * (hsize_t)sizeof(ptdata[0]);
+ if (alloc_size == (hsize_t)((size_t) alloc_size)) {
+ ptdata = (hsize_t *)HDmalloc((size_t) alloc_size);
+ H5Sget_select_hyper_blocklist(region, (hsize_t) 0,
+ (hsize_t) nblocks, ptdata);
+
+ h5str_append(str, " {");
+ for (i = 0; i < nblocks; i++) {
+ int j;
+
+ h5str_append(str, " ");
+
+ /* Start coordinates and opposite corner */
+ for (j = 0; j < ndims; j++) {
+ tmp_str[0] = '\0';
+ sprintf(tmp_str, "%s%lu", j ? "," : "(",
+ (unsigned long) ptdata[i * 2 * ndims + j]);
+ h5str_append(str, tmp_str);
+ }
+
+ for (j = 0; j < ndims; j++) {
+ tmp_str[0] = '\0';
+ sprintf(tmp_str, "%s%lu", j ? "," : ")-(",
+ (unsigned long) ptdata[i * 2 * ndims + j + ndims]);
+ h5str_append(str, tmp_str);
+ }
+ h5str_append(str, ") ");
+ tmp_str[0] = '\0';
+ }
+ h5str_append(str, " }");
+
+ HDfree(ptdata);
+ } /* if (alloc_size == (hsize_t)((size_t)alloc_size)) */
+ } /* if (nblocks > 0) */
+
+ return ret_value;
+} /* end h5str_dump_region_blocks */
+
+/*-------------------------------------------------------------------------
+ * Purpose: Print the data values from a dataset referenced by region points.
+ *
+ * Description:
+ * This is a special case subfunction to print the data in a region reference of type points.
+ *
+ * Return:
+ * The function returns FAIL on error, otherwise SUCCEED
+ *-------------------------------------------------------------------------
+ */
+static int
+h5str_print_region_data_points(hid_t region_space, hid_t region_id,
+ h5str_t *str, int ndims, hid_t type_id, hssize_t npoints, hsize_t *ptdata)
+{
+ hsize_t *dims1 = NULL;
+ hsize_t total_size[H5S_MAX_RANK];
+ size_t jndx;
+ unsigned indx;
+ size_t type_size;
+ int ret_value = SUCCEED;
+ unsigned int region_flags; /* buffer extent flags */
+ hid_t mem_space = -1;
+ void *region_buf = NULL;
+ char tmp_str[256];
+
+ /* Allocate space for the dimension array */
+ if((dims1 = (hsize_t *)HDmalloc(sizeof(hsize_t) * (size_t)ndims)) != NULL) {
+
+ dims1[0] = (hsize_t)npoints;
+
+ /* Create dataspace for reading buffer */
+ if((mem_space = H5Screate_simple(1, dims1, NULL)) >= 0) {
+
+ if((type_size = H5Tget_size(type_id)) > 0) {
+
+ if((region_buf = HDmalloc(type_size * (size_t)npoints)) != NULL) {
+
+ if(H5Dread(region_id, type_id, mem_space, region_space, H5P_DEFAULT, region_buf) >= 0) {
+
+ for (jndx = 0; jndx < npoints; jndx++) {
+ if(H5Sget_simple_extent_dims(mem_space, total_size, NULL) >= 0) {
+
+ h5str_sprintf(str, region_id, type_id, ((char*)region_buf + jndx * type_size), 1);
+
+ if (jndx + 1 < npoints)
+ h5str_append(str, ", ");
+
+ } /* end if(H5Sget_simple_extent_dims(mem_space, total_size, NULL) >= 0) */
+ } /* end for (jndx = 0; jndx < npoints; jndx++, elmtno++) */
+ } /* end if(H5Dread(region_id, type_id, mem_space, region_space, H5P_DEFAULT, region_buf) >= 0) */
+ else
+ ret_value = -1;
+
+ HDfree(region_buf);
+ } /* end if((region_buf = HDmalloc(type_size * (size_t)npoints)) != NULL) */
+ else
+ ret_value = -1;
+ } /* end if((type_size = H5Tget_size(type_id)) > 0) */
+ else
+ ret_value = -1;
+
+ if(H5Sclose(mem_space) < 0)
+ ret_value = -1;
+ } /* end if((mem_space = H5Screate_simple(1, dims1, NULL)) >= 0) */
+ else
+ ret_value = -1;
+ HDfree(dims1);
+ } /* end if((dims1 = (hsize_t *) HDmalloc(sizeof(hsize_t) * ndims)) != NULL) */
+ else
+ ret_value = -1;
+
+ return ret_value;
+} /* end h5str_print_region_data_points */
+
+int
+h5str_dump_region_points_data(h5str_t *str, hid_t region, hid_t region_id)
+{
+ int ret_value = 0;
+ hssize_t npoints;
+ hsize_t alloc_size;
+ hsize_t *ptdata;
+ char tmp_str[256];
+ hid_t dtype = -1;
+ hid_t type_id = -1;
+ int ndims = H5Sget_simple_extent_ndims(region);
+
+ /*
+ * This function fails if the region does not have points.
+ */
+ H5E_BEGIN_TRY {
+ npoints = H5Sget_select_elem_npoints(region);
+ } H5E_END_TRY;
+
+ /* Print point information */
+ if (npoints > 0) {
+ int i;
+
+ alloc_size = (hsize_t)npoints * (hsize_t)ndims * (hsize_t)sizeof(ptdata[0]);
+ if (alloc_size == (hsize_t)((size_t) alloc_size)) {
+ ptdata = (hsize_t *)HDmalloc((size_t) alloc_size);
+ H5Sget_select_elem_pointlist(region, (hsize_t) 0,
+ (hsize_t) npoints, ptdata);
+
+ if((dtype = H5Dget_type(region_id)) >= 0) {
+ if((type_id = H5Tget_native_type(dtype, H5T_DIR_DEFAULT)) >= 0) {
+
+ h5str_print_region_data_points(region, region_id,
+ str, ndims, type_id, npoints, ptdata);
+
+ if(H5Tclose(type_id) < 0)
+ ret_value = -1;
+ } /* end if((type_id = H5Tget_native_type(dtype, H5T_DIR_DEFAULT)) >= 0) */
+ else
+ ret_value = -1;
+
+ if(H5Tclose(dtype) < 0)
+ ret_value = -1;
+ } /* end if((dtype = H5Dget_type(region_id)) >= 0) */
+ else
+ ret_value = -1;
+ HDfree(ptdata);
+ }
+ }
+
+ return ret_value;
+} /* end h5str_dump_region_points_data */
+
+static int
+h5str_dump_region_points(h5str_t *str, hid_t region, hid_t region_id)
+{
+ int ret_value = 0;
+ hssize_t npoints;
+ hsize_t alloc_size;
+ hsize_t *ptdata;
+ char tmp_str[256];
+ hid_t dtype = -1;
+ hid_t type_id = -1;
+ int ndims = H5Sget_simple_extent_ndims(region);
+
+ /*
+ * This function fails if the region does not have points.
+ */
+ H5E_BEGIN_TRY {
+ npoints = H5Sget_select_elem_npoints(region);
+ } H5E_END_TRY;
+
+ /* Print point information */
+ if (npoints > 0) {
+ int i;
+
+ alloc_size = (hsize_t)npoints * (hsize_t)ndims * (hsize_t)sizeof(ptdata[0]);
+ if (alloc_size == (hsize_t)((size_t) alloc_size)) {
+ ptdata = (hsize_t *)HDmalloc((size_t) alloc_size);
+ H5Sget_select_elem_pointlist(region, (hsize_t) 0,
+ (hsize_t) npoints, ptdata);
+
+ h5str_append(str, " {");
+ for (i = 0; i < npoints; i++) {
+ int j;
+
+ h5str_append(str, " ");
+
+ for (j = 0; j < ndims; j++) {
+ tmp_str[0] = '\0';
+ sprintf(tmp_str, "%s%lu", j ? "," : "(",
+ (unsigned long) (ptdata[i * ndims + j]));
+ h5str_append(str, tmp_str);
+ } /* end for (j = 0; j < ndims; j++) */
+
+ h5str_append(str, ") ");
+ } /* end for (i = 0; i < npoints; i++) */
+ h5str_append(str, " }");
+
+ HDfree(ptdata);
+ } /* end if (alloc_size == (hsize_t)((size_t) alloc_size)) */
+ } /* end if (npoints > 0) */
+
+ return ret_value;
+} /* end h5str_dump_region_points */
+
+static int
+h5str_is_zero(const void *_mem, size_t size) {
+ const unsigned char *mem = (const unsigned char *) _mem;
+
+ while (size-- > 0)
+ if (mem[size])
+ return 0;
+
+ return 1;
+} /* end h5str_is_zero */
+
+/*-------------------------------------------------------------------------
+ * Function: h5str_detect_vlen_str
+ *
+ * Purpose: Recursive check for variable length string of a datatype.
+ *
+ * Return:
+ * TRUE : type conatains any variable length string
+ * FALSE : type doesn't contain any variable length string
+ * Negative value: error occur
+ *
+ *-------------------------------------------------------------------------
+ */
+static htri_t
+h5str_detect_vlen_str(hid_t tid)
+{
+ H5T_class_t tclass = H5T_NO_CLASS;
+ htri_t ret = 0;
+
+ ret = H5Tis_variable_str(tid);
+ if((ret == 1) || (ret < 0))
+ goto done;
+
+ tclass = H5Tget_class(tid);
+ if(tclass == H5T_ARRAY || tclass == H5T_VLEN) {
+ hid_t btid = H5Tget_super(tid);
+
+ if(btid < 0) {
+ ret = (htri_t)btid;
+ goto done;
+ } /* end if */
+ ret = h5str_detect_vlen_str(btid);
+ if((ret == 1) || (ret < 0)) {
+ H5Tclose(btid);
+ goto done;
+ } /* end if */
+ } /* end if */
+ else if(tclass == H5T_COMPOUND) {
+ unsigned i = 0;
+ int n = H5Tget_nmembers(tid);
+
+ if(n < 0) {
+ n = ret;
+ goto done;
+ } /* end if */
+
+ for(i = 0; i < n; i++) {
+ hid_t mtid = H5Tget_member_type(tid, i);
+
+ ret = h5str_detect_vlen_str(mtid);
+ if((ret == 1) || (ret < 0)) {
+ H5Tclose(mtid);
+ goto done;
+ }
+ H5Tclose(mtid);
+ } /* end for */
+ } /* end else */
+
+done:
+ return ret;
+} /* end h5str_detect_vlen_str */
+
+/*-------------------------------------------------------------------------
+ * Function: h5str_get_native_type
+ *
+ * Purpose: Wrapper around H5Tget_native_type() to work around
+ * Problems with bitfields.
+ *
+ * Return: Success: datatype ID
+ * Failure: FAIL
+ *-------------------------------------------------------------------------
+ */
+static hid_t
+h5str_get_native_type(hid_t type)
+{
+ hid_t p_type;
+ H5T_class_t type_class;
+
+ type_class = H5Tget_class(type);
+ if(type_class==H5T_BITFIELD)
+ p_type=H5Tcopy(type);
+ else
+ p_type = H5Tget_native_type(type,H5T_DIR_DEFAULT);
+
+ return(p_type);
+} /* end h5str_get_native_type */
+
+
+/*-------------------------------------------------------------------------
+ * Function: h5str_get_little_endian_type
+ *
+ * Purpose: Get a little endian type from a file type
+ *
+ * Return: Success: datatype ID
+ * Failure: FAIL
+ *-------------------------------------------------------------------------
+ */
+static hid_t
+h5str_get_little_endian_type(hid_t tid)
+{
+ hid_t p_type=-1;
+ H5T_class_t type_class;
+ size_t size;
+ H5T_sign_t sign;
+
+ type_class = H5Tget_class(tid);
+ size = H5Tget_size(tid);
+ sign = H5Tget_sign(tid);
+
+ switch( type_class )
+ {
+ case H5T_INTEGER:
+ {
+ if ( size == 1 && sign == H5T_SGN_2)
+ p_type=H5Tcopy(H5T_STD_I8LE);
+ else if ( size == 2 && sign == H5T_SGN_2)
+ p_type=H5Tcopy(H5T_STD_I16LE);
+ else if ( size == 4 && sign == H5T_SGN_2)
+ p_type=H5Tcopy(H5T_STD_I32LE);
+ else if ( size == 8 && sign == H5T_SGN_2)
+ p_type=H5Tcopy(H5T_STD_I64LE);
+ else if ( size == 1 && sign == H5T_SGN_NONE)
+ p_type=H5Tcopy(H5T_STD_U8LE);
+ else if ( size == 2 && sign == H5T_SGN_NONE)
+ p_type=H5Tcopy(H5T_STD_U16LE);
+ else if ( size == 4 && sign == H5T_SGN_NONE)
+ p_type=H5Tcopy(H5T_STD_U32LE);
+ else if ( size == 8 && sign == H5T_SGN_NONE)
+ p_type=H5Tcopy(H5T_STD_U64LE);
+ }
+ break;
+
+ case H5T_FLOAT:
+ if ( size == 4)
+ p_type=H5Tcopy(H5T_IEEE_F32LE);
+ else if ( size == 8)
+ p_type=H5Tcopy(H5T_IEEE_F64LE);
+ break;
+
+ case H5T_TIME:
+ case H5T_BITFIELD:
+ case H5T_OPAQUE:
+ case H5T_STRING:
+ case H5T_COMPOUND:
+ case H5T_REFERENCE:
+ case H5T_ENUM:
+ case H5T_VLEN:
+ case H5T_ARRAY:
+ break;
+
+ default:
+ break;
+
+ }
+
+ return(p_type);
+} /* end h5str_get_little_endian_type */
+
+/*-------------------------------------------------------------------------
+ * Function: h5str_get_big_endian_type
+ *
+ * Purpose: Get a big endian type from a file type
+ *
+ * Return: Success: datatype ID
+ * Failure: FAIL
+ *-------------------------------------------------------------------------
+ */
+static hid_t
+h5str_get_big_endian_type(hid_t tid)
+{
+ hid_t p_type=-1;
+ H5T_class_t type_class;
+ size_t size;
+ H5T_sign_t sign;
+
+ type_class = H5Tget_class(tid);
+ size = H5Tget_size(tid);
+ sign = H5Tget_sign(tid);
+
+ switch( type_class )
+ {
+ case H5T_INTEGER:
+ {
+ if ( size == 1 && sign == H5T_SGN_2)
+ p_type=H5Tcopy(H5T_STD_I8BE);
+ else if ( size == 2 && sign == H5T_SGN_2)
+ p_type=H5Tcopy(H5T_STD_I16BE);
+ else if ( size == 4 && sign == H5T_SGN_2)
+ p_type=H5Tcopy(H5T_STD_I32BE);
+ else if ( size == 8 && sign == H5T_SGN_2)
+ p_type=H5Tcopy(H5T_STD_I64BE);
+ else if ( size == 1 && sign == H5T_SGN_NONE)
+ p_type=H5Tcopy(H5T_STD_U8BE);
+ else if ( size == 2 && sign == H5T_SGN_NONE)
+ p_type=H5Tcopy(H5T_STD_U16BE);
+ else if ( size == 4 && sign == H5T_SGN_NONE)
+ p_type=H5Tcopy(H5T_STD_U32BE);
+ else if ( size == 8 && sign == H5T_SGN_NONE)
+ p_type=H5Tcopy(H5T_STD_U64BE);
+ }
+ break;
+
+ case H5T_FLOAT:
+ if ( size == 4)
+ p_type=H5Tcopy(H5T_IEEE_F32BE);
+ else if ( size == 8)
+ p_type=H5Tcopy(H5T_IEEE_F64BE);
+ break;
+
+ case H5T_TIME:
+ case H5T_BITFIELD:
+ case H5T_OPAQUE:
+ case H5T_STRING:
+ case H5T_COMPOUND:
+ case H5T_REFERENCE:
+ case H5T_ENUM:
+ case H5T_VLEN:
+ case H5T_ARRAY:
+ break;
+
+ default:
+ break;
+
+ }
+
+ return(p_type);
+} /* end h5str_get_big_endian_type */
+
+/*-------------------------------------------------------------------------
+ * Function: h5str_detect_vlen
+ *
+ * Purpose: Recursive check for any variable length data in given type.
+ *
+ * Return:
+ * 1 : type conatains any variable length data
+ * 0 : type doesn't contain any variable length data
+ * Negative value: error occur
+ *-------------------------------------------------------------------------
+ */
+static htri_t
+h5str_detect_vlen(hid_t tid)
+{
+ htri_t ret;
+
+ /* recursive detect any vlen data values in type (compound, array ...) */
+ ret = H5Tdetect_class(tid, H5T_VLEN);
+ if((ret == 1) || (ret < 0))
+ goto done;
+
+ /* recursive detect any vlen string in type (compound, array ...) */
+ ret = h5str_detect_vlen_str(tid);
+ if((ret == 1) || (ret < 0))
+ goto done;
+
+done:
+ return ret;
+} /* end h5str_detect_vlen */
+
+/*-------------------------------------------------------------------------
+ * Function: render_bin_output
+ *
+ * Purpose: Write one element of memory buffer to a binary file stream
+ *
+ * Return: Success: SUCCEED
+ * Failure: FAIL
+ *-------------------------------------------------------------------------
+ */
+static int
+h5str_render_bin_output(FILE *stream, hid_t container, hid_t tid, void *_mem, hsize_t block_nelmts)
+{
+ int ret_value = 0;
+ unsigned char *mem = (unsigned char*)_mem;
+ size_t size; /* datum size */
+ hsize_t block_index;
+ H5T_class_t type_class;
+
+ if((size = H5Tget_size(tid)) > 0) {
+
+ if((type_class = H5Tget_class(tid)) >= 0) {
+
+ switch (type_class) {
+ case H5T_INTEGER:
+ case H5T_FLOAT:
+ case H5T_ENUM:
+ block_index = block_nelmts * size;
+ while(block_index > 0) {
+ size_t bytes_in = 0; /* # of bytes to write */
+ size_t bytes_wrote = 0; /* # of bytes written */
+ size_t item_size = size; /* size of items in bytes */
+
+ if(block_index > sizeof(size_t))
+ bytes_in = sizeof(size_t);
+ else
+ bytes_in = (size_t)block_index;
+
+ bytes_wrote = fwrite(mem, 1, bytes_in, stream);
+
+ if(bytes_wrote != bytes_in || (0 == bytes_wrote && ferror(stream))) {
+ ret_value = -1;
+ break;
+ }
+
+ block_index -= (hsize_t)bytes_wrote;
+ mem = mem + bytes_wrote;
+ }
+ break;
+ case H5T_STRING:
+ {
+ unsigned int i;
+ H5T_str_t pad;
+ char *s;
+ unsigned char tempuchar;
+
+ pad = H5Tget_strpad(tid);
+
+ for (block_index = 0; block_index < block_nelmts; block_index++) {
+ mem = ((unsigned char*)_mem) + block_index * size;
+
+ if (H5Tis_variable_str(tid)) {
+ s = *(char**) mem;
+ if (s != NULL)
+ size = strlen(s);
+ }
+ else {
+ s = (char *) mem;
+ }
+ for (i = 0; i < size && (s[i] || pad != H5T_STR_NULLTERM); i++) {
+ HDmemcpy(&tempuchar, &s[i], sizeof(unsigned char));
+ if (1 != fwrite(&tempuchar, sizeof(unsigned char), 1, stream)) {
+ ret_value = -1;
+ break;
+ }
+ } /* i */
+ if(ret_value < 0)
+ break;
+ } /* for (block_index = 0; block_index < block_nelmts; block_index++) */
+ }
+ break;
+ case H5T_COMPOUND:
+ {
+ unsigned j;
+ hid_t memb;
+ unsigned nmembs;
+ size_t offset;
+
+ nmembs = (unsigned)H5Tget_nmembers(tid);
+
+ for (block_index = 0; block_index < block_nelmts; block_index++) {
+ mem = ((unsigned char*)_mem) + block_index * size;
+ for (j = 0; j < nmembs; j++) {
+ offset = H5Tget_member_offset(tid, j);
+ memb = H5Tget_member_type(tid, j);
+
+ if (h5str_render_bin_output(stream, container, memb, mem + offset, 1) < 0) {
+ H5Tclose(memb);
+ ret_value = -1;
+ break;
+ }
+
+ H5Tclose(memb);
+ }
+ if(ret_value < 0)
+ break;
+ }
+ }
+ break;
+ case H5T_ARRAY:
+ {
+ int k, ndims;
+ hsize_t i, dims[H5S_MAX_RANK], temp_nelmts, nelmts;
+ hid_t memb;
+
+ /* get the array's base datatype for each element */
+ memb = H5Tget_super(tid);
+ ndims = H5Tget_array_ndims(tid);
+ H5Tget_array_dims2(tid, dims);
+
+ /* calculate the number of array elements */
+ for (k = 0, nelmts = 1; k < ndims; k++) {
+ temp_nelmts = nelmts;
+ temp_nelmts *= dims[k];
+ nelmts = (size_t) temp_nelmts;
+ }
+
+ for (block_index = 0; block_index < block_nelmts; block_index++) {
+ mem = ((unsigned char*)_mem) + block_index * size;
+ /* dump the array element */
+ if (h5str_render_bin_output(stream, container, memb, mem, nelmts) < 0) {
+ ret_value = -1;
+ break;
+ }
+ }
+ H5Tclose(memb);
+ }
+ break;
+ case H5T_VLEN:
+ {
+ unsigned int i;
+ hsize_t nelmts;
+ hid_t memb;
+
+ /* get the VL sequences's base datatype for each element */
+ memb = H5Tget_super(tid);
+
+ for (block_index = 0; block_index < block_nelmts; block_index++) {
+ mem = ((unsigned char*)_mem) + block_index * size;
+ /* Get the number of sequence elements */
+ nelmts = ((hvl_t *) mem)->len;
+
+ /* dump the array element */
+ if (h5str_render_bin_output(stream, container, memb, ((char *) (((hvl_t *) mem)->p)), nelmts) < 0) {
+ ret_value = -1;
+ break;
+ }
+ }
+ H5Tclose(memb);
+ }
+ break;
+ case H5T_REFERENCE:
+ {
+ if (H5Tequal(tid, H5T_STD_REF_DSETREG)) {
+ /* region data */
+ hid_t region_id, region_space;
+ H5S_sel_type region_type;
+
+ for (block_index = 0; block_index < block_nelmts; block_index++) {
+ mem = ((unsigned char*)_mem) + block_index * size;
+ region_id = H5Rdereference2(container, H5P_DEFAULT, H5R_DATASET_REGION, mem);
+ if (region_id >= 0) {
+ region_space = H5Rget_region(container, H5R_DATASET_REGION, mem);
+ if (region_space >= 0) {
+ region_type = H5Sget_select_type(region_space);
+ if(region_type == H5S_SEL_POINTS)
+ ret_value = render_bin_output_region_points(stream, region_space, region_id, container);
+ else
+ ret_value = render_bin_output_region_blocks(stream, region_space, region_id, container);
+ H5Sclose(region_space);
+ } /* end if (region_space >= 0) */
+ H5Dclose(region_id);
+ } /* end if (region_id >= 0) */
+ if(ret_value < 0)
+ break;
+ }
+ }
+ else if (H5Tequal(tid, H5T_STD_REF_OBJ)) {
+ ;
+ }
+ }
+ break;
+ default:
+ for (block_index = 0; block_index < block_nelmts; block_index++) {
+ mem = ((unsigned char*)_mem) + block_index * size;
+ if (size != fwrite(mem, sizeof(char), size, stream)) {
+ ret_value = -1;
+ break;
+ }
+ }
+ break;
+ }
+ } /* end if((type_class = H5Tget_class(tid)) >= 0) */
+ else
+ ret_value = -1;
+ } /* end if((size = H5Tget_size(tid)) > 0) */
+ else
+ ret_value = -1;
+
+ return ret_value;
+} /* end h5str_render_bin_output */
+
+/*-------------------------------------------------------------------------
+ * Purpose: Print the data values from a dataset referenced by region blocks.
+ *
+ * Description:
+ * This is a special case subfunction to print the data in a region reference of type blocks.
+ *
+ * Return:
+ * The function returns FAIL if there was an error, otherwise SUCEED
+ *
+ *-------------------------------------------------------------------------
+ */
+static int
+render_bin_output_region_data_blocks(FILE *stream, hid_t region_id,
+ hid_t container, int ndims, hid_t type_id, hssize_t nblocks, hsize_t *ptdata)
+{
+ hsize_t *dims1 = NULL;
+ hsize_t *start = NULL;
+ hsize_t *count = NULL;
+ hsize_t numelem;
+ hsize_t numindex;
+ hsize_t total_size[H5S_MAX_RANK];
+ int jndx;
+ size_t type_size;
+ hid_t mem_space = -1;
+ void *region_buf = NULL;
+ int blkndx;
+ hid_t sid1 = -1;
+ int ret_value = SUCCEED;
+
+ /* Get the dataspace of the dataset */
+ if((sid1 = H5Dget_space(region_id)) >= 0) {
+ /* Allocate space for the dimension array */
+ if((dims1 = (hsize_t *)HDmalloc(sizeof(hsize_t) * (size_t)ndims)) != NULL) {
+ /* find the dimensions of each data space from the block coordinates */
+ numelem = 1;
+ for (jndx = 0; jndx < ndims; jndx++) {
+ dims1[jndx] = ptdata[jndx + ndims] - ptdata[jndx] + 1;
+ numelem = dims1[jndx] * numelem;
+ }
+
+ /* Create dataspace for reading buffer */
+ if((mem_space = H5Screate_simple(ndims, dims1, NULL)) >= 0) {
+ if((type_size = H5Tget_size(type_id)) > 0) {
+ if((region_buf = HDmalloc(type_size * (size_t)numelem)) != NULL) {
+ /* Select (x , x , ..., x ) x (y , y , ..., y ) hyperslab for reading memory dataset */
+ /* 1 2 n 1 2 n */
+ if((start = (hsize_t *)HDmalloc(sizeof(hsize_t) * (size_t)ndims)) != NULL) {
+ if((count = (hsize_t *)HDmalloc(sizeof(hsize_t) * (size_t)ndims)) != NULL) {
+ for (blkndx = 0; blkndx < nblocks; blkndx++) {
+ for (jndx = 0; jndx < ndims; jndx++) {
+ start[jndx] = ptdata[jndx + blkndx * ndims * 2];
+ count[jndx] = dims1[jndx];
+ }
+
+ if(H5Sselect_hyperslab(sid1, H5S_SELECT_SET, start, NULL, count, NULL) >= 0) {
+ if(H5Dread(region_id, type_id, mem_space, sid1, H5P_DEFAULT, region_buf) >= 0) {
+ if(H5Sget_simple_extent_dims(mem_space, total_size, NULL) >= 0) {
+ ret_value = h5str_render_bin_output(stream, container, type_id, (char*)region_buf, numelem);
+ } /* end if(H5Sget_simple_extent_dims(mem_space, total_size, NULL) >= 0) */
+ else {
+ ret_value = -1;
+ break;
+ }
+ } /* end if(H5Dread(region_id, type_id, mem_space, sid1, H5P_DEFAULT, region_buf) >= 0) */
+ else {
+ ret_value = -1;
+ break;
+ }
+ } /* end if(H5Sselect_hyperslab(sid1, H5S_SELECT_SET, start, NULL, count, NULL) >= 0) */
+ else {
+ ret_value = -1;
+ break;
+ }
+ /* Render the region data element end */
+ } /* end for (blkndx = 0; blkndx < nblocks; blkndx++) */
+
+ HDfree(count);
+ } /* end if((count = (hsize_t *) HDmalloc(sizeof(hsize_t) * ndims)) != NULL) */
+ else
+ ret_value = -1;
+ HDfree(start);
+ } /* end if((start = (hsize_t *) HDmalloc(sizeof(hsize_t) * ndims)) != NULL) */
+ else
+ ret_value = -1;
+ HDfree(region_buf);
+ } /* end if((region_buf = HDmalloc(type_size * (size_t)numelem)) != NULL) */
+ else
+ ret_value = -1;
+ } /* end if((type_size = H5Tget_size(type_id)) > 0) */
+ else
+ ret_value = -1;
+
+ if(H5Sclose(mem_space) < 0)
+ ret_value = -1;
+ } /* end if((mem_space = H5Screate_simple(ndims, dims1, NULL)) >= 0) */
+ else
+ ret_value = -1;
+ HDfree(dims1);
+ } /* end if((dims1 = (hsize_t *) HDmalloc(sizeof(hsize_t) * ndims)) != NULL) */
+ else
+ ret_value = -1;
+ if(H5Sclose(sid1) < 0)
+ ret_value = -1;
+ } /* end if((sid1 = H5Dget_space(region_id)) >= 0) */
+ else
+ ret_value = -1;
+
+ return ret_value;
+} /* end render_bin_output_region_data_blocks */
+
+/*-------------------------------------------------------------------------
+ * Purpose: Print some values from a dataset referenced by region blocks.
+ *
+ * Description:
+ * This is a special case subfunction to dump a region reference using blocks.
+ *
+ * Return:
+ * The function returns False if ERROR, otherwise True
+ *
+ *-------------------------------------------------------------------------
+ */
+static int
+render_bin_output_region_blocks(FILE *stream, hid_t region_space, hid_t region_id, hid_t container)
+{
+ int ret_value = SUCCEED;
+ hssize_t nblocks;
+ hsize_t alloc_size;
+ hsize_t *ptdata = NULL;
+ int ndims;
+ hid_t dtype;
+ hid_t type_id;
+
+ if((nblocks = H5Sget_select_hyper_nblocks(region_space)) > 0) {
+ /* Print block information */
+ if((ndims = H5Sget_simple_extent_ndims(region_space)) >= 0) {
+ alloc_size = (hsize_t)nblocks * (hsize_t)ndims * 2 * (hsize_t)sizeof(ptdata[0]);
+ if((ptdata = (hsize_t*)HDmalloc((size_t)alloc_size)) != NULL) {
+ if(H5Sget_select_hyper_blocklist(region_space, (hsize_t)0, (hsize_t)nblocks, ptdata) >= 0) {
+ if((dtype = H5Dget_type(region_id)) >= 0) {
+ if((type_id = H5Tget_native_type(dtype, H5T_DIR_DEFAULT)) >= 0) {
+ ret_value = render_bin_output_region_data_blocks(stream, region_id, container, ndims,
+ type_id, nblocks, ptdata);
+
+ if(H5Tclose(type_id) < 0)
+ ret_value = -1;
+ } /* end if((type_id = H5Tget_native_type(dtype, H5T_DIR_DEFAULT)) >= 0) */
+ else
+ ret_value = -1;
+
+ if(H5Tclose(dtype) < 0)
+ ret_value = -1;
+ } /* end if((dtype = H5Dget_type(region_id)) >= 0) */
+ else
+ ret_value = -1;
+ } /* end if(H5Sget_select_hyper_blocklist(region_space, (hsize_t) 0, (hsize_t) nblocks, ptdata) >= 0) */
+ else
+ ret_value = -1;
+
+ HDfree(ptdata);
+ } /* end if((ptdata = (hsize_t*) HDmalloc((size_t) alloc_size)) != NULL) */
+ else
+ ret_value = -1;
+ } /* end if((ndims = H5Sget_simple_extent_ndims(region_space)) >= 0) */
+ else
+ ret_value = -1;
+ } /* end if((nblocks = H5Sget_select_hyper_nblocks(region_space)) > 0) */
+ else
+ ret_value = -1;
+
+ return ret_value;
+} /* end render_bin_output_region_blocks */
+
+/*-------------------------------------------------------------------------
+ * Purpose: Print the data values from a dataset referenced by region points.
+ *
+ * Description:
+ * This is a special case subfunction to print the data in a region reference of type points.
+ *
+ * Return:
+ * The function returns FAIL on error, otherwise SUCCEED
+ *
+ *-------------------------------------------------------------------------
+ */
+static int
+render_bin_output_region_data_points(FILE *stream, hid_t region_space, hid_t region_id,
+ hid_t container, int ndims, hid_t type_id, hssize_t npoints, hsize_t *ptdata)
+{
+ hsize_t *dims1 = NULL;
+ int jndx;
+ size_t type_size;
+ hid_t mem_space = -1;
+ void *region_buf = NULL;
+ int ret_value = SUCCEED;
+
+ if((type_size = H5Tget_size(type_id)) > 0) {
+ if((region_buf = HDmalloc(type_size * (size_t)npoints)) != NULL) {
+ /* Allocate space for the dimension array */
+ if((dims1 = (hsize_t *)HDmalloc(sizeof(hsize_t) * (size_t)ndims)) != NULL) {
+ dims1[0] = (hsize_t)npoints;
+ if((mem_space = H5Screate_simple(1, dims1, NULL)) >= 0) {
+ if(H5Dread(region_id, type_id, mem_space, region_space, H5P_DEFAULT, region_buf) >= 0) {
+ if(H5Sget_simple_extent_dims(region_space, dims1, NULL) >= 0) {
+ ret_value = h5str_render_bin_output(stream, container, type_id, (char*)region_buf, (hsize_t)npoints);
+ } /* end if(H5Sget_simple_extent_dims(region_space, dims1, NULL) >= 0) */
+ else
+ ret_value = -1;
+ } /* end if(H5Dread(region_id, type_id, mem_space, region_space, H5P_DEFAULT, region_buf) >= 0) */
+ else
+ ret_value = -1;
+ } /* end if((mem_space = H5Screate_simple(1, dims1, NULL)) >= 0) */
+ else
+ ret_value = -1;
+
+ HDfree(dims1);
+ } /* end if((dims1 = (hsize_t *) malloc(sizeof(hsize_t) * ndims)) != NULL) */
+ else
+ ret_value = -1;
+ HDfree(region_buf);
+ } /* end if((region_buf = malloc(type_size * (size_t)npoints)) != NULL) */
+ else
+ ret_value = -1;
+
+ if(H5Sclose(mem_space) < 0)
+ ret_value = -1;
+ } /* end if((type_size = H5Tget_size(type_id)) > 0) */
+ else
+ ret_value = -1;
+
+ return ret_value;
+} /* end render_bin_output_region_data_points */
+
+/*-------------------------------------------------------------------------
+ * Purpose: Print some values from a dataset referenced by region points.
+ *
+ * Description:
+ * This is a special case subfunction to dump a region reference using points.
+ *
+ * Return:
+ * The function returns False if the last dimension has been reached, otherwise True
+ *
+ *-------------------------------------------------------------------------
+ */
+static int
+render_bin_output_region_points(FILE *stream, hid_t region_space, hid_t region_id, hid_t container)
+{
+ int ret_value = SUCCEED;
+ hssize_t npoints;
+ hsize_t alloc_size;
+ hsize_t *ptdata;
+ int ndims;
+ hid_t dtype;
+ hid_t type_id;
+
+ if((npoints = H5Sget_select_elem_npoints(region_space)) > 0) {
+ /* Allocate space for the dimension array */
+ if((ndims = H5Sget_simple_extent_ndims(region_space)) >= 0) {
+ alloc_size = (hsize_t)npoints * (hsize_t)ndims * (hsize_t)sizeof(ptdata[0]);
+ if(NULL != (ptdata = (hsize_t *)HDmalloc((size_t)alloc_size))) {
+ if(H5Sget_select_elem_pointlist(region_space, (hsize_t)0, (hsize_t)npoints, ptdata) >= 0) {
+ if((dtype = H5Dget_type(region_id)) >= 0) {
+ if((type_id = H5Tget_native_type(dtype, H5T_DIR_DEFAULT)) >= 0) {
+ ret_value = render_bin_output_region_data_points(stream, region_space, region_id,
+ container, ndims, type_id, npoints, ptdata);
+
+ if(H5Tclose(type_id) < 0)
+ ret_value = -1;
+ } /* end if((type_id = H5Tget_native_type(dtype, H5T_DIR_DEFAULT)) >= 0) */
+ else
+ ret_value = -1;
+
+ if(H5Tclose(dtype) < 0)
+ ret_value = -1;
+ } /* end if((dtype = H5Dget_type(region_id)) >= 0) */
+ else
+ ret_value = -1;
+ } /* end if(H5Sget_select_elem_pointlist(region_space, (hsize_t) 0, (hsize_t) npoints, ptdata) >= 0) */
+ else
+ ret_value = -1;
+
+ HDfree(ptdata);
+ } /* end if(NULL != (ptdata = (hsize_t *)HDmalloc((size_t) alloc_size))) */
+ else
+ ret_value = -1;
+ } /* end if((ndims = H5Sget_simple_extent_ndims(region_space)) >= 0) */
+ else
+ ret_value = -1;
+
+ } /* end if((npoints = H5Sget_select_elem_npoints(region_space)) > 0) */
+ else
+ ret_value = -1;
+
+ return ret_value;
+} /* end render_bin_output_region_points */
+
+int
+h5str_dump_simple_dset(FILE *stream, hid_t dset, int binary_order)
+{
+ int ret_value = SUCCEED;
+ hid_t f_space = -1; /* file data space */
+ hsize_t elmtno; /* counter */
+ size_t i; /* counter */
+ int ndims;
+ int carry; /* counter carry value */
+ hsize_t zero[8]; /* vector of zeros */
+ unsigned int flags; /* buffer extent flags */
+ hsize_t total_size[H5S_MAX_RANK]; /* total size of dataset*/
+
+ /* Print info */
+ size_t p_type_nbytes; /* size of memory type */
+ hsize_t p_nelmts; /* total selected elmts */
+
+ /* Stripmine info */
+ hsize_t sm_size[H5S_MAX_RANK]; /* stripmine size */
+ hsize_t sm_nbytes; /* bytes per stripmine */
+ hsize_t sm_nelmts; /* elements per stripmine*/
+ unsigned char *sm_buf = NULL; /* buffer for raw data */
+ hid_t sm_space = -1; /* stripmine data space */
+
+ /* Hyperslab info */
+ hsize_t hs_offset[H5S_MAX_RANK]; /* starting offset */
+ hsize_t hs_size[H5S_MAX_RANK]; /* size this pass */
+ hsize_t hs_nelmts; /* elements in request */
+
+ /* VL data special information */
+ unsigned int vl_data = 0; /* contains VL datatypes */
+ hid_t p_type = -1;
+ hid_t f_type = -1;
+
+ if(dset < 0) return -1;
+ f_type = H5Dget_type(dset);
+ if (binary_order == 1)
+ p_type = h5str_get_native_type(f_type);
+ else if (binary_order == 2)
+ p_type = h5str_get_little_endian_type(f_type);
+ else if (binary_order == 3)
+ p_type = h5str_get_big_endian_type(f_type);
+ else
+ p_type = H5Tcopy(f_type);
+
+ H5Tclose(f_type);
+
+ if (p_type >= 0) {
+ if((f_space = H5Dget_space(dset)) >= 0) {
+ ndims = H5Sget_simple_extent_ndims(f_space);
+
+ if ((size_t)ndims <= (sizeof(sm_size)/sizeof(sm_size[0]))) {
+ H5Sget_simple_extent_dims(f_space, total_size, NULL);
+
+ /* calculate the number of elements we're going to print */
+ p_nelmts = 1;
+
+ if (ndims > 0) {
+ for (i = 0; i < ndims; i++)
+ p_nelmts *= total_size[i];
+ } /* end if */
+
+ if (p_nelmts > 0) {
+ /* Check if we have VL data in the dataset's datatype */
+ if (h5str_detect_vlen(p_type) != 0)
+ vl_data = 1;
+
+ /*
+ * Determine the strip mine size and allocate a buffer. The strip mine is
+ * a hyperslab whose size is manageable.
+ */
+ sm_nbytes = p_type_nbytes = H5Tget_size(p_type);
+
+ if (ndims > 0) {
+ for (i = (size_t)ndims; i > 0; --i) {
+ hsize_t size = H5TOOLS_BUFSIZE / sm_nbytes;
+ if ( size == 0) /* datum size > H5TOOLS_BUFSIZE */
+ size = 1;
+ sm_size[i - 1] = (((total_size[i - 1]) < (size)) ? (total_size[i - 1]) : (size));
+ sm_nbytes *= sm_size[i - 1];
+ }
+ }
+
+ if(sm_nbytes > 0) {
+ sm_buf = (unsigned char *)HDmalloc((size_t)sm_nbytes);
+
+ sm_nelmts = sm_nbytes / p_type_nbytes;
+ sm_space = H5Screate_simple(1, &sm_nelmts, NULL);
+
+ /* The stripmine loop */
+ HDmemset(hs_offset, 0, sizeof hs_offset);
+ HDmemset(zero, 0, sizeof zero);
+
+ for (elmtno = 0; elmtno < p_nelmts; elmtno += hs_nelmts) {
+ /* Calculate the hyperslab size */
+ if (ndims > 0) {
+ for (i = 0, hs_nelmts = 1; i < ndims; i++) {
+ hs_size[i] = (((total_size[i] - hs_offset[i]) < (sm_size[i])) ? (total_size[i] - hs_offset[i]) : (sm_size[i]));
+ hs_nelmts *= hs_size[i];
+ }
+
+ H5Sselect_hyperslab(f_space, H5S_SELECT_SET, hs_offset, NULL, hs_size, NULL);
+ H5Sselect_hyperslab(sm_space, H5S_SELECT_SET, zero, NULL, &hs_nelmts, NULL);
+ }
+ else {
+ H5Sselect_all(f_space);
+ H5Sselect_all(sm_space);
+ hs_nelmts = 1;
+ }
+
+ /* Read the data */
+ if (H5Dread(dset, p_type, sm_space, f_space, H5P_DEFAULT, sm_buf) >= 0) {
+
+ if (binary_order == 99)
+ ret_value = h5tools_dump_simple_data(stream, dset, p_type, sm_buf, hs_nelmts);
+ else
+ ret_value = h5str_render_bin_output(stream, dset, p_type, sm_buf, hs_nelmts);
+
+ /* Reclaim any VL memory, if necessary */
+ if (vl_data)
+ H5Dvlen_reclaim(p_type, sm_space, H5P_DEFAULT, sm_buf);
+ }
+ else {
+ ret_value = -1;
+ break;
+ }
+
+ if(ret_value < 0) break;
+
+ /* Calculate the next hyperslab offset */
+ for (i = (size_t)ndims, carry = 1; i > 0 && carry; --i) {
+ hs_offset[i - 1] += hs_size[i - 1];
+
+ if (hs_offset[i - 1] == total_size[i - 1])
+ hs_offset[i - 1] = 0;
+ else
+ carry = 0;
+ }
+ }
+
+ if(sm_buf)
+ HDfree(sm_buf);
+ }
+ if(sm_space >= 0 && H5Sclose(sm_space) < 0)
+ ret_value = -1;
+ }
+ }
+ if(f_space >= 0 && H5Sclose(f_space) < 0)
+ ret_value = -1;
+ } /* end if((f_space = H5Dget_space(dset)) >= 0) */
+ else
+ ret_value = -1;
+
+ if (p_type >= 0)
+ H5Tclose(p_type);
+ }
+ return ret_value;
+} /* end h5str_dump_simple_dset */
+
+static int
+h5tools_dump_simple_data(FILE *stream, hid_t container, hid_t type, void *_mem, hsize_t nelmts)
+{
+ int ret_value = 0;
+ int line_count;
+ unsigned char *mem = (unsigned char*)_mem;
+ size_t size; /* datum size */
+ H5T_class_t type_class;
+ hsize_t i; /*element counter */
+ h5str_t buffer; /*string into which to render */
+
+ if((size = H5Tget_size(type)) > 0) {
+ for (i = 0, line_count = 0; i < nelmts; i++, line_count++) {
+ size_t bytes_in = 0; /* # of bytes to write */
+ size_t bytes_wrote = 0; /* # of bytes written */
+ void* memref = mem + i * size;
+
+ /* Render the data element*/
+ h5str_new(&buffer, 32 * size);
+ bytes_in = h5str_sprintf(&buffer, container, type, memref, 1);
+ if(i > 0) {
+ HDfprintf(stream, ", ");
+ if (line_count >= H5TOOLS_TEXT_BLOCK) {
+ line_count = 0;
+ HDfprintf(stream, "\n");
+ }
+ }
+ HDfprintf(stream, "%s", buffer.s);
+ h5str_free(&buffer);
+ } /* end for (i = 0; i < nelmts... */
+ HDfprintf(stream, "\n");
+ } /* end if((size = H5Tget_size(tid)) > 0) */
+ else
+ ret_value = -1;
+
+ return ret_value;
+} /* end h5tools_dump_simple_data */
+
+/*
+ * Utility Java APIs
+ * Functions designed to workaround issues with the Java-C interface
+ */
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5AwriteVL
+ * Signature: (JJ[Ljava/lang/String;)I
+ */
+JNIEXPORT jint JNICALL
+Java_hdf_hdf5lib_H5_H5AwriteVL(JNIEnv *env, jclass clss, jlong attr_id, jlong mem_type_id, jobjectArray buf)
+{
+ herr_t status = -1;
+ char **wdata;
+ jsize size;
+ jint i;
+
+ size = ENVPTR->GetArrayLength(ENVPAR (jarray) buf);
+
+ wdata = (char**)HDcalloc((size_t)size + 1, sizeof(char*));
+ if (!wdata) {
+ h5JNIFatalError(env, "H5AwriteVL: cannot allocate buffer");
+ } /* end if */
+ else {
+ HDmemset(wdata, 0, (size_t)size * sizeof(char*));
+ for (i = 0; i < size; ++i) {
+ jstring obj = (jstring) ENVPTR->GetObjectArrayElement(ENVPAR (jobjectArray) buf, i);
+ if (obj != 0) {
+ jsize length = ENVPTR->GetStringUTFLength(ENVPAR obj);
+ const char *utf8 = ENVPTR->GetStringUTFChars(ENVPAR obj, 0);
+
+ if (utf8) {
+ wdata[i] = (char*)HDmalloc((size_t)length + 1);
+ if (wdata[i]) {
+ HDmemset(wdata[i], 0, ((size_t)length + 1));
+ HDstrncpy(wdata[i], utf8, (size_t)length);
+ } /* end if */
+ } /* end if */
+
+ ENVPTR->ReleaseStringUTFChars(ENVPAR obj, utf8);
+ ENVPTR->DeleteLocalRef(ENVPAR obj);
+ } /* end if */
+ } /* end for (i = 0; i < size; ++i) */
+
+ status = H5Awrite((hid_t)attr_id, (hid_t)mem_type_id, wdata);
+
+ for (i = 0; i < size; i++) {
+ if(wdata[i]) {
+ HDfree(wdata[i]);
+ } /* end if */
+ } /* end for */
+ HDfree(wdata);
+
+ if (status < 0)
+ h5libraryError(env);
+ } /* end else */
+
+ return (jint)status;
+} /* end Java_hdf_hdf5lib_H5_H5AwriteVL */
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5AreadVL
+ * Signature: (JJ[Ljava/lang/String;)I
+ */
+JNIEXPORT jint JNICALL
+Java_hdf_hdf5lib_H5_H5AreadVL(JNIEnv *env, jclass clss, jlong attr_id, jlong mem_type_id, jobjectArray buf)
+{
+ herr_t status = -1;
+ jstring jstr;
+ char **strs;
+ int i, n;
+ hid_t sid;
+ hsize_t dims[H5S_MAX_RANK];
+
+ n = ENVPTR->GetArrayLength(ENVPAR buf);
+
+ strs =(char **)HDmalloc((size_t)n * sizeof(char *));
+ if (strs == NULL) {
+ h5JNIFatalError( env, "H5AreadVL: failed to allocate buff for read variable length strings");
+ } /* end if */
+ else {
+ status = H5Aread(attr_id, mem_type_id, strs);
+ if (status < 0) {
+ dims[0] = (hsize_t)n;
+ sid = H5Screate_simple(1, dims, NULL);
+ H5Dvlen_reclaim(mem_type_id, sid, H5P_DEFAULT, strs);
+ H5Sclose(sid);
+ HDfree(strs);
+ h5JNIFatalError(env, "H5AreadVL: failed to read variable length strings");
+ } /* end if */
+ else {
+ for (i=0; i<n; i++) {
+ jstr = ENVPTR->NewStringUTF(ENVPAR strs[i]);
+ ENVPTR->SetObjectArrayElement(ENVPAR buf, i, jstr);
+ HDfree (strs[i]);
+ } /* end for */
+
+ /*
+ for repeatedly reading an attribute with a large number of strs (e.g., 1,000,000 strings,
+ H5Dvlen_reclaim() may crash on Windows because the Java GC will not be able to collect
+ free space in time. Instead, use "free(strs[i])" to free individual strings
+ after it is done.
+ H5Dvlen_reclaim(tid, sid, H5P_DEFAULT, strs);
+ */
+
+ HDfree(strs);
+ } /* end else */
+ } /* end else */
+ return status;
+} /* end Java_hdf_hdf5lib_H5_H5AreadVL */
+
+/*
+ * Copies the content of one dataset to another dataset
+ * Class: hdf_hdf5lib_H5
+ * Method: H5Acopy
+ * Signature: (JJ)I
+ */
+JNIEXPORT jint JNICALL
+Java_hdf_hdf5lib_H5_H5Acopy(JNIEnv *env, jclass clss, jlong src_id, jlong dst_id)
+{
+ jbyte *buf;
+ herr_t retVal = -1;
+ hid_t src_did = (hid_t)src_id;
+ hid_t dst_did = (hid_t)dst_id;
+ hid_t tid = -1;
+ hid_t sid = -1;
+ hsize_t total_size = 0;
+
+
+ sid = H5Aget_space(src_did);
+ if (sid < 0) {
+ h5libraryError(env);
+ } /* end if */
+ else {
+ tid = H5Aget_type(src_did);
+ if (tid < 0) {
+ H5Sclose(sid);
+ h5libraryError(env);
+ } /* end if */
+ else {
+ total_size = (hsize_t)H5Sget_simple_extent_npoints(sid) * (hsize_t)H5Tget_size(tid);
+
+ H5Sclose(sid);
+
+ buf = (jbyte *)HDmalloc( (size_t)total_size * sizeof(jbyte));
+ if (buf == NULL) {
+ H5Tclose(tid);
+ h5outOfMemory( env, "H5Acopy: malloc failed");
+ } /* end if */
+ else {
+ retVal = H5Aread(src_did, tid, buf);
+ H5Tclose(tid);
+
+ if (retVal < 0) {
+ HDfree(buf);
+ h5libraryError(env);
+ } /* end if */
+ else {
+ tid = H5Aget_type(dst_did);
+ if (tid < 0) {
+ HDfree(buf);
+ h5libraryError(env);
+ } /* end if */
+ else {
+ retVal = H5Awrite(dst_did, tid, buf);
+
+ H5Tclose(tid);
+ HDfree(buf);
+
+ if (retVal < 0)
+ h5libraryError(env);
+ } /* end else */
+ } /* end else */
+ } /* end else */
+ } /* end else */
+ } /* end else */
+
+ return (jint)retVal;
+} /* end Java_hdf_hdf5lib_H5_H5Acopy */
+
+/*
+ * Copies the content of one dataset to another dataset
+ * Class: hdf_hdf5lib_H5
+ * Method: H5Dcopy
+ * Signature: (JJ)I
+ */
+JNIEXPORT jint JNICALL
+Java_hdf_hdf5lib_H5_H5Dcopy(JNIEnv *env, jclass clss, jlong src_id, jlong dst_id)
+{
+ jbyte *buf;
+ herr_t retVal = -1;
+ hid_t src_did = (hid_t)src_id;
+ hid_t dst_did = (hid_t)dst_id;
+ hid_t tid = -1;
+ hid_t sid = -1;
+ hsize_t total_size = 0, total_allocated_size;
+
+ total_allocated_size = H5Dget_storage_size(src_did);
+ if (total_allocated_size <=0)
+ return 0; // nothing to write;
+
+ sid = H5Dget_space(src_did);
+ if (sid < 0) {
+ h5libraryError(env);
+ return -1;
+ } /* end if */
+
+ tid = H5Dget_type(src_did);
+ if (tid < 0) {
+ H5Sclose(sid);
+ h5libraryError(env);
+ return -1;
+ } /* end if */
+
+ total_size = (hsize_t)H5Sget_simple_extent_npoints(sid) * (hsize_t)H5Tget_size(tid);
+
+ H5Sclose(sid);
+
+ buf = (jbyte*)HDmalloc((size_t)total_size * sizeof(jbyte));
+ if (buf == NULL) {
+ H5Tclose(tid);
+ h5outOfMemory(env, "H5Dcopy: malloc failed");
+ return -1;
+ } /* end if */
+
+ retVal = H5Dread(src_did, tid, H5S_ALL, H5S_ALL, H5P_DEFAULT, buf);
+ H5Tclose(tid);
+
+ if (retVal < 0) {
+ HDfree(buf);
+ h5libraryError(env);
+ return (jint)retVal;
+ } /* end if */
+
+ tid = H5Dget_type(dst_did);
+ if (tid < 0) {
+ HDfree(buf);
+ h5libraryError(env);
+ return -1;
+ } /* end if */
+ retVal = H5Dwrite(dst_did, tid, H5S_ALL, H5S_ALL, H5P_DEFAULT, buf);
+ H5Tclose(tid);
+ HDfree(buf);
+
+ if (retVal < 0) {
+ h5libraryError(env);
+ } /* end if */
+
+ return (jint)retVal;
+} /* end Java_hdf_hdf5lib_H5_H5Dcopy */
+
+/*
+/////////////////////////////////////////////////////////////////////////////////
+//
+//
+// Add these methods so that we don't need to call H5Gget_objtype_by_idx
+// in a loop to get information for all the object in a group, which takes
+// a lot of time to finish if the number of objects is more than 10,000
+//
+/////////////////////////////////////////////////////////////////////////////////
+*/
+
+#ifdef __cplusplus
+ herr_t obj_info_all(hid_t g_id, const char *name, const H5L_info_t *linfo, void *op_data);
+ herr_t obj_info_max(hid_t g_id, const char *name, const H5L_info_t *linfo, void *op_data);
+ int H5Gget_obj_info_max(hid_t, char **, int *, int *, unsigned long *, long);
+ int H5Gget_obj_info_full( hid_t loc_id, char **objname, int *otype, int *ltype, unsigned long *fno, unsigned long *objno, int indexType, int indexOrder);
+#else
+ static herr_t obj_info_all(hid_t g_id, const char *name, const H5L_info_t *linfo, void *op_data);
+ static herr_t obj_info_max(hid_t g_id, const char *name, const H5L_info_t *linfo, void *op_data);
+ static int H5Gget_obj_info_max(hid_t, char **, int *, int *, unsigned long *, long);
+ static int H5Gget_obj_info_full( hid_t loc_id, char **objname, int *otype, int *ltype, unsigned long *fno, unsigned long *objno, int indexType, int indexOrder);
+#endif
+
+typedef struct info_all
+{
+ char **objname;
+ int *otype;
+ int *ltype;
+ unsigned long *objno;
+ unsigned long *fno;
+ unsigned long idxnum;
+ int count;
+} info_all_t;
+
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5Gget_obj_info_full
+ * Signature: (JLjava/lang/String;[Ljava/lang/String;[I[I[J[JIII)I
+ */
+JNIEXPORT jint JNICALL
+Java_hdf_hdf5lib_H5_H5Gget_1obj_1info_1full(JNIEnv *env, jclass clss, jlong loc_id, jstring group_name,
+ jobjectArray objName, jintArray oType, jintArray lType, jlongArray fNo,
+ jlongArray oRef, jint n, jint indx_type, jint indx_order)
+{
+ herr_t ret_val = -1;
+ const char *gName = NULL;
+ char **oName = NULL;
+ jboolean isCopy;
+ jstring str;
+ jint *otarr;
+ jint *ltarr;
+ jlong *refP;
+ jlong *fnoP;
+ unsigned long *refs=NULL;
+ unsigned long *fnos=NULL;
+ hid_t gid = (hid_t)loc_id;
+ int i;
+ int indexType = indx_type;
+ int indexOrder = indx_order;
+
+ if (oType == NULL) {
+ h5nullArgument(env, "H5Gget_obj_info_full: oType is NULL");
+ }
+ else if (lType == NULL) {
+ h5nullArgument(env, "H5Gget_obj_info_full: lType is NULL");
+ }
+ else if (oRef == NULL) {
+ h5nullArgument(env, "H5Gget_obj_info_full: oRef is NULL");
+ }
+ else if (fNo == NULL) {
+ h5nullArgument(env, "H5Gget_obj_info_full: fNo is NULL");
+ }
+ else {
+ otarr = ENVPTR->GetIntArrayElements(ENVPAR oType, &isCopy);
+ if (otarr == NULL) {
+ h5JNIFatalError(env, "H5Gget_obj_info_full: otype not pinned");
+ return -1;
+ } /* end if */
+ ltarr = ENVPTR->GetIntArrayElements(ENVPAR lType, &isCopy);
+ if (ltarr == NULL) {
+ ENVPTR->ReleaseIntArrayElements(ENVPAR oType, otarr, JNI_ABORT);
+ h5JNIFatalError(env, "H5Gget_obj_info_full: ltype not pinned");
+ return -1;
+ } /* end if */
+ refP = ENVPTR->GetLongArrayElements(ENVPAR oRef, &isCopy);
+ if (refP == NULL) {
+ ENVPTR->ReleaseIntArrayElements(ENVPAR lType, ltarr, JNI_ABORT);
+ ENVPTR->ReleaseIntArrayElements(ENVPAR oType, otarr, JNI_ABORT);
+ h5JNIFatalError(env, "H5Gget_obj_info_full: oRef not pinned");
+ return -1;
+ } /* end if */
+ fnoP = ENVPTR->GetLongArrayElements(ENVPAR fNo, &isCopy);
+ if (fnoP == NULL) {
+ ENVPTR->ReleaseIntArrayElements(ENVPAR lType, ltarr, JNI_ABORT);
+ ENVPTR->ReleaseIntArrayElements(ENVPAR oType, otarr, JNI_ABORT);
+ ENVPTR->ReleaseLongArrayElements(ENVPAR oRef, refP, JNI_ABORT);
+ h5JNIFatalError(env, "H5Gget_obj_info_full: fNo not pinned");
+ return -1;
+ } /* end if */
+ oName = (char **)HDcalloc((size_t)n, sizeof(*oName));
+ if (!oName) {
+ ENVPTR->ReleaseIntArrayElements(ENVPAR lType, ltarr, JNI_ABORT);
+ ENVPTR->ReleaseIntArrayElements(ENVPAR oType, otarr, JNI_ABORT);
+ ENVPTR->ReleaseLongArrayElements(ENVPAR oRef, refP, JNI_ABORT);
+ ENVPTR->ReleaseLongArrayElements(ENVPAR fNo, fnoP, JNI_ABORT);
+ h5JNIFatalError(env, "H5Gget_obj_info_full: oName not allocated");
+ return -1;
+ } /* end if */
+
+ refs = (unsigned long *)HDcalloc((size_t)n, sizeof(unsigned long));
+ fnos = (unsigned long *)HDcalloc((size_t)n, sizeof(unsigned long));
+ if (!refs || !fnos) {
+ ENVPTR->ReleaseIntArrayElements(ENVPAR lType, ltarr, JNI_ABORT);
+ ENVPTR->ReleaseIntArrayElements(ENVPAR oType, otarr, JNI_ABORT);
+ ENVPTR->ReleaseLongArrayElements(ENVPAR oRef, refP, JNI_ABORT);
+ ENVPTR->ReleaseLongArrayElements(ENVPAR fNo, fnoP, JNI_ABORT);
+ h5str_array_free(oName, (size_t)n);
+ if (refs)
+ HDfree(refs);
+ if (fnos)
+ HDfree(fnos);
+ h5JNIFatalError(env, "H5Gget_obj_info_full: result arrays not allocated");
+ return -1;
+ } /* end if */
+
+ if (group_name != NULL) {
+ gid = -1;
+ gName = ENVPTR->GetStringUTFChars(ENVPAR group_name, &isCopy);
+ if (gName != NULL) {
+ gid = H5Gopen2((hid_t)loc_id, gName, H5P_DEFAULT);
+
+ ENVPTR->ReleaseStringUTFChars(ENVPAR group_name, gName);
+ } /* end if */
+ if(gid < 0) {
+ ENVPTR->ReleaseIntArrayElements(ENVPAR lType, ltarr, JNI_ABORT);
+ ENVPTR->ReleaseIntArrayElements(ENVPAR oType, otarr, JNI_ABORT);
+ ENVPTR->ReleaseLongArrayElements(ENVPAR oRef, refP, JNI_ABORT);
+ ENVPTR->ReleaseLongArrayElements(ENVPAR fNo, fnoP, JNI_ABORT);
+ h5str_array_free(oName, (size_t)n);
+ HDfree(refs);
+ HDfree(fnos);
+ h5JNIFatalError(env, "H5Gget_obj_info_full: could not get group identifier");
+ return -1;
+ } /* end if */
+ } /* end if */
+
+ ret_val = H5Gget_obj_info_full(gid, oName, (int *)otarr, (int *)ltarr, fnos, refs, indexType, indexOrder);
+
+ ENVPTR->ReleaseIntArrayElements(ENVPAR lType, ltarr, 0);
+ ENVPTR->ReleaseIntArrayElements(ENVPAR oType, otarr, 0);
+
+ if (group_name != NULL)
+ H5Gclose(gid);
+
+ if (ret_val < 0) {
+ ENVPTR->ReleaseLongArrayElements(ENVPAR oRef, refP, JNI_ABORT);
+ ENVPTR->ReleaseLongArrayElements(ENVPAR fNo, fnoP, JNI_ABORT);
+ h5str_array_free(oName, (size_t)n);
+ HDfree(refs);
+ HDfree(fnos);
+ h5libraryError(env);
+ } /* end if */
+ else {
+ for (i=0; i<n; i++) {
+ refP[i] = (jlong)refs[i];
+ } /* end for */
+ HDfree(refs);
+ ENVPTR->ReleaseLongArrayElements(ENVPAR oRef, refP, 0);
+
+ for (i=0; i<n; i++) {
+ fnoP[i] = (jlong)fnos[i];
+ } /* end for */
+ HDfree(fnos);
+ ENVPTR->ReleaseLongArrayElements(ENVPAR fNo, fnoP, 0);
+
+ for (i=0; i<n; i++) {
+ if (*(oName+i)) {
+ str = ENVPTR->NewStringUTF(ENVPAR *(oName+i));
+ ENVPTR->SetObjectArrayElement(ENVPAR objName, i, (jobject)str);
+ } /* end if */
+ } /* for (i=0; i<n; i++)*/
+ h5str_array_free(oName, (size_t)n);
+ } /* end else */
+ } /* end else */
+ return ret_val;
+} /* end Java_hdf_hdf5lib_H5_H5Gget_1obj_1info_1full */
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5Gget_obj_info_max
+ * Signature: (J[Ljava/lang/String;[I[I[JJI)I
+ */
+JNIEXPORT jint JNICALL
+Java_hdf_hdf5lib_H5_H5Gget_1obj_1info_1max(JNIEnv *env, jclass clss, jlong loc_id, jobjectArray objName,
+ jintArray oType, jintArray lType, jlongArray oRef,
+ jlong maxnum, jint n)
+{
+ herr_t ret_val = -1;
+ char **oName=NULL;
+ jboolean isCopy;
+ jstring str;
+ jint *otarr;
+ jint *ltarr;
+ jlong *refP;
+ unsigned long *refs;
+ int i;
+
+ if (oType == NULL) {
+ h5nullArgument(env, "H5Gget_obj_info_max: oType is NULL");
+ } /* end if */
+ else if (lType == NULL) {
+ h5nullArgument(env, "H5Gget_obj_info_max: lType is NULL");
+ } /* end else if */
+ else if (oRef == NULL) {
+ h5nullArgument(env, "H5Gget_obj_info_max: oRef is NULL");
+ } /* end else if */
+ else {
+ otarr = ENVPTR->GetIntArrayElements(ENVPAR oType, &isCopy);
+ if (otarr == NULL) {
+ h5JNIFatalError(env, "H5Gget_obj_info_max: otype not pinned");
+ return -1;
+ } /* end if */
+
+ ltarr = ENVPTR->GetIntArrayElements(ENVPAR lType, &isCopy);
+ if (ltarr == NULL) {
+ ENVPTR->ReleaseIntArrayElements(ENVPAR oType, otarr, JNI_ABORT);
+ h5JNIFatalError(env, "H5Gget_obj_info_max: ltype not pinned");
+ return -1;
+ } /* end if */
+
+ refP = ENVPTR->GetLongArrayElements(ENVPAR oRef, &isCopy);
+ if (refP == NULL) {
+ ENVPTR->ReleaseIntArrayElements(ENVPAR oType, otarr, JNI_ABORT);
+ ENVPTR->ReleaseIntArrayElements(ENVPAR lType, ltarr, JNI_ABORT);
+ h5JNIFatalError(env, "H5Gget_obj_info_max: oRef not pinned");
+ return -1;
+ } /* end if */
+
+ oName = (char **)HDcalloc((size_t)n, sizeof(*oName));
+ if (!oName) {
+ ENVPTR->ReleaseIntArrayElements(ENVPAR lType, ltarr, JNI_ABORT);
+ ENVPTR->ReleaseIntArrayElements(ENVPAR oType, otarr, JNI_ABORT);
+ ENVPTR->ReleaseLongArrayElements(ENVPAR oRef, refP, JNI_ABORT);
+ h5JNIFatalError(env, "H5Gget_obj_info_max: oName not allocated");
+ return -1;
+ } /* end if */
+ refs = (unsigned long *)HDcalloc((size_t)n, sizeof(unsigned long));
+ if (!refs) {
+ ENVPTR->ReleaseIntArrayElements(ENVPAR lType, ltarr, JNI_ABORT);
+ ENVPTR->ReleaseIntArrayElements(ENVPAR oType, otarr, JNI_ABORT);
+ ENVPTR->ReleaseLongArrayElements(ENVPAR oRef, refP, JNI_ABORT);
+ h5str_array_free(oName, (size_t)n);
+ h5JNIFatalError(env, "H5Gget_obj_info_max: result array not allocated");
+ return -1;
+ } /* end if */
+
+ ret_val = H5Gget_obj_info_max((hid_t)loc_id, oName, (int*)otarr, (int*)ltarr, refs, maxnum );
+ ENVPTR->ReleaseIntArrayElements(ENVPAR lType, ltarr, 0);
+ ENVPTR->ReleaseIntArrayElements(ENVPAR oType, otarr, 0);
+
+ if (ret_val < 0) {
+ ENVPTR->ReleaseLongArrayElements(ENVPAR oRef, refP, JNI_ABORT);
+ h5str_array_free(oName, (size_t)n);
+ HDfree(refs);
+ h5libraryError(env);
+ } /* end if */
+ else {
+ for (i=0; i<n; i++) {
+ refP[i] = (jlong) refs[i];
+ } /* end for */
+ HDfree(refs);
+ ENVPTR->ReleaseLongArrayElements(ENVPAR oRef, refP, 0);
+
+ for (i=0; i<n; i++) {
+ if (*(oName+i)) {
+ str = ENVPTR->NewStringUTF(ENVPAR *(oName+i));
+ ENVPTR->SetObjectArrayElement(ENVPAR objName, i, (jobject)str);
+ }
+ } /* for (i=0; i<n; i++)*/
+
+ h5str_array_free(oName, (size_t)n);
+ } /* end else */
+ } /* end else */
+
+ return ret_val;
+} /* end Java_hdf_hdf5lib_H5_H5Gget_1obj_1info_1max */
+
+int
+H5Gget_obj_info_full(hid_t loc_id, char **objname, int *otype, int *ltype, unsigned long *fno, unsigned long *objno, int indexType, int indexOrder)
+{
+ info_all_t info;
+ info.objname = objname;
+ info.otype = otype;
+ info.ltype = ltype;
+ info.idxnum = 0;
+ info.fno = fno;
+ info.objno = objno;
+ info.count = 0;
+
+ if(H5Literate(loc_id, (H5_index_t)indexType, (H5_iter_order_t)indexOrder, NULL, obj_info_all, (void *)&info) < 0) {
+ /* iterate failed, try normal alphabetical order */
+ if(H5Literate(loc_id, H5_INDEX_NAME, H5_ITER_INC, NULL, obj_info_all, (void *)&info) < 0)
+ return -1;
+ } /* end if */
+
+ return info.count;
+} /* end H5Gget_obj_info_full */
+
+int
+H5Gget_obj_info_max(hid_t loc_id, char **objname, int *otype, int *ltype, unsigned long *objno, long maxnum)
+{
+ info_all_t info;
+ info.objname = objname;
+ info.otype = otype;
+ info.ltype = ltype;
+ info.idxnum = (unsigned long)maxnum;
+ info.objno = objno;
+ info.count = 0;
+
+ if(H5Lvisit(loc_id, H5_INDEX_NAME, H5_ITER_NATIVE, obj_info_max, (void *)&info) < 0)
+ return -1;
+
+ return info.count;
+} /* end H5Gget_obj_info_max */
+
+herr_t
+obj_info_all(hid_t loc_id, const char *name, const H5L_info_t *info, void *op_data)
+{
+ int type = -1;
+ hid_t oid = -1;
+ herr_t retVal = -1;
+ info_all_t *datainfo = (info_all_t*)op_data;
+ H5O_info_t object_info;
+
+ retVal = H5Oget_info_by_name(loc_id, name, &object_info, H5P_DEFAULT);
+
+ if (retVal < 0) {
+ *(datainfo->otype+datainfo->count) = -1;
+ *(datainfo->ltype+datainfo->count) = -1;
+ *(datainfo->objname+datainfo->count) = (char *)HDmalloc(strlen(name)+1);
+ HDstrcpy(*(datainfo->objname+datainfo->count), name);
+ *(datainfo->objno+datainfo->count) = (unsigned long)-1;
+ } /* end if */
+ else {
+ *(datainfo->otype+datainfo->count) = object_info.type;
+ *(datainfo->ltype+datainfo->count) = info->type;
+ *(datainfo->objname+datainfo->count) = (char *)HDmalloc(HDstrlen(name)+1);
+ HDstrcpy(*(datainfo->objname+datainfo->count), name);
+
+ *(datainfo->fno+datainfo->count) = object_info.fileno;
+ *(datainfo->objno+datainfo->count) = (unsigned long)object_info.addr;
+ /*
+ if(info->type==H5L_TYPE_HARD)
+ *(datainfo->objno+datainfo->count) = (unsigned long)info->u.address;
+ else
+ *(datainfo->objno+datainfo->count) = info->u.val_size;
+ */
+ } /* end else */
+
+ datainfo->count++;
+
+ return 0;
+} /* end obj_info_all */
+
+herr_t
+obj_info_max(hid_t loc_id, const char *name, const H5L_info_t *info, void *op_data)
+{
+ int type = -1;
+ herr_t retVal = 0;
+ info_all_t *datainfo = (info_all_t*)op_data;
+ H5O_info_t object_info;
+
+ retVal = H5Oget_info(loc_id, &object_info);
+ if (retVal < 0) {
+ *(datainfo->otype+datainfo->count) = -1;
+ *(datainfo->ltype+datainfo->count) = -1;
+ *(datainfo->objname+datainfo->count) = NULL;
+ *(datainfo->objno+datainfo->count) = (unsigned long)-1;
+ return 1;
+ } /* end if */
+ else {
+ *(datainfo->otype+datainfo->count) = object_info.type;
+ *(datainfo->ltype+datainfo->count) = info->type;
+ /* this will be freed by h5str_array_free(oName, n)*/
+ *(datainfo->objname+datainfo->count) = (char *)HDmalloc(HDstrlen(name)+1);
+ strcpy(*(datainfo->objname+datainfo->count), name);
+ if(info->type==H5L_TYPE_HARD)
+ *(datainfo->objno+datainfo->count) = (unsigned long)info->u.address;
+ else
+ *(datainfo->objno+datainfo->count) = info->u.val_size;
+ } /* end else */
+ datainfo->count++;
+ if(datainfo->count < (int)datainfo->idxnum)
+ return 0;
+ else
+ return 1;
+} /* end obj_info_max */
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5export_dataset
+ * Signature: (Ljava/lang/String;Ljava/lang/String;Ljava/lang/String;I)V
+ */
+JNIEXPORT void JNICALL
+Java_hdf_hdf5lib_H5_H5export_1dataset(JNIEnv *env, jclass cls, jstring file_export_name, jstring file_name, jstring object_path, jint binary_order)
+{
+ herr_t status = -1;
+ herr_t ret_val = -1;
+ hid_t file_id = -1;
+ hid_t dataset_id = -1;
+ FILE *stream;
+ const char *file_export;
+ const char *object_name;
+ const char *fileName;
+ jboolean isCopy2;
+
+ if (file_export_name == NULL) {
+ h5nullArgument(env, "HDF5Library_export_data: file_export_name is NULL");
+ } /* end if */
+ else if (object_path == NULL) {
+ h5nullArgument(env, "HDF5Library_export_data: object_path is NULL");
+ } /* end else if */
+ else {
+ PIN_JAVA_STRING0(file_name, fileName);
+
+ file_id = H5Fopen(fileName, (unsigned)H5F_ACC_RDWR, (hid_t)H5P_DEFAULT);
+
+ UNPIN_JAVA_STRING(file_name, fileName);
+
+ if (file_id < 0) {
+ /* throw exception */
+ h5libraryError(env);
+ } /* end if */
+ else {
+ object_name = ENVPTR->GetStringUTFChars(ENVPAR object_path, &isCopy2);
+ if (object_name == NULL) {
+ h5JNIFatalError( env, "H5Dopen: object name not pinned");
+ } /* end if */
+ else {
+ dataset_id = H5Dopen2(file_id, object_name, H5P_DEFAULT);
+
+ ENVPTR->ReleaseStringUTFChars(ENVPAR object_path, object_name);
+
+ if (dataset_id < 0) {
+ H5Fclose(file_id);
+ h5libraryError(env);
+ } /* end if */
+ else {
+ file_export = ENVPTR->GetStringUTFChars(ENVPAR file_export_name, 0);
+ stream = HDfopen(file_export, "w+");
+ ENVPTR->ReleaseStringUTFChars(ENVPAR file_export_name, file_export);
+
+ ret_val = h5str_dump_simple_dset(stream, dataset_id, binary_order);
+
+ if (stream)
+ HDfclose(stream);
+
+ H5Dclose(dataset_id);
+
+ H5Fclose(file_id);
+
+ if (ret_val < 0)
+ h5libraryError(env);
+ } /* end else */
+ } /* end else */
+ } /* end else */
+ } /* end else */
+} /* end Java_hdf_hdf5lib_H5_H5export_1dataset */
+
+#ifdef __cplusplus
+}
+#endif
diff --git a/java/src/jni/h5util.h b/java/src/jni/h5util.h
new file mode 100644
index 0000000..fcf343d
--- /dev/null
+++ b/java/src/jni/h5util.h
@@ -0,0 +1,108 @@
+/* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
+ * Copyright by The HDF Group. *
+ * Copyright by the Board of Trustees of the University of Illinois. *
+ * All rights reserved. *
+ * *
+ * This file is part of HDF5. The full HDF5 copyright notice, including *
+ * terms governing use, modification, and redistribution, is contained in *
+ * the files COPYING and Copyright.html. COPYING can be found at the root *
+ * of the source code distribution tree; Copyright.html can be found at the *
+ * root level of an installed copy of the electronic HDF5 document set and *
+ * is linked from the top-level documents page. It can also be found at *
+ * http://hdfgroup.org/HDF5/doc/Copyright.html. If you do not have *
+ * access to either file, you may request a copy from help@hdfgroup.org. *
+ * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */
+
+/*
+ * For details of the HDF libraries, see the HDF Documentation at:
+ * http://hdfdfgroup.org/HDF5/doc/
+ *
+ */
+
+#ifndef H5UTIL_H__
+#define H5UTIL_H__
+
+#include "h5jni.h"
+
+#ifndef SUCCEED
+#define SUCCEED 0
+#endif
+
+#ifndef FAIL
+#define FAIL (-1)
+#endif
+
+typedef struct h5str_t {
+ char *s;
+ size_t max; /* the allocated size of the string */
+} h5str_t;
+
+extern void h5str_new (h5str_t *str, size_t len);
+extern void h5str_free (h5str_t *str);
+extern void h5str_resize (h5str_t *str, size_t new_len);
+extern char* h5str_append (h5str_t *str, const char* cstr);
+extern size_t h5str_sprintf(h5str_t *str, hid_t container, hid_t tid, void *buf, int expand_data);
+extern void h5str_array_free(char **strs, size_t len);
+extern int h5str_dump_simple_dset(FILE *stream, hid_t dset, int binary_order);
+extern int h5str_dump_region_blocks_data(h5str_t *str, hid_t region, hid_t region_obj);
+extern int h5str_dump_region_points_data(h5str_t *str, hid_t region, hid_t region_obj);
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5AwriteVL
+ * Signature: (JJ[Ljava/lang/String;)I
+ */
+JNIEXPORT jint JNICALL Java_hdf_hdf5lib_H5_H5AwriteVL
+ (JNIEnv *, jclass, jlong, jlong, jobjectArray);
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5AreadVL
+ * Signature: (JJ[Ljava/lang/String;)I
+ */
+JNIEXPORT jint JNICALL Java_hdf_hdf5lib_H5_H5AreadVL
+ (JNIEnv *, jclass, jlong, jlong, jobjectArray);
+
+/*
+ * Copies the content of one dataset to another dataset
+ * Class: hdf_hdf5lib_H5
+ * Method: H5Acopy
+ * Signature: (JJ)I
+ */
+JNIEXPORT jint JNICALL Java_hdf_hdf5lib_H5_H5Acopy
+ (JNIEnv *, jclass, jlong, jlong);
+
+/*
+ * Copies the content of one dataset to another dataset
+ * Class: hdf_hdf5lib_H5
+ * Method: H5Dcopy
+ * Signature: (JJ)I
+ */
+JNIEXPORT jint JNICALL Java_hdf_hdf5lib_H5_H5Dcopy
+ (JNIEnv*, jclass, jlong, jlong);
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5Gget_obj_info_full
+ * Signature: (JLjava/lang/String;[Ljava/lang/String;[I[I[J[JIII)I
+ */
+JNIEXPORT jint JNICALL Java_hdf_hdf5lib_H5_H5Gget_1obj_1info_1full
+ (JNIEnv*, jclass, jlong, jstring, jobjectArray, jintArray, jintArray, jlongArray, jlongArray, jint, jint, jint);
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5Gget_obj_info_max
+ * Signature: (J[Ljava/lang/String;[I[I[JJI)I
+ */
+JNIEXPORT jint JNICALL Java_hdf_hdf5lib_H5_H5Gget_1obj_1info_1max
+ (JNIEnv*, jclass, jlong, jobjectArray, jintArray, jintArray, jlongArray, jlong, jint);
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5export_dataset
+ * Signature: (Ljava/lang/String;Ljava/lang/String;Ljava/lang/String;I)V
+ */
+JNIEXPORT void JNICALL Java_hdf_hdf5lib_H5_H5export_1dataset
+ (JNIEnv*, jclass, jstring, jstring, jstring, jint);
+
+#endif /* H5UTIL_H__ */
diff --git a/java/src/jni/h5zImp.c b/java/src/jni/h5zImp.c
new file mode 100644
index 0000000..a60854d
--- /dev/null
+++ b/java/src/jni/h5zImp.c
@@ -0,0 +1,84 @@
+/* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
+ * Copyright by The HDF Group. *
+ * Copyright by the Board of Trustees of the University of Illinois. *
+ * All rights reserved. *
+ * *
+ * This file is part of HDF5. The full HDF5 copyright notice, including *
+ * terms governing use, modification, and redistribution, is contained in *
+ * the files COPYING and Copyright.html. COPYING can be found at the root *
+ * of the source code distribution tree; Copyright.html can be found at the *
+ * root level of an installed copy of the electronic HDF5 document set and *
+ * is linked from the top-level documents page. It can also be found at *
+ * http://hdfgroup.org/HDF5/doc/Copyright.html. If you do not have *
+ * access to either file, you may request a copy from help@hdfgroup.org. *
+ * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */
+
+/*
+ * For details of the HDF libraries, see the HDF Documentation at:
+ * http://hdfdfgroup.org/HDF5/doc/
+ *
+ */
+
+#ifdef __cplusplus
+extern "C" {
+#endif /* __cplusplus */
+
+#include "hdf5.h"
+#include <jni.h>
+#include <stdlib.h>
+#include "h5jni.h"
+#include "h5zImp.h"
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5Zunregister
+ * Signature: (I)I
+ */
+JNIEXPORT jint JNICALL
+Java_hdf_hdf5lib_H5_H5Zunregister(JNIEnv *env, jclass clss, jint filter)
+{
+ herr_t retValue = H5Zunregister((H5Z_filter_t)filter);
+
+ if (retValue < 0)
+ h5libraryError(env);
+
+ return (jint)retValue;
+} /* end Java_hdf_hdf5lib_H5_H5Zunregister */
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5Zfilter_avail
+ * Signature: (I)I
+ */
+JNIEXPORT jint JNICALL
+Java_hdf_hdf5lib_H5_H5Zfilter_1avail(JNIEnv *env, jclass clss, jint filter)
+{
+ herr_t retValue = H5Zfilter_avail((H5Z_filter_t)filter);
+
+ if (retValue < 0)
+ h5libraryError(env);
+
+ return (jint)retValue;
+} /* end Java_hdf_hdf5lib_H5_H5Zfilter_1avail */
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5Zget_filter_info
+ * Signature: (I)I
+ */
+
+JNIEXPORT jint JNICALL
+Java_hdf_hdf5lib_H5_H5Zget_1filter_1info(JNIEnv *env, jclass clss, jint filter)
+{
+ unsigned int flags = 0;
+
+ if (H5Zget_filter_info ((H5Z_filter_t) filter, (unsigned *) &flags) < 0)
+ h5libraryError(env);
+
+ return (jint)flags;
+} /* end Java_hdf_hdf5lib_H5_H5Zget_1filter_1info */
+
+
+#ifdef __cplusplus
+} /* end extern "C" */
+#endif /* __cplusplus */
diff --git a/java/src/jni/h5zImp.h b/java/src/jni/h5zImp.h
new file mode 100644
index 0000000..de0d6e6
--- /dev/null
+++ b/java/src/jni/h5zImp.h
@@ -0,0 +1,55 @@
+/* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
+ * Copyright by The HDF Group. *
+ * Copyright by the Board of Trustees of the University of Illinois. *
+ * All rights reserved. *
+ * *
+ * This file is part of HDF5. The full HDF5 copyright notice, including *
+ * terms governing use, modification, and redistribution, is contained in *
+ * the files COPYING and Copyright.html. COPYING can be found at the root *
+ * of the source code distribution tree; Copyright.html can be found at the *
+ * root level of an installed copy of the electronic HDF5 document set and *
+ * is linked from the top-level documents page. It can also be found at *
+ * http://hdfgroup.org/HDF5/doc/Copyright.html. If you do not have *
+ * access to either file, you may request a copy from help@hdfgroup.org. *
+ * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */
+
+#include <jni.h>
+/* Header for class hdf_hdf5lib_H5_H5Z */
+
+#ifndef _Included_hdf_hdf5lib_H5_H5Z
+#define _Included_hdf_hdf5lib_H5_H5Z
+
+#ifdef __cplusplus
+extern "C" {
+#endif /* __cplusplus */
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5Zunregister
+ * Signature: (I)I
+ */
+JNIEXPORT jint JNICALL Java_hdf_hdf5lib_H5_H5Zunregister
+ (JNIEnv *, jclass, jint);
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5Zfilter_avail
+ * Signature: (I)I
+ */
+JNIEXPORT jint JNICALL Java_hdf_hdf5lib_H5_H5Zfilter_1avail
+ (JNIEnv *, jclass, jint);
+
+/*
+ * Class: hdf_hdf5lib_H5
+ * Method: H5Zget_filter_info
+ * Signature: (I)I
+ */
+
+JNIEXPORT jint JNICALL Java_hdf_hdf5lib_H5_H5Zget_1filter_1info
+ (JNIEnv *, jclass, jint);
+
+#ifdef __cplusplus
+} /* end extern "C" */
+#endif /* __cplusplus */
+
+#endif /* _Included_hdf_hdf5lib_H5_H5Z */
diff --git a/java/src/jni/nativeData.c b/java/src/jni/nativeData.c
new file mode 100644
index 0000000..da86e09
--- /dev/null
+++ b/java/src/jni/nativeData.c
@@ -0,0 +1,1195 @@
+/* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
+ * Copyright by The HDF Group. *
+ * Copyright by the Board of Trustees of the University of Illinois. *
+ * All rights reserved. *
+ * *
+ * This file is part of HDF5. The full HDF5 copyright notice, including *
+ * terms governing use, modification, and redistribution, is contained in *
+ * the files COPYING and Copyright.html. COPYING can be found at the root *
+ * of the source code distribution tree; Copyright.html can be found at the *
+ * root level of an installed copy of the electronic HDF5 document set and *
+ * is linked from the top-level documents page. It can also be found at *
+ * http://hdfgroup.org/HDF5/doc/Copyright.html. If you do not have *
+ * access to either file, you may request a copy from help@hdfgroup.org. *
+ * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */
+
+/*
+ * For details of the HDF libraries, see the HDF Documentation at:
+ * http://hdfdfgroup.org/HDF5/doc/
+ *
+ */
+/*
+ * This module contains the implementation of all the native methods
+ * used for number conversion. This is represented by the Java
+ * class HDFNativeData.
+ *
+ * These routines convert one dimensional arrays of bytes into
+ * one-D arrays of other types (int, float, etc) and vice versa.
+ *
+ * These routines are called from the Java parts of the Java-C
+ * interface.
+ *
+ * ***Important notes:
+ *
+ * 1. These routines are designed to be portable--they use the
+ * C compiler to do the required native data manipulation.
+ * 2. These routines copy the data at least once -- a serious
+ * but unavoidable performance hit.
+ */
+
+#ifdef __cplusplus
+extern "C" {
+#endif /* __cplusplus */
+
+#include <jni.h>
+#include "hdf5.h"
+#include "h5jni.h"
+#include "nativeData.h"
+
+
+/* returns int [] */
+JNIEXPORT jintArray JNICALL
+Java_hdf_hdf5lib_HDFNativeData_byteToInt___3B(JNIEnv *env, jclass clss, jbyteArray bdata) /* IN: array of bytes */
+{
+ jbyte *barr;
+ jintArray rarray = NULL;
+ int blen;
+ jint *iarray;
+ jboolean bb;
+ char *bp;
+ jint *iap;
+ int ii;
+ int len;
+
+ if (bdata == NULL) {
+ h5nullArgument( env, "byteToInt: bdata is NULL?");
+ } /* end if */
+ else {
+ barr = ENVPTR->GetByteArrayElements(ENVPAR bdata,&bb);
+ if (barr == NULL) {
+ h5JNIFatalError(env, "byteToInt: pin failed");
+ } /* end if */
+ else {
+ blen = ENVPTR->GetArrayLength(ENVPAR bdata);
+
+ len = blen/(int)sizeof(jint);
+ rarray = ENVPTR->NewIntArray(ENVPAR len);
+ if (rarray == NULL) {
+ ENVPTR->ReleaseByteArrayElements(ENVPAR bdata,barr,JNI_ABORT);
+ h5outOfMemory( env, "byteToInt" );
+ return NULL;
+ } /* end if */
+
+ iarray = ENVPTR->GetIntArrayElements(ENVPAR rarray,&bb);
+ if (iarray == NULL) {
+ ENVPTR->ReleaseByteArrayElements(ENVPAR bdata,barr,JNI_ABORT);
+ h5JNIFatalError(env, "byteToInt: pin iarray failed");
+ return NULL;
+ } /* end if */
+
+ bp = (char *)barr;
+ iap = iarray;
+ for (ii = 0; ii < len; ii++) {
+ *iap = *(jint *)bp;
+ iap++;
+ bp += sizeof(jint);
+ } /* end for */
+
+ ENVPTR->ReleaseIntArrayElements(ENVPAR rarray,iarray, 0);
+ } /* end else */
+ ENVPTR->ReleaseByteArrayElements(ENVPAR bdata,barr,JNI_ABORT);
+ } /* end else */
+ return rarray;
+} /* end Java_hdf_hdf5lib_HDFNativeData_byteToInt___3B */
+
+/* returns float [] */
+JNIEXPORT jfloatArray JNICALL
+Java_hdf_hdf5lib_HDFNativeData_byteToFloat___3B(JNIEnv *env, jclass clss, jbyteArray bdata) /* IN: array of bytes */
+{
+ jbyte *barr;
+ jfloatArray rarray;
+ int blen;
+ jfloat *farray;
+ jboolean bb;
+ char *bp;
+ jfloat *iap;
+ int ii;
+ int len;
+
+ if (bdata == NULL) {
+ h5nullArgument( env, "byteToFloat: bdata is NULL?");
+ return NULL;
+ } /* end if */
+ barr = ENVPTR->GetByteArrayElements(ENVPAR bdata,&bb);
+ if (barr == NULL) {
+ h5JNIFatalError(env, "byteToFloat: pin failed");
+ return NULL;
+ } /* end if */
+ blen = ENVPTR->GetArrayLength(ENVPAR bdata);
+
+ len = blen/(int)sizeof(jfloat);
+ rarray = ENVPTR->NewFloatArray(ENVPAR len);
+ if (rarray == NULL) {
+ ENVPTR->ReleaseByteArrayElements(ENVPAR bdata,barr,JNI_ABORT);
+ h5outOfMemory( env, "byteToFloat" );
+ return NULL;
+ } /* end if */
+ farray = ENVPTR->GetFloatArrayElements(ENVPAR rarray,&bb);
+ if (farray == NULL) {
+ ENVPTR->ReleaseByteArrayElements(ENVPAR bdata,barr,JNI_ABORT);
+ h5JNIFatalError(env, "byteToFloat: pin farray failed");
+ return NULL;
+ } /* end if */
+
+ bp = (char *)barr;
+ iap = farray;
+ for (ii = 0; ii < len; ii++) {
+ *iap = *(jfloat *)bp;
+ iap++;
+ bp += sizeof(jfloat);
+ } /* end for */
+
+ ENVPTR->ReleaseFloatArrayElements(ENVPAR rarray,farray, 0);
+ ENVPTR->ReleaseByteArrayElements(ENVPAR bdata,barr,JNI_ABORT);
+
+ return rarray;
+} /* end Java_hdf_hdf5lib_HDFNativeData_byteToFloat___3B */
+
+/* returns short [] */
+JNIEXPORT jshortArray JNICALL
+Java_hdf_hdf5lib_HDFNativeData_byteToShort___3B(JNIEnv *env, jclass clss, jbyteArray bdata) /* IN: array of bytes */
+{
+ jbyte *barr;
+ jshortArray rarray;
+ int blen;
+ jshort *sarray;
+ jboolean bb;
+ char *bp;
+ jshort *iap;
+ int ii;
+ int len;
+
+ if (bdata == NULL) {
+ h5nullArgument( env, "byteToShort: bdata is NULL?");
+ return NULL;
+ } /* end if */
+ barr = ENVPTR->GetByteArrayElements(ENVPAR bdata,&bb);
+ if (barr == NULL) {
+ h5JNIFatalError(env, "byteToShort: pin failed");
+ return NULL;
+ } /* end if */
+
+ blen = ENVPTR->GetArrayLength(ENVPAR bdata);
+
+ len = blen/(int)sizeof(jshort);
+ rarray = ENVPTR->NewShortArray(ENVPAR len);
+ if (rarray == NULL) {
+ ENVPTR->ReleaseByteArrayElements(ENVPAR bdata,barr,JNI_ABORT);
+ h5outOfMemory( env, "byteToShort" );
+ return NULL;
+ } /* end if */
+
+ sarray = ENVPTR->GetShortArrayElements(ENVPAR rarray,&bb);
+ if (sarray == NULL) {
+ ENVPTR->ReleaseByteArrayElements(ENVPAR bdata,barr,JNI_ABORT);
+ h5JNIFatalError(env, "byteToShort: pin sarray failed");
+ return NULL;
+ } /* end if */
+
+ bp = (char *)barr;
+ iap = sarray;
+ for (ii = 0; ii < len; ii++) {
+ *iap = *(jshort *)bp;
+ iap++;
+ bp += sizeof(jshort);
+ } /* end for */
+
+ ENVPTR->ReleaseShortArrayElements(ENVPAR rarray,sarray, 0);
+ ENVPTR->ReleaseByteArrayElements(ENVPAR bdata,barr,JNI_ABORT);
+
+ return rarray;
+} /* end Java_hdf_hdf5lib_HDFNativeData_byteToShort___3B */
+
+
+/* returns long [] */
+JNIEXPORT jlongArray JNICALL
+Java_hdf_hdf5lib_HDFNativeData_byteToLong___3B(JNIEnv *env, jclass clss, jbyteArray bdata) /* IN: array of bytes */
+{
+ jbyte *barr;
+ jlongArray rarray;
+ int blen;
+ jlong *larray;
+ jboolean bb;
+ char *bp;
+ jlong *iap;
+ int ii;
+ int len;
+
+ if (bdata == NULL) {
+ h5nullArgument( env, "byteToLong: bdata is NULL?");
+ return NULL;
+ } /* end if */
+ barr = ENVPTR->GetByteArrayElements(ENVPAR bdata,&bb);
+ if (barr == NULL) {
+ h5JNIFatalError(env, "byteToLong: pin failed");
+ return NULL;
+ } /* end if */
+ blen = ENVPTR->GetArrayLength(ENVPAR bdata);
+
+ len = blen/(int)sizeof(jlong);
+ rarray = ENVPTR->NewLongArray(ENVPAR len);
+ if (rarray == NULL) {
+ ENVPTR->ReleaseByteArrayElements(ENVPAR bdata,barr,JNI_ABORT);
+ h5outOfMemory( env, "byteToLong" );
+ return NULL;
+ } /* end if */
+
+ larray = ENVPTR->GetLongArrayElements(ENVPAR rarray,&bb);
+ if (larray == NULL) {
+ ENVPTR->ReleaseByteArrayElements(ENVPAR bdata,barr,JNI_ABORT);
+ h5JNIFatalError(env, "byteToLong: pin larray failed");
+ return NULL;
+ } /* end if */
+
+ bp = (char *)barr;
+ iap = larray;
+ for (ii = 0; ii < len; ii++) {
+ *iap = *(jlong *)bp;
+ iap++;
+ bp += sizeof(jlong);
+ } /* end for */
+ ENVPTR->ReleaseLongArrayElements(ENVPAR rarray,larray, 0);
+ ENVPTR->ReleaseByteArrayElements(ENVPAR bdata,barr,JNI_ABORT);
+
+ return rarray;
+} /* end Java_hdf_hdf5lib_HDFNativeData_byteToLong___3B */
+
+
+/* returns double [] */
+JNIEXPORT jdoubleArray JNICALL
+Java_hdf_hdf5lib_HDFNativeData_byteToDouble___3B(JNIEnv *env, jclass clss, jbyteArray bdata) /* IN: array of bytes */
+{
+ jbyte *barr;
+ jdoubleArray rarray;
+ int blen;
+ jdouble *darray;
+ jboolean bb;
+ char *bp;
+ jdouble *iap;
+ int ii;
+ int len;
+
+ if (bdata == NULL) {
+ h5nullArgument( env, "byteToDouble: bdata is NULL?");
+ return NULL;
+ } /* end if */
+ barr = ENVPTR->GetByteArrayElements(ENVPAR bdata,&bb);
+ if (barr == NULL) {
+ h5JNIFatalError(env, "byteToDouble: pin failed");
+ return NULL;
+ } /* end if */
+ blen = ENVPTR->GetArrayLength(ENVPAR bdata);
+
+ len = blen/(int)sizeof(jdouble);
+ rarray = ENVPTR->NewDoubleArray(ENVPAR len);
+ if (rarray == NULL) {
+ ENVPTR->ReleaseByteArrayElements(ENVPAR bdata,barr,JNI_ABORT);
+ h5outOfMemory( env, "byteToDouble" );
+ return NULL;
+ } /* end if */
+
+ darray = ENVPTR->GetDoubleArrayElements(ENVPAR rarray,&bb);
+ if (darray == NULL) {
+ ENVPTR->ReleaseByteArrayElements(ENVPAR bdata,barr,JNI_ABORT);
+ h5JNIFatalError(env, "byteToDouble: pin darray failed");
+ return NULL;
+ } /* end if */
+
+ bp = (char *)barr;
+ iap = darray;
+ for (ii = 0; ii < len; ii++) {
+ *iap = *(jdouble *)bp;
+ iap++;
+ bp += sizeof(jdouble);
+ } /* end for */
+
+ ENVPTR->ReleaseDoubleArrayElements(ENVPAR rarray,darray,0);
+ ENVPTR->ReleaseByteArrayElements(ENVPAR bdata,barr,JNI_ABORT);
+
+ return rarray;
+} /* end Java_hdf_hdf5lib_HDFNativeData_byteToDouble___3B */
+
+
+/* returns int [] */
+JNIEXPORT jintArray JNICALL
+Java_hdf_hdf5lib_HDFNativeData_byteToInt__II_3B(JNIEnv *env, jclass clss, jint start, jint len, jbyteArray bdata) /* IN: array of bytes */
+{
+ char *bp;
+ jbyte *barr;
+ jintArray rarray;
+ int blen;
+ jint *iarray;
+ jint *iap;
+ int ii;
+ jboolean bb;
+
+ if (bdata == NULL) {
+ h5nullArgument( env, "byteToInt: bdata is NULL?");
+ return NULL;
+ } /* end if */
+ barr = ENVPTR->GetByteArrayElements(ENVPAR bdata,&bb);
+ if (barr == NULL) {
+ h5JNIFatalError(env, "byteToInt: pin failed");
+ return NULL;
+ } /* end if */
+
+ blen = ENVPTR->GetArrayLength(ENVPAR bdata);
+ if ((start < 0) || ((int)(start + (len*(int)sizeof(jint))) > blen)) {
+ ENVPTR->ReleaseByteArrayElements(ENVPAR bdata,barr,JNI_ABORT);
+ h5JNIFatalError(env, "byteToInt: getLen failed");
+ return NULL;
+ } /* end if */
+
+ bp = (char *)barr + start;
+
+ rarray = ENVPTR->NewIntArray(ENVPAR len);
+ if (rarray == NULL) {
+ ENVPTR->ReleaseByteArrayElements(ENVPAR bdata,barr,JNI_ABORT);
+ h5outOfMemory( env, "byteToInt" );
+ return NULL;
+ } /* end if */
+
+ iarray = ENVPTR->GetIntArrayElements(ENVPAR rarray,&bb);
+ if (iarray == NULL) {
+ ENVPTR->ReleaseByteArrayElements(ENVPAR bdata,barr,JNI_ABORT);
+ h5JNIFatalError(env, "byteToInt: pin iarray failed");
+ return NULL;
+ } /* end if */
+
+ iap = iarray;
+ for (ii = 0; ii < len; ii++) {
+ *iap = *(jint *)bp;
+ iap++;
+ bp += sizeof(jint);
+ } /* end for */
+
+ ENVPTR->ReleaseIntArrayElements(ENVPAR rarray,iarray, 0);
+ ENVPTR->ReleaseByteArrayElements(ENVPAR bdata,barr,JNI_ABORT);
+
+ return rarray;
+} /* end Java_hdf_hdf5lib_HDFNativeData_byteToInt__II_3B */
+
+/* returns short [] */
+JNIEXPORT jshortArray JNICALL
+Java_hdf_hdf5lib_HDFNativeData_byteToShort__II_3B(JNIEnv *env, jclass clss, jint start, jint len, jbyteArray bdata) /* IN: array of bytes */
+{
+ char *bp;
+ jbyte *barr;
+ jshortArray rarray;
+ int blen;
+ jshort *iarray;
+ jshort *iap;
+ int ii;
+ jboolean bb;
+
+ if (bdata == NULL) {
+ h5nullArgument( env, "byteToShort: bdata is NULL?");
+ return NULL;
+ } /* end if */
+ barr = ENVPTR->GetByteArrayElements(ENVPAR bdata,&bb);
+ if (barr == NULL) {
+ h5JNIFatalError( env, "byteToShort: getByte failed?");
+ return NULL;
+ } /* end if */
+
+ blen = ENVPTR->GetArrayLength(ENVPAR bdata);
+ if ((start < 0) || ((int)(start + (len*(int)sizeof(jshort))) > blen)) {
+ ENVPTR->ReleaseByteArrayElements(ENVPAR bdata,barr,JNI_ABORT);
+ h5badArgument( env, "byteToShort: start or len is out of bounds");
+ return NULL;
+ } /* end if */
+
+ bp = (char *)barr + start;
+
+ rarray = ENVPTR->NewShortArray(ENVPAR len);
+ if (rarray == NULL) {
+ ENVPTR->ReleaseByteArrayElements(ENVPAR bdata,barr,JNI_ABORT);
+ h5outOfMemory( env, "byteToShort" );
+ return NULL;
+ } /* end if */
+
+ iarray = ENVPTR->GetShortArrayElements(ENVPAR rarray,&bb);
+ if (iarray == NULL) {
+ ENVPTR->ReleaseByteArrayElements(ENVPAR bdata,barr,JNI_ABORT);
+ h5JNIFatalError( env, "byteToShort: getShort failed?");
+ return NULL;
+ } /* end if */
+
+ iap = iarray;
+ for (ii = 0; ii < len; ii++) {
+ *iap = *(jshort *)bp;
+ iap++;
+ bp += sizeof(jshort);
+ } /* end for */
+
+ ENVPTR->ReleaseShortArrayElements(ENVPAR rarray,iarray, 0);
+ ENVPTR->ReleaseByteArrayElements(ENVPAR bdata,barr,JNI_ABORT);
+
+ return rarray;
+} /* end Java_hdf_hdf5lib_HDFNativeData_byteToShort__II_3B */
+
+/* returns float [] */
+JNIEXPORT jfloatArray JNICALL
+Java_hdf_hdf5lib_HDFNativeData_byteToFloat__II_3B(JNIEnv *env, jclass clss, jint start, jint len, jbyteArray bdata) /* IN: array of bytes */
+{
+ char *bp;
+ jbyte *barr;
+ jfloatArray rarray;
+ int blen;
+ jfloat *iarray;
+ jfloat *iap;
+ int ii;
+ jboolean bb;
+
+ if (bdata == NULL) {
+ h5nullArgument( env, "byteToFloat: bdata is NULL?");
+ return NULL;
+ } /* end if */
+ barr = ENVPTR->GetByteArrayElements(ENVPAR bdata,&bb);
+ if (barr == NULL) {
+ h5JNIFatalError( env, "byteToFloat: getByte failed?");
+ return NULL;
+ } /* end if */
+
+ blen = ENVPTR->GetArrayLength(ENVPAR bdata);
+ if ((start < 0) || ((int)(start + (len*(int)sizeof(jfloat))) > blen)) {
+ ENVPTR->ReleaseByteArrayElements(ENVPAR bdata,barr,JNI_ABORT);
+ h5badArgument( env, "byteToFloat: start or len is out of bounds");
+ return NULL;
+ } /* end if */
+
+ bp = (char *)barr + start;
+
+ rarray = ENVPTR->NewFloatArray(ENVPAR len);
+ if (rarray == NULL) {
+ ENVPTR->ReleaseByteArrayElements(ENVPAR bdata,barr,JNI_ABORT);
+ h5outOfMemory( env, "byteToFloat" );
+ return NULL;
+ } /* end if */
+
+ iarray = ENVPTR->GetFloatArrayElements(ENVPAR rarray,&bb);
+ if (iarray == NULL) {
+ ENVPTR->ReleaseByteArrayElements(ENVPAR bdata,barr,JNI_ABORT);
+ h5JNIFatalError( env, "byteToFloat: getFloat failed?");
+ return NULL;
+ } /* end if */
+
+ iap = iarray;
+ for (ii = 0; ii < len; ii++) {
+ *iap = *(jfloat *)bp;
+ iap++;
+ bp += sizeof(jfloat);
+ } /* end for */
+
+ ENVPTR->ReleaseFloatArrayElements(ENVPAR rarray,iarray, 0);
+ ENVPTR->ReleaseByteArrayElements(ENVPAR bdata,barr,JNI_ABORT);
+
+ return rarray;
+} /* end Java_hdf_hdf5lib_HDFNativeData_byteToFloat__II_3B */
+
+/* returns long [] */
+JNIEXPORT jlongArray JNICALL
+Java_hdf_hdf5lib_HDFNativeData_byteToLong__II_3B(JNIEnv *env, jclass clss, jint start, jint len, jbyteArray bdata) /* IN: array of bytes */
+{
+ char *bp;
+ jbyte *barr;
+ jlongArray rarray;
+ int blen;
+ jlong *iarray;
+ jlong *iap;
+ int ii;
+ jboolean bb;
+
+ if (bdata == NULL) {
+ h5nullArgument( env, "byteToLong: bdata is NULL?");
+ return NULL;
+ } /* end if */
+ barr = ENVPTR->GetByteArrayElements(ENVPAR bdata,&bb);
+ if (barr == NULL) {
+ h5JNIFatalError( env, "byteToLong: getByte failed?");
+ return NULL;
+ } /* end if */
+
+ blen = ENVPTR->GetArrayLength(ENVPAR bdata);
+ if ((start < 0) || ((int)(start + (len*(int)sizeof(jlong))) > blen)) {
+ ENVPTR->ReleaseByteArrayElements(ENVPAR bdata,barr,JNI_ABORT);
+ h5badArgument( env, "byteToLong: start or len is out of bounds");
+ return NULL;
+ } /* end if */
+
+ bp = (char *)barr + start;
+
+ rarray = ENVPTR->NewLongArray(ENVPAR len);
+ if (rarray == NULL) {
+ ENVPTR->ReleaseByteArrayElements(ENVPAR bdata,barr,JNI_ABORT);
+ h5outOfMemory( env, "byteToLong" );
+ return NULL;
+ } /* end if */
+
+ iarray = ENVPTR->GetLongArrayElements(ENVPAR rarray,&bb);
+ if (iarray == NULL) {
+ ENVPTR->ReleaseByteArrayElements(ENVPAR bdata,barr,JNI_ABORT);
+ h5JNIFatalError( env, "byteToLong: getLong failed?");
+ return NULL;
+ } /* end if */
+
+ iap = iarray;
+ for (ii = 0; ii < len; ii++) {
+
+ *iap = *(jlong *)bp;
+ iap++;
+ bp += sizeof(jlong);
+ } /* end for */
+
+ ENVPTR->ReleaseLongArrayElements(ENVPAR rarray,iarray, 0);
+ ENVPTR->ReleaseByteArrayElements(ENVPAR bdata,barr,JNI_ABORT);
+
+ return rarray;
+} /* end Java_hdf_hdf5lib_HDFNativeData_byteToLong__II_3B */
+
+/* returns double [] */
+JNIEXPORT jdoubleArray JNICALL
+Java_hdf_hdf5lib_HDFNativeData_byteToDouble__II_3B(JNIEnv *env, jclass clss, jint start, jint len, jbyteArray bdata) /* IN: array of bytes */
+{
+ char *bp;
+ jbyte *barr;
+ jdoubleArray rarray;
+ int blen;
+ jdouble *iarray;
+ jdouble *iap;
+ int ii;
+ jboolean bb;
+
+ if (bdata == NULL) {
+ h5nullArgument( env, "byteToDouble: bdata is NULL?");
+ return NULL;
+ } /* end if */
+ barr = ENVPTR->GetByteArrayElements(ENVPAR bdata,&bb);
+ if (barr == NULL) {
+ h5JNIFatalError( env, "byteToDouble: getByte failed?");
+ return NULL;
+ } /* end if */
+
+ blen = ENVPTR->GetArrayLength(ENVPAR bdata);
+ if ((start < 0) || ((int)(start + (len*(int)sizeof(jdouble))) > blen)) {
+ ENVPTR->ReleaseByteArrayElements(ENVPAR bdata,barr,JNI_ABORT);
+ h5badArgument( env, "byteToDouble: start or len is out of bounds");
+ return NULL;
+ } /* end if */
+
+ bp = (char *)barr + start;
+
+ rarray = ENVPTR->NewDoubleArray(ENVPAR len);
+ if (rarray == NULL) {
+ ENVPTR->ReleaseByteArrayElements(ENVPAR bdata,barr,JNI_ABORT);
+ h5outOfMemory( env, "byteToDouble" );
+ return NULL;
+ } /* end if */
+
+ iarray = ENVPTR->GetDoubleArrayElements(ENVPAR rarray,&bb);
+ if (iarray == NULL) {
+ ENVPTR->ReleaseByteArrayElements(ENVPAR bdata,barr,JNI_ABORT);
+ h5JNIFatalError( env, "byteToDouble: getDouble failed?");
+ return NULL;
+ } /* end if */
+
+ iap = iarray;
+ for (ii = 0; ii < len; ii++) {
+ *iap = *(jdouble *)bp;
+ iap++;
+ bp += sizeof(jdouble);
+ } /* end for */
+
+ ENVPTR->ReleaseDoubleArrayElements(ENVPAR rarray,iarray, 0);
+ ENVPTR->ReleaseByteArrayElements(ENVPAR bdata,barr,JNI_ABORT);
+
+ return rarray;
+} /* end Java_hdf_hdf5lib_HDFNativeData_byteToDouble__II_3B */
+
+/* returns byte [] */
+JNIEXPORT jbyteArray JNICALL
+Java_hdf_hdf5lib_HDFNativeData_intToByte__II_3I(JNIEnv *env, jclass clss, jint start, jint len, jintArray idata) /* IN: array of int */
+{
+ jint *ip;
+ jint *iarr;
+ int ilen;
+ jbyteArray rarray;
+ int blen;
+ jbyte *barray;
+ jbyte *bap;
+ jboolean bb;
+ int ii;
+ int ij;
+ union things {
+ int ival;
+ char bytes[4];
+ } u;
+
+ if (idata == NULL) {
+ h5nullArgument( env, "intToByte: idata is NULL?");
+ return NULL;
+ } /* end if */
+
+ iarr = ENVPTR->GetIntArrayElements(ENVPAR idata,&bb);
+ if (iarr == NULL) {
+ h5JNIFatalError( env, "intToByte: getInt failed?");
+ return NULL;
+ } /* end if */
+
+ ilen = ENVPTR->GetArrayLength(ENVPAR idata);
+ if ((start < 0) || (((start + len)) > ilen)) {
+ ENVPTR->ReleaseIntArrayElements(ENVPAR idata,iarr,JNI_ABORT);
+ h5badArgument( env, "intToByte: start or len is out of bounds");
+ return NULL;
+ } /* end if */
+
+ ip = iarr + start;
+
+ blen = ilen * (int)sizeof(jint);
+ rarray = ENVPTR->NewByteArray(ENVPAR blen);
+ if (rarray == NULL) {
+ ENVPTR->ReleaseIntArrayElements(ENVPAR idata,iarr,JNI_ABORT);
+ h5outOfMemory( env, "intToByte" );
+ return NULL;
+ } /* end if */
+
+ barray = ENVPTR->GetByteArrayElements(ENVPAR rarray,&bb);
+ if (barray == NULL) {
+ ENVPTR->ReleaseIntArrayElements(ENVPAR idata,iarr,JNI_ABORT);
+ h5JNIFatalError( env, "intToByte: getByte failed?");
+ return NULL;
+ } /* end if */
+
+ bap = barray;
+ for (ii = 0; ii < len; ii++) {
+ u.ival = *ip++;
+ for (ij = 0; ij < sizeof(jint); ij++) {
+ *bap = u.bytes[ij];
+ bap++;
+ } /* end for */
+ } /* end for */
+
+ ENVPTR->ReleaseByteArrayElements(ENVPAR rarray,barray, 0);
+ ENVPTR->ReleaseIntArrayElements(ENVPAR idata,iarr,JNI_ABORT);
+
+ return rarray;
+} /* end Java_hdf_hdf5lib_HDFNativeData_intToByte__II_3I */
+
+/* returns byte [] */
+JNIEXPORT jbyteArray JNICALL
+Java_hdf_hdf5lib_HDFNativeData_shortToByte__II_3S(JNIEnv *env, jclass clss, jint start, jint len, jshortArray idata) /* IN: array of short */
+{
+ jshort *ip;
+ jshort *iarr;
+ int ilen;
+ jbyteArray rarray;
+ int blen;
+ jbyte *barray;
+ jbyte *bap;
+ jboolean bb;
+ int ii;
+ int ij;
+ union things {
+ short ival;
+ char bytes[4];
+ } u;
+
+ if (idata == NULL) {
+ h5nullArgument( env, "shortToByte: idata is NULL?");
+ return NULL;
+ } /* end if */
+ iarr = ENVPTR->GetShortArrayElements(ENVPAR idata,&bb);
+ if (iarr == NULL) {
+ h5JNIFatalError( env, "shortToByte: getShort failed?");
+ return NULL;
+ } /* end if */
+
+ ilen = ENVPTR->GetArrayLength(ENVPAR idata);
+ if ((start < 0) || (((start + len)) > ilen)) {
+ ENVPTR->ReleaseShortArrayElements(ENVPAR idata,iarr,JNI_ABORT);
+ h5badArgument( env, "shortToByte: start or len is out of bounds");
+ return NULL;
+ } /* end if */
+
+ ip = iarr + start;
+
+ blen = ilen * (int)sizeof(jshort);
+ rarray = ENVPTR->NewByteArray(ENVPAR blen);
+ if (rarray == NULL) {
+ ENVPTR->ReleaseShortArrayElements(ENVPAR idata,iarr,JNI_ABORT);
+ h5outOfMemory( env, "shortToByte" );
+ return NULL;
+ } /* end if */
+
+ barray = ENVPTR->GetByteArrayElements(ENVPAR rarray,&bb);
+ if (barray == NULL) {
+ ENVPTR->ReleaseShortArrayElements(ENVPAR idata,iarr,JNI_ABORT);
+ h5JNIFatalError( env, "shortToByte: getByte failed?");
+ return NULL;
+ } /* end if */
+
+ bap = barray;
+ for (ii = 0; ii < len; ii++) {
+ u.ival = *ip++;
+ for (ij = 0; ij < sizeof(jshort); ij++) {
+ *bap = u.bytes[ij];
+ bap++;
+ } /* end for */
+ } /* end for */
+
+ ENVPTR->ReleaseByteArrayElements(ENVPAR rarray,barray, 0);
+ ENVPTR->ReleaseShortArrayElements(ENVPAR idata,iarr,JNI_ABORT);
+
+ return rarray;
+} /* end Java_hdf_hdf5lib_HDFNativeData_shortToByte__II_3S */
+
+/* returns byte [] */
+JNIEXPORT jbyteArray JNICALL
+Java_hdf_hdf5lib_HDFNativeData_floatToByte__II_3F(JNIEnv *env, jclass clss, jint start, jint len, jfloatArray idata) /* IN: array of float */
+{
+ jfloat *ip;
+ jfloat *iarr;
+ int ilen;
+ jbyteArray rarray;
+ int blen;
+ jbyte *barray;
+ jbyte *bap;
+ jboolean bb;
+ int ii;
+ int ij;
+ union things {
+ float ival;
+ char bytes[4];
+ } u;
+
+ if (idata == NULL) {
+ h5nullArgument( env, "floatToByte: idata is NULL?");
+ return NULL;
+ } /* end if */
+ iarr = ENVPTR->GetFloatArrayElements(ENVPAR idata,&bb);
+ if (iarr == NULL) {
+ h5JNIFatalError( env, "floatToByte: getFloat failed?");
+ return NULL;
+ } /* end if */
+
+ ilen = ENVPTR->GetArrayLength(ENVPAR idata);
+ if ((start < 0) || (((start + len)) > ilen)) {
+ ENVPTR->ReleaseFloatArrayElements(ENVPAR idata,iarr,JNI_ABORT);
+ h5badArgument( env, "floatToByte: start or len is out of bounds");
+ return NULL;
+ } /* end if */
+
+ ip = iarr + start;
+
+ blen = ilen * (int)sizeof(jfloat);
+ rarray = ENVPTR->NewByteArray(ENVPAR blen);
+ if (rarray == NULL) {
+ ENVPTR->ReleaseFloatArrayElements(ENVPAR idata,iarr,JNI_ABORT);
+ h5outOfMemory( env, "floatToByte" );
+ return NULL;
+ } /* end if */
+
+ barray = ENVPTR->GetByteArrayElements(ENVPAR rarray,&bb);
+ if (barray == NULL) {
+ ENVPTR->ReleaseFloatArrayElements(ENVPAR idata,iarr,JNI_ABORT);
+ h5JNIFatalError( env, "floatToByte: getByte failed?");
+ return NULL;
+ } /* end if */
+
+ bap = barray;
+ for (ii = 0; ii < len; ii++) {
+ u.ival = *ip++;
+ for (ij = 0; ij < sizeof(jfloat); ij++) {
+ *bap = u.bytes[ij];
+ bap++;
+ } /* end for */
+ } /* end for */
+
+ ENVPTR->ReleaseByteArrayElements(ENVPAR rarray,barray, 0);
+ ENVPTR->ReleaseFloatArrayElements(ENVPAR idata,iarr,JNI_ABORT);
+
+ return rarray;
+} /* end Java_hdf_hdf5lib_HDFNativeData_floatToByte__II_3F */
+
+/* returns byte [] */
+JNIEXPORT jbyteArray JNICALL
+Java_hdf_hdf5lib_HDFNativeData_doubleToByte__II_3D(JNIEnv *env, jclass clss, jint start, jint len, jdoubleArray idata) /* IN: array of double */
+{
+ jdouble *ip;
+ jdouble *iarr;
+ int ilen;
+ jbyteArray rarray;
+ int blen;
+ jbyte *barray;
+ jbyte *bap;
+ jboolean bb;
+ int ii;
+ int ij;
+ union things {
+ double ival;
+ char bytes[8];
+ } u;
+
+ if (idata == NULL) {
+ h5nullArgument( env, "doubleToByte: idata is NULL?");
+ return NULL;
+ } /* end if */
+ iarr = ENVPTR->GetDoubleArrayElements(ENVPAR idata,&bb);
+ if (iarr == NULL) {
+ h5JNIFatalError( env, "doubleToByte: getDouble failed?");
+ return NULL;
+ } /* end if */
+
+ ilen = ENVPTR->GetArrayLength(ENVPAR idata);
+ if ((start < 0) || (((start + len)) > ilen)) {
+ ENVPTR->ReleaseDoubleArrayElements(ENVPAR idata,iarr,JNI_ABORT);
+ h5badArgument( env, "doubleToByte: start or len is out of bounds");
+ return NULL;
+ } /* end if */
+
+ ip = iarr + start;
+
+ blen = ilen * (int)sizeof(jdouble);
+ rarray = ENVPTR->NewByteArray(ENVPAR blen);
+ if (rarray == NULL) {
+ ENVPTR->ReleaseDoubleArrayElements(ENVPAR idata,iarr,JNI_ABORT);
+ h5outOfMemory( env, "doubleToByte" );
+ return NULL;
+ } /* end if */
+
+ barray = ENVPTR->GetByteArrayElements(ENVPAR rarray,&bb);
+ if (barray == NULL) {
+ ENVPTR->ReleaseDoubleArrayElements(ENVPAR idata,iarr,JNI_ABORT);
+ h5JNIFatalError( env, "doubleToByte: getByte failed?");
+ return NULL;
+ } /* end if */
+
+ bap = barray;
+ for (ii = 0; ii < len; ii++) {
+ u.ival = *ip++;
+ for (ij = 0; ij < sizeof(jdouble); ij++) {
+ *bap = u.bytes[ij];
+ bap++;
+ } /* end for */
+ } /* end for */
+
+ ENVPTR->ReleaseByteArrayElements(ENVPAR rarray,barray, 0);
+ ENVPTR->ReleaseDoubleArrayElements(ENVPAR idata,iarr,JNI_ABORT);
+
+ return rarray;
+} /* end Java_hdf_hdf5lib_HDFNativeData_doubleToByte__II_3D */
+
+
+/* returns byte [] */
+JNIEXPORT jbyteArray JNICALL
+Java_hdf_hdf5lib_HDFNativeData_longToByte__II_3J(JNIEnv *env, jclass clss, jint start, jint len, jlongArray idata) /* IN: array of long */
+{
+ jlong *ip;
+ jlong *iarr;
+ int ilen;
+ jbyteArray rarray;
+ int blen;
+ jbyte *barray;
+ jbyte *bap;
+ jboolean bb;
+ int ii;
+ int ij;
+ union things {
+ jlong ival;
+ char bytes[8];
+ } u;
+
+ if (idata == NULL) {
+ h5nullArgument( env, "longToByte: idata is NULL?");
+ return NULL;
+ } /* end if */
+ iarr = ENVPTR->GetLongArrayElements(ENVPAR idata,&bb);
+ if (iarr == NULL) {
+ h5JNIFatalError( env, "longToByte: getLong failed?");
+ return NULL;
+ } /* end if */
+
+ ilen = ENVPTR->GetArrayLength(ENVPAR idata);
+ if ((start < 0) || (((start + len)) > ilen)) {
+ ENVPTR->ReleaseLongArrayElements(ENVPAR idata,iarr,JNI_ABORT);
+ h5badArgument( env, "longToByte: start or len is out of bounds?\n");
+ return NULL;
+ } /* end if */
+
+ ip = iarr + start;
+
+ blen = ilen * (int)sizeof(jlong);
+ rarray = ENVPTR->NewByteArray(ENVPAR blen);
+ if (rarray == NULL) {
+ ENVPTR->ReleaseLongArrayElements(ENVPAR idata,iarr,JNI_ABORT);
+ h5outOfMemory( env, "longToByte" );
+ return NULL;
+ } /* end if */
+
+ barray = ENVPTR->GetByteArrayElements(ENVPAR rarray,&bb);
+ if (barray == NULL) {
+ ENVPTR->ReleaseLongArrayElements(ENVPAR idata,iarr,JNI_ABORT);
+ h5JNIFatalError( env, "longToByte: getByte failed?");
+ return NULL;
+ } /* end if */
+
+ bap = barray;
+ for (ii = 0; ii < len; ii++) {
+ u.ival = *ip++;
+ for (ij = 0; ij < sizeof(jlong); ij++) {
+ *bap = u.bytes[ij];
+ bap++;
+ } /* end for */
+ } /* end for */
+
+ ENVPTR->ReleaseByteArrayElements(ENVPAR rarray,barray, 0);
+ ENVPTR->ReleaseLongArrayElements(ENVPAR idata,iarr,JNI_ABORT);
+
+ return rarray;
+} /* end Java_hdf_hdf5lib_HDFNativeData_longToByte__II_3J */
+
+
+/* returns byte [] */
+JNIEXPORT jbyteArray JNICALL
+Java_hdf_hdf5lib_HDFNativeData_intToByte__I(JNIEnv *env, jclass clss, jint idata) /* IN: int */
+{
+ jbyteArray rarray;
+ jbyte *barray;
+ jbyte *bap;
+ int ij;
+ jboolean bb;
+ union things {
+ int ival;
+ char bytes[sizeof(int)];
+ } u;
+
+ rarray = ENVPTR->NewByteArray(ENVPAR sizeof(jint));
+ if (rarray == NULL) {
+ h5outOfMemory( env, "intToByte" );
+ return NULL;
+ } /* end if */
+
+ barray = ENVPTR->GetByteArrayElements(ENVPAR rarray,&bb);
+ if (barray == NULL) {
+ h5JNIFatalError( env, "intToByte: getByte failed?");
+ return NULL;
+ } /* end if */
+
+ bap = barray;
+ u.ival = idata;
+ for (ij = 0; ij < sizeof(jint); ij++) {
+ *bap = u.bytes[ij];
+ bap++;
+ } /* end for */
+
+ ENVPTR->ReleaseByteArrayElements(ENVPAR rarray,barray, 0);
+
+ return rarray;
+} /* end Java_hdf_hdf5lib_HDFNativeData_intToByte__I */
+
+/* returns byte [] */
+JNIEXPORT jbyteArray JNICALL
+Java_hdf_hdf5lib_HDFNativeData_floatToByte__F(JNIEnv *env, jclass clss, jfloat idata) /* IN: int */
+{
+ jbyteArray rarray;
+ jbyte *barray;
+ jbyte *bap;
+ jboolean bb;
+ int ij;
+ union things {
+ float ival;
+ char bytes[sizeof(float)];
+ } u;
+
+ rarray = ENVPTR->NewByteArray(ENVPAR sizeof(jfloat));
+ if (rarray == NULL) {
+ h5outOfMemory( env, "floatToByte" );
+ return NULL;
+ } /* end if */
+
+ barray = ENVPTR->GetByteArrayElements(ENVPAR rarray,&bb);
+ if (barray == NULL) {
+ h5JNIFatalError( env, "floatToByte: getByte failed?");
+ return NULL;
+ } /* end if */
+
+ bap = barray;
+ u.ival = idata;
+ for (ij = 0; ij < sizeof(jfloat); ij++) {
+ *bap = u.bytes[ij];
+ bap++;
+ } /* end for */
+
+ ENVPTR->ReleaseByteArrayElements(ENVPAR rarray,(jbyte *)barray, 0);
+
+ return rarray;
+} /* end Java_hdf_hdf5lib_HDFNativeData_floatToByte__F */
+
+/* returns byte [] */
+JNIEXPORT jbyteArray JNICALL
+Java_hdf_hdf5lib_HDFNativeData_shortToByte__S(JNIEnv *env, jclass clss, jshort idata) /* IN: short */
+{
+ jbyteArray rarray;
+ jbyte *barray;
+ jbyte *bap;
+ jboolean bb;
+ int ij;
+ union things {
+ short ival;
+ char bytes[sizeof(short)];
+ } u;
+
+ rarray = ENVPTR->NewByteArray(ENVPAR sizeof(jshort));
+ if (rarray == NULL) {
+ h5outOfMemory( env, "shortToByte" );
+ return NULL;
+ } /* end if */
+
+ barray = ENVPTR->GetByteArrayElements(ENVPAR rarray,&bb);
+ if (barray == NULL) {
+ h5JNIFatalError( env, "shortToByte: getShort failed?");
+ return NULL;
+ } /* end if */
+
+ bap = barray;
+ u.ival = idata;
+ for (ij = 0; ij < sizeof(jshort); ij++) {
+ *bap = u.bytes[ij];
+ bap++;
+ } /* end for */
+
+ ENVPTR->ReleaseByteArrayElements(ENVPAR rarray,(jbyte *)barray, 0);
+
+ return rarray;
+} /* end Java_hdf_hdf5lib_HDFNativeData_shortToByte__S */
+
+
+/* returns byte [] */
+JNIEXPORT jbyteArray JNICALL
+Java_hdf_hdf5lib_HDFNativeData_doubleToByte__D(JNIEnv *env, jclass clss, jdouble idata) /* IN: double */
+{
+ jbyteArray rarray;
+ jbyte *barray;
+ jbyte *bap;
+ jboolean bb;
+ int ij;
+ union things {
+ double ival;
+ char bytes[sizeof(double)];
+ } u;
+
+ rarray = ENVPTR->NewByteArray(ENVPAR sizeof(jdouble));
+ if (rarray == NULL) {
+ h5outOfMemory( env, "doubleToByte" );
+ return NULL;
+ } /* end if */
+
+ barray = ENVPTR->GetByteArrayElements(ENVPAR rarray,&bb);
+ if (barray == NULL) {
+ h5JNIFatalError( env, "doubleToByte: getDouble failed?");
+ return NULL;
+ } /* end if */
+
+ bap = barray;
+ u.ival = idata;
+ for (ij = 0; ij < sizeof(jdouble); ij++) {
+ *bap = u.bytes[ij];
+ bap++;
+ } /* end for */
+
+ ENVPTR->ReleaseByteArrayElements(ENVPAR rarray,(jbyte *)barray, 0);
+
+ return rarray;
+} /* end Java_hdf_hdf5lib_HDFNativeData_doubleToByte__D */
+
+
+/* returns byte [] */
+JNIEXPORT jbyteArray JNICALL
+Java_hdf_hdf5lib_HDFNativeData_longToByte__J(JNIEnv *env, jclass clss, jlong idata) /* IN: array of long */
+{
+ jbyteArray rarray;
+ jbyte *barray;
+ jbyte *bap;
+ jboolean bb;
+ int ij;
+ union things {
+ jlong ival;
+ char bytes[sizeof(jlong)];
+ } u;
+
+ rarray = ENVPTR->NewByteArray(ENVPAR sizeof(jlong));
+ if (rarray == NULL) {
+ h5outOfMemory( env, "longToByte" );
+ return NULL;
+ } /* end if */
+
+ barray = ENVPTR->GetByteArrayElements(ENVPAR rarray,&bb);
+ if (barray == NULL) {
+ h5JNIFatalError( env, "longToByte: getLong failed?");
+ return NULL;
+ } /* end if */
+
+ bap = barray;
+ u.ival = idata;
+ for (ij = 0; ij < sizeof(jlong); ij++) {
+ *bap = u.bytes[ij];
+ bap++;
+ } /* end for */
+
+ ENVPTR->ReleaseByteArrayElements(ENVPAR rarray,(jbyte *)barray, 0);
+
+ return rarray;
+} /* end Java_hdf_hdf5lib_HDFNativeData_longToByte__J */
+
+/* returns byte [] */
+JNIEXPORT jbyteArray JNICALL
+Java_hdf_hdf5lib_HDFNativeData_byteToByte__B(JNIEnv *env, jclass clss, jbyte idata) /* IN: array of long */
+{
+ jbyteArray rarray;
+ jbyte *barray;
+ jbyte *bap;
+ jboolean bb;
+ int ij;
+ union things {
+ jbyte ival;
+ char bytes[sizeof(jbyte)];
+ } u;
+
+ rarray = ENVPTR->NewByteArray(ENVPAR sizeof(jbyte));
+ if (rarray == NULL) {
+ h5outOfMemory( env, "byteToByte" );
+ return NULL;
+ } /* end if */
+
+ barray = ENVPTR->GetByteArrayElements(ENVPAR rarray,&bb);
+ if (barray == NULL) {
+ h5JNIFatalError( env, "byteToByte: getByte failed?");
+ return NULL;
+ } /* end if */
+
+ bap = barray;
+ u.ival = idata;
+ for (ij = 0; ij < sizeof(jbyte); ij++) {
+ *bap = u.bytes[ij];
+ bap++;
+ } /* end for */
+
+ ENVPTR->ReleaseByteArrayElements(ENVPAR rarray,(jbyte *)barray, 0);
+
+ return rarray;
+} /* end Java_hdf_hdf5lib_HDFNativeData_byteToByte__B */
+
+
+#ifdef __cplusplus
+} /* end extern "C" */
+#endif /* __cplusplus */
diff --git a/java/src/jni/nativeData.h b/java/src/jni/nativeData.h
new file mode 100644
index 0000000..1af06b4
--- /dev/null
+++ b/java/src/jni/nativeData.h
@@ -0,0 +1,115 @@
+/* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
+ * Copyright by The HDF Group. *
+ * Copyright by the Board of Trustees of the University of Illinois. *
+ * All rights reserved. *
+ * *
+ * This file is part of HDF5. The full HDF5 copyright notice, including *
+ * terms governing use, modification, and redistribution, is contained in *
+ * the files COPYING and Copyright.html. COPYING can be found at the root *
+ * of the source code distribution tree; Copyright.html can be found at the *
+ * root level of an installed copy of the electronic HDF5 document set and *
+ * is linked from the top-level documents page. It can also be found at *
+ * http://hdfgroup.org/HDF5/doc/Copyright.html. If you do not have *
+ * access to either file, you may request a copy from help@hdfgroup.org. *
+ * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */
+
+#include <jni.h>
+/* Header for class hdf_hdf5lib_HDFNativeData */
+
+#ifndef _Included_hdf_hdf5lib_HDFNativeData
+#define _Included_hdf_hdf5lib_HDFNativeData
+
+#ifdef __cplusplus
+extern "C" {
+#endif /* __cplusplus */
+
+
+/* returns int [] */
+JNIEXPORT jintArray JNICALL Java_hdf_hdf5lib_HDFNativeData_byteToInt___3B
+(JNIEnv *, jclass, jbyteArray);
+
+/* returns float [] */
+JNIEXPORT jfloatArray JNICALL Java_hdf_hdf5lib_HDFNativeData_byteToFloat___3B
+(JNIEnv *, jclass, jbyteArray);
+
+/* returns short [] */
+JNIEXPORT jshortArray JNICALL Java_hdf_hdf5lib_HDFNativeData_byteToShort___3B
+(JNIEnv *, jclass, jbyteArray);
+
+/* returns long [] */
+JNIEXPORT jlongArray JNICALL Java_hdf_hdf5lib_HDFNativeData_byteToLong___3B
+(JNIEnv *, jclass, jbyteArray);
+
+/* returns double [] */
+JNIEXPORT jdoubleArray JNICALL Java_hdf_hdf5lib_HDFNativeData_byteToDouble___3B
+(JNIEnv *, jclass, jbyteArray);
+
+/* returns int [] */
+JNIEXPORT jintArray JNICALL Java_hdf_hdf5lib_HDFNativeData_byteToInt__II_3B
+(JNIEnv *, jclass, jint, jint, jbyteArray);
+
+/* returns short [] */
+JNIEXPORT jshortArray JNICALL Java_hdf_hdf5lib_HDFNativeData_byteToShort__II_3B
+(JNIEnv *, jclass, jint, jint, jbyteArray);
+
+/* returns float [] */
+JNIEXPORT jfloatArray JNICALL Java_hdf_hdf5lib_HDFNativeData_byteToFloat__II_3B
+(JNIEnv *, jclass, jint, jint, jbyteArray);
+
+/* returns long [] */
+JNIEXPORT jlongArray JNICALL Java_hdf_hdf5lib_HDFNativeData_byteToLong__II_3B
+(JNIEnv *, jclass, jint, jint, jbyteArray);
+
+/* returns double [] */
+JNIEXPORT jdoubleArray JNICALL Java_hdf_hdf5lib_HDFNativeData_byteToDouble__II_3B
+(JNIEnv *, jclass, jint, jint, jbyteArray);
+
+/* returns byte [] */
+JNIEXPORT jbyteArray JNICALL Java_hdf_hdf5lib_HDFNativeData_intToByte__II_3I
+(JNIEnv *, jclass, jint, jint, jintArray);
+
+/* returns byte [] */
+JNIEXPORT jbyteArray JNICALL Java_hdf_hdf5lib_HDFNativeData_shortToByte__II_3S
+(JNIEnv *, jclass, jint, jint, jshortArray);
+
+/* returns byte [] */
+JNIEXPORT jbyteArray JNICALL Java_hdf_hdf5lib_HDFNativeData_floatToByte__II_3F
+(JNIEnv *, jclass, jint, jint, jfloatArray);
+
+/* returns byte [] */
+JNIEXPORT jbyteArray JNICALL Java_hdf_hdf5lib_HDFNativeData_doubleToByte__II_3D
+(JNIEnv *, jclass, jint, jint, jdoubleArray);
+
+/* returns byte [] */
+JNIEXPORT jbyteArray JNICALL Java_hdf_hdf5lib_HDFNativeData_longToByte__II_3J
+(JNIEnv *, jclass, jint, jint, jlongArray);
+
+/* returns byte [] */
+JNIEXPORT jbyteArray JNICALL Java_hdf_hdf5lib_HDFNativeData_intToByte__I
+(JNIEnv *, jclass, jint);
+
+/* returns byte [] */
+JNIEXPORT jbyteArray JNICALL Java_hdf_hdf5lib_HDFNativeData_floatToByte__F
+(JNIEnv *, jclass, jfloat);
+
+/* returns byte [] */
+JNIEXPORT jbyteArray JNICALL Java_hdf_hdf5lib_HDFNativeData_shortToByte__S
+(JNIEnv *, jclass, jshort);
+
+/* returns byte [] */
+JNIEXPORT jbyteArray JNICALL Java_hdf_hdf5lib_HDFNativeData_doubleToByte__D
+(JNIEnv *env, jclass, jdouble);
+
+/* returns byte [] */
+JNIEXPORT jbyteArray JNICALL Java_hdf_hdf5lib_HDFNativeData_longToByte__J
+(JNIEnv *, jclass, jlong);
+
+/* returns byte [] */
+JNIEXPORT jbyteArray JNICALL Java_hdf_hdf5lib_HDFNativeData_byteToByte__B
+(JNIEnv *, jclass, jbyte);
+
+#ifdef __cplusplus
+} /* end extern "C" */
+#endif /* __cplusplus */
+
+#endif /* _Included_hdf_hdf5lib_HDFNativeData */
diff --git a/java/test/CMakeLists.txt b/java/test/CMakeLists.txt
new file mode 100644
index 0000000..4ee059d
--- /dev/null
+++ b/java/test/CMakeLists.txt
@@ -0,0 +1,131 @@
+cmake_minimum_required (VERSION 3.1.0)
+PROJECT (HDF5_JAVA_TEST Java)
+
+set (CMAKE_VERBOSE_MAKEFILE 1)
+
+INCLUDE_DIRECTORIES (
+ ${HDF5_JAVA_JNI_BINARY_DIR}
+ ${HDF5_JAVA_HDF5_LIB_DIR}
+)
+
+set (HDF5_JAVA_TEST_SRCS
+ TestH5.java
+ TestH5E.java
+ TestH5Edefault.java
+ TestH5Eregister.java
+ TestH5Fparams.java
+ TestH5Fbasic.java
+ TestH5F.java
+ TestH5Gbasic.java
+ TestH5G.java
+ TestH5Giterate.java
+ TestH5Sbasic.java
+ TestH5S.java
+ TestH5Tparams.java
+ TestH5Tbasic.java
+ TestH5T.java
+ TestH5Dparams.java
+ TestH5D.java
+ TestH5Dplist.java
+ TestH5Lparams.java
+ TestH5Lbasic.java
+ TestH5Lcreate.java
+ TestH5R.java
+ TestH5P.java
+ TestH5PData.java
+ TestH5Pfapl.java
+ TestH5Pvirtual.java
+ TestH5Plist.java
+ TestH5A.java
+ TestH5Oparams.java
+ TestH5Obasic.java
+ TestH5Ocreate.java
+ TestH5Ocopy.java
+ TestH5PL.java
+ TestH5Z.java
+ TestAll.java
+)
+
+file (WRITE ${PROJECT_BINARY_DIR}/Manifest.txt
+"Main-Class: test.TestAll
+"
+)
+
+set (CMAKE_JAVA_INCLUDE_PATH "${HDF5_JAVA_LIB_DIR}/junit.jar;${HDF5_JAVA_LIB_DIR}/hamcrest-core.jar;${HDF5_JAVA_JARS};${HDF5_JAVA_LOGGING_JAR};${HDF5_JAVA_LOGGING_SIMPLE_JAR}")
+
+add_jar (${HDF5_JAVA_TEST_LIB_TARGET} MANIFEST ${PROJECT_BINARY_DIR}/Manifest.txt ${HDF5_JAVA_TEST_SRCS})
+
+get_target_property (${HDF5_JAVA_TEST_LIB_TARGET}_JAR_FILE ${HDF5_JAVA_TEST_LIB_TARGET} JAR_FILE)
+#install_jar (${HDF5_JAVA_TEST_LIB_TARGET} ${HJAVA_INSTALL_DATA_DIR}/tests tests)
+#get_target_property (${HDF5_JAVA_TEST_LIB_TARGET}_CLASSPATH ${HDF5_JAVA_TEST_LIB_TARGET} CLASSDIR)
+
+add_dependencies (${HDF5_JAVA_TEST_LIB_TARGET} ${HDF5_JAVA_HDF5_LIB_TARGET})
+set_target_properties (${HDF5_JAVA_TEST_LIB_TARGET} PROPERTIES FOLDER test/java)
+
+set (HDF_JAVA_TEST_FILES
+ JUnit-interface.txt
+ JUnit-interface.ert
+)
+
+foreach (h5_file ${HDF_JAVA_TEST_FILES})
+ set (dest "${PROJECT_BINARY_DIR}/${h5_file}")
+ #message (STATUS " Copying ${h5_file}")
+ add_custom_command (
+ TARGET ${HDF5_JAVA_TEST_LIB_TARGET}
+ POST_BUILD
+ COMMAND ${CMAKE_COMMAND}
+ ARGS -E copy_if_different ${PROJECT_SOURCE_DIR}/${h5_file} ${dest}
+ )
+endforeach (h5_file ${HDF_JAVA_TEST_FILES})
+
+add_custom_command (
+ TARGET ${HDF5_JAVA_TEST_LIB_TARGET}
+ POST_BUILD
+ COMMAND ${CMAKE_COMMAND}
+ ARGS -E copy_if_different ${PROJECT_SOURCE_DIR}/h5ex_g_iterate.orig ${PROJECT_BINARY_DIR}/h5ex_g_iterate.hdf
+)
+
+if (WIN32)
+ set (CMAKE_JAVA_INCLUDE_FLAG_SEP ";")
+else (WIN32)
+ set (CMAKE_JAVA_INCLUDE_FLAG_SEP ":")
+endif (WIN32)
+
+set (CMAKE_JAVA_CLASSPATH ".")
+foreach (CMAKE_INCLUDE_PATH ${CMAKE_JAVA_INCLUDE_PATH})
+ set (CMAKE_JAVA_CLASSPATH "${CMAKE_JAVA_CLASSPATH}${CMAKE_JAVA_INCLUDE_FLAG_SEP}${CMAKE_INCLUDE_PATH}")
+endforeach (CMAKE_INCLUDE_PATH)
+set (CMAKE_JAVA_CLASSPATH "${CMAKE_JAVA_CLASSPATH}${CMAKE_JAVA_INCLUDE_FLAG_SEP}${${HDF5_JAVA_TEST_LIB_TARGET}_JAR_FILE}")
+set (testfilter "OK (598 tests)")
+
+if (CMAKE_BUILD_TYPE MATCHES Debug)
+ set (CMD_ARGS "-Dhdf.hdf5lib.H5.loadLibraryName=hdf5_java_debug;")
+endif(CMAKE_BUILD_TYPE MATCHES Debug)
+
+add_test (
+ NAME JUnit-interface-clearall-objects
+ COMMAND ${CMAKE_COMMAND}
+ -E remove
+ JUnit-interface.out
+ JUnit-interface.out.err
+)
+
+add_test (
+ NAME JUnit-interface
+ COMMAND "${CMAKE_COMMAND}"
+ -D "TEST_TESTER=${CMAKE_Java_RUNTIME};${CMAKE_Java_RUNTIME_FLAGS}"
+ -D "TEST_CLASSPATH:STRING=${CMAKE_JAVA_CLASSPATH}"
+ -D "TEST_ARGS:STRING=${CMD_ARGS}-ea;org.junit.runner.JUnitCore"
+ -D "TEST_PROGRAM=test.TestAll"
+ -D "TEST_LIBRARY_DIRECTORY=${CMAKE_TEST_OUTPUT_DIRECTORY}"
+ -D "TEST_FOLDER=${HDF5_BINARY_DIR}/java/test"
+ -D "TEST_OUTPUT=JUnit-interface.out"
+# -D "TEST_LOG_LEVEL=trace"
+ -D "TEST_EXPECT=0"
+ -D "SKIP_APPEND=1"
+ -D "TEST_MASK_ERROR=TRUE"
+ -D "TEST_FILTER:STRING=${testfilter}"
+ -D "TEST_REFERENCE=JUnit-interface.txt"
+ -P "${HDF_RESOURCES_DIR}/jrunTest.cmake"
+)
+set_tests_properties (JUnit-interface PROPERTIES DEPENDS "JUnit-interface-clearall-objects")
diff --git a/java/test/JUnit-interface.ert b/java/test/JUnit-interface.ert
new file mode 100644
index 0000000..5771a4f
--- /dev/null
+++ b/java/test/JUnit-interface.ert
@@ -0,0 +1,2 @@
+[main] INFO hdf.hdf5lib.H5 - HDF5 library: hdf5_java
+[main] INFO hdf.hdf5lib.H5 - successfully loaded from java.library.path
diff --git a/java/test/JUnit-interface.txt b/java/test/JUnit-interface.txt
new file mode 100644
index 0000000..12e67ad
--- /dev/null
+++ b/java/test/JUnit-interface.txt
@@ -0,0 +1,650 @@
+JUnit version 4.11
+.testJ2C
+.testIsSerializable
+.testH5garbage_collect
+.testH5error_off
+.serializeToDisk
+.testH5open
+.testH5check_version
+.testH5get_libversion
+.testH5set_free_list_limits
+.testH5Eregister_class_lib_name_null
+.testH5Eregister_class_version_null
+.testH5Eunregister_class_invalid_classid
+.testH5Eregister_class
+.testH5Eregister_class_cls_name_null
+.testH5EprintInt
+.testH5Eset_current_stack_invalid_stkid
+.testH5Eset_current_stack
+.testH5Eget_num
+.testH5Eclear
+.testH5Eprint
+.testH5Epush_null_name
+.testH5Eget_num_with_msg
+.testH5Eclear2_with_msg
+.testH5Epush_invalid_stkid
+.testH5Eget_current_stack
+.testH5Ecreate_stack
+.testH5Eget_msg_invalid_msgid
+.testH5Eauto_is_v2
+.testH5EclearInt
+.testH5Eauto_is_v2_invalid_stkid
+.testH5Eclose_msg_invalid_errid
+.testH5Eclose_stack_invalid_stackid
+.testH5Eget_class_name_invalid_classname
+.testH5Eget_num_invalid_stkid
+.testH5EprintInt_invalid_classid
+.testH5Epop
+.testH5Epop_invalid_stkid
+.testH5Eget_current_stack_pop
+.testH5Eget_class_name_invalid_classid
+.testH5Ecreate_msg_invalid_errid
+.testH5Eclear2_invalid_stkid
+.testH5Eprint2_invalid_classid
+.testH5EprintInt
+.testH5Eget_msg_major
+.testH5Eget_msg_minor
+.testH5Eget_msg
+.testH5Eget_num
+.testH5Epush
+.testH5Ewalk
+.testH5Eget_class_name
+.testH5Eget_num_with_msg
+.testH5Eclear2
+.testH5Eprint2
+.testH5Ecreate_msg_major
+.testH5Ecreate_msg_minor
+.testH5Ecreate_stack
+.testH5Ecreate_msg_name_null
+.testH5Eauto_is_v2
+.testH5EclearInt
+.testH5Epop
+.testH5Fcreate_null
+.testH5Fflush_local
+.testH5Fget_info
+.testH5Fmount_null
+.testH5Fcreate
+.testH5Fflush_global
+.testH5Funmount_null
+.testH5Fclose_negative
+.testH5Fopen_null
+.testH5Fis_hdf5_null
+.testH5Fis_hdf5_text
+.testH5Fget_mdc_size
+.testH5Fget_mdc_hit_rate
+.testH5Fis_hdf5
+.testH5Fget_freespace
+.testH5Fclose
+.testH5Fget_filesize
+.testH5Fcreate_EXCL
+.testH5Freopen_closed
+.testH5Freset_mdc_hit_rate_stats
+.testH5Fget_name
+.testH5Fcreate
+.testH5Fclear_elink_file_cache
+.testH5Fclose_twice
+.testH5Freopen
+.testH5Fopen_read_only
+.testH5Fget_access_plist
+.testH5Fget_obj_ids
+.testH5Fget_intent_rdwr
+.testH5Fget_access_plist_closed
+.testH5Fget_create_plist_closed
+.testH5Fget_intent_rdonly
+.testH5Fget_create_plist
+.testH5Fget_obj_count
+.testH5Gget_info_by_name_not_exists
+.testH5Gget_info_by_idx_not_exists
+.testH5Gget_info_by_name
+.testH5Gget_create_plist
+.testH5Gopen
+.testH5Gget_info_by_idx_null
+.testH5Gopen_not_exists
+.testH5Gclose
+.testH5Gcreate_anon
+.testH5Gcreate_null
+.testH5Gget_info_by_idx_fileid
+.testH5Gclose_invalid
+.testH5Gopen_invalid
+.testH5Gget_info_invalid
+.testH5Gcreate_invalid
+.testH5Gcreate_exists
+.testH5Gget_info_by_name_null
+.testH5Gget_info_by_name_invalid
+.testH5Gget_create_plist_invalid
+.testH5Gcreate
+.testH5Gget_info_by_name_fileid
+.testH5Gget_info_by_idx_invalid
+.testH5Gopen_null
+.testH5Gget_info_by_idx
+.testH5Gget_info
+.testH5Gget_info_by_name
+.testH5Gget_create_plist
+.testH5Gopen
+.testH5Gget_obj_info_all_gid2
+.testH5Gget_obj_info_all_byIndexType
+.testH5Gget_obj_info_max_limit
+.testH5Gget_obj_info_all
+.testH5Gget_obj_info_max
+.testH5Gget_obj_info_all_gid
+.testH5Gget_info_by_idx
+.testH5Gget_info
+.testH5Gget_obj_info_all
+.testH5Sclose_invalid
+.testH5Screate_simple_max_default
+.testH5Screate_simple_dims_null
+.testH5Sdecode_null
+.testH5Screate_simple_dims_exceed
+.testH5Screate_simple_unlimted_1d
+.testH5Screate_simple_dims_invalid
+.testH5Screate_scalar
+.testH5Screate_simple
+.testH5Screate_simple_rank_invalid
+.testH5Sget_simple_extent_type_invalid
+.testH5Sencode_invalid
+.testH5Screate_null
+.testH5Screate_simple_extent
+.testH5Screate_invalid
+.testH5Screate_simple_unlimted
+.testH5Sget_select_npoints
+.testH5Sget_select_type
+.testH5Sset_extent_simple
+.testH5Sget_select_hyper
+.testH5Sget_select_valid
+.testH5Sget_select_elem_pointlist
+.testH5Sset_extent_none
+.testH5Sencode_decode_scalar_dataspace
+.testH5Soffset_simple
+.testH5Scopy
+.testH5Sget_simple_extent_ndims
+.testH5Sextent_equal
+.testH5Sget_simple_extent_dims
+.testH5Sget_simple_extent_type
+.testH5Shyper_regular
+.testH5Sget_select_bounds
+.testH5Sget_select_elem_pointlist_invalid
+.testH5Sget_simple_extent_npoints
+.testH5Sextent_copy
+.testH5Sencode_decode_null_dataspace
+.testH5Sis_simple
+.testH5Sget_simple_extent_dims_null
+.testH5Sselect_none
+.testH5Tget_member_type_invalid
+.testH5Tenum_insert_null
+.testH5Tget_offset_invalid
+.testH5Tset_precision_invalid
+.testH5Tget_inpad_invalid
+.testH5Tenum_nameof_invalid
+.testH5Tget_member_value_invalid
+.testH5Tenum_nameof_value_null
+.testH5Tcreate_invalid
+.testH5Tget_strpad_invalid
+.testH5Tenum_valueof_invalid
+.testH5Tget_fields_null
+.testH5Topen_null
+.testH5Tpack_invalid
+.testH5Tcommit_null
+.testH5Tinsert_invalid
+.testH5Tenum_valueof_null
+.testH5Tset_norm_invalid
+.testH5Tlock_invalid
+.testH5Tarray_create_invalid
+.testH5Tget_member_value_null
+.testH5Tset_offset_invalid
+.testH5Tget_fields_invalid
+.testH5Tequal_invalid
+.testH5Tget_ebias_long_invalid
+.testH5Tget_cset_invalid
+.testH5Tget_size_invalid
+.testH5Tset_strpad_invalid
+.testH5Tset_ebias_invalid
+.testH5Tget_sign_invalid
+.testH5Tget_member_index_invalid
+.testH5Tget_precision_invalid
+.testH5Tset_fields_invalid
+.testH5Tcopy_invalid
+.testH5Tget_pad_invalid
+.testH5Tset_order_invalid
+.testH5Tget_member_class_invalid
+.testH5Tget_super_invalid
+.testH5Tget_class_invalid
+.testH5Topen_invalid
+.testH5Tget_precision_long_invalid
+.testH5Tget_ebias_invalid
+.testH5Tget_native_type_invalid
+.testH5Tget_fields_length_invalid
+.testH5Tget_norm_invalid
+.testH5Tenum_nameof_invalid_size
+.testH5Tset_pad_invalid
+.testH5Tget_pad_null
+.testH5Tset_tag_null
+.testH5Tget_order_invalid
+.testH5Tcommit_invalid
+.testH5Tget_array_ndims_invalid
+.testH5Tset_tag_invalid
+.testH5Tvlen_create_invalid
+.testH5Tenum_create_invalid
+.testH5Tinsert_null
+.testH5Tset_inpad_invalid
+.testH5Tenum_valueof_name_null
+.testH5Tset_cset_invalid
+.testH5Tclose_invalid
+.testH5Tget_nmembers_invalid
+.testH5Tarray_create_value_null
+.testH5Tset_size_invalid
+.testH5Tenum_insert_invalid
+.testH5Tget_array_dims_null
+.testH5Tget_member_index_null
+.testH5Tset_sign_invalid
+.testH5Tenum_insert_name_null
+.testH5Tequal_not
+.testH5Tcopy
+.testH5Tequal
+.testH5Torder_size
+.testH5Tconvert
+.testH5Tvlen_create
+.testH5Tenum_create_functions
+.testH5Tenum_functions
+.testH5Tget_class
+.testH5Tget_array_ndims
+.testH5Tequal_type_error
+.testH5Tget_array_dims
+.testH5Tset_size
+.testH5Tis_variable_str
+.testH5Tcompound_functions
+.testH5Tget_size
+.testH5Tarray_create
+.testH5Topaque_functions
+.testH5Dget_space_status_invalid
+.testH5Dget_access_plist_invalid
+.testH5Dget_type_invalid
+.testH5Dget_create_plist_invalid
+.testH5Dget_offset_invalid
+.testH5Dvlen_get_buf_size_invalid
+.testH5Dcreate_null
+.testH5Dset_extent_status_null
+.testH5Dvlen_reclaim_null
+.testH5Dcreate_invalid
+.testH5Dcreate_anon_invalid
+.testH5Dset_extent_status_invalid
+.testH5Dvlen_reclaim_invalid
+.testH5Dopen_invalid
+.testH5Dclose_invalid
+.testH5Dget_storage_size_invalid
+.testH5Dget_space_invalid
+.testH5Dopen_null
+.testH5Dget_storage_size
+.testH5Diterate_write
+.testH5Dcreate
+.testH5Dget_offset
+.testH5Dget_type
+.testH5Dfill
+.testH5Dopen
+.testH5Dcreate_anon
+.testH5Dfill_null
+.testH5Dget_storage_size_empty
+.testH5Diterate
+.testH5Dget_access_plist
+.testH5Dvlen_read_invalid_buffer
+.testH5Dvlen_get_buf_size
+.testH5Dget_space_closed
+.testH5Dget_space_status
+.testH5Dvlen_write_read
+.testH5Dget_space
+.testH5Dget_type_closed
+.testH5Dset_extent
+.testH5Lcopy_invalid
+.testH5Lget_value_by_idx_null
+.testH5Lcreate_external_invalid
+.testH5Lexists_null
+.testH5Lget_info_invalid
+.testH5Lget_name_by_idx_invalid
+.testH5Lmove_null_current
+.testH5Literate_by_name_nullname
+.testH5Lvisit_by_name_nullname
+.testH5Lvisit_null
+.testH5Lget_name_by_idx_null
+.testH5Lcreate_hard_null_dest
+.testH5Lget_value_null
+.testH5Lcreate_external_null_dest
+.testH5Lcreate_external_null_file
+.testH5Lcreate_external_null_current
+.testH5Ldelete_null
+.testH5Lexists_invalid
+.testH5Lmove_invalid
+.testH5Lcreate_hard_invalid
+.testH5Lcopy_null_dest
+.testH5Lcreate_soft_null_current
+.testH5Lcopy_null_current
+.testH5Lget_info_by_idx_null
+.testH5Literate_null
+.testH5Ldelete_invalid
+.testH5Lvisit_by_name_null
+.testH5Ldelete_by_idx_invalid
+.testH5Lget_info_by_idx_invalid
+.testH5Ldelete_by_idx_null
+.testH5Lcreate_soft_invalid
+.testH5Lcreate_hard_null_current
+.testH5Lget_value_by_idx_invalid
+.testH5Lmove_null_dest
+.testH5Lget_info_null
+.testH5Literate_by_name_null
+.testH5Lcreate_soft_null_dest
+.testH5Lget_value_invalid
+.testH5Lget_info_by_idx_not_exist_name
+.testH5Lget_name_by_idx_not_exist
+.testH5Lvisit
+.testH5Lget_name_by_idx_n0
+.testH5Lget_name_by_idx_n3
+.testH5Lvisit_by_name
+.testH5Literate_by_name
+.testH5Lget_info_hardlink
+.testH5Literate
+.testH5Lget_info_by_idx_n0
+.testH5Lget_info_by_idx_n3
+.testH5Lget_info_by_idx_name_not_exist_create
+.testH5Lexists
+.testH5Lget_info_by_idx_name_not_exist_name
+.testH5Lget_info_by_idx_not_exist_create
+.testH5Lget_info_not_exist
+.testH5Lget_info_dataset
+.testH5Lget_info_by_idx_n0_create
+.testH5Ldelete_soft_link_dangle
+.testH5Lget_value_by_idx_external_create
+.testH5Ldelete_by_idx_not_exist_create
+.testH5Lvisit_create
+.testH5Lmove_dst_link_exists
+.testH5Lcreate_soft_dangle
+.testH5Literate_create
+.testH5Lcopy_cur_not_exists
+.testH5Lcopy
+.testH5Lmove
+.testH5Lget_value_by_idx_n2_create
+.testH5Lget_value_soft
+.testH5Ldelete_by_idx_n2_name
+.testH5Lget_info_by_idx_n1_create
+.testH5Lcreate_external
+.testH5Lget_value_dangle
+.testH5Lcreate_hard_dst_link_exists
+.testH5Lget_value_by_idx_n2_name
+.testH5Lcreate_soft_dst_link_exists
+.testH5Lcreate_hard
+.testH5Lcreate_soft
+.testH5Lmove_cur_not_exists
+.testH5Lcreate_hard_cur_not_exists
+.testH5Lget_info_softlink_dangle
+.testH5Ldelete_by_idx_n2_create
+.testH5Ldelete_soft_link
+.testH5Lget_info_externallink
+.testH5Lcopy_dst_link_exists
+.testH5Lget_value_by_idx_external_name
+.testH5Ldelete_by_idx_not_exist_name
+.testH5Lget_info_softlink
+.testH5Lget_value_external
+.testH5Lget_value_by_idx_not_exist_create
+.testH5Lget_value_by_idx_not_exist_name
+.testH5Ldelete_hard_link
+.testH5Rgetregion_Nullreference
+.testH5Rget_obj_type2_Invalidreftype
+.testH5Rdereference
+.testH5Rget_name
+.testH5Rcreate_Invalidreftype
+.testH5Rget_name_NULLreference
+.testH5Rget_region
+.testH5Rdereference_Nullreference
+.testH5Rcreate_refobj
+.testH5Rcreate_Invalidspace_id
+.testH5Rdereference_Invalidreference
+.testH5Rgetregion_Badreferencetype
+.testH5Rcreate_regionrefobj
+.testH5Rget_name_Invalidreftype
+.testH5Rgetregion_Invalidreftype
+.testH5Rget_obj_type2
+.testH5Rcreate_InvalidObjectName
+.testH5Pset_nbit
+.testH5Pset_shared_mesg_index_InvalidFlag
+.testH5Pset_shared_mesg_phase_change
+.testH5PH5Pset_shared_mesg_phase_change_HighMaxlistValue
+.testH5P_layout
+.testH5Pget_link_creation_order
+.testH5Pset_shared_mesg_nindexes_InvalidHIGHnindexes
+.testH5Pget_shared_mesg_index_Invalid_indexnum
+.testH5Pset_data_transform_NullExpression
+.testH5Pset_elink_prefix_null
+.testH5Pget_elink_prefix
+.testH5Pget_nlinks
+.testH5Pset_libver_bounds_invalidhigh
+.testH5Pget_char_encoding
+.testH5P_istore_k
+.testH5Pget_link_phase_change
+.testH5Pset_link_phase_change_max_compactLESSTHANmin_dense
+.testH5Pget_shared_mesg_phase_change_EqualsSET
+.testH5Pset_scaleoffset_Invalidscale_type
+.testH5Pget_istore_k_null
+.testH5Pset_libver_bounds_invalidlow
+.testH5Pset_est_link_info
+.testH5Pget_link_phase_change_Null
+.testH5P_fill_time
+.testH5Pget_userblock_null
+.testH5Pset_link_creation_order_tracked
+.testH5Pset_shared_mesg_index
+.testH5Pset_copy_object
+.testH5Pset_link_creation_order_trackedPLUSindexed
+.testH5Pset_copy_object_invalidobject
+.testH5Pset_est_link_info_InvalidValues
+.testH5P_file_space
+.testH5Pset_local_heap_size_hint
+.testH5Pget_est_link_info
+.testH5Pset_scaleoffset
+.testH5Pset_create_intermediate_group_invalidobject
+.testH5PH5Pset_shared_mesg_phase_change_HighMinbtreeValue
+.testH5Pset_create_intermediate_group
+.testH5P_alloc_time
+.testH5Pset_elink_acc_flags
+.testH5Pset_link_phase_change_Highmax_Compact
+.testH5P_chunk
+.testH5P_sizes
+.testH5Pset_link_creation_order_invalidvalue
+.testH5P_sym_k
+.testH5PH5Pset_shared_mesg_phase_change_MinbtreeGreaterThanMaxlist
+.testH5Pget_version_null
+.testH5Pset_scaleoffset_Invalidscale_factor
+.testH5Pget_elink_prefix_null
+.testH5Pget_data_transform_IllegalSize
+.testH5Pget_create_intermediate_group
+.testH5Pset_shared_mesg_nindexes
+.testH5Pset_attr_creation_order_trackedPLUSindexed
+.testH5Pget_sym_k_null
+.testH5Pset_nlinks
+.testH5P_obj_track_times
+.testH5P_userblock
+.testH5Pget_local_heap_size_hint
+.testH5Pset_shared_mesg_index_Invalid_indexnum
+.testH5Pset_data_transform_InvalidExpression1
+.testH5Pset_data_transform_InvalidExpression2
+.testH5Pget_attr_phase_change
+.testH5Pget_data_transform
+.testH5Pget_create_intermediate_group_notcreated
+.testH5Pset_elink_prefix
+.testH5Pget_attr_creation_order
+.testH5Pset_attr_creation_order_invalidvalue
+.testH5Pget_shared_mesg_phase_change
+.testH5Pget_shared_mesg_index
+.testH5Pset_link_phase_change
+.testH5Pget_shared_mesg_nindexes
+.testH5Pget_version
+.testH5Pset_elink_acc_flags_InvalidFlag1
+.testH5Pset_elink_acc_flags_InvalidFlag2
+.testH5Pget_link_phase_change_EqualsSet
+.testH5Pget_elink_acc_flags
+.testH5Pget_data_transform_ExpressionNotSet
+.testH5P_fill_value
+.testH5Pget_sizes_null
+.testH5Pset_data_transform
+.testH5Pset_attr_creation_order_tracked
+.testH5P_buffer
+.testH5Pdata_transform
+.testH5P_elink_fapl
+.testH5P_fapl_direct
+.testH5P_alignment
+.testH5P_fapl_family
+.testH5P_chunk_cache
+.testH5P_meta_block_size
+.testH5Pget_elink_fapl
+.testH5Pset_mdc_config
+.testH5P_small_data_block_size
+.testH5Pset_fapl_log
+.testH5Pset_libver_bounds
+.testH5P_sieve_buf_size
+.testH5P_elink_file_cache_size
+.testH5P_cache
+.testH5Pget_mdc_config
+.testH5P_fapl_muti_defaults
+.testH5Pget_libver_bounds
+.testH5P_btree_ratios
+.testH5P_fapl_muti_nulls
+.testH5Pset_fapl_sec2
+.testH5Pmulti_transform
+.testH5Pset_elink_fapl_NegativeID
+.testH5Pset_fapl_stdio
+.testH5P_edc_check
+.testH5Pset_elink_fapl
+.testH5P_hyper_vector_size
+.testH5P_gc_references
+.testH5P_family_offset
+.testH5P_fapl_core
+.testH5P_fapl_muti
+.testH5P_fapl_split
+.testH5Pset_fapl_windows
+.testH5P_fclose_degree
+.testH5Pget_source_datasetname
+.testH5Pvirtual_storage
+.testH5Pget_selection_source_dataset
+.testH5Pget_source_filename
+.testH5Pget_virtual_count
+.testH5Pset_get_virtual_view
+.testH5Pget_mapping_parameters
+.testH5P_genprop_basic_class
+.testH5P_genprop_class_iter
+.testH5P_genprop_basic_class_prop
+.testH5P_genprop_basic_list_prop
+.testH5Acreate2_nullname
+.testH5Acreate_by_name
+.testH5Aget_name_by_idx
+.testH5Aget_storage_size
+.testH5Aiterate
+.testH5Aopen_by_idx
+.testH5Aopen_invalidname
+.testH5Aopen
+.testH5Aget_info_by_name
+.testH5Aget_create_plist
+.testH5Adelete_by_name
+.testH5Aopen_by_name
+.testH5Aget_info
+.testH5Aget_name
+.testH5Aexists
+.testH5Aget_info_by_idx
+.testH5Arename
+.testH5Adelete_by_idx_name1
+.testH5Adelete_by_idx_name2
+.testH5Adelete_by_idx_order
+.testH5Arename_by_name
+.testH5Acreate2_invalidobject
+.testH5Acreate2
+.testH5Aiterate_by_name
+.testH5Adelete_by_idx_null
+.testH5Adelete_by_idx_invalidobject
+.testH5Awrite_readVL
+.testH5Aget_info1
+.testH5Oget_comment_by_name_null
+.testH5Ovisit_by_name_nullname
+.testH5Oget_info_invalid
+.testH5Ovisit_by_name_null
+.testH5Oget_comment_invalid
+.testH5Oset_comment_by_name_invalid
+.testH5Oopen_null
+.testH5Oclose_invalid
+.testH5Oget_comment_by_name_invalid
+.testH5Ocopy_null_dest
+.testH5Olink_invalid
+.testH5Oget_info_by_idx_invalid
+.testH5Oget_info_by_idx_null
+.testH5Olink_null_dest
+.testH5Oget_info_by_name_invalid
+.testH5Oget_info_by_name_null
+.testH5Ocopy_invalid
+.testH5Oset_comment_by_name_null
+.testH5Ocopy_null_current
+.testH5Oset_comment_invalid
+.testH5Oopen_invalid
+.testH5Ovisit_null
+.testH5Oexists_by_name
+.testH5Oget_info_by_idx_n0
+.testH5Oget_info_by_idx_n3
+.testH5Oget_info_by_name_not_exist_name
+.testH5Ovisit_by_name
+.testH5Oget_info_by_idx_name_not_exist_name
+.testH5Oget_info_datatype
+.testH5Oget_info_by_idx_not_exist_name
+.testH5Oopen_by_idx_n0
+.testH5Oopen_by_idx_n3
+.testH5Oopen_not_exists
+.testH5Ovisit
+.testH5Oget_info_by_idx_not_exist_create
+.testH5Oget_info_by_name_hardlink
+.testH5Oget_info_by_name_group
+.testH5Oopen_by_addr
+.testH5Oget_info_by_name_not_exists
+.testH5Oget_info_by_name_dataset
+.testH5Oget_info_group
+.testH5Oget_info_by_name_datatype
+.testH5Oget_info_hardlink
+.testH5Oget_info_by_idx_name_not_exist_create
+.testH5Oget_info_dataset
+.testH5OcopyRefsDatasettosameFile
+.testH5OcopyRefsDatasettodiffFile
+.testH5OcopyRefsAttr
+.testH5Oget_info_by_idx_n0_create
+.testH5Oget_info_softlink_dangle
+.testH5Oget_info_softlink
+.testH5Oget_info_externallink
+.testH5Ocopy
+.testH5Olink
+.testH5Ocomment_by_name
+.testH5Oget_info_by_idx_n1_create
+.testH5Ocomment
+.testH5Oinc_dec_count
+.testH5Ocomment_by_name_clear
+.testH5Ovisit_create
+.testH5Ocopy_dst_link_exists
+.testH5Ocomment_clear
+.testH5Ocopy_cur_not_exists
+.TestH5PLplugins
+.testH5Zfilter_avail
+.testH5Zunregister_predefined
+.testH5Zget_filter_info
+
+Time: XXXX
+
+OK (624 tests)
+
+HDF5-DIAG: Error detected in HDF5 (version (number)) thread (IDs):
+ #000: (file name) line (number) in H5Fopen(): can't set access and transfer property lists
+ major: File accessibilty
+ minor: Can't set value
+ #001: (file name) line (number) in H5P_verify_apl_and_dxpl(): not the required access property list
+ major: Property lists
+ minor: Inappropriate type
+ #002: (file name) line (number) in H5P_isa_class(): not a property list
+ major: Invalid arguments to routine
+ minor: Inappropriate type
+HDF5-DIAG: Error detected in HDF5 (version (number)) thread (IDs):
+ #000: (file name) line (number) in H5Fopen(): can't set access and transfer property lists
+ major: File accessibilty
+ minor: Can't set value
+ #001: (file name) line (number) in H5P_verify_apl_and_dxpl(): not the required access property list
+ major: Property lists
+ minor: Inappropriate type
+ #002: (file name) line (number) in H5P_isa_class(): not a property list
+ major: Invalid arguments to routine
+ minor: Inappropriate type
diff --git a/java/test/Makefile.am b/java/test/Makefile.am
new file mode 100644
index 0000000..9f29f1c
--- /dev/null
+++ b/java/test/Makefile.am
@@ -0,0 +1,103 @@
+#
+# Copyright by The HDF Group.
+# Copyright by the Board of Trustees of the University of Illinois.
+# All rights reserved.
+#
+# This file is part of HDF5. The full HDF5 copyright notice, including
+# terms governing use, modification, and redistribution, is contained in
+# the files COPYING and Copyright.html. COPYING can be found at the root
+# of the source code distribution tree; Copyright.html can be found at the
+# root level of an installed copy of the electronic HDF5 document set and
+# is linked from the top-level documents page. It can also be found at
+# http://hdfgroup.org/HDF5/doc/Copyright.html. If you do not have
+# access to either file, you may request a copy from help@hdfgroup.org.
+##
+## Makefile.am
+## Run automake to generate a Makefile.in from this file.
+##
+#
+# HDF5 Java native interface (JNI) Library Test Makefile(.in)
+
+include $(top_srcdir)/config/commence.am
+
+# Mark this directory as part of the JNI API
+JAVA_API=yes
+
+JAVAROOT = .classes
+
+classes:
+ test -d $(@D)/$(JAVAROOT) || $(MKDIR_P) $(@D)/$(JAVAROOT)
+
+pkgpath = test
+hdfjarfile = jar$(PACKAGE_TARNAME)-$(PACKAGE_VERSION).jar
+jarfile = jar$(PACKAGE_TARNAME)test.jar
+
+CLASSPATH_ENV=CLASSPATH=.:$(JAVAROOT):$(top_builddir)/java/src/$(hdfjarfile):$(top_srcdir)/java/lib/junit.jar:$(top_srcdir)/java/lib/hamcrest-core.jar:$(top_srcdir)/java/lib/slf4j-api-1.7.5.jar:$(top_srcdir)/java/lib/ext/slf4j-simple-1.7.5.jar:$$CLASSPATH
+AM_JAVACFLAGS = $(H5_JAVACFLAGS) -deprecation
+
+noinst_JAVA = \
+ TestH5.java \
+ TestH5E.java \
+ TestH5Edefault.java \
+ TestH5Eregister.java \
+ TestH5Fparams.java \
+ TestH5Fbasic.java \
+ TestH5F.java \
+ TestH5Gbasic.java \
+ TestH5G.java \
+ TestH5Giterate.java \
+ TestH5Sbasic.java \
+ TestH5S.java \
+ TestH5Tparams.java \
+ TestH5Tbasic.java \
+ TestH5T.java \
+ TestH5Dparams.java \
+ TestH5D.java \
+ TestH5Dplist.java \
+ TestH5Lparams.java \
+ TestH5Lbasic.java \
+ TestH5Lcreate.java \
+ TestH5R.java \
+ TestH5P.java \
+ TestH5PData.java \
+ TestH5Pfapl.java \
+ TestH5Pvirtual.java \
+ TestH5Plist.java \
+ TestH5A.java \
+ TestH5Oparams.java \
+ TestH5Obasic.java \
+ TestH5Ocreate.java \
+ TestH5Ocopy.java \
+ TestH5PL.java \
+ TestH5Z.java \
+ TestAll.java
+
+$(jarfile): classnoinst.stamp classes
+ $(JAR) cvf $@ -C $(JAVAROOT)/ $(pkgpath)
+
+noinst_DATA = $(jarfile)
+
+.PHONY: classes
+
+check_SCRIPTS = junit.sh
+TEST_SCRIPT = $(check_SCRIPTS)
+
+CLEANFILES = classnoinst.stamp $(jarfile) $(JAVAROOT)/$(pkgpath)/*.class junit.sh
+
+#JAVA_JUNIT = $(JAVA_SRCS)
+#noinst_JAVA = @JAVA_JUNIT@
+#EXTRA_JAVA = $(JAVA_JUNIT)
+TESTS_JUNIT = AllJunitTests
+#EXTRA_TEST = $(TESTS_JUNIT)
+
+AllJunitTests :
+ echo "#! /bin/sh" > $@
+ echo "exec @JUNIT@ test.TestAll" >> $@
+ chmod +x $@
+
+clean:
+ rm -rf $(JAVAROOT)/*
+ rm -f $(jarfile)
+ rm -f classnoinst.stamp
+
+include $(top_srcdir)/config/conclude.am
diff --git a/java/test/TestAll.java b/java/test/TestAll.java
new file mode 100644
index 0000000..e3abe21
--- /dev/null
+++ b/java/test/TestAll.java
@@ -0,0 +1,40 @@
+/* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
+ * Copyright by The HDF Group. *
+ * Copyright by the Board of Trustees of the University of Illinois. *
+ * All rights reserved. *
+ * *
+ * This file is part of HDF5. The full HDF5 copyright notice, including *
+ * terms governing use, modification, and redistribution, is contained in *
+ * the files COPYING and Copyright.html. COPYING can be found at the root *
+ * of the source code distribution tree; Copyright.html can be found at the *
+ * root level of an installed copy of the electronic HDF5 document set and *
+ * is linked from the top-level documents page. It can also be found at *
+ * http://hdfgroup.org/HDF5/doc/Copyright.html. If you do not have *
+ * access to either file, you may request a copy from help@hdfgroup.org. *
+ * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */
+
+package test;
+
+import org.junit.runner.RunWith;
+import org.junit.runners.Suite;
+
+@RunWith(Suite.class)
+@Suite.SuiteClasses( { TestH5.class,
+ TestH5Eregister.class,
+ TestH5Edefault.class,
+ TestH5E.class,
+ TestH5Fparams.class, TestH5Fbasic.class, TestH5F.class,
+ TestH5Gbasic.class, TestH5G.class, TestH5Giterate.class,
+ TestH5Sbasic.class, TestH5S.class,
+ TestH5Tparams.class, TestH5Tbasic.class, TestH5T.class,
+ TestH5Dparams.class, TestH5D.class, TestH5Dplist.class,
+ TestH5Lparams.class, TestH5Lbasic.class, TestH5Lcreate.class,
+ TestH5R.class,
+ TestH5P.class, TestH5PData.class, TestH5Pfapl.class, TestH5Pvirtual.class, TestH5Plist.class,
+ TestH5A.class,
+ TestH5Oparams.class, TestH5Obasic.class, TestH5Ocopy.class, TestH5Ocreate.class,
+ TestH5PL.class, TestH5Z.class
+})
+
+public class TestAll {
+}
diff --git a/java/test/TestH5.java b/java/test/TestH5.java
new file mode 100644
index 0000000..1a78bea
--- /dev/null
+++ b/java/test/TestH5.java
@@ -0,0 +1,257 @@
+/* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
+ * Copyright by The HDF Group. *
+ * Copyright by the Board of Trustees of the University of Illinois. *
+ * All rights reserved. *
+ * *
+ * This file is part of HDF5. The full HDF5 copyright notice, including *
+ * terms governing use, modification, and redistribution, is contained in *
+ * the files COPYING and Copyright.html. COPYING can be found at the root *
+ * of the source code distribution tree; Copyright.html can be found at the *
+ * root level of an installed copy of the electronic HDF5 document set and *
+ * is linked from the top-level documents page. It can also be found at *
+ * http://hdfgroup.org/HDF5/doc/Copyright.html. If you do not have *
+ * access to either file, you may request a copy from help@hdfgroup.org. *
+ * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */
+
+package test;
+
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertFalse;
+import static org.junit.Assert.assertTrue;
+import static org.junit.Assert.fail;
+
+import java.io.ByteArrayOutputStream;
+import java.io.File;
+import java.io.FileInputStream;
+import java.io.FileOutputStream;
+import java.io.IOException;
+import java.io.ObjectInputStream;
+import java.io.ObjectOutputStream;
+
+import hdf.hdf5lib.H5;
+import hdf.hdf5lib.HDF5Constants;
+
+import org.junit.After;
+import org.junit.Before;
+import org.junit.Ignore;
+import org.junit.Rule;
+import org.junit.Test;
+import org.junit.rules.TestName;
+
+/**
+ * @author xcao
+ *
+ */
+public class TestH5 {
+ @Rule public TestName testname = new TestName();
+ @Before
+ public void showTestName() {
+ System.out.print(testname.getMethodName());
+ }
+ @After
+ public void nextTestName() {
+ System.out.println();
+ }
+
+ /**
+ * Test method for {@link hdf.hdf5lib.H5#J2C(int)}.
+ * NOTE:
+ * H5F_ACC_DEBUG no longer prints any special debug info. The symbol is
+ * being retained and will be listed as deprecated in HDF5 1.10.0.
+ */
+ @Test
+ public void testJ2C() {
+ int H5F_ACC_RDONLY = 0x0000;
+ int H5F_ACC_RDWR = 0x0001;
+ int H5F_ACC_TRUNC = 0x0002;
+ int H5F_ACC_EXCL = 0x0004;
+ int H5F_ACC_DEBUG = 0x0000; // HDFFV-1074 was 0x0008;
+ int H5F_ACC_CREAT = 0x0010;
+ int H5F_OBJ_FILE = 0x0001;
+ int H5F_OBJ_DATASET = 0x0002;
+ int H5F_OBJ_GROUP = 0x0004;
+ int H5F_OBJ_DATATYPE = 0x0008;
+ int H5F_OBJ_ATTR = 0x0010;
+ int H5F_OBJ_ALL = H5F_OBJ_FILE | H5F_OBJ_DATASET | H5F_OBJ_GROUP
+ | H5F_OBJ_DATATYPE | H5F_OBJ_ATTR;
+ int H5F_OBJ_LOCAL = 0x0020;
+
+ int definedValues[] = { H5F_ACC_RDONLY, H5F_ACC_RDWR, H5F_ACC_TRUNC,
+ H5F_ACC_EXCL, H5F_ACC_DEBUG, H5F_ACC_CREAT, H5F_OBJ_FILE,
+ H5F_OBJ_DATASET, H5F_OBJ_GROUP, H5F_OBJ_DATATYPE, H5F_OBJ_ATTR,
+ H5F_OBJ_ALL, H5F_OBJ_LOCAL };
+
+ int j2cValues[] = { HDF5Constants.H5F_ACC_RDONLY,
+ HDF5Constants.H5F_ACC_RDWR, HDF5Constants.H5F_ACC_TRUNC,
+ HDF5Constants.H5F_ACC_EXCL, H5F_ACC_DEBUG,
+ HDF5Constants.H5F_ACC_CREAT, HDF5Constants.H5F_OBJ_FILE,
+ HDF5Constants.H5F_OBJ_DATASET, HDF5Constants.H5F_OBJ_GROUP,
+ HDF5Constants.H5F_OBJ_DATATYPE, HDF5Constants.H5F_OBJ_ATTR,
+ HDF5Constants.H5F_OBJ_ALL, HDF5Constants.H5F_OBJ_LOCAL };
+
+ for (int i = 0; i < definedValues.length; i++) {
+ assertEquals(definedValues[i], j2cValues[i]);
+ }
+
+ assertFalse(H5F_ACC_RDONLY == HDF5Constants.H5F_ACC_RDWR);
+ assertFalse(H5F_OBJ_FILE == HDF5Constants.H5F_OBJ_GROUP);
+ }
+
+ /**
+ * Test method for {@link hdf.hdf5lib.H5#H5error_off()}.
+ */
+ @Test
+ public void testH5error_off() {
+ try {
+ H5.H5error_off();
+ }
+ catch (Throwable err) {
+ fail("H5.H5error_off failed: " + err);
+ }
+ }
+
+ /**
+ * Test method for {@link hdf.hdf5lib.H5#H5open()}.
+ */
+ @Test
+ public void testH5open() {
+ try {
+ H5.H5open();
+ }
+ catch (Throwable err) {
+ fail("H5.H5open failed: " + err);
+ }
+ }
+
+ /**
+ * Test method for {@link hdf.hdf5lib.H5#H5garbage_collect()}.
+ */
+ @Test
+ public void testH5garbage_collect() {
+ try {
+ H5.H5garbage_collect();
+ }
+ catch (Throwable err) {
+ fail("H5.H5garbage_collect failed: " + err);
+ }
+ }
+
+ /**
+ * Test method for
+ * {@link hdf.hdf5lib.H5#H5set_free_list_limits(int, int, int, int, int, int)}
+ * .
+ */
+ @Test
+ public void testH5set_free_list_limits() {
+ int reg_global_lim = 1;
+ int reg_list_lim = 1;
+ int arr_global_lim = 1;
+ int arr_list_lim = 1;
+ int blk_global_lim = 1;
+ int blk_list_lim = 1;
+
+ try {
+ H5.H5set_free_list_limits(reg_global_lim, reg_list_lim,
+ arr_global_lim, arr_list_lim, blk_global_lim, blk_list_lim);
+ }
+ catch (Throwable err) {
+ fail("H5.H5set_free_list_limits failed: " + err);
+ }
+ }
+
+ /**
+ * Test method for {@link hdf.hdf5lib.H5#H5get_libversion(int[])}.
+ */
+ @Test
+ public void testH5get_libversion() {
+ int libversion[] = { 1, 9, 0 };
+
+ try {
+ H5.H5get_libversion(libversion);
+ }
+ catch (Throwable err) {
+ fail("H5.H5get_libversion: " + err);
+ }
+
+ for (int i = 0; i < 2; i++)
+ assertEquals(H5.LIB_VERSION[i], libversion[i]);
+
+ for (int i = 0; i < 2; i++)
+ assertFalse(libversion[i] == 0);
+ }
+
+ /**
+ * Test method for
+ * {@link hdf.hdf5lib.H5#H5check_version(int, int, int)}.
+ */
+ @Test
+ public void testH5check_version() {
+ int majnum = 1, minnum = 9, relnum = 0;
+
+ try {
+ H5.H5check_version(majnum, minnum, relnum);
+ }
+ catch (Throwable err) {
+ fail("H5.H5check_version failed: " + err);
+ }
+
+ try {
+ H5.H5check_version(-1, 0, 0);
+ }
+ catch (Throwable err) {
+ fail("H5.H5check_version failed: " + err);
+ }
+ }
+
+ @Test
+ public void testIsSerializable() {
+ H5 test = new H5();
+ ByteArrayOutputStream out = new ByteArrayOutputStream();
+ ObjectOutputStream oos;
+ try {
+ oos = new ObjectOutputStream(out);
+ oos.writeObject(test);
+ oos.close();
+ }
+ catch (IOException err) {
+ err.printStackTrace();
+ fail("ObjectOutputStream failed: " + err);
+ }
+ assertTrue(out.toByteArray().length > 0);
+
+ }
+
+ @SuppressWarnings("static-access")
+ @Test
+ public void serializeToDisk()
+ {
+ try {
+ H5 test = new H5();
+
+ FileOutputStream fos = new FileOutputStream("temph5.ser");
+ ObjectOutputStream oos = new ObjectOutputStream(fos);
+ oos.writeObject(test);
+ oos.close();
+ }
+ catch (Exception ex) {
+ fail("Exception thrown during test: " + ex.toString());
+ }
+
+ try {
+ FileInputStream fis = new FileInputStream("temph5.ser");
+ ObjectInputStream ois = new ObjectInputStream(fis);
+ H5 test = (hdf.hdf5lib.H5) ois.readObject();
+ ois.close();
+
+ assertTrue("H5.LIB_VERSION[0]", test.LIB_VERSION[0]==H5.LIB_VERSION[0]);
+ assertTrue("H5.LIB_VERSION[1]", test.LIB_VERSION[1]==H5.LIB_VERSION[1]);
+// assertTrue("H5.LIB_VERSION[2]", test.LIB_VERSION[2]==H5.LIB_VERSION[2]);
+
+ // Clean up the file
+ new File("temph5.ser").delete();
+ }
+ catch (Exception ex) {
+ fail("Exception thrown during test: " + ex.toString());
+ }
+ }
+}
diff --git a/java/test/TestH5A.java b/java/test/TestH5A.java
new file mode 100644
index 0000000..fb7b31a
--- /dev/null
+++ b/java/test/TestH5A.java
@@ -0,0 +1,1119 @@
+/* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
+ * Copyright by The HDF Group. *
+ * Copyright by the Board of Trustees of the University of Illinois. *
+ * All rights reserved. *
+ * *
+ * This file is part of HDF5. The full HDF5 copyright notice, including *
+ * terms governing use, modification, and redistribution, is contained in *
+ * the files COPYING and Copyright.html. COPYING can be found at the root *
+ * of the source code distribution tree; Copyright.html can be found at the *
+ * root level of an installed copy of the electronic HDF5 document set and *
+ * is linked from the top-level documents page. It can also be found at *
+ * http://hdfgroup.org/HDF5/doc/Copyright.html. If you do not have *
+ * access to either file, you may request a copy from help@hdfgroup.org. *
+ * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */
+
+package test;
+
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertFalse;
+import static org.junit.Assert.assertNotNull;
+import static org.junit.Assert.assertTrue;
+import static org.junit.Assert.fail;
+
+import java.util.ArrayList;
+
+import java.io.File;
+
+import hdf.hdf5lib.H5;
+import hdf.hdf5lib.HDF5Constants;
+import hdf.hdf5lib.callbacks.H5A_iterate_cb;
+import hdf.hdf5lib.callbacks.H5A_iterate_t;
+import hdf.hdf5lib.exceptions.HDF5Exception;
+import hdf.hdf5lib.exceptions.HDF5LibraryException;
+import hdf.hdf5lib.structs.H5A_info_t;
+
+import org.junit.After;
+import org.junit.Before;
+import org.junit.Ignore;
+import org.junit.Rule;
+import org.junit.Test;
+import org.junit.rules.TestName;
+
+public class TestH5A {
+ @Rule public TestName testname = new TestName();
+ private static final String H5_FILE = "test.h5";
+ private static final int DIM_X = 4;
+ private static final int DIM_Y = 6;
+ long H5fid = -1;
+ long H5dsid = -1;
+ long H5did = -1;
+ long[] H5dims = { DIM_X, DIM_Y };
+ long type_id = -1;
+ long space_id = -1;
+ long lapl_id = -1;
+
+ private final void _deleteFile(String filename) {
+ File file = new File(filename);
+
+ if (file.exists()) {
+ try {file.delete();} catch (SecurityException e) {}
+ }
+ }
+
+ private final long _createDataset(long fid, long dsid, String name, long dapl) {
+ long did = -1;
+ try {
+ did = H5.H5Dcreate(fid, name, HDF5Constants.H5T_STD_I32BE, dsid,
+ HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT, dapl);
+ }
+ catch (Throwable err) {
+ err.printStackTrace();
+ fail("H5.H5Dcreate: " + err);
+ }
+ assertTrue("TestH5A._createDataset: ", did > 0);
+
+ return did;
+ }
+
+ @Before
+ public void createH5file() throws NullPointerException, HDF5Exception {
+ assertTrue("H5 open ids is 0", H5.getOpenIDCount() == 0);
+ System.out.print(testname.getMethodName());
+
+ try {
+ H5fid = H5.H5Fcreate(H5_FILE, HDF5Constants.H5F_ACC_TRUNC,
+ HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT);
+ assertTrue("TestH5A.createH5file: H5.H5Fcreate: ", H5fid > 0);
+ H5dsid = H5.H5Screate_simple(2, H5dims, null);
+ assertTrue("TestH5A.createH5file: H5.H5Screate_simple: ", H5dsid > 0);
+ H5did = _createDataset(H5fid, H5dsid, "dset",
+ HDF5Constants.H5P_DEFAULT);
+ assertTrue("TestH5A.createH5file: _createDataset: ", H5did > 0);
+ space_id = H5.H5Screate(HDF5Constants.H5S_NULL);
+ assertTrue(space_id > 0);
+ lapl_id = H5.H5Pcreate(HDF5Constants.H5P_ATTRIBUTE_ACCESS);
+ assertTrue(lapl_id > 0);
+ type_id = H5.H5Tenum_create(HDF5Constants.H5T_STD_I32LE);
+ assertTrue(type_id > 0);
+ int status = H5.H5Tenum_insert(type_id, "test", 1);
+ assertTrue(status >= 0);
+ }
+ catch (Throwable err) {
+ err.printStackTrace();
+ fail("TestH5A.createH5file: " + err);
+ }
+
+ H5.H5Fflush(H5fid, HDF5Constants.H5F_SCOPE_LOCAL);
+ }
+
+ @After
+ public void deleteH5file() throws HDF5LibraryException {
+ if (H5dsid > 0)
+ try {H5.H5Sclose(H5dsid);} catch (Exception ex) {}
+ if (H5did > 0)
+ try {H5.H5Dclose(H5did);} catch (Exception ex) {}
+ if (H5fid > 0)
+ try {H5.H5Fclose(H5fid);} catch (Exception ex) {}
+
+ _deleteFile(H5_FILE);
+
+ if (type_id > 0)
+ try {H5.H5Tclose(type_id);} catch (Exception ex) {}
+ if (space_id > 0)
+ try {H5.H5Sclose(space_id);} catch (Exception ex) {}
+ if (lapl_id > 0)
+ try {H5.H5Pclose(lapl_id);} catch (Exception ex) {}
+ System.out.println();
+ }
+
+ @Test
+ public void testH5Acreate2() {
+ long attr_id = -1;
+ try {
+ attr_id = H5.H5Acreate(H5did, "dset", type_id, space_id, HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT);
+ assertTrue("testH5Acreate2", attr_id >= 0);
+ }
+ catch (Throwable err) {
+ err.printStackTrace();
+ fail("H5.H5Acreate2: " + err);
+ }
+ finally {
+ if (attr_id > 0)
+ try {H5.H5Aclose(attr_id);} catch (Exception ex) {}
+ }
+ }
+
+ @Test(expected = HDF5LibraryException.class)
+ public void testH5Acreate2_invalidobject() throws Throwable {
+ H5.H5Acreate(H5dsid, "dset", type_id, space_id, HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT);
+ }
+
+ @Test(expected = NullPointerException.class)
+ public void testH5Acreate2_nullname() throws Throwable {
+ H5.H5Acreate(H5did, null, type_id, space_id, HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT);
+ }
+
+ @Test
+ public void testH5Aopen() {
+ String attr_name = "dset";
+ long attribute_id = -1;
+ long attr_id = -1;
+
+ try {
+ attr_id = H5.H5Acreate(H5did, attr_name, type_id, space_id,
+ HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT);
+
+ // Opening the existing attribute, attr_name(Created by H5ACreate2)
+ // attached to an object identifier.
+ attribute_id = H5.H5Aopen(H5did, attr_name,
+ HDF5Constants.H5P_DEFAULT);
+ assertTrue("testH5Aopen: H5Aopen", attribute_id >= 0);
+ }
+ catch (Throwable err) {
+ err.printStackTrace();
+ fail("H5.H5Aopen: " + err);
+ }
+ finally {
+ if (attr_id > 0)
+ try {H5.H5Aclose(attr_id);} catch (Exception ex) {}
+ if (attribute_id > 0)
+ try {H5.H5Aclose(attribute_id);} catch (Exception ex) {}
+ }
+ }
+
+ @Test(expected = HDF5LibraryException.class)
+ public void testH5Aopen_invalidname() throws Throwable {
+ H5.H5Aopen(H5did, "attr_name", HDF5Constants.H5P_DEFAULT);
+ }
+
+ @Test
+ public void testH5Aopen_by_idx() {
+ long loc_id = H5did;
+ String obj_name = ".";
+ int idx_type = HDF5Constants.H5_INDEX_CRT_ORDER;
+ int order = HDF5Constants.H5_ITER_INC;
+ long n = 0;
+ long attr_id = -1;
+ long attribute_id = -1;
+ long aapl_id = HDF5Constants.H5P_DEFAULT;
+
+ try {
+ attr_id = H5.H5Acreate(H5did, "file", type_id, space_id,
+ HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT);
+
+ // Opening the existing attribute, obj_name(Created by H5ACreate2)
+ // by index, attached to an object identifier.
+ attribute_id = H5.H5Aopen_by_idx(H5did, ".", HDF5Constants.H5_INDEX_CRT_ORDER, HDF5Constants.H5_ITER_INC,
+ 0, HDF5Constants.H5P_DEFAULT, lapl_id);
+
+ assertTrue("testH5Aopen_by_idx: H5Aopen_by_idx", attribute_id >= 0);
+
+ // Negative test- Error should be thrown when H5Aopen_by_idx is
+ // called
+ // with n=5 and we do not have 5 attributes created.
+ try {
+ n = 5;
+ H5.H5Aopen_by_idx(loc_id, obj_name, idx_type, order, n,
+ aapl_id, lapl_id);
+ fail("Negative Test Failed:- Error not Thrown when n is invalid.");
+ }
+ catch (AssertionError err) {
+ fail("H5.H5Aopen_by_idx: " + err);
+ }
+ catch (HDF5LibraryException err) {}
+
+ // Negative test- Error should be thrown when H5Aopen_by_idx is
+ // called
+ // with an invalid object name(which hasn't been created).
+ try {
+ n = 0;
+ obj_name = "file";
+ H5.H5Aopen_by_idx(loc_id, obj_name, idx_type, order, n,
+ aapl_id, lapl_id);
+ fail("Negative Test Failed:- Error not Thrown when attribute name is invalid.");
+ }
+ catch (AssertionError err) {
+ fail("H5.H5Aopen_by_idx: " + err);
+ }
+ catch (HDF5LibraryException err) {}
+ }
+ catch (Throwable err) {
+ err.printStackTrace();
+ fail("H5.H5Aopen_by_idx: " + err);
+ }
+ finally {
+ if (attr_id > 0)
+ try {H5.H5Aclose(attr_id);} catch (Exception ex) {}
+ if (attribute_id > 0)
+ try {H5.H5Aclose(attribute_id);} catch (Exception ex) {}
+ }
+ }
+
+ @Test
+ public void testH5Acreate_by_name() {
+ String obj_name = ".";
+ String attr_name = "DATASET";
+ long attribute_id = -1;
+ boolean bool_val = false;
+
+ try {
+ attribute_id = H5.H5Acreate_by_name(H5fid, obj_name, attr_name,
+ type_id, space_id, HDF5Constants.H5P_DEFAULT,
+ HDF5Constants.H5P_DEFAULT, lapl_id);
+ assertTrue("testH5Acreate_by_name: H5Acreate_by_name",
+ attribute_id >= 0);
+
+ // Check if the name of attribute attached to the object specified
+ // by loc_id and obj_name exists.It should be true.
+ bool_val = H5.H5Aexists_by_name(H5fid, obj_name, attr_name,
+ lapl_id);
+ assertTrue("testH5Acreate_by_name: H5Aexists_by_name",
+ bool_val == true);
+ }
+ catch (Throwable err) {
+ err.printStackTrace();
+ fail("H5.H5Acreate_by_name " + err);
+ }
+ finally {
+ if (attribute_id > 0)
+ try {H5.H5Aclose(attribute_id);} catch (Exception ex) {}
+ }
+ }
+
+ @Test
+ public void testH5Arename() throws Throwable, HDF5LibraryException, NullPointerException {
+ long loc_id = H5fid;
+ String old_attr_name = "old";
+ String new_attr_name = "new";
+ long attr_id = -1;
+ int ret_val = -1;
+ boolean bool_val = false;
+
+ try {
+ attr_id = H5.H5Acreate(loc_id, old_attr_name, type_id, space_id, HDF5Constants.H5P_DEFAULT, lapl_id);
+
+ ret_val = H5.H5Arename(loc_id, old_attr_name, new_attr_name);
+
+ // Check the return value.It should be non negative.
+ assertTrue("testH5Arename: H5Arename", ret_val >= 0);
+
+ // Check if the new name of attribute attached to the object
+ // specified by loc_id and obj_name exists.It should be true.
+ bool_val = H5.H5Aexists(loc_id, new_attr_name);
+ assertTrue("testH5Arename: H5Aexists", bool_val == true);
+
+ // Check if the old name of attribute attached to the object
+ // specified by loc_id and obj_name exists. It should equal false.
+ bool_val = H5.H5Aexists(loc_id, old_attr_name);
+ assertEquals(bool_val, false);
+ }
+ catch (Throwable err) {
+ err.printStackTrace();
+ fail("H5.H5Arename " + err);
+ }
+ finally {
+ if (attr_id > 0)
+ try {H5.H5Aclose(attr_id);} catch (Exception ex) {}
+ }
+ }
+
+ @Test
+ public void testH5Arename_by_name() {
+ long loc_id = H5fid;
+ String obj_name = ".";
+ String old_attr_name = "old";
+ String new_attr_name = "new";
+ long attr_id = -1;
+ int ret_val = -1;
+ boolean bool_val = false;
+
+ try {
+ attr_id = H5.H5Acreate_by_name(loc_id, obj_name, old_attr_name,
+ type_id, space_id, HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT, lapl_id);
+
+ ret_val = H5.H5Arename_by_name(loc_id, obj_name, old_attr_name,
+ new_attr_name, lapl_id);
+
+ // Check the return value.It should be non negative.
+ assertTrue("testH5Arename_by_name: H5Arename_by_name", ret_val >= 0);
+
+ // Check if the new name of attribute attached to the object
+ // specified by loc_id and obj_name exists.It should be true.
+ bool_val = H5.H5Aexists_by_name(loc_id, obj_name, new_attr_name,
+ lapl_id);
+ assertTrue("testH5Arename_by_name: H5Aexists_by_name",
+ bool_val == true);
+
+ // Check if the old name of attribute attached to the object
+ // specified by loc_id and obj_name exists. It should equal false.
+ bool_val = H5.H5Aexists_by_name(loc_id, obj_name, old_attr_name,
+ lapl_id);
+ assertEquals(bool_val, false);
+ }
+ catch (Throwable err) {
+ err.printStackTrace();
+ fail("H5.H5Arename_by_name " + err);
+ }
+ finally {
+ if (attr_id > 0)
+ try {H5.H5Aclose(attr_id);} catch (Exception ex) {}
+ }
+ }
+
+ @Test
+ public void testH5Aget_name() {
+ String obj_name = ".";
+ String attr_name = "DATASET1";
+ String ret_name = null;
+ long attribute_id = -1;
+
+ try {
+ attribute_id = H5.H5Acreate_by_name(H5fid, obj_name, attr_name,
+ type_id, space_id, HDF5Constants.H5P_DEFAULT,
+ HDF5Constants.H5P_DEFAULT, lapl_id);
+ assertTrue("testH5Aget_name: H5Acreate_by_name ", attribute_id > 0);
+ ret_name = H5.H5Aget_name(attribute_id);
+ assertEquals(ret_name, attr_name);
+ }
+ catch (Throwable err) {
+ err.printStackTrace();
+ fail("testH5Aget_name " + err);
+ }
+ finally {
+ if (attribute_id > 0)
+ try {H5.H5Aclose(attribute_id);} catch (Exception ex) {}
+ }
+ }
+
+
+ @Test
+ public void testH5Aget_name_by_idx() {
+ long loc_id = H5fid;
+ String obj_name = ".";
+ String attr_name = "DATASET1", attr2_name = "DATASET2";
+ String ret_name = null;
+ int idx_type = HDF5Constants.H5_INDEX_NAME;
+ int order = HDF5Constants.H5_ITER_INC;
+ int n = 0;
+ long attr1_id = -1;
+ long attr2_id = -1;
+
+ try {
+ attr1_id = H5.H5Acreate_by_name(loc_id, obj_name, attr_name,
+ type_id, space_id, HDF5Constants.H5P_DEFAULT,
+ HDF5Constants.H5P_DEFAULT, lapl_id);
+ attr2_id = H5.H5Acreate_by_name(loc_id, obj_name, attr2_name,
+ type_id, space_id, HDF5Constants.H5P_DEFAULT,
+ HDF5Constants.H5P_DEFAULT, lapl_id);
+
+ // getting the 1st attribute name(n=0).
+ ret_name = H5.H5Aget_name_by_idx(loc_id, obj_name, idx_type, order,
+ n, lapl_id);
+ assertFalse("H5Aget_name_by_idx ", ret_name == null);
+ assertEquals(ret_name, attr_name);
+
+ // getting the second attribute name(n=1)
+ ret_name = H5.H5Aget_name_by_idx(loc_id, obj_name, idx_type, order,
+ 1, lapl_id);
+ assertFalse("H5Aget_name_by_idx ", ret_name == null);
+ assertEquals(ret_name, attr2_name);
+ }
+ catch (Throwable err) {
+ err.printStackTrace();
+ fail("H5.H5Aget_name_by_idx " + err);
+ }
+ finally {
+ if (attr1_id > 0)
+ try {H5.H5Aclose(attr1_id);} catch (Exception ex) {}
+ if (attr2_id > 0)
+ try {H5.H5Aclose(attr2_id);} catch (Exception ex) {}
+ }
+ }
+
+ @Test
+ public void testH5Aget_storage_size() {
+ long attr_id = -1;
+ long attr_size = -1;
+
+ try {
+ attr_id = H5.H5Acreate(H5did, "dset", type_id, space_id,
+ HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT);
+
+ attr_size = H5.H5Aget_storage_size(attr_id);
+ assertTrue("The size of attribute is :", attr_size == 0);
+ }
+ catch (Throwable err) {
+ err.printStackTrace();
+ fail("H5.H5Aget_storage_size: " + err);
+ }
+ finally {
+ if (attr_id > 0)
+ try {H5.H5Aclose(attr_id);} catch (Exception ex) {}
+ }
+ }
+
+ @Test
+ public void testH5Aget_info() {
+ H5A_info_t attr_info = null;
+ long attribute_id = -1;
+ long attr_id = -1;
+
+ try {
+ attr_id = H5.H5Acreate(H5did, "dset", type_id, space_id,
+ HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT);
+ attribute_id = H5.H5Aopen(H5did, "dset", HDF5Constants.H5P_DEFAULT);
+ // Calling H5Aget_info with attribute_id returned from H5Aopen.
+ attr_info = H5.H5Aget_info(attribute_id);
+ assertFalse("H5Aget_info ", attr_info == null);
+ assertTrue("Corder_Valid should be false",
+ attr_info.corder_valid == false);
+ assertTrue("Character set used for attribute name",
+ attr_info.cset == HDF5Constants.H5T_CSET_ASCII);
+ assertTrue("Corder ", attr_info.corder == 0);
+ assertEquals(attr_info.data_size, H5.H5Aget_storage_size(attr_id));
+ }
+ catch (Throwable err) {
+ err.printStackTrace();
+ fail("H5.H5Aget_info: " + err);
+ }
+ finally {
+ if (attr_id > 0)
+ try {H5.H5Aclose(attr_id);} catch (Exception ex) {}
+ if (attribute_id > 0)
+ try {H5.H5Aclose(attribute_id);} catch (Exception ex) {}
+ }
+ }
+
+ @Test
+ public void testH5Aget_info1() {
+ H5A_info_t attr_info = null;
+ long attribute_id = -1;
+ long attr_id = -1;
+ int order = HDF5Constants.H5_ITER_INC;
+
+ try {
+ attr_id = H5.H5Acreate(H5did, ".", type_id, space_id,
+ HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT);
+ attribute_id = H5.H5Aopen_by_idx(H5did, ".",
+ HDF5Constants.H5_INDEX_CRT_ORDER, order, 0,
+ HDF5Constants.H5P_DEFAULT, lapl_id);
+ // Calling H5Aget_info with attribute_id returned from
+ // H5Aopen_by_idx.
+ attr_info = H5.H5Aget_info(attribute_id);
+
+ assertFalse("H5Aget_info ", attr_info == null);
+ assertTrue("Corder_Valid should be true",
+ attr_info.corder_valid == true);
+ assertTrue("Character set",
+ attr_info.cset == HDF5Constants.H5T_CSET_ASCII);
+ assertTrue("Corder ", attr_info.corder == 0);
+ assertEquals(attr_info.data_size, H5.H5Aget_storage_size(attr_id));
+ }
+ catch (Throwable err) {
+ err.printStackTrace();
+ fail("H5.H5Aget_info1: " + err);
+ }
+ finally {
+ if (attr_id > 0)
+ try {H5.H5Aclose(attr_id);} catch (Exception ex) {}
+ if (attribute_id > 0)
+ try {H5.H5Aclose(attribute_id);} catch (Exception ex) {}
+ }
+ }
+
+ @Test
+ public void testH5Aget_info_by_idx() {
+ long attr_id = -1;
+ long attr2_id = -1;;
+ H5A_info_t attr_info = null;
+
+ try {
+ attr_id = H5.H5Acreate(H5did, "dset1", type_id, space_id,
+ HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT);
+ attr2_id = H5.H5Acreate(H5did, "dataset2", type_id, space_id,
+ HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT);
+
+ //Verify info for 1st attribute, in increasing creation order
+ attr_info = H5.H5Aget_info_by_idx(H5did, ".", HDF5Constants.H5_INDEX_CRT_ORDER, HDF5Constants.H5_ITER_INC, 0, lapl_id);
+ assertNotNull(attr_info);
+ assertTrue("Corder ", attr_info.corder == 0);//should equal 0 as this is the order of 1st attribute created.
+ assertEquals(attr_info.data_size, H5.H5Aget_storage_size(attr_id));
+
+ //Verify info for 2nd attribute, in increasing creation order
+ attr_info = H5.H5Aget_info_by_idx(H5did, ".", HDF5Constants.H5_INDEX_CRT_ORDER, HDF5Constants.H5_ITER_INC, 1, lapl_id);
+ assertNotNull(attr_info);
+ assertTrue("Corder", attr_info.corder == 1);
+ assertEquals(attr_info.data_size, H5.H5Aget_storage_size(attr2_id));
+
+ //verify info for 2nd attribute, in decreasing creation order
+ attr_info = H5.H5Aget_info_by_idx(H5did, ".", HDF5Constants.H5_INDEX_CRT_ORDER, HDF5Constants.H5_ITER_DEC, 0, lapl_id);
+ assertNotNull(attr_info);
+ assertTrue("Corder", attr_info.corder == 1); //should equal 1 as this is the order of 2nd attribute created.
+
+ //verify info for 1st attribute, in decreasing creation order
+ attr_info = H5.H5Aget_info_by_idx(H5did, ".", HDF5Constants.H5_INDEX_CRT_ORDER, HDF5Constants.H5_ITER_DEC, 1, lapl_id);
+ assertNotNull(attr_info);
+ assertTrue("Corder", attr_info.corder == 0); //should equal 0 as this is the order of 1st attribute created.
+
+ //verify info for 1st attribute, in increasing name order
+ attr_info = H5.H5Aget_info_by_idx(H5did, ".", HDF5Constants.H5_INDEX_NAME, HDF5Constants.H5_ITER_INC, 1, lapl_id);
+ assertNotNull(attr_info);
+ assertTrue("Corder", attr_info.corder == 0); //should equal 0 as this is the order of 1st attribute created.
+
+ //verify info for 2nd attribute, in decreasing name order
+ attr_info = H5.H5Aget_info_by_idx(H5did, ".", HDF5Constants.H5_INDEX_NAME, HDF5Constants.H5_ITER_DEC, 1, lapl_id);
+ assertNotNull(attr_info);
+ assertTrue("Corder", attr_info.corder == 1); //should equal 1 as this is the order of 2nd attribute created.
+ }
+ catch (Throwable err) {
+ err.printStackTrace();
+ fail("H5.H5Aget_info_by_idx:" + err);
+ }
+ finally {
+ if (attr_id > 0)
+ try {H5.H5Aclose(attr_id);} catch (Exception ex) {}
+ if (attr2_id > 0)
+ try {H5.H5Aclose(attr2_id);} catch (Exception ex) {}
+ }
+ }
+
+ @Test
+ public void testH5Aget_info_by_name() {
+ long attr_id = -1;
+ H5A_info_t attr_info = null;
+ String obj_name = ".";
+ String attr_name = "DATASET";
+
+ try {
+ attr_id = H5.H5Acreate_by_name(H5fid, obj_name, attr_name, type_id,
+ space_id, HDF5Constants.H5P_DEFAULT,
+ HDF5Constants.H5P_DEFAULT, lapl_id);
+ attr_info = H5.H5Aget_info_by_name(H5fid, obj_name, attr_name,
+ lapl_id);
+ assertNotNull(attr_info);
+ }
+ catch (Throwable err) {
+ err.printStackTrace();
+ fail("H5.H5Aget_info_by_name:" + err);
+ }
+ finally {
+ if (attr_id > 0)
+ try {H5.H5Aclose(attr_id);} catch (Exception ex) {}
+ }
+ }
+
+ @Test
+ public void testH5Adelete_by_name() {
+ long attr_id = -1;
+ int ret_val = -1;
+ boolean bool_val = false;
+ boolean exists = false;
+
+ try {
+ attr_id = H5.H5Acreate_by_name(H5fid, ".", "DATASET",
+ type_id, space_id, HDF5Constants.H5P_DEFAULT,
+ HDF5Constants.H5P_DEFAULT, lapl_id);
+ ret_val = H5.H5Adelete_by_name(H5fid, ".", "DATASET", lapl_id);
+ assertTrue("H5Adelete_by_name", ret_val >= 0);
+
+ // Check if the Attribute still exists.
+ bool_val = H5.H5Aexists_by_name(H5fid, ".", "DATASET",
+ lapl_id);
+ assertFalse("testH5Adelete_by_name: H5Aexists_by_name", bool_val);
+ exists = H5.H5Aexists(H5fid, "DATASET");
+ assertFalse("testH5Adelete_by_name: H5Aexists ",exists);
+
+ // Negative test. Error thrown when we try to delete an attribute
+ // that has already been deleted.
+ try{
+ ret_val = H5.H5Adelete_by_name(H5fid, ".", "DATASET", lapl_id);
+ fail("Negative Test Failed: Error Not thrown.");
+ }
+ catch (AssertionError err) {
+ fail("H5.H5Adelete_by_name: " + err);
+ }
+ catch (HDF5LibraryException err) {}
+ }
+ catch (Throwable err) {
+ err.printStackTrace();
+ fail("H5.H5Adelete_by_name " + err);
+ }
+ finally {
+ if (attr_id > 0)
+ try {H5.H5Aclose(attr_id);} catch (Exception ex) {}
+ }
+ }
+
+ @Test
+ public void testH5Aexists() {
+ boolean exists = false;
+ long attr_id = -1;
+ long attribute_id = -1;
+
+ try {
+ exists = H5.H5Aexists(H5fid, "None");
+ }
+ catch (Throwable err) {
+ err.printStackTrace();
+ fail("H5.H5Aexists: " + err);
+ }
+ assertFalse("H5Aexists ", exists);
+
+ try {
+ attr_id = H5.H5Acreate(H5fid, "dset", type_id, space_id,
+ HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT);
+ exists = H5.H5Aexists(H5fid, "dset");
+ assertTrue("H5Aexists ", exists);
+
+ attribute_id = H5.H5Acreate_by_name(H5fid, ".", "attribute",
+ type_id, space_id, HDF5Constants.H5P_DEFAULT,
+ HDF5Constants.H5P_DEFAULT, lapl_id);
+ exists = H5.H5Aexists(H5fid, "attribute");
+ assertTrue("H5Aexists ", exists);
+ }
+ catch (Throwable err) {
+ err.printStackTrace();
+ fail("H5.H5Aexists: " + err);
+ }
+ finally {
+ if (attr_id > 0)
+ try {H5.H5Aclose(attr_id);} catch (Exception ex) {}
+ if (attribute_id > 0)
+ try {H5.H5Aclose(attribute_id);} catch (Exception ex) {}
+ }
+ }
+
+ @Test
+ public void testH5Adelete_by_idx_order() {
+ boolean exists = false;
+ long attr1_id = -1;
+ long attr2_id = -1;
+ long attr3_id = -1;
+ long attr4_id = -1;
+
+ try {
+ attr1_id = H5.H5Acreate_by_name(H5fid, ".", "attribute1",
+ type_id, space_id, HDF5Constants.H5P_DEFAULT,
+ HDF5Constants.H5P_DEFAULT, lapl_id);
+ attr2_id = H5.H5Acreate_by_name(H5fid, ".", "attribute2",
+ type_id, space_id, HDF5Constants.H5P_DEFAULT,
+ HDF5Constants.H5P_DEFAULT, lapl_id);
+ attr3_id = H5.H5Acreate_by_name(H5fid, ".", "attribute3",
+ type_id, space_id, HDF5Constants.H5P_DEFAULT,
+ HDF5Constants.H5P_DEFAULT, lapl_id);
+ attr4_id = H5.H5Acreate_by_name(H5fid, ".", "attribute4",
+ type_id, space_id, HDF5Constants.H5P_DEFAULT,
+ HDF5Constants.H5P_DEFAULT, lapl_id);
+
+ H5.H5Adelete_by_idx(H5fid, ".", HDF5Constants.H5_INDEX_CRT_ORDER, HDF5Constants.H5_ITER_INC, 3, lapl_id);
+ exists = H5.H5Aexists(H5fid, "attribute4");
+ assertFalse("H5Adelete_by_idx: H5Aexists", exists);
+ }
+ catch (Throwable err) {
+ err.printStackTrace();
+ fail("H5.H5Adelete_by_idx: " + err);
+ }
+ finally {
+ if (attr1_id > 0)
+ try {H5.H5Aclose(attr1_id);} catch (Exception ex) {}
+ if (attr2_id > 0)
+ try {H5.H5Aclose(attr2_id);} catch (Exception ex) {}
+ if (attr3_id > 0)
+ try {H5.H5Aclose(attr3_id);} catch (Exception ex) {}
+ if (attr4_id > 0)
+ try {H5.H5Aclose(attr4_id);} catch (Exception ex) {}
+ }
+ }
+
+ @Test
+ public void testH5Adelete_by_idx_name1() {
+ boolean exists = false;
+ long attr1_id = -1;
+ long attr2_id = -1;
+ long attr3_id = -1;
+
+ try {
+ attr1_id = H5.H5Acreate_by_name(H5fid, ".", "attribute1",
+ type_id, space_id, HDF5Constants.H5P_DEFAULT,
+ HDF5Constants.H5P_DEFAULT, lapl_id);
+ attr2_id = H5.H5Acreate_by_name(H5fid, ".", "attribute2",
+ type_id, space_id, HDF5Constants.H5P_DEFAULT,
+ HDF5Constants.H5P_DEFAULT, lapl_id);
+ attr3_id = H5.H5Acreate_by_name(H5fid, ".", "attribute3",
+ type_id, space_id, HDF5Constants.H5P_DEFAULT,
+ HDF5Constants.H5P_DEFAULT, lapl_id);
+ H5.H5Adelete_by_idx(H5fid, ".", HDF5Constants.H5_INDEX_NAME, HDF5Constants.H5_ITER_INC, 2, lapl_id);
+ exists = H5.H5Aexists(H5fid, "attribute3");
+ assertFalse("H5Adelete_by_idx: H5Aexists", exists);
+ }
+ catch (Throwable err) {
+ err.printStackTrace();
+ fail("H5.H5Adelete_by_idx: " + err);
+ }
+ finally {
+ if (attr1_id > 0)
+ try {H5.H5Aclose(attr1_id);} catch (Exception ex) {}
+ if (attr2_id > 0)
+ try {H5.H5Aclose(attr2_id);} catch (Exception ex) {}
+ if (attr3_id > 0)
+ try {H5.H5Aclose(attr3_id);} catch (Exception ex) {}
+ }
+ }
+
+ @Test
+ public void testH5Adelete_by_idx_name2() {
+ boolean exists = false;
+ long attr1_id = -1;
+ long attr2_id = -1;
+ long attr3_id = -1;
+ long attr4_id = -1;
+
+ try {
+ attr1_id = H5.H5Acreate_by_name(H5fid, ".", "attribute1",
+ type_id, space_id, HDF5Constants.H5P_DEFAULT,
+ HDF5Constants.H5P_DEFAULT, lapl_id);
+ attr2_id = H5.H5Acreate_by_name(H5fid, ".", "attribute2",
+ type_id, space_id, HDF5Constants.H5P_DEFAULT,
+ HDF5Constants.H5P_DEFAULT, lapl_id);
+ attr3_id = H5.H5Acreate_by_name(H5fid, ".", "attribute3",
+ type_id, space_id, HDF5Constants.H5P_DEFAULT,
+ HDF5Constants.H5P_DEFAULT, lapl_id);
+ attr4_id = H5.H5Acreate_by_name(H5fid, ".", "attribute4",
+ type_id, space_id, HDF5Constants.H5P_DEFAULT,
+ HDF5Constants.H5P_DEFAULT, lapl_id);
+
+ H5.H5Adelete_by_idx(H5fid, ".", HDF5Constants.H5_INDEX_NAME, HDF5Constants.H5_ITER_DEC, 3, lapl_id);
+ exists = H5.H5Aexists(H5fid, "attribute1");
+ assertFalse("H5Adelete_by_idx: H5Aexists", exists);
+ }
+ catch (Throwable err) {
+ err.printStackTrace();
+ fail("H5.H5Adelete_by_idx: " + err);
+ }
+ finally {
+ if (attr1_id > 0)
+ try {H5.H5Aclose(attr1_id);} catch (Exception ex) {}
+ if (attr2_id > 0)
+ try {H5.H5Aclose(attr2_id);} catch (Exception ex) {}
+ if (attr3_id > 0)
+ try {H5.H5Aclose(attr3_id);} catch (Exception ex) {}
+ if (attr4_id > 0)
+ try {H5.H5Aclose(attr4_id);} catch (Exception ex) {}
+ }
+ }
+
+ @Test(expected = NullPointerException.class)
+ public void testH5Adelete_by_idx_null() throws Throwable {
+ H5.H5Adelete_by_idx(H5fid, null, HDF5Constants.H5_INDEX_CRT_ORDER,
+ HDF5Constants.H5_ITER_INC, 0, lapl_id);
+ }
+
+ @Test(expected = HDF5LibraryException.class)
+ public void testH5Adelete_by_idx_invalidobject() throws Throwable {
+ H5.H5Adelete_by_idx(H5fid, "invalid", HDF5Constants.H5_INDEX_CRT_ORDER,
+ HDF5Constants.H5_ITER_INC, 0, lapl_id);
+ }
+
+ @Test
+ public void testH5Aopen_by_name() {
+ String obj_name = ".";
+ String attr_name = "DATASET";
+ long attribute_id = -1;
+ long aid = -1;
+
+ try {
+ attribute_id = H5.H5Acreate_by_name(H5fid, obj_name, attr_name,
+ type_id, space_id, HDF5Constants.H5P_DEFAULT,
+ HDF5Constants.H5P_DEFAULT, lapl_id);
+
+ //open Attribute by name
+ if(attribute_id >= 0) {
+ try {
+ aid = H5.H5Aopen_by_name(H5fid, obj_name, attr_name, HDF5Constants.H5P_DEFAULT, lapl_id);
+ assertTrue("testH5Aopen_by_name: ", aid>=0);
+ }
+ catch(Throwable err) {
+ err.printStackTrace();
+ fail("H5.H5Aopen_by_name " + err);
+ }
+ }
+ }
+ catch (Throwable err) {
+ err.printStackTrace();
+ fail("H5.H5Aopen_by_name " + err);
+ }
+ finally {
+ if (aid > 0)
+ try {H5.H5Aclose(aid);} catch (Exception ex) {}
+ if (attribute_id > 0)
+ try {H5.H5Aclose(attribute_id);} catch (Exception ex) {}
+ }
+ }
+
+ @Test
+ public void testH5Awrite_readVL() {
+ String attr_name = "VLdata";
+ long attr_id = -1;
+ long atype_id = -1;
+ long aspace_id = -1;
+ String[] str_data = { "Parting", "is such", "sweet", "sorrow." };
+ long[] dims = { str_data.length };
+ long lsize = 1;
+
+ try {
+ atype_id = H5.H5Tcopy(HDF5Constants.H5T_C_S1);
+ assertTrue("testH5Awrite_readVL.H5Tcopy: ", atype_id >= 0);
+ H5.H5Tset_size(atype_id, HDF5Constants.H5T_VARIABLE);
+ assertTrue("testH5Awrite_readVL.H5Tis_variable_str", H5.H5Tis_variable_str(atype_id));
+ }
+ catch (Exception err) {
+ if (atype_id > 0)
+ try {H5.H5Tclose(atype_id);} catch (Exception ex) {}
+ err.printStackTrace();
+ fail("H5.testH5Awrite_readVL: " + err);
+ }
+
+ try {
+ aspace_id = H5.H5Screate_simple(1, dims, null);
+ assertTrue(aspace_id > 0);
+ attr_id = H5.H5Acreate(H5did, attr_name, atype_id, aspace_id,
+ HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT);
+ assertTrue("testH5Awrite_readVL: ", attr_id >= 0);
+
+ H5.H5AwriteVL(attr_id, atype_id, str_data);
+
+ H5.H5Fflush(H5fid, HDF5Constants.H5F_SCOPE_LOCAL);
+
+ for (int j = 0; j < dims.length; j++) {
+ lsize *= dims[j];
+ }
+ String[] strs = new String[(int) lsize];
+ for (int j = 0; j < lsize; j++) {
+ strs[j] = "";
+ }
+ try {
+ H5.H5AreadVL(attr_id, atype_id, strs);
+ }
+ catch (Exception ex) {
+ ex.printStackTrace();
+ }
+ assertTrue("testH5Awrite_readVL:", str_data[0].equals(strs[0]));
+ assertTrue("testH5Awrite_readVL:", str_data[1].equals(strs[1]));
+ assertTrue("testH5Awrite_readVL:", str_data[2].equals(strs[2]));
+ assertTrue("testH5Awrite_readVL:", str_data[3].equals(strs[3]));
+ }
+ catch (Throwable err) {
+ err.printStackTrace();
+ fail("H5.testH5Awrite_readVL: " + err);
+ }
+ finally {
+ if (attr_id > 0)
+ try {H5.H5Aclose(attr_id);} catch (Exception ex) {}
+ if (aspace_id > 0)
+ try {H5.H5Sclose(aspace_id);} catch (Exception ex) {}
+ if (atype_id > 0)
+ try {H5.H5Tclose(atype_id);} catch (Exception ex) {}
+ }
+ }
+
+ @Test
+ public void testH5Aget_create_plist() {
+ String attr_name = "DATASET1";
+ int char_encoding = 0;
+ long plist_id = -1;
+ long attribute_id = -1;
+
+ try {
+ plist_id = H5.H5Pcreate(HDF5Constants.H5P_ATTRIBUTE_CREATE);
+ assertTrue(plist_id > 0);
+ }
+ catch (Throwable err) {
+ err.printStackTrace();
+ fail("testH5Aget_create_plist: H5Pcreate " + err);
+ }
+ try {
+ // Get the character encoding and ensure that it is the default (ASCII)
+ try {
+ char_encoding = H5.H5Pget_char_encoding(plist_id);
+ }
+ catch (Throwable err) {
+ err.printStackTrace();
+ fail("H5Pget_char_encoding: " + err);
+ }
+ assertTrue("testH5Aget_create_plist: get_char_encoding", char_encoding == HDF5Constants.H5T_CSET_ASCII);
+
+ // Create an attribute for the dataset using the property list
+ try {
+ attribute_id = H5.H5Acreate(H5fid, attr_name, type_id, space_id, plist_id, HDF5Constants.H5P_DEFAULT);
+ assertTrue("testH5Aget_create_plist: H5Acreate", attribute_id >= 0);
+ }
+ catch (Throwable err) {
+ err.printStackTrace();
+ fail("H5.H5Acreate: " + err);
+ }
+
+ /* Close the property list, and get the attribute's property list */
+ H5.H5Pclose(plist_id);
+ plist_id = H5.H5Aget_create_plist(attribute_id);
+ assertTrue(plist_id > 0);
+
+ /* Get the character encoding and ensure that it is the default (ASCII) */
+ try {
+ char_encoding = H5.H5Pget_char_encoding(plist_id);
+ }
+ catch (Throwable err) {
+ err.printStackTrace();
+ fail("H5Pget_char_encoding: " + err);
+ }
+ assertTrue("testH5Aget_create_plist: get_char_encoding", char_encoding == HDF5Constants.H5T_CSET_ASCII);
+ }
+ catch (Throwable err) {
+ err.printStackTrace();
+ fail("testH5Aget_create_plist " + err);
+ }
+ finally {
+ if (plist_id > 0)
+ try {H5.H5Pclose(plist_id);} catch (Exception ex) {}
+ if (attribute_id > 0)
+ try {H5.H5Aclose(attribute_id);} catch (Exception ex) {}
+ }
+ }
+
+ @Test
+ public void testH5Aiterate() {
+ long attr1_id = -1;
+ long attr2_id = -1;
+ long attr3_id = -1;
+ long attr4_id = -1;
+
+ class idata {
+ public String attr_name = null;
+ idata(String name) {
+ this.attr_name = name;
+ }
+ }
+ class H5A_iter_data implements H5A_iterate_t {
+ public ArrayList<idata> iterdata = new ArrayList<idata>();
+ }
+ H5A_iterate_t iter_data = new H5A_iter_data();
+ class H5A_iter_callback implements H5A_iterate_cb {
+ public int callback(long group, String name, H5A_info_t info, H5A_iterate_t op_data) {
+ idata id = new idata(name);
+ ((H5A_iter_data)op_data).iterdata.add(id);
+ return 0;
+ }
+ }
+ try {
+ attr1_id = H5.H5Acreate_by_name(H5fid, ".", "attribute1",
+ type_id, space_id, HDF5Constants.H5P_DEFAULT,
+ HDF5Constants.H5P_DEFAULT, lapl_id);
+ attr2_id = H5.H5Acreate_by_name(H5fid, ".", "attribute2",
+ type_id, space_id, HDF5Constants.H5P_DEFAULT,
+ HDF5Constants.H5P_DEFAULT, lapl_id);
+ attr3_id = H5.H5Acreate_by_name(H5fid, ".", "attribute3",
+ type_id, space_id, HDF5Constants.H5P_DEFAULT,
+ HDF5Constants.H5P_DEFAULT, lapl_id);
+ attr4_id = H5.H5Acreate_by_name(H5fid, ".", "attribute4",
+ type_id, space_id, HDF5Constants.H5P_DEFAULT,
+ HDF5Constants.H5P_DEFAULT, lapl_id);
+ H5A_iterate_cb iter_cb = new H5A_iter_callback();
+ try {
+ H5.H5Aiterate(H5fid, HDF5Constants.H5_INDEX_CRT_ORDER, HDF5Constants.H5_ITER_INC, 0L, iter_cb, iter_data);
+ }
+ catch (Throwable err) {
+ err.printStackTrace();
+ fail("H5.H5Aiterate: " + err);
+ }
+ assertFalse("H5Aiterate ",((H5A_iter_data)iter_data).iterdata.isEmpty());
+ assertTrue("H5Aiterate "+((H5A_iter_data)iter_data).iterdata.size(),((H5A_iter_data)iter_data).iterdata.size()==4);
+ assertTrue("H5Aiterate "+(((H5A_iter_data)iter_data).iterdata.get(0)).attr_name,(((H5A_iter_data)iter_data).iterdata.get(0)).attr_name.compareToIgnoreCase("attribute1")==0);
+ assertTrue("H5Aiterate "+(((H5A_iter_data)iter_data).iterdata.get(1)).attr_name,(((H5A_iter_data)iter_data).iterdata.get(1)).attr_name.compareToIgnoreCase("attribute2")==0);
+ assertTrue("H5Aiterate "+((idata)((H5A_iter_data)iter_data).iterdata.get(2)).attr_name,(((H5A_iter_data)iter_data).iterdata.get(2)).attr_name.compareToIgnoreCase("attribute3")==0);
+ assertTrue("H5Aiterate "+((idata)((H5A_iter_data)iter_data).iterdata.get(3)).attr_name,((idata)((H5A_iter_data)iter_data).iterdata.get(3)).attr_name.compareToIgnoreCase("attribute4")==0);
+ }
+ catch (Throwable err) {
+ err.printStackTrace();
+ fail("testH5Aiterate: " + err);
+ }
+ finally {
+ if (attr1_id > 0)
+ try {H5.H5Aclose(attr1_id);} catch (Exception ex) {}
+ if (attr2_id > 0)
+ try {H5.H5Aclose(attr2_id);} catch (Exception ex) {}
+ if (attr3_id > 0)
+ try {H5.H5Aclose(attr3_id);} catch (Exception ex) {}
+ if (attr4_id > 0)
+ try {H5.H5Aclose(attr4_id);} catch (Exception ex) {}
+ }
+ }
+
+ @Test
+ public void testH5Aiterate_by_name() {
+ long attr1_id = -1;
+ long attr2_id = -1;
+ long attr3_id = -1;
+ long attr4_id = -1;
+
+ class idata {
+ public String attr_name = null;
+ idata(String name) {
+ this.attr_name = name;
+ }
+ }
+ class H5A_iter_data implements H5A_iterate_t {
+ public ArrayList<idata> iterdata = new ArrayList<idata>();
+ }
+ H5A_iterate_t iter_data = new H5A_iter_data();
+ class H5A_iter_callback implements H5A_iterate_cb {
+ public int callback(long group, String name, H5A_info_t info, H5A_iterate_t op_data) {
+ idata id = new idata(name);
+ ((H5A_iter_data)op_data).iterdata.add(id);
+ return 0;
+ }
+ }
+ try {
+ attr1_id = H5.H5Acreate_by_name(H5fid, ".", "attribute4",
+ type_id, space_id, HDF5Constants.H5P_DEFAULT,
+ HDF5Constants.H5P_DEFAULT, lapl_id);
+ attr2_id = H5.H5Acreate_by_name(H5fid, ".", "attribute3",
+ type_id, space_id, HDF5Constants.H5P_DEFAULT,
+ HDF5Constants.H5P_DEFAULT, lapl_id);
+ attr3_id = H5.H5Acreate_by_name(H5fid, ".", "attribute2",
+ type_id, space_id, HDF5Constants.H5P_DEFAULT,
+ HDF5Constants.H5P_DEFAULT, lapl_id);
+ attr4_id = H5.H5Acreate_by_name(H5fid, ".", "attribute1",
+ type_id, space_id, HDF5Constants.H5P_DEFAULT,
+ HDF5Constants.H5P_DEFAULT, lapl_id);
+ H5A_iterate_cb iter_cb = new H5A_iter_callback();
+ try {
+ H5.H5Aiterate_by_name(H5fid, ".", HDF5Constants.H5_INDEX_NAME, HDF5Constants.H5_ITER_INC, 0L, iter_cb, iter_data, HDF5Constants.H5P_DEFAULT);
+ }
+ catch (Throwable err) {
+ err.printStackTrace();
+ fail("H5.H5Aiterate_by_name: " + err);
+ }
+ assertFalse("H5Aiterate_by_name ",((H5A_iter_data)iter_data).iterdata.isEmpty());
+ assertTrue("H5Aiterate_by_name "+((H5A_iter_data)iter_data).iterdata.size(),((H5A_iter_data)iter_data).iterdata.size()==4);
+ assertTrue("H5Aiterate_by_name "+((idata)((H5A_iter_data)iter_data).iterdata.get(1)).attr_name,((idata)((H5A_iter_data)iter_data).iterdata.get(1)).attr_name.compareToIgnoreCase("attribute2")==0);
+ }
+ catch (Throwable err) {
+ err.printStackTrace();
+ fail("testH5Aiterate: " + err);
+ }
+ finally {
+ if (attr1_id > 0)
+ try {H5.H5Aclose(attr1_id);} catch (Exception ex) {}
+ if (attr2_id > 0)
+ try {H5.H5Aclose(attr2_id);} catch (Exception ex) {}
+ if (attr3_id > 0)
+ try {H5.H5Aclose(attr3_id);} catch (Exception ex) {}
+ if (attr4_id > 0)
+ try {H5.H5Aclose(attr4_id);} catch (Exception ex) {}
+ }
+ }
+
+}
+
+
+
diff --git a/java/test/TestH5D.java b/java/test/TestH5D.java
new file mode 100644
index 0000000..fa051db
--- /dev/null
+++ b/java/test/TestH5D.java
@@ -0,0 +1,944 @@
+/* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
+ * Copyright by The HDF Group. *
+ * Copyright by the Board of Trustees of the University of Illinois. *
+ * All rights reserved. *
+ * *
+ * This file is part of HDF5. The full HDF5 copyright notice, including *
+ * terms governing use, modification, and redistribution, is contained in *
+ * the files COPYING and Copyright.html. COPYING can be found at the root *
+ * of the source code distribution tree; Copyright.html can be found at the *
+ * root level of an installed copy of the electronic HDF5 document set and *
+ * is linked from the top-level documents page. It can also be found at *
+ * http://hdfgroup.org/HDF5/doc/Copyright.html. If you do not have *
+ * access to either file, you may request a copy from help@hdfgroup.org. *
+ * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */
+
+package test;
+
+import static org.junit.Assert.assertFalse;
+import static org.junit.Assert.assertTrue;
+import static org.junit.Assert.fail;
+
+import java.io.File;
+
+import hdf.hdf5lib.H5;
+import hdf.hdf5lib.HDF5Constants;
+import hdf.hdf5lib.HDFNativeData;
+import hdf.hdf5lib.callbacks.H5D_iterate_cb;
+import hdf.hdf5lib.callbacks.H5D_iterate_t;
+import hdf.hdf5lib.exceptions.HDF5Exception;
+import hdf.hdf5lib.exceptions.HDF5LibraryException;
+
+import org.junit.After;
+import org.junit.Before;
+import org.junit.Rule;
+import org.junit.Test;
+import org.junit.rules.TestName;
+
+public class TestH5D {
+ @Rule public TestName testname = new TestName();
+ private static final String H5_FILE = "test.h5";
+ private static final int DIM_X = 4;
+ private static final int DIM_Y = 6;
+ private static final int RANK = 2;
+ long H5fid = -1;
+ long H5faplid = -1;
+ long H5dsid = -1;
+ long H5dtid = -1;
+ long H5did = -1;
+ long H5did0 = -1;
+ long H5dcpl_id = -1;
+ long[] H5dims = { DIM_X, DIM_Y };
+
+ // Values for the status of space allocation
+ enum H5D_space_status {
+ H5D_SPACE_STATUS_ERROR(-1), H5D_SPACE_STATUS_NOT_ALLOCATED(0), H5D_SPACE_STATUS_PART_ALLOCATED(
+ 1), H5D_SPACE_STATUS_ALLOCATED(2);
+
+ private int code;
+
+ H5D_space_status(int space_status) {
+ this.code = space_status;
+ }
+
+ public int getCode() {
+ return this.code;
+ }
+ }
+
+ private final void _deleteFile(String filename) {
+ File file = new File(filename);
+
+ if (file.exists()) {
+ try {file.delete();}
+ catch (SecurityException e) {}
+ }
+ }
+
+ private final void _createPDataset(long fid, long dsid, String name, long dcpl_val) {
+
+ try {
+ H5dcpl_id = H5.H5Pcreate(dcpl_val);
+ }
+ catch (Exception err) {
+ err.printStackTrace();
+ fail("H5.H5Pcreate: " + err);
+ }
+ assertTrue("testH5D._createPDataset: H5.H5Pcreate: ", H5dcpl_id >= 0);
+
+ // Set the allocation time to "early". This way we can be sure
+ // that reading from the dataset immediately after creation will
+ // return the fill value.
+ try {
+ H5.H5Pset_alloc_time(H5dcpl_id, HDF5Constants.H5D_ALLOC_TIME_EARLY);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ try {
+ H5did0 = H5.H5Dcreate(fid, name,
+ HDF5Constants.H5T_STD_I32BE, dsid,
+ HDF5Constants.H5P_DEFAULT, H5dcpl_id, HDF5Constants.H5P_DEFAULT);
+ }
+ catch (Throwable err) {
+ err.printStackTrace();
+ fail("H5.H5Dcreate: " + err);
+ }
+ assertTrue("TestH5D._createPDataset.H5Dcreate: ", H5did0 >= 0);
+ }
+
+ private final void _createChunkDataset(long fid, long dsid, String name, long dapl) {
+
+ try {
+ H5dcpl_id = H5.H5Pcreate(HDF5Constants.H5P_DATASET_CREATE);
+ }
+ catch (Exception err) {
+ err.printStackTrace();
+ fail("H5.H5Pcreate: " + err);
+ }
+ assertTrue("testH5D._createChunkDataset: H5.H5Pcreate: ", H5dcpl_id >= 0);
+
+ // Set the chunking.
+ long[] chunk_dim = {4, 4};
+
+ try {
+ H5.H5Pset_chunk(H5dcpl_id, RANK, chunk_dim);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ try {
+ H5did = H5.H5Dcreate(fid, name,
+ HDF5Constants.H5T_STD_I32BE, dsid,
+ HDF5Constants.H5P_DEFAULT, H5dcpl_id, dapl);
+ }
+ catch (Throwable err) {
+ err.printStackTrace();
+ fail("H5.H5Dcreate: " + err);
+ }
+ assertTrue("TestH5D._createChunkDataset.H5Dcreate: ", H5did >= 0);
+ }
+
+ private final void _createDataset(long fid, long dsid, String name, long dapl) {
+ try {
+ H5did = H5.H5Dcreate(fid, name,
+ HDF5Constants.H5T_STD_I32BE, dsid,
+ HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT, dapl);
+ }
+ catch (Throwable err) {
+ err.printStackTrace();
+ fail("H5.H5Dcreate: " + err);
+ }
+ assertTrue("TestH5D._createDataset.H5Dcreate: ", H5did >= 0);
+ }
+
+ private final void _createVLDataset(long fid, long dsid, String name, long dapl) {
+ try {
+ H5dtid = H5.H5Tcopy(HDF5Constants.H5T_C_S1);
+ }
+ catch (Throwable err) {
+ err.printStackTrace();
+ fail("H5.H5Tcopy: " + err);
+ }
+ assertTrue("TestH5D._createVLDataset.H5Tcopy: ", H5dtid >= 0);
+ try {
+ H5.H5Tset_size(H5dtid, HDF5Constants.H5T_VARIABLE);
+ assertTrue("TestH5D._createVLDataset.H5Tis_variable_str", H5.H5Tis_variable_str(H5dtid));
+ }
+ catch (Throwable err) {
+ err.printStackTrace();
+ fail("H5.H5Tset_size: " + err);
+ }
+ try {
+ H5did = H5.H5Dcreate(fid, name, H5dtid, dsid,
+ HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT, dapl);
+ }
+ catch (Throwable err) {
+ err.printStackTrace();
+ fail("H5.H5Dcreate: " + err);
+ }
+ assertTrue("TestH5D._createVLDataset.H5Dcreate: ", H5did >= 0);
+ }
+
+ private final void _closeH5file() throws HDF5LibraryException {
+ if (H5dcpl_id >= 0)
+ try {H5.H5Pclose(H5dcpl_id);} catch (Exception ex) {}
+ if (H5did0 >= 0)
+ try {H5.H5Dclose(H5did0);} catch (Exception ex) {}
+ if (H5did >= 0)
+ try {H5.H5Dclose(H5did);} catch (Exception ex) {}
+ if (H5dtid > 0)
+ try {H5.H5Tclose(H5dtid);} catch (Exception ex) {}
+ if (H5dsid > 0)
+ try {H5.H5Sclose(H5dsid);} catch (Exception ex) {}
+ if (H5fid > 0)
+ try {H5.H5Fclose(H5fid);} catch (Exception ex) {}
+ }
+
+ private final void _openH5file(String name, long dapl) {
+ try {
+ H5fid = H5.H5Fopen(H5_FILE,
+ HDF5Constants.H5F_ACC_RDWR, HDF5Constants.H5P_DEFAULT);
+ }
+ catch (Throwable err) {
+ err.printStackTrace();
+ fail("TestH5D._openH5file: " + err);
+ }
+ assertTrue("TestH5D._openH5file: H5.H5Fopen: ",H5fid >= 0);
+ try {
+ H5did = H5.H5Dopen(H5fid, name, dapl);
+ }
+ catch (Throwable err) {
+ err.printStackTrace();
+ fail("TestH5D._openH5file: " + err);
+ }
+ assertTrue("TestH5D._openH5file: H5.H5Dopen: ",H5did >= 0);
+ try {
+ H5dsid = H5.H5Dget_space(H5did);
+ }
+ catch (Throwable err) {
+ err.printStackTrace();
+ fail("TestH5D._openH5file: " + err);
+ }
+ assertTrue("TestH5D._openH5file: H5.H5Screate_simple: ",H5dsid > 0);
+ }
+
+ @Before
+ public void createH5file()
+ throws NullPointerException, HDF5Exception {
+ assertTrue("H5 open ids is 0",H5.getOpenIDCount()==0);
+ System.out.print(testname.getMethodName());
+
+ try {
+ H5faplid = H5.H5Pcreate(HDF5Constants.H5P_FILE_ACCESS);
+ H5fid = H5.H5Fcreate(H5_FILE, HDF5Constants.H5F_ACC_TRUNC,
+ HDF5Constants.H5P_DEFAULT, H5faplid);
+ H5dsid = H5.H5Screate_simple(RANK, H5dims, null);
+ }
+ catch (Throwable err) {
+ err.printStackTrace();
+ fail("TestH5D.createH5file: " + err);
+ }
+ assertTrue("TestH5D.createH5file: H5.H5Fcreate: ",H5fid >= 0);
+ assertTrue("TestH5D.createH5file: H5.H5Screate_simple: ",H5dsid >= 0);
+
+ H5.H5Fflush(H5fid, HDF5Constants.H5F_SCOPE_LOCAL);
+ }
+
+ @After
+ public void deleteH5file() throws HDF5LibraryException {
+ if (H5dcpl_id >= 0)
+ try {H5.H5Pclose(H5dcpl_id);} catch (Exception ex) {}
+ if (H5did0 >= 0)
+ try {H5.H5Dclose(H5did0);} catch (Exception ex) {}
+ if (H5did >= 0)
+ try {H5.H5Dclose(H5did);} catch (Exception ex) {}
+ if (H5dtid > 0)
+ try {H5.H5Tclose(H5dtid);} catch (Exception ex) {}
+ if (H5dsid > 0)
+ try {H5.H5Sclose(H5dsid);} catch (Exception ex) {}
+ if (H5faplid >= 0)
+ try {H5.H5Pclose(H5faplid);} catch (Exception ex) {}
+ if (H5fid > 0)
+ try {H5.H5Fclose(H5fid);} catch (Exception ex) {}
+
+ _deleteFile(H5_FILE);
+ System.out.println();
+ }
+
+ @Test
+ public void testH5Dcreate() {
+ long dataset_id = -1;
+ try {
+ dataset_id = H5.H5Dcreate(H5fid, "dset",
+ HDF5Constants.H5T_STD_I32BE, H5dsid,
+ HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT);
+ }
+ catch (Exception err) {
+ err.printStackTrace();
+ fail("testH5Dcreate: " + err);
+ }
+ assertTrue(dataset_id >= 0);
+
+ // End access to the dataset and release resources used by it.
+ try {
+ if (dataset_id >= 0)
+ H5.H5Dclose(dataset_id);
+ }
+ catch (Exception err) {
+ err.printStackTrace();
+ }
+ }
+
+ @Test
+ public void testH5Dcreate_anon() {
+ long dataset_id = -1;
+ try {
+ dataset_id = H5.H5Dcreate_anon(H5fid, HDF5Constants.H5T_STD_I32BE,
+ H5dsid, HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT);
+ }
+ catch (Exception err) {
+ err.printStackTrace();
+ fail("testH5Dcreate_anon: " + err);
+ }
+ assertTrue(dataset_id >= 0);
+
+ // End access to the dataset and release resources used by it.
+ try {
+ if (dataset_id >= 0)
+ H5.H5Dclose(dataset_id);
+ }
+ catch (Exception err) {
+ err.printStackTrace();
+ }
+ }
+
+ @Test
+ public void testH5Dopen() {
+ long dataset_id = -1;
+ _createDataset(H5fid, H5dsid, "dset", HDF5Constants.H5P_DEFAULT);
+
+ try {
+ H5.H5Dclose(H5did);
+ H5did = -1;
+ dataset_id = H5.H5Dopen(H5fid, "dset", HDF5Constants.H5P_DEFAULT);
+ }
+ catch (Exception err) {
+ err.printStackTrace();
+ fail("testH5Dopen: " + err);
+ }
+ assertTrue("testH5Dopen: ", dataset_id >= 0);
+
+ // End access to the dataset and release resources used by it.
+ try {
+ if (dataset_id >= 0)
+ H5.H5Dclose(dataset_id);
+ }
+ catch (Exception err) {
+ err.printStackTrace();
+ }
+ }
+
+ @Test
+ public void testH5Dget_storage_size_empty() {
+ long storage_size = 0;
+ _createDataset(H5fid, H5dsid, "dset", HDF5Constants.H5P_DEFAULT);
+
+ try {
+ storage_size = H5.H5Dget_storage_size(H5did);
+ }
+ catch (Exception err) {
+ err.printStackTrace();
+ fail("testH5Dget_storage_size: " + err);
+ }
+ assertTrue("testH5Dget_storage_size: ", storage_size == 0);
+ }
+
+ @Test
+ public void testH5Dget_storage_size() {
+ long storage_size = 0;
+ int[][] dset_data = new int[DIM_X][DIM_Y];
+ int FILLVAL = 99;
+ _createDataset(H5fid, H5dsid, "dset", HDF5Constants.H5P_DEFAULT);
+
+ // Initialize the dataset.
+ for (int indx = 0; indx < DIM_X; indx++)
+ for (int jndx = 0; jndx < DIM_Y; jndx++)
+ dset_data[indx][jndx] = FILLVAL;
+
+ try {
+ if (H5did >= 0)
+ H5.H5Dwrite(H5did, HDF5Constants.H5T_NATIVE_INT,
+ HDF5Constants.H5S_ALL, HDF5Constants.H5S_ALL,
+ HDF5Constants.H5P_DEFAULT, dset_data[0]);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ try {
+ storage_size = H5.H5Dget_storage_size(H5did);
+ }
+ catch (Exception err) {
+ err.printStackTrace();
+ fail("testH5Dget_storage_size: " + err);
+ }
+ assertTrue("testH5Dget_storage_size: "+storage_size, storage_size == DIM_X*DIM_Y*4);
+ }
+
+ @Test
+ public void testH5Dget_access_plist() {
+ long dapl_id = -1;
+ long test_dapl_id = -1;
+ int[] mdc_nelmts1 = {0};
+ int[] mdc_nelmts2 = {0};
+ long[] rdcc_nelmts1 = {0};
+ long[] rdcc_nelmts2 = {0};
+ long[] rdcc_nbytes1 = {0};
+ long[] rdcc_nbytes2 = {0};
+ double[] rdcc_w01 = {0};
+ double[] rdcc_w02 = {0};
+
+ try {
+ test_dapl_id = H5.H5Pcreate(HDF5Constants.H5P_DATASET_ACCESS);
+ }
+ catch (Exception err) {
+ err.printStackTrace();
+ fail("testH5Dget_access_plist: H5.H5Pcreate: " + err);
+ }
+ assertTrue("testH5Dget_access_plist: test_dapl_id: ", test_dapl_id >= 0);
+
+ try {
+ H5.H5Pget_cache(H5faplid, mdc_nelmts1, rdcc_nelmts1, rdcc_nbytes1, rdcc_w01);
+ }
+ catch (Exception err) {
+ err.printStackTrace();
+ fail("testH5Dget_access_plist: H5.H5Pget_cache: " + err);
+ }
+
+ _createChunkDataset(H5fid, H5dsid, "dset", test_dapl_id);
+
+ try {
+ dapl_id = H5.H5Dget_access_plist(H5did);
+ assertTrue("testH5Dget_access_plist: dapl_id: ", dapl_id >= 0);
+ H5.H5Pget_chunk_cache(dapl_id, rdcc_nelmts2, rdcc_nbytes2, rdcc_w02);
+ }
+ catch (Exception err) {
+ err.printStackTrace();
+ fail("testH5Dget_access_plist: H5.H5Dget_access_plist: " + err);
+ }
+
+ // End access to the dataset and release resources used by it.
+ try {
+ if (dapl_id >= 0)
+ H5.H5Pclose(dapl_id);
+ }
+ catch (Exception err) {
+ err.printStackTrace();
+ }
+ try {
+ if (test_dapl_id >= 0)
+ H5.H5Pclose(test_dapl_id);
+ }
+ catch (Exception err) {
+ err.printStackTrace();
+ }
+ assertTrue("testH5Dget_access_plist: ", rdcc_nelmts2==rdcc_nelmts2 && rdcc_nbytes2==rdcc_nbytes2);
+ }
+
+ @Test
+ public void testH5Dget_space_status() {
+ int[][] write_dset_data = new int[DIM_X][DIM_Y];
+ int space_status = -1;
+ int space_status0 = -1;
+
+ // Initialize the dataset.
+ for (int indx = 0; indx < DIM_X; indx++)
+ for (int jndx = 0; jndx < DIM_Y; jndx++)
+ write_dset_data[indx][jndx] = indx * jndx - jndx;
+
+ _createPDataset(H5fid, H5dsid, "dset0", HDF5Constants.H5P_DATASET_CREATE);
+ _createDataset(H5fid, H5dsid, "dset", HDF5Constants.H5P_DEFAULT);
+
+ // Retrieve and print space status and storage size for dset0.
+ try {
+ space_status0 = H5.H5Dget_space_status(H5did0);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+ assertTrue("testH5Dget_space_status0 - H5.H5Dget_space_status: ", space_status0 == H5D_space_status.H5D_SPACE_STATUS_ALLOCATED.getCode());
+
+ // Retrieve and print space status and storage size for dset.
+ try {
+ space_status = H5.H5Dget_space_status(H5did);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+ assertFalse("testH5Dget_space_status - H5.H5Dget_space_status: ", space_status == H5D_space_status.H5D_SPACE_STATUS_ALLOCATED.getCode());
+
+ // Write the data to the dataset.
+ try {
+ H5.H5Dwrite(H5did, HDF5Constants.H5T_NATIVE_INT,
+ HDF5Constants.H5S_ALL, HDF5Constants.H5S_ALL,
+ HDF5Constants.H5P_DEFAULT, write_dset_data);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Retrieve and print space status and storage size for dset.
+ try {
+ space_status = H5.H5Dget_space_status(H5did);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+ assertTrue("testH5Dget_space_status - H5.H5Dget_space_status: ", space_status == H5D_space_status.H5D_SPACE_STATUS_ALLOCATED.getCode());
+ }
+
+ @Test(expected = HDF5LibraryException.class)
+ public void testH5Dget_space_closed() throws Throwable {
+ long dataset_id = -1;
+ try {
+ dataset_id = H5.H5Dcreate(H5fid, "dset",
+ HDF5Constants.H5T_STD_I32BE, H5dsid,
+ HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT);
+ }
+ catch (Throwable err) {
+ err.printStackTrace();
+ fail("H5.H5Dcreate: " + err);
+ }
+ assertTrue("TestH5D.testH5Dget_space_closed: ", dataset_id >= 0);
+ H5.H5Dclose(dataset_id);
+
+ H5.H5Dget_space(dataset_id);
+ }
+
+ @Test
+ public void testH5Dget_space() {
+ long dataspace_id = -1;
+ _createDataset(H5fid, H5dsid, "dset", HDF5Constants.H5P_DEFAULT);
+
+ try {
+ dataspace_id = H5.H5Dget_space(H5did);
+ }
+ catch (Exception err) {
+ err.printStackTrace();
+ fail("H5.H5Dget_space: " + err);
+ }
+ assertTrue("TestH5D.testH5Dget_space: ", dataspace_id >= 0);
+
+ // End access to the dataspace and release resources used by it.
+ try {
+ if (dataspace_id >= 0)
+ H5.H5Sclose(dataspace_id);
+ }
+ catch (Exception err) {
+ err.printStackTrace();
+ }
+ }
+
+ @Test(expected = HDF5LibraryException.class)
+ public void testH5Dget_type_closed() throws Throwable {
+ long dataset_id = -1;
+ try {
+ dataset_id = H5.H5Dcreate(H5fid, "dset",
+ HDF5Constants.H5T_STD_I32BE, H5dsid,
+ HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT);
+ }
+ catch (Throwable err) {
+ err.printStackTrace();
+ fail("H5.H5Dcreate: " + err);
+ }
+ assertTrue("TestH5D.testH5Dget_type_closed: ", dataset_id >= 0);
+ H5.H5Dclose(dataset_id);
+
+ H5.H5Dget_type(dataset_id);
+ }
+
+ @Test
+ public void testH5Dget_type() {
+ long datatype_id = -1;
+ _createDataset(H5fid, H5dsid, "dset", HDF5Constants.H5P_DEFAULT);
+
+ try {
+ datatype_id = H5.H5Dget_type(H5did);
+ }
+ catch (Exception err) {
+ err.printStackTrace();
+ fail("H5.H5Dget_type: " + err);
+ }
+ assertTrue("TestH5D.testH5Dget_type: ", datatype_id >= 0);
+
+ // End access to the datatype and release resources used by it.
+ try {
+ if (datatype_id >= 0)
+ H5.H5Tclose(datatype_id);
+ }
+ catch (Exception err) {
+ err.printStackTrace();
+ }
+ }
+
+ @Test
+ public void testH5Dget_offset() {
+ int[][] write_dset_data = new int[DIM_X][DIM_Y];
+ long dset_address = 0;
+ _createDataset(H5fid, H5dsid, "dset", HDF5Constants.H5P_DEFAULT);
+
+ try {
+ // Test dataset address. Should be undefined.
+ dset_address = H5.H5Dget_offset(H5did);
+ }
+ catch (HDF5LibraryException hdfex) {
+ ;
+ }
+ catch (Exception err) {
+ err.printStackTrace();
+ fail("H5.H5Dget_offset: " + err);
+ }
+ // Write the data to the dataset.
+ try {
+ H5.H5Dwrite(H5did, HDF5Constants.H5T_NATIVE_INT,
+ HDF5Constants.H5S_ALL, HDF5Constants.H5S_ALL,
+ HDF5Constants.H5P_DEFAULT, write_dset_data);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ try {
+ // Test dataset address.
+ dset_address = H5.H5Dget_offset(H5did);
+ }
+ catch (Exception err) {
+ err.printStackTrace();
+ fail("H5.H5Dget_offset: " + err);
+ }
+
+ assertTrue("TestH5D.testH5Dget_offset: ", dset_address >= 0);
+ }
+
+ @Test
+ public void testH5Dfill_null() {
+ int[] buf_data = new int[DIM_X*DIM_Y];
+
+ // Initialize memory buffer
+ for (int indx = 0; indx < DIM_X; indx++)
+ for (int jndx = 0; jndx < DIM_Y; jndx++) {
+ buf_data[(indx * DIM_Y) + jndx] = indx * jndx - jndx;
+ }
+ byte[] buf_array = HDFNativeData.intToByte(0, DIM_X*DIM_Y, buf_data);
+
+ // Fill selection in memory
+ try {
+ H5.H5Dfill(null, HDF5Constants.H5T_NATIVE_UINT, buf_array, HDF5Constants.H5T_NATIVE_UINT, H5dsid);
+ }
+ catch (Exception err) {
+ err.printStackTrace();
+ fail("H5.H5Dfill: " + err);
+ }
+ buf_data = HDFNativeData.byteToInt(buf_array);
+
+ // Verify memory buffer the hard way
+ for (int indx = 0; indx < DIM_X; indx++)
+ for (int jndx = 0; jndx < DIM_Y; jndx++)
+ assertTrue("H5.H5Dfill: [" + indx+","+jndx + "] ", buf_data[(indx * DIM_Y) + jndx] == 0);
+ }
+
+ @Test
+ public void testH5Dfill() {
+ int[] buf_data = new int[DIM_X*DIM_Y];
+ byte[] fill_value = HDFNativeData.intToByte(254);
+
+ // Initialize memory buffer
+ for (int indx = 0; indx < DIM_X; indx++)
+ for (int jndx = 0; jndx < DIM_Y; jndx++) {
+ buf_data[(indx * DIM_Y) + jndx] = indx * jndx - jndx;
+ }
+ byte[] buf_array = HDFNativeData.intToByte(0, DIM_X*DIM_Y, buf_data);
+
+ // Fill selection in memory
+ try {
+ H5.H5Dfill(fill_value, HDF5Constants.H5T_NATIVE_UINT, buf_array, HDF5Constants.H5T_NATIVE_UINT, H5dsid);
+ }
+ catch (Exception err) {
+ err.printStackTrace();
+ fail("H5.H5Dfill: " + err);
+ }
+ buf_data = HDFNativeData.byteToInt(buf_array);
+
+ // Verify memory buffer the hard way
+ for (int indx = 0; indx < DIM_X; indx++)
+ for (int jndx = 0; jndx < DIM_Y; jndx++)
+ assertTrue("H5.H5Dfill: [" + indx+","+jndx + "] ", buf_data[(indx * DIM_Y) + jndx] == 254);
+ }
+
+ @Test
+ public void testH5Diterate() {
+ final int SPACE_RANK = 2;
+ final int SPACE_FILL = 254;
+
+ class H5D_iter_data implements H5D_iterate_t {
+ public int fill_value; /* The fill value to check */
+ public long fill_curr_coord; /* Current coordinate to examine */
+ public long[] fill_coords; /* Pointer to selection's coordinates */
+ }
+
+ H5D_iterate_t iter_data = new H5D_iter_data();
+
+ class H5D_iter_callback implements H5D_iterate_cb {
+ public int callback(byte[] elem_buf, long elem_id, int ndim, long[] point, H5D_iterate_t op_data) {
+ //Check value in current buffer location
+ int element = HDFNativeData.byteToInt(elem_buf, 0);
+ if(element != ((H5D_iter_data)op_data).fill_value)
+ return -1;
+ //Check number of dimensions
+ if(ndim != SPACE_RANK)
+ return(-1);
+ //Check Coordinates
+ long[] fill_coords = new long[2];
+ fill_coords[0] = ((H5D_iter_data)op_data).fill_coords[(int) (2 * ((H5D_iter_data)op_data).fill_curr_coord)];
+ fill_coords[1] = ((H5D_iter_data)op_data).fill_coords[(int) (2 * ((H5D_iter_data)op_data).fill_curr_coord) + 1];
+ ((H5D_iter_data)op_data).fill_curr_coord++;
+ if(fill_coords[0] != point[0])
+ return(-1);
+ if(fill_coords[1] != point[1])
+ return(-1);
+
+ return(0);
+ }
+ }
+
+ int[] buf_data = new int[DIM_X*DIM_Y];
+ byte[] fill_value = HDFNativeData.intToByte(SPACE_FILL);
+
+ // Initialize memory buffer
+ for (int indx = 0; indx < DIM_X; indx++)
+ for (int jndx = 0; jndx < DIM_Y; jndx++) {
+ buf_data[(indx * DIM_Y) + jndx] = indx * jndx - jndx;
+ }
+ byte[] buf_array = HDFNativeData.intToByte(0, DIM_X*DIM_Y, buf_data);
+
+ // Fill selection in memory
+ try {
+ H5.H5Dfill(fill_value, HDF5Constants.H5T_NATIVE_UINT, buf_array, HDF5Constants.H5T_NATIVE_UINT, H5dsid);
+ }
+ catch (Exception err) {
+ err.printStackTrace();
+ fail("H5.H5Diterate: " + err);
+ }
+
+ // Initialize the iterator structure
+ ((H5D_iter_data)iter_data).fill_value = SPACE_FILL;
+ ((H5D_iter_data)iter_data).fill_curr_coord = 0;
+ // Set the coordinates of the selection
+ ((H5D_iter_data)iter_data).fill_coords = new long[DIM_X*DIM_Y*SPACE_RANK]; /* Coordinates of selection */
+ for (int indx = 0; indx < DIM_X; indx++)
+ for (int jndx = 0; jndx < DIM_Y; jndx++) {
+ ((H5D_iter_data)iter_data).fill_coords[2*(indx * DIM_Y + jndx)] = indx;
+ ((H5D_iter_data)iter_data).fill_coords[2*(indx * DIM_Y + jndx) + 1] = jndx;
+ } /* end for */
+
+ // Iterate through selection, verifying correct data
+ H5D_iterate_cb iter_cb = new H5D_iter_callback();
+ int op_status = -1;
+ try {
+ op_status = H5.H5Diterate(buf_array, HDF5Constants.H5T_NATIVE_UINT, H5dsid, iter_cb, iter_data);
+ }
+ catch (Throwable err) {
+ err.printStackTrace();
+ fail("H5.H5Diterate: " + err);
+ }
+ assertTrue("H5Diterate ", op_status == 0);
+ }
+
+ @Test
+ public void testH5Diterate_write() {
+ final int SPACE_RANK = 2;
+ final int SPACE_FILL = 254;
+
+ class H5D_iter_data implements H5D_iterate_t {
+ public int fill_value; /* The fill value to check */
+ public long fill_curr_coord; /* Current coordinate to examine */
+ public long[] fill_coords; /* Pointer to selection's coordinates */
+ }
+
+ H5D_iterate_t iter_data = new H5D_iter_data();
+
+ class H5D_iter_callback implements H5D_iterate_cb {
+ public int callback(byte[] elem_buf, long elem_id, int ndim, long[] point, H5D_iterate_t op_data) {
+ //Check value in current buffer location
+ int element = HDFNativeData.byteToInt(elem_buf, 0);
+ if(element != ((H5D_iter_data)op_data).fill_value)
+ return -1;
+ //Check number of dimensions
+ if(ndim != SPACE_RANK)
+ return(-1);
+ //Check Coordinates
+ long[] fill_coords = new long[2];
+ fill_coords[0] = ((H5D_iter_data)op_data).fill_coords[(int) (2 * ((H5D_iter_data)op_data).fill_curr_coord)];
+ fill_coords[1] = ((H5D_iter_data)op_data).fill_coords[(int) (2 * ((H5D_iter_data)op_data).fill_curr_coord) + 1];
+ ((H5D_iter_data)op_data).fill_curr_coord++;
+ if(fill_coords[0] != point[0])
+ return(-1);
+ if(fill_coords[1] != point[1])
+ return(-1);
+ element -= 128;
+ byte[] new_elembuf = HDFNativeData.intToByte(element);
+ elem_buf[0] = new_elembuf[0];
+ elem_buf[1] = new_elembuf[1];
+ elem_buf[2] = new_elembuf[2];
+ elem_buf[3] = new_elembuf[3];
+ return(0);
+ }
+ }
+
+ int[] buf_data = new int[DIM_X*DIM_Y];
+ byte[] fill_value = HDFNativeData.intToByte(SPACE_FILL);
+
+ // Initialize memory buffer
+ for (int indx = 0; indx < DIM_X; indx++)
+ for (int jndx = 0; jndx < DIM_Y; jndx++) {
+ buf_data[(indx * DIM_Y) + jndx] = indx * jndx - jndx;
+ }
+ byte[] buf_array = HDFNativeData.intToByte(0, DIM_X*DIM_Y, buf_data);
+
+ // Fill selection in memory
+ try {
+ H5.H5Dfill(fill_value, HDF5Constants.H5T_NATIVE_UINT, buf_array, HDF5Constants.H5T_NATIVE_UINT, H5dsid);
+ }
+ catch (Exception err) {
+ err.printStackTrace();
+ fail("H5.H5Diterate: " + err);
+ }
+
+ // Initialize the iterator structure
+ ((H5D_iter_data)iter_data).fill_value = SPACE_FILL;
+ ((H5D_iter_data)iter_data).fill_curr_coord = 0;
+ // Set the coordinates of the selection
+ ((H5D_iter_data)iter_data).fill_coords = new long[DIM_X*DIM_Y*SPACE_RANK]; /* Coordinates of selection */
+ for (int indx = 0; indx < DIM_X; indx++)
+ for (int jndx = 0; jndx < DIM_Y; jndx++) {
+ ((H5D_iter_data)iter_data).fill_coords[2*(indx * DIM_Y + jndx)] = indx;
+ ((H5D_iter_data)iter_data).fill_coords[2*(indx * DIM_Y + jndx) + 1] = jndx;
+ } /* end for */
+
+ // Iterate through selection, verifying correct data
+ H5D_iterate_cb iter_cb = new H5D_iter_callback();
+ int op_status = -1;
+ try {
+ op_status = H5.H5Diterate(buf_array, HDF5Constants.H5T_NATIVE_UINT, H5dsid, iter_cb, iter_data);
+ }
+ catch (Throwable err) {
+ err.printStackTrace();
+ fail("H5.H5Diterate: " + err);
+ }
+ assertTrue("H5Diterate ", op_status == 0);
+
+ buf_data = HDFNativeData.byteToInt(buf_array);
+
+ // Verify memory buffer the hard way
+ for (int indx = 0; indx < DIM_X; indx++)
+ for (int jndx = 0; jndx < DIM_Y; jndx++)
+ assertTrue("H5.H5Diterate: [" + indx+","+jndx + "] "+buf_data[(indx * DIM_Y) + jndx], buf_data[(indx * DIM_Y) + jndx] == 126);
+ }
+
+ @Test
+ public void testH5Dvlen_get_buf_size() {
+ String[] str_data = { "Parting", "is such", "sweet", "sorrow.",
+ "Testing", "one", "two", "three.",
+ "Dog,", "man's", "best", "friend.",
+ "Diamonds", "are", "a", "girls!",
+ "S A", "T U R", "D A Y", "night",
+ "That's", "all", "folks", "!!!" };
+ long vl_size = -1; /* Number of bytes used */
+ long str_data_bytes = 0;
+ for (int idx = 0; idx < str_data.length; idx++)
+ str_data_bytes += str_data[idx].length() + 1; //Account for terminating null
+
+ _createVLDataset(H5fid, H5dsid, "dset", HDF5Constants.H5P_DEFAULT);
+
+ try {
+ if ((H5did >= 0) && (H5dtid >= 0))
+ H5.H5Dwrite_VLStrings(H5did, H5dtid,
+ HDF5Constants.H5S_ALL, HDF5Constants.H5S_ALL,
+ HDF5Constants.H5P_DEFAULT, str_data);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+ try {
+ vl_size = H5.H5Dvlen_get_buf_size(H5did, H5dtid, H5dsid);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+ assertTrue("H5Dvlen_get_buf_size " + vl_size + " == " + str_data_bytes, vl_size == str_data_bytes);
+ }
+
+ @Test(expected = IllegalArgumentException.class)
+ public void testH5Dvlen_read_invalid_buffer() throws Throwable {
+ String[] str_data = { "Parting", "is such", "sweet", "sorrow.",
+ "Testing", "one", "two", "three.",
+ "Dog,", "man's", "best", "friend.",
+ "Diamonds", "are", "a", "girls!",
+ "S A", "T U R", "D A Y", "night",
+ "That's", "all", "folks", "!!!" };
+ byte[] read_data = new byte[512];
+
+ _createVLDataset(H5fid, H5dsid, "dset", HDF5Constants.H5P_DEFAULT);
+
+ try {
+ H5.H5Dwrite_VLStrings(H5did, H5dtid,
+ HDF5Constants.H5S_ALL, HDF5Constants.H5S_ALL,
+ HDF5Constants.H5P_DEFAULT, str_data);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ H5.H5Dread(H5did, H5dtid,
+ HDF5Constants.H5S_ALL, HDF5Constants.H5S_ALL,
+ HDF5Constants.H5P_DEFAULT, read_data);
+ }
+
+ @Test
+ public void testH5Dvlen_write_read() {
+ String[] str_wdata = { "Parting", "is such", "sweet", "sorrow.",
+ "Testing", "one", "two", "three.",
+ "Dog,", "man's", "best", "friend.",
+ "Diamonds", "are", "a", "girls!",
+ "S A", "T U R", "D A Y", "night",
+ "That's", "all", "folks", "!!!" };
+ String[] str_rdata = new String[DIM_X * DIM_Y];
+
+ _createVLDataset(H5fid, H5dsid, "dset", HDF5Constants.H5P_DEFAULT);
+
+ try {
+ if ((H5did >= 0) && (H5dtid >= 0))
+ H5.H5Dwrite_VLStrings(H5did, H5dtid,
+ HDF5Constants.H5S_ALL, HDF5Constants.H5S_ALL,
+ HDF5Constants.H5P_DEFAULT, str_wdata);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+ try {
+ if ((H5did >= 0) && (H5dtid >= 0))
+ H5.H5Dread_VLStrings(H5did, H5dtid,
+ HDF5Constants.H5S_ALL, HDF5Constants.H5S_ALL,
+ HDF5Constants.H5P_DEFAULT, str_rdata);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+ for (int v = 0; v < DIM_X * DIM_Y; v++)
+ assertTrue("testH5Dvlen_write_read " + str_wdata[v] + " == " + str_rdata[v], str_wdata[v] == str_wdata[v]);
+ }
+
+}
diff --git a/java/test/TestH5Dparams.java b/java/test/TestH5Dparams.java
new file mode 100644
index 0000000..f056027
--- /dev/null
+++ b/java/test/TestH5Dparams.java
@@ -0,0 +1,134 @@
+/* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
+ * Copyright by The HDF Group. *
+ * Copyright by the Board of Trustees of the University of Illinois. *
+ * All rights reserved. *
+ * *
+ * This file is part of HDF5. The full HDF5 copyright notice, including *
+ * terms governing use, modification, and redistribution, is contained in *
+ * the files COPYING and Copyright.html. COPYING can be found at the root *
+ * of the source code distribution tree; Copyright.html can be found at the *
+ * root level of an installed copy of the electronic HDF5 document set and *
+ * is linked from the top-level documents page. It can also be found at *
+ * http://hdfgroup.org/HDF5/doc/Copyright.html. If you do not have *
+ * access to either file, you may request a copy from help@hdfgroup.org. *
+ * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */
+
+package test;
+
+import static org.junit.Assert.assertTrue;
+import hdf.hdf5lib.H5;
+import hdf.hdf5lib.exceptions.HDF5LibraryException;
+
+import org.junit.After;
+import org.junit.Before;
+import org.junit.Rule;
+import org.junit.Test;
+import org.junit.rules.TestName;
+
+public class TestH5Dparams {
+ @Rule public TestName testname = new TestName();
+
+ @Before
+ public void checkOpenIDs() {
+ assertTrue("H5 open ids is 0",H5.getOpenIDCount()==0);
+ System.out.print(testname.getMethodName());
+ }
+ @After
+ public void nextTestName() {
+ System.out.println();
+ }
+
+ @Test//(expected = HDF5LibraryException.class)
+ public void testH5Dclose_invalid() throws Throwable {
+ long did = H5.H5Dclose(-1);
+ assertTrue(did == 0);
+ }
+
+ @Test(expected = NullPointerException.class)
+ public void testH5Dcreate_null() throws Throwable {
+ H5.H5Dcreate(-1, null, 0, 0, 0, 0, 0);
+ }
+
+ @Test(expected = HDF5LibraryException.class)
+ public void testH5Dcreate_invalid() throws Throwable {
+ H5.H5Dcreate(-1, "Bogus", -1, -1, -1, -1, -1);
+ }
+
+ @Test(expected = HDF5LibraryException.class)
+ public void testH5Dcreate_anon_invalid() throws Throwable {
+ H5.H5Dcreate_anon(-1, -1, -1, -1, -1);
+ }
+
+ @Test(expected = HDF5LibraryException.class)
+ public void testH5Dget_access_plist_invalid() throws Throwable {
+ H5.H5Dget_access_plist(-1);
+ }
+
+ @Test(expected = HDF5LibraryException.class)
+ public void testH5Dget_create_plist_invalid() throws Throwable {
+ H5.H5Dget_create_plist(-1);
+ }
+
+ @Test(expected = HDF5LibraryException.class)
+ public void testH5Dget_offset_invalid() throws Throwable {
+ H5.H5Dget_offset(-1);
+ }
+
+ @Test(expected = HDF5LibraryException.class)
+ public void testH5Dget_space_invalid() throws Throwable {
+ H5.H5Dget_space(-1);
+ }
+
+ @Test(expected = HDF5LibraryException.class)
+ public void testH5Dget_type_invalid() throws Throwable {
+ H5.H5Dget_type(-1);
+ }
+
+ @Test(expected = HDF5LibraryException.class)
+ public void testH5Dget_space_status_invalid() throws Throwable {
+ int status = H5.H5Dget_space_status(-1);
+ }
+
+ @Test(expected = HDF5LibraryException.class)
+ public void testH5Dset_extent_status_invalid() throws Throwable {
+ long[] size = new long[2];
+ H5.H5Dset_extent(-1, size);
+ }
+
+ @Test(expected = NullPointerException.class)
+ public void testH5Dset_extent_status_null() throws Throwable {
+ H5.H5Dset_extent(-1, null);
+ }
+
+ @Test(expected = NullPointerException.class)
+ public void testH5Dopen_null() throws Throwable {
+ H5.H5Dopen(-1, null, 0);
+ }
+
+ @Test(expected = HDF5LibraryException.class)
+ public void testH5Dopen_invalid() throws Throwable {
+ H5.H5Dopen(-1, "Bogus", 0);
+ }
+
+ @Test(expected = HDF5LibraryException.class)
+ public void testH5Dvlen_get_buf_size_invalid() throws Throwable {
+ H5.H5Dvlen_get_buf_size(-1, -1, -1);
+ }
+
+ @Test(expected = HDF5LibraryException.class)
+ public void testH5Dvlen_reclaim_invalid() throws Throwable {
+ byte[] buf = new byte[2];
+ H5.H5Dvlen_reclaim(-1, -1, -1, buf);
+ }
+
+ @Test(expected = NullPointerException.class)
+ public void testH5Dvlen_reclaim_null() throws Throwable {
+ H5.H5Dvlen_reclaim(-1, -1, -1, null);
+ }
+
+ @Test(expected = IllegalArgumentException.class)
+ public void testH5Dget_storage_size_invalid() throws Throwable {
+ H5.H5Dget_storage_size(-1);
+ }
+
+}
diff --git a/java/test/TestH5Dplist.java b/java/test/TestH5Dplist.java
new file mode 100644
index 0000000..eb1669f
--- /dev/null
+++ b/java/test/TestH5Dplist.java
@@ -0,0 +1,216 @@
+/* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
+ * Copyright by The HDF Group. *
+ * Copyright by the Board of Trustees of the University of Illinois. *
+ * All rights reserved. *
+ * *
+ * This file is part of HDF5. The full HDF5 copyright notice, including *
+ * terms governing use, modification, and redistribution, is contained in *
+ * the files COPYING and Copyright.html. COPYING can be found at the root *
+ * of the source code distribution tree; Copyright.html can be found at the *
+ * root level of an installed copy of the electronic HDF5 document set and *
+ * is linked from the top-level documents page. It can also be found at *
+ * http://hdfgroup.org/HDF5/doc/Copyright.html. If you do not have *
+ * access to either file, you may request a copy from help@hdfgroup.org. *
+ * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */
+
+package test;
+
+import static org.junit.Assert.assertTrue;
+import static org.junit.Assert.fail;
+
+import java.io.File;
+
+import hdf.hdf5lib.H5;
+import hdf.hdf5lib.HDF5Constants;
+import hdf.hdf5lib.exceptions.HDF5Exception;
+import hdf.hdf5lib.exceptions.HDF5LibraryException;
+
+import org.junit.After;
+import org.junit.Before;
+import org.junit.Rule;
+import org.junit.Test;
+import org.junit.rules.TestName;
+
+public class TestH5Dplist {
+ @Rule public TestName testname = new TestName();
+ private static final String H5_FILE = "test.h5";
+ private static final int DIM_X = 4;
+ private static final int DIM_Y = 7;
+ private static final int EDIM_X = 6;
+ private static final int EDIM_Y = 10;
+ private static final int CHUNK_X = 4;
+ private static final int CHUNK_Y = 4;
+ private static final int NDIMS = 2;
+ private static final int FILLVAL = 99;
+ private static final int RANK = 2;
+ long H5fid = -1;
+ long H5dsid = -1;
+ long H5did = -1;
+ long H5dcpl_id = -1;
+ long[] H5dims = { DIM_X, DIM_Y };
+ long[] H5extdims = { EDIM_X, EDIM_Y };
+ long[] H5chunk_dims = { CHUNK_X, CHUNK_Y };
+ long[] H5maxdims = { HDF5Constants.H5S_UNLIMITED, HDF5Constants.H5S_UNLIMITED };
+
+ private final void _deleteFile(String filename) {
+ File file = new File(filename);
+
+ if (file.exists()) {
+ try {file.delete();} catch (SecurityException e) {}
+ }
+ }
+
+ private final void _createPDataset(long fid, long dsid, String name, long dcpl_val) {
+ try {
+ H5dcpl_id = H5.H5Pcreate(dcpl_val);
+ }
+ catch (Exception err) {
+ err.printStackTrace();
+ fail("H5.H5Pcreate: " + err);
+ }
+ assertTrue("TestH5Dplist._createPDataset: ", H5dcpl_id > 0);
+
+ // Set the chunk size.
+ try {
+ H5.H5Pset_chunk(H5dcpl_id, NDIMS, H5chunk_dims);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Set the fill value for the dataset
+ try {
+ int[] fill_value = { FILLVAL };
+ H5.H5Pset_fill_value(H5dcpl_id, HDF5Constants.H5T_NATIVE_INT, fill_value);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Set the allocation time to "early". This way we can be sure
+ // that reading from the dataset immediately after creation will
+ // return the fill value.
+ try {
+ H5.H5Pset_alloc_time(H5dcpl_id, HDF5Constants.H5D_ALLOC_TIME_EARLY);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ _createDataset(H5fid, H5dsid, "dset", H5dcpl_id, HDF5Constants.H5P_DEFAULT);
+ }
+
+ private final void _createDataset(long fid, long dsid, String name, long dcpl, long dapl) {
+ try {
+ H5did = H5.H5Dcreate(fid, name,
+ HDF5Constants.H5T_STD_I32BE, dsid,
+ HDF5Constants.H5P_DEFAULT, dcpl, dapl);
+ }
+ catch (Throwable err) {
+ err.printStackTrace();
+ fail("H5.H5Dcreate: " + err);
+ }
+ assertTrue("TestH5Dplist._createDataset: ",H5did > 0);
+ }
+
+ @Before
+ public void createH5file() throws NullPointerException, HDF5Exception {
+ assertTrue("H5 open ids is 0",H5.getOpenIDCount()==0);
+ System.out.print(testname.getMethodName());
+ try {
+ H5fid = H5.H5Fcreate(H5_FILE, HDF5Constants.H5F_ACC_TRUNC,
+ HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT);
+ H5dsid = H5.H5Screate_simple(RANK, H5dims, H5maxdims);
+ }
+ catch (Throwable err) {
+ err.printStackTrace();
+ fail("TestH5Dplist.createH5file: " + err);
+ }
+ assertTrue("TestH5Dplist.createH5file: H5.H5Fcreate: ",H5fid > 0);
+ assertTrue("TestH5Dplist.createH5file: H5.H5Screate_simple: ",H5dsid > 0);
+
+ H5.H5Fflush(H5fid, HDF5Constants.H5F_SCOPE_LOCAL);
+ }
+
+ @After
+ public void deleteH5file() throws HDF5LibraryException {
+ if (H5dcpl_id >= 0)
+ try {H5.H5Pclose(H5dcpl_id);} catch (Exception ex) {}
+ if (H5did > 0)
+ try {H5.H5Dclose(H5did);} catch (Exception ex) {}
+ if (H5dsid > 0)
+ try {H5.H5Sclose(H5dsid);} catch (Exception ex) {}
+ if (H5fid > 0)
+ try {H5.H5Fclose(H5fid);} catch (Exception ex) {}
+
+ _deleteFile(H5_FILE);
+ System.out.println();
+ }
+
+ @Test
+ public void testH5Dset_extent() {
+ int[][] write_dset_data = new int[DIM_X][DIM_Y];
+ int[][] read_dset_data = new int[DIM_X][DIM_Y];
+ int[][] extend_dset_data = new int[EDIM_X][EDIM_Y];
+
+ // Initialize the dataset.
+ for (int indx = 0; indx < DIM_X; indx++)
+ for (int jndx = 0; jndx < DIM_Y; jndx++)
+ write_dset_data[indx][jndx] = indx * jndx - jndx;
+
+ _createPDataset(H5fid, H5dsid, "dset", HDF5Constants.H5P_DATASET_CREATE);
+
+ // Read values from the dataset, which has not been written to yet.
+ try {
+ H5.H5Dread(H5did, HDF5Constants.H5T_NATIVE_INT,
+ HDF5Constants.H5S_ALL, HDF5Constants.H5S_ALL,
+ HDF5Constants.H5P_DEFAULT, read_dset_data);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+ assertTrue("testH5Dset_extent - H5.H5Dread: ", read_dset_data[0][0] == 99);
+
+ // Write the data to the dataset.
+ try {
+ H5.H5Dwrite(H5did, HDF5Constants.H5T_NATIVE_INT,
+ HDF5Constants.H5S_ALL, HDF5Constants.H5S_ALL,
+ HDF5Constants.H5P_DEFAULT, write_dset_data);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Read the data back.
+ try {
+ H5.H5Dread(H5did, HDF5Constants.H5T_NATIVE_INT,
+ HDF5Constants.H5S_ALL, HDF5Constants.H5S_ALL,
+ HDF5Constants.H5P_DEFAULT, read_dset_data);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+ assertTrue("testH5Dset_extent - H5.H5Dread: ", read_dset_data[3][6] == 12);
+
+ // Extend the dataset.
+ try {
+ H5.H5Dset_extent(H5did, H5extdims);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Read from the extended dataset.
+ try {
+ H5.H5Dread(H5did, HDF5Constants.H5T_NATIVE_INT,
+ HDF5Constants.H5S_ALL, HDF5Constants.H5S_ALL,
+ HDF5Constants.H5P_DEFAULT, extend_dset_data);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+ assertTrue("testH5Dset_extent - H5.H5Dread: ", extend_dset_data[3][6] == 12);
+ assertTrue("testH5Dset_extent - H5.H5Dread: ", extend_dset_data[4][8] == 99);
+ }
+
+}
diff --git a/java/test/TestH5E.java b/java/test/TestH5E.java
new file mode 100644
index 0000000..028369d
--- /dev/null
+++ b/java/test/TestH5E.java
@@ -0,0 +1,553 @@
+/* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
+ * Copyright by The HDF Group. *
+ * Copyright by the Board of Trustees of the University of Illinois. *
+ * All rights reserved. *
+ * *
+ * This file is part of HDF5. The full HDF5 copyright notice, including *
+ * terms governing use, modification, and redistribution, is contained in *
+ * the files COPYING and Copyright.html. COPYING can be found at the root *
+ * of the source code distribution tree; Copyright.html can be found at the *
+ * root level of an installed copy of the electronic HDF5 document set and *
+ * is linked from the top-level documents page. It can also be found at *
+ * http://hdfgroup.org/HDF5/doc/Copyright.html. If you do not have *
+ * access to either file, you may request a copy from help@hdfgroup.org. *
+ * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */
+
+package test;
+
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertFalse;
+import static org.junit.Assert.assertNotNull;
+import static org.junit.Assert.assertTrue;
+import static org.junit.Assert.fail;
+
+import java.util.ArrayList;
+
+import hdf.hdf5lib.H5;
+import hdf.hdf5lib.HDF5Constants;
+import hdf.hdf5lib.exceptions.HDF5LibraryException;
+import hdf.hdf5lib.callbacks.H5E_walk_cb;
+import hdf.hdf5lib.callbacks.H5E_walk_t;
+import hdf.hdf5lib.structs.H5E_error2_t;
+
+import org.junit.After;
+import org.junit.Before;
+import org.junit.Ignore;
+import org.junit.Rule;
+import org.junit.Test;
+import org.junit.rules.TestName;
+
+public class TestH5E {
+ @Rule public TestName testname = new TestName();
+ long hdf_java_classid = -1;
+ long current_stackid = -1;
+
+ @Before
+ public void H5Eget_stack_class() {
+ assertTrue("H5 open ids is 0",H5.getOpenIDCount()==0);
+ System.out.print(testname.getMethodName());
+
+ hdf_java_classid = -1;
+ try {
+ hdf_java_classid = H5.H5Eregister_class("HDF-Java-Error",
+ "hdf-java", "2.5");
+ current_stackid = H5.H5Eget_current_stack();
+ }
+ catch (Throwable err) {
+ err.printStackTrace();
+ fail("H5.H5Eget_stack_class: " + err);
+ }
+ }
+
+ @After
+ public void H5Erestore_stack_class() {
+ try {
+ H5.H5Eunregister_class(hdf_java_classid);
+ hdf_java_classid = -1;
+ H5.H5Eclose_stack(current_stackid);
+ current_stackid = -1;
+ }
+ catch (Throwable err) {
+ err.printStackTrace();
+ fail("H5.H5Erestore_stack_class: " + err);
+ }
+ System.out.println();
+ }
+
+ @Test
+ public void testH5Eget_class_name() {
+ try {
+ String class_name = H5.H5Eget_class_name(hdf_java_classid);
+ assertNotNull("H5.H5Eget_class_name: " + class_name, class_name);
+ assertEquals("H5.H5Eget_class_name: ", "HDF-Java-Error", class_name);
+ }
+ catch (Throwable err) {
+ err.printStackTrace();
+ fail("H5.H5Eget_class_name: " + err);
+ }
+ }
+
+ @Test
+ public void testH5Eprint2() {
+ try {
+ assertFalse(current_stackid < 0);
+ H5.H5Eprint2(current_stackid, null);
+ }
+ catch (Throwable err) {
+ err.printStackTrace();
+ fail("H5.H5Eprint2: " + err);
+ }
+ }
+
+ @Ignore("Tested with create_msg_major[minor]")
+ public void testH5Eclose_msg() {
+ fail("Not yet implemented");
+ }
+
+ @Test(expected = NullPointerException.class)
+ public void testH5Ecreate_msg_name_null() throws Throwable {
+ H5.H5Ecreate_msg(hdf_java_classid, HDF5Constants.H5E_MAJOR, null);
+ }
+
+ @Test
+ public void testH5Ecreate_msg_major() {
+ try {
+ long err_id = H5.H5Ecreate_msg(hdf_java_classid,
+ HDF5Constants.H5E_MAJOR, "Error in Test");
+ assertFalse("H5.H5Ecreate_msg_major: " + err_id, err_id < 0);
+ H5.H5Eclose_msg(err_id);
+ }
+ catch (Throwable err) {
+ err.printStackTrace();
+ fail("H5.H5Ecreate_msg_major: " + err);
+ }
+ }
+
+ @Test
+ public void testH5Ecreate_msg_minor() {
+ try {
+ long err_id = H5.H5Ecreate_msg(hdf_java_classid,
+ HDF5Constants.H5E_MINOR, "Error in Test Function");
+ assertFalse("H5.H5Ecreate_msg_minor: " + err_id, err_id < 0);
+ H5.H5Eclose_msg(err_id);
+ }
+ catch (Throwable err) {
+ err.printStackTrace();
+ fail("H5.H5Ecreate_msg_minor: " + err);
+ }
+ }
+
+ @Test
+ public void testH5Eget_msg() {
+ int[] error_msg_type = { HDF5Constants.H5E_MINOR };
+ long err_id = -1;
+ String msg = null;
+ try {
+ err_id = H5.H5Ecreate_msg(hdf_java_classid,
+ HDF5Constants.H5E_MAJOR, "Error in Test");
+ }
+ catch (Throwable err) {
+ err.printStackTrace();
+ fail("H5.H5Eget_msg: " + err);
+ }
+ assertFalse("H5.H5Eget_msg: H5Ecreate_msg - " + err_id, err_id < 0);
+ try {
+ msg = H5.H5Eget_msg(err_id, error_msg_type);
+ }
+ catch (Throwable err) {
+ err.printStackTrace();
+ fail("H5.H5Eget_msg: " + err);
+ }
+ assertNotNull("H5.H5Eget_msg: " + msg, msg);
+ assertEquals("H5.H5Eget_msg: ", "Error in Test", msg);
+ assertEquals("H5.H5Eget_msg: ", HDF5Constants.H5E_MAJOR,
+ error_msg_type[0]);
+ try {
+ H5.H5Eclose_msg(err_id);
+ }
+ catch (Throwable err) {
+ err.printStackTrace();
+ fail("H5.H5Eget_msg: " + err);
+ }
+ }
+
+ @Test
+ public void testH5Eget_msg_major() {
+
+ try {
+ H5.H5Fopen("test", 0, 1);
+ }
+ catch (HDF5LibraryException hdferr) {
+ int[] error_msg_type = { HDF5Constants.H5E_MAJOR };
+ String msg = null;
+ try {
+ msg = H5.H5Eget_msg(hdferr.getMajorErrorNumber(),
+ error_msg_type);
+ }
+ catch (Throwable err) {
+ err.printStackTrace();
+ fail("H5.H5Eget_msg: " + err);
+ }
+ assertNotNull("H5.H5Eget_msg: " + msg, msg);
+ assertEquals("H5.H5Eget_msg: ", "Invalid arguments to routine",
+ msg);
+ assertEquals("H5.H5Eget_msg: ", HDF5Constants.H5E_MAJOR,
+ error_msg_type[0]);
+ }
+ catch (Throwable err) {
+ err.printStackTrace();
+ fail("H5.H5Eget_msg: " + err);
+ }
+ }
+
+ @Test
+ public void testH5Eget_msg_minor() {
+ try {
+ H5.H5Fopen("test", 0, 1);
+ }
+ catch (HDF5LibraryException hdferr) {
+ int[] error_msg_type = { HDF5Constants.H5E_MINOR };
+ String msg = null;
+ try {
+ msg = H5.H5Eget_msg(hdferr.getMinorErrorNumber(),
+ error_msg_type);
+ }
+ catch (Throwable err) {
+ err.printStackTrace();
+ fail("H5.H5Eget_msg: " + err);
+ }
+ assertNotNull("H5.H5Eget_msg: " + msg, msg);
+ assertEquals("H5.H5Eget_msg: ", "Inappropriate type", msg);
+ assertEquals("H5.H5Eget_msg: ", HDF5Constants.H5E_MINOR,
+ error_msg_type[0]);
+ }
+ catch (Throwable err) {
+ err.printStackTrace();
+ fail("H5.H5Eget_msg: " + err);
+ }
+ }
+
+ @Test
+ public void testH5Ecreate_stack() {
+ long stk_id = -1;
+ try {
+ stk_id = H5.H5Ecreate_stack();
+ }
+ catch (Throwable err) {
+ err.printStackTrace();
+ fail("H5.H5Ecreate_stack: " + err);
+ }
+ assertFalse("H5.H5Ecreate_stack: " + stk_id, stk_id < 0);
+ try {
+ H5.H5Eclose_stack(stk_id);
+ }
+ catch (Throwable err) {
+ err.printStackTrace();
+ fail("H5.H5Ecreate_stack: " + err);
+ }
+ }
+
+ @Test
+ public void testH5Epop() {
+ try {
+ H5.H5Eset_current_stack(current_stackid);
+ }
+ catch (Throwable err) {
+ err.printStackTrace();
+ fail("H5.H5Epop: " + err);
+ }
+
+ try {
+ H5.H5Fopen("test", 0, 1);
+ }
+ catch (Throwable err) {
+ }
+
+ // save current stack contents
+ try {
+ current_stackid = H5.H5Eget_current_stack();
+ }
+ catch (HDF5LibraryException err) {
+ err.printStackTrace();
+ fail("H5.H5Epop: " + err);
+ }
+
+ long num_msg = -1;
+ try {
+ num_msg = H5.H5Eget_num(HDF5Constants.H5E_DEFAULT);
+ }
+ catch (Throwable err) {
+ err.printStackTrace();
+ fail("H5.H5Epop: " + err);
+ }
+
+ assertTrue("H5.H5Epop #:" + num_msg, num_msg == 0);
+
+ try {
+ num_msg = H5.H5Eget_num(current_stackid);
+ }
+ catch (Throwable err) {
+ err.printStackTrace();
+ fail("H5.H5Epop: " + err);
+ }
+
+ assertTrue("H5.H5Epop #:" + num_msg, num_msg == 3);
+
+ try {
+ H5.H5Epop(current_stackid, 1);
+ }
+ catch (Throwable err) {
+ err.printStackTrace();
+ fail("H5.H5Epop: " + err);
+ }
+
+ try {
+ num_msg = H5.H5Eget_num(current_stackid);
+ }
+ catch (Throwable err) {
+ err.printStackTrace();
+ fail("H5.H5Epop: " + err);
+ }
+
+ assertTrue("H5.H5Epop", num_msg == 2);
+ }
+
+ @Test
+ public void testH5Epush() {
+ String err_func = "testH5Epush";
+ String err_msg = "Error message";
+ long estack_id = -1;
+ long maj_err_id = -1;
+ long min_err_id = -1;
+ long num_msg = -1;
+
+ try {
+ try {
+ maj_err_id = H5.H5Ecreate_msg(hdf_java_classid, HDF5Constants.H5E_MAJOR, "Error in Test");
+ assertFalse("testH5Epush: H5.H5Ecreate_msg_major: " + maj_err_id, maj_err_id < 0);
+ }
+ catch (Throwable err) {
+ err.printStackTrace();
+ fail("testH5Epush: H5.H5Ecreate_msg_major: " + err);
+ }
+ try {
+ min_err_id = H5.H5Ecreate_msg(hdf_java_classid, HDF5Constants.H5E_MINOR, "Error in Test Function");
+ assertFalse("H5.H5Ecreate_msg_minor: " + min_err_id, min_err_id < 0);
+ }
+ catch (Throwable err) {
+ err.printStackTrace();
+ fail("testH5Epush: H5.H5Ecreate_msg_minor: " + err);
+ }
+
+ try {
+ estack_id = H5.H5Ecreate_stack();
+ }
+ catch (Throwable err) {
+ err.printStackTrace();
+ fail("testH5Epush: H5.H5Ecreate_stack: " + err);
+ }
+ assertFalse("testH5Epush: H5.H5Ecreate_stack: " + estack_id, estack_id < 0);
+
+ try {
+ num_msg = H5.H5Eget_num(estack_id);
+ assertTrue("testH5Epush #:" + num_msg, num_msg == 0);
+ }
+ catch (Throwable err) {
+ err.printStackTrace();
+ fail("testH5Epush: H5.H5Eget_num: " + err);
+ }
+
+ H5.H5Epush(estack_id, "TestH5E.java", err_func, 354, hdf_java_classid, maj_err_id, min_err_id, err_msg);
+
+ try {
+ num_msg = H5.H5Eget_num(estack_id);
+ assertTrue("testH5Epush #:" + num_msg, num_msg == 1);
+ }
+ catch (Throwable err) {
+ err.printStackTrace();
+ fail("testH5Epush: H5.H5Eget_num: " + err);
+ }
+ }
+ catch (Throwable err) {
+ err.printStackTrace();
+ fail("testH5Epush: " + err);
+ }
+ finally {
+ if (estack_id >= 0)
+ try {H5.H5Eclose_stack(estack_id);} catch (Exception ex) {}
+ if (maj_err_id >= 0)
+ try {H5.H5Eclose_msg(maj_err_id);} catch (Exception ex) {}
+ if (min_err_id >= 0)
+ try {H5.H5Eclose_msg(min_err_id);} catch (Exception ex) {}
+ }
+ } /* end test_create() */
+
+ @Test
+ public void testH5EprintInt() {
+ assertFalse(current_stackid < 0);
+ try {
+ H5.H5Eprint2(current_stackid, null);
+ }
+ catch (Throwable err) {
+ err.printStackTrace();
+ fail("H5.H5EprintInt: " + err);
+ }
+ }
+
+ @Test
+ public void testH5EclearInt() {
+ try {
+ H5.H5Eclear(current_stackid);
+ }
+ catch (Throwable err) {
+ err.printStackTrace();
+ fail("H5.H5EclearInt: " + err);
+ }
+ }
+
+ @Test
+ public void testH5Eclear2() {
+ try {
+ H5.H5Eclear2(current_stackid);
+ }
+ catch (Throwable err) {
+ err.printStackTrace();
+ fail("H5.H5Eclear2: " + err);
+ }
+ }
+
+ @Test
+ public void testH5Eauto_is_v2() {
+ boolean is_v2 = false;
+ try {
+ is_v2 = H5.H5Eauto_is_v2(current_stackid);
+ }
+ catch (Throwable err) {
+ err.printStackTrace();
+ fail("H5.H5Eauto_is_v2: " + err);
+ }
+ assertTrue("H5.H5Eauto_is_v2: ", is_v2);
+ }
+
+ @Test
+ public void testH5Eget_num() {
+ long num_msg = -1;
+ try {
+ num_msg = H5.H5Eget_num(current_stackid);
+ }
+ catch (Throwable err) {
+ err.printStackTrace();
+ fail("H5.H5Eget_num: " + err);
+ }
+ assertTrue("H5.H5Eget_num", num_msg == 0);
+ }
+
+ @Test
+ public void testH5Eget_num_with_msg() {
+ try {
+ H5.H5Eset_current_stack(current_stackid);
+ }
+ catch (Throwable err) {
+ err.printStackTrace();
+ fail("H5.H5Epop: " + err);
+ }
+ try {
+ H5.H5Fopen("test", 0, 1);
+ }
+ catch (Throwable err) {
+ }
+
+ // save current stack contents
+ try {
+ current_stackid = H5.H5Eget_current_stack();
+ }
+ catch (HDF5LibraryException err) {
+ err.printStackTrace();
+ fail("H5.H5Epop: " + err);
+ }
+
+ long num_msg = -1;
+ try {
+ num_msg = H5.H5Eget_num(current_stackid);
+ }
+ catch (Throwable err) {
+ err.printStackTrace();
+ fail("H5.H5Epop: " + err);
+ }
+ assertTrue("H5.H5Eget_num_with_msg #:" + num_msg, num_msg > 0);
+ }
+
+ @Test
+ public void testH5Ewalk() {
+ class wdata {
+ public String err_desc = null;
+ public String func_name = null;
+ public int line = -1;
+ wdata(String desc, String func, int lineno) {
+ this.err_desc = new String(desc);
+ this.func_name = new String(func);
+ this.line = lineno;
+ }
+ }
+ class H5E_walk_data implements H5E_walk_t {
+ public ArrayList<wdata> walkdata = new ArrayList<wdata>();
+ }
+ H5E_walk_t walk_data = new H5E_walk_data();
+ class H5E_walk_callback implements H5E_walk_cb {
+ public int callback(int nidx, H5E_error2_t info, H5E_walk_t op_data) {
+ wdata wd = new wdata(info.desc, info.func_name, info.line);
+ ((H5E_walk_data)op_data).walkdata.add(wd);
+ return 0;
+ }
+ }
+ H5E_walk_cb walk_cb = new H5E_walk_callback();
+ long num_msg = -1;
+
+ try {
+ H5.H5Eset_current_stack(current_stackid);
+ }
+ catch (Throwable err) {
+ err.printStackTrace();
+ fail("testH5Ewalk:H5Eset_current_stack " + err);
+ }
+ try {
+ H5.H5Fopen("test", 0, 1);
+ }
+ catch (Throwable err) {
+ }
+
+ // save current stack contents
+ try {
+ current_stackid = H5.H5Eget_current_stack();
+ }
+ catch (HDF5LibraryException err) {
+ err.printStackTrace();
+ fail("H5.H5Epop: " + err);
+ }
+
+ try {
+ num_msg = H5.H5Eget_num(current_stackid);
+ }
+ catch (Throwable err) {
+ err.printStackTrace();
+ fail("testH5Ewalk:H5Eget_num " + err);
+ }
+ assertTrue("testH5Ewalk #:" + num_msg, num_msg == 3);
+
+ try {
+ H5.H5Ewalk2(current_stackid, HDF5Constants.H5E_WALK_UPWARD, walk_cb, walk_data);
+ }
+ catch (Throwable err) {
+ err.printStackTrace();
+ fail("testH5Ewalk:H5Ewalk2 " + err);
+ }
+ assertFalse("testH5Ewalk:H5Ewalk2 ",((H5E_walk_data)walk_data).walkdata.isEmpty());
+ assertTrue("testH5Ewalk:H5Ewalk2 "+((H5E_walk_data)walk_data).walkdata.size(),((H5E_walk_data)walk_data).walkdata.size()==3);
+ assertTrue("testH5Ewalk:H5Ewalk2 "+((wdata)((H5E_walk_data)walk_data).walkdata.get(0)).line,((wdata)((H5E_walk_data)walk_data).walkdata.get(0)).line==3767);
+ assertTrue("testH5Ewalk:H5Ewalk2 "+((wdata)((H5E_walk_data)walk_data).walkdata.get(1)).line,((wdata)((H5E_walk_data)walk_data).walkdata.get(1)).line==5506);
+ assertTrue("testH5Ewalk:H5Ewalk2 "+((wdata)((H5E_walk_data)walk_data).walkdata.get(1)).func_name,((wdata)((H5E_walk_data)walk_data).walkdata.get(1)).func_name.compareToIgnoreCase("H5P_verify_apl_and_dxpl")==0);
+ assertTrue("testH5Ewalk:H5Ewalk2 "+((wdata)((H5E_walk_data)walk_data).walkdata.get(0)).err_desc,((wdata)((H5E_walk_data)walk_data).walkdata.get(0)).err_desc.compareToIgnoreCase("not a property list")==0);
+ assertTrue("testH5Ewalk:H5Ewalk2 "+((wdata)((H5E_walk_data)walk_data).walkdata.get(1)).err_desc,((wdata)((H5E_walk_data)walk_data).walkdata.get(1)).err_desc.compareToIgnoreCase("not the required access property list")==0);
+ }
+
+}
diff --git a/java/test/TestH5Edefault.java b/java/test/TestH5Edefault.java
new file mode 100644
index 0000000..510aa2f
--- /dev/null
+++ b/java/test/TestH5Edefault.java
@@ -0,0 +1,564 @@
+/* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
+ * Copyright by The HDF Group. *
+ * Copyright by the Board of Trustees of the University of Illinois. *
+ * All rights reserved. *
+ * *
+ * This file is part of HDF5. The full HDF5 copyright notice, including *
+ * terms governing use, modification, and redistribution, is contained in *
+ * the files COPYING and Copyright.html. COPYING can be found at the root *
+ * of the source code distribution tree; Copyright.html can be found at the *
+ * root level of an installed copy of the electronic HDF5 document set and *
+ * is linked from the top-level documents page. It can also be found at *
+ * http://hdfgroup.org/HDF5/doc/Copyright.html. If you do not have *
+ * access to either file, you may request a copy from help@hdfgroup.org. *
+ * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */
+
+package test;
+
+import static org.junit.Assert.assertFalse;
+import static org.junit.Assert.assertTrue;
+import static org.junit.Assert.fail;
+import hdf.hdf5lib.H5;
+import hdf.hdf5lib.HDF5Constants;
+import hdf.hdf5lib.exceptions.HDF5LibraryException;
+
+import org.junit.After;
+import org.junit.Before;
+import org.junit.Rule;
+import org.junit.Test;
+import org.junit.rules.TestName;
+
+public class TestH5Edefault {
+ @Rule public TestName testname = new TestName();
+
+ @Before
+ public void H5Eset_default_stack() {
+ assertTrue("H5 open ids is 0",H5.getOpenIDCount()==0);
+ System.out.print(testname.getMethodName());
+
+ try {
+ // Clear any active stack messages
+ H5.H5Eclear2(HDF5Constants.H5E_DEFAULT);
+ }
+ catch (HDF5LibraryException err) {
+ err.printStackTrace();
+ fail("H5Eset_default_stack: " + err);
+ }
+ }
+ @After
+ public void nextTestName() {
+ System.out.println();
+ }
+
+ @Test(expected = IllegalArgumentException.class)
+ public void testH5Eprint2_invalid_classid() throws Throwable {
+ H5.H5Eprint2(-1, null);
+ }
+
+ @Test
+ public void testH5Eprint() {
+ try {
+ H5.H5Fopen("test", 0, 1);
+ }
+ catch (Throwable err) {
+ }
+ try {
+ H5.H5Eprint2(HDF5Constants.H5E_DEFAULT, null);
+ }
+ catch (Throwable err) {
+ err.printStackTrace();
+ fail("H5.H5Eprint: " + err);
+ }
+ }
+
+ @Test
+ public void testH5Eget_current_stack() {
+ long num_msg = -1;
+ long num_msg_default = -1;
+ long stack_id = -1;
+ long stack_id_default = HDF5Constants.H5E_DEFAULT;
+ try {
+ H5.H5Fopen("test", 0, 1);
+ }
+ catch (Throwable err) {
+ //default stack id will be different after exception
+ stack_id_default = HDF5Constants.H5E_DEFAULT;
+ //err.printStackTrace(); //This will clear the error stack
+ }
+ // Verify we have the correct number of messages
+ try {
+ num_msg_default = H5.H5Eget_num(stack_id_default);
+ }
+ catch (Throwable err) {
+ err.printStackTrace();
+ fail("H5.H5Eget_current_stack: " + err);
+ }
+ assertTrue("H5.H5Eget_current_stack: get_num #:" + num_msg_default,
+ num_msg_default == 3);
+
+ //Save a copy of the current stack and clears the current stack
+ try {
+ stack_id = H5.H5Eget_current_stack();
+ }
+ catch (Throwable err) {
+ err.printStackTrace();
+ fail("H5.H5Eget_current_stack: " + err);
+ }
+ assertFalse("H5.H5Eget_current_stack: get_current_stack - "
+ + stack_id, stack_id < 0);
+ assertFalse("H5.H5Eget_current_stack: get_current_stack - "
+ + stack_id, stack_id == stack_id_default);
+
+ // Verify we have the correct number of messages
+ try {
+ num_msg_default = H5.H5Eget_num(stack_id_default);
+ }
+ catch (Throwable err) {
+ err.printStackTrace();
+ fail("H5.H5Eget_current_stack: " + err);
+ }
+ assertTrue("H5.H5Eget_current_stack: get_num #:" + num_msg_default,
+ num_msg_default == 0);
+
+ //Verify the copy has the correct number of messages
+ try {
+ num_msg = H5.H5Eget_num(stack_id);
+ }
+ catch (Throwable err) {
+ err.printStackTrace();
+ fail("H5.H5Eget_current_stack: " + err);
+ }
+ assertTrue("H5.H5Eget_current_stack: get_num #:" + num_msg,
+ num_msg == 3);
+
+ try {
+ H5.H5Eclose_stack(stack_id);
+ }
+ catch (Throwable err) {
+ err.printStackTrace();
+ fail("H5.H5Eget_current_stack: " + err);
+ }
+ }
+
+ @Test
+ public void testH5Eget_current_stack_pop() {
+ long num_msg = -1;
+ long num_msg_default = -1;
+ long stack_id = -1;
+ try {
+ H5.H5Fopen("test", 0, 1);
+ }
+ catch (Throwable err) {
+ //err.printStackTrace(); //This will clear the error stack
+ }
+
+ // Verify we have the correct number of messages
+ try {
+ num_msg_default = H5.H5Eget_num(HDF5Constants.H5E_DEFAULT);
+ }
+ catch (Throwable err) {
+ err.printStackTrace();
+ fail("H5.H5Eget_current_stack: " + err);
+ }
+ assertTrue("H5.H5Eget_current_stack: get_num #:" + num_msg_default,
+ num_msg_default == 3);
+
+ //Save a copy of the current stack and clears the current stack
+ try {
+ stack_id = H5.H5Eget_current_stack();
+ }
+ catch (Throwable err) {
+ err.printStackTrace();
+ fail("H5.H5Eget_current_stack: " + err);
+ }
+ assertFalse("H5.H5Eget_current_stack: get_current_stack - "
+ + stack_id, stack_id < 0);
+ assertFalse("H5.H5Eget_current_stack: get_current_stack - "
+ + stack_id, stack_id == HDF5Constants.H5E_DEFAULT);
+
+ // Verify we have the correct number of messages
+ try {
+ num_msg_default = H5.H5Eget_num(HDF5Constants.H5E_DEFAULT);
+ }
+ catch (Throwable err) {
+ err.printStackTrace();
+ fail("H5.H5Eget_current_stack: " + err);
+ }
+ assertTrue("H5.H5Eget_current_stack: get_num #:" + num_msg_default,
+ num_msg_default == 0);
+
+ //Verify the copy has the correct number of messages
+ try {
+ num_msg = H5.H5Eget_num(stack_id);
+ }
+ catch (Throwable err) {
+ err.printStackTrace();
+ fail("H5.H5Eget_current_stack: " + err);
+ }
+ assertTrue("H5.H5Eget_current_stack: get_num #:" + num_msg,
+ num_msg == 3);
+
+ //Generate errors on default stack
+ try {
+ H5.H5Fopen("test", 0, 1);
+ }
+ catch (Throwable err) {
+ //err.printStackTrace(); //This will clear the error stack
+ }
+
+ // Verify we have the correct number of messages
+ try {
+ num_msg_default = H5.H5Eget_num(HDF5Constants.H5E_DEFAULT);
+ }
+ catch (Throwable err) {
+ err.printStackTrace();
+ fail("H5.H5Eget_current_stack: " + err);
+ }
+ assertTrue("H5.H5Eget_current_stack: get_num #:" + num_msg_default,
+ num_msg_default == 3);
+
+ //Remove one message from the current stack
+ try {
+ H5.H5Epop(HDF5Constants.H5E_DEFAULT, 1);
+ num_msg_default = H5.H5Eget_num(HDF5Constants.H5E_DEFAULT);
+ }
+ catch (Throwable err) {
+ err.printStackTrace();
+ fail("H5.H5Eget_current_stack: " + err);
+ }
+ assertTrue("H5.H5Eget_current_stack: pop #:" + num_msg_default,
+ num_msg_default == 2);
+
+ //Verify the copy still has the correct number of messages
+ try {
+ num_msg = H5.H5Eget_num(stack_id);
+ }
+ catch (Throwable err) {
+ err.printStackTrace();
+ fail("H5.H5Eget_current_stack: " + err);
+ }
+ assertTrue("H5.H5Eget_current_stack: get_num #:" + num_msg,
+ num_msg == 3);
+ }
+
+ @Test(expected = IllegalArgumentException.class)
+ public void testH5Eclose_stack_invalid_stackid() throws Throwable {
+ H5.H5Eclose_stack(-1);
+ }
+
+ @Test(expected = IllegalArgumentException.class)
+ public void testH5Eget_class_name_invalid_classid() throws Throwable {
+ H5.H5Eget_class_name(-1);
+ }
+
+ @Test(expected = IllegalArgumentException.class)
+ public void testH5Eget_class_name_invalid_classname() throws Throwable {
+ H5.H5Eget_class_name(HDF5Constants.H5E_DEFAULT);
+ }
+
+ @Test(expected = IllegalArgumentException.class)
+ public void testH5Eclose_msg_invalid_errid() throws Throwable {
+ H5.H5Eclose_msg(-1);
+ }
+
+ @Test(expected = IllegalArgumentException.class)
+ public void testH5Ecreate_msg_invalid_errid() throws Throwable {
+ H5.H5Ecreate_msg(-1, HDF5Constants.H5E_MAJOR, "null");
+ }
+
+ @Test(expected = IllegalArgumentException.class)
+ public void testH5Eget_msg_invalid_msgid() throws Throwable {
+ H5.H5Eget_msg(-1, null);
+ }
+
+ @Test
+ public void testH5Ecreate_stack() {
+ try {
+ long stack_id = H5.H5Ecreate_stack();
+ assertTrue("H5.H5Ecreate_stack", stack_id > 0);
+ H5.H5Eclose_stack(stack_id);
+ }
+ catch (Throwable err) {
+ err.printStackTrace();
+ fail("H5.H5Ecreate_stack: " + err);
+ }
+ }
+
+ @Test(expected = IllegalArgumentException.class)
+ public void testH5Eset_current_stack_invalid_stkid() throws Throwable {
+ H5.H5Eset_current_stack(-1);
+ }
+
+ @Test
+ public void testH5Eset_current_stack() {
+ long num_msg = -1;
+ long stack_id = -1;
+ try {
+ H5.H5Fopen("test", 0, 1);
+ }
+ catch (Throwable err) {
+ //err.printStackTrace(); //This will clear the error stack
+ }
+
+ // Verify we have the correct number of messages
+ try {
+ num_msg = H5.H5Eget_num(HDF5Constants.H5E_DEFAULT);
+ }
+ catch (Throwable err) {
+ err.printStackTrace();
+ fail("H5.H5Eset_current_stack: " + err);
+ }
+ assertTrue("H5.H5Eset_current_stack: get_num #:" + num_msg,
+ num_msg == 3);
+
+ //Save a copy of the current stack
+ try {
+ stack_id = H5.H5Eget_current_stack();
+ }
+ catch (Throwable err) {
+ err.printStackTrace();
+ fail("H5.H5Eset_current_stack: " + err);
+ }
+ assertFalse("H5.H5Eset_current_stack: get_current_stack - "
+ + stack_id, stack_id < 0);
+ assertFalse("H5.H5Eset_current_stack: get_current_stack - "
+ + stack_id, stack_id == HDF5Constants.H5E_DEFAULT);
+
+ //Verify the copy has the correct number of messages
+ try {
+ num_msg = H5.H5Eget_num(stack_id);
+ }
+ catch (Throwable err) {
+ err.printStackTrace();
+ fail("H5.H5Eset_current_stack: " + err);
+ }
+ assertTrue("H5.H5Eset_current_stack: get_num #:" + num_msg,
+ num_msg == 3);
+
+ //Generate errors on default stack
+ try {
+ H5.H5Fopen("test", 0, 1);
+ }
+ catch (Throwable err) {
+ //err.printStackTrace(); //This will clear the error stack
+ }
+
+ // Verify we have the correct number of messages
+ try {
+ num_msg = H5.H5Eget_num(HDF5Constants.H5E_DEFAULT);
+ }
+ catch (Throwable err) {
+ err.printStackTrace();
+ fail("H5.H5Eget_current_stack: " + err);
+ }
+ assertTrue("H5.H5Eset_current_stack: get_num #:" + num_msg,
+ num_msg == 3);
+
+ //Remove one message from the current stack
+ try {
+ H5.H5Epop(HDF5Constants.H5E_DEFAULT, 1);
+ num_msg = H5.H5Eget_num(HDF5Constants.H5E_DEFAULT);
+ }
+ catch (Throwable err) {
+ err.printStackTrace();
+ fail("H5.H5Eset_current_stack: " + err);
+ }
+ assertTrue("H5.H5Eset_current_stack: pop #:" + num_msg,
+ num_msg == 2);
+
+ //Verify the copy still has the correct number of messages
+ try {
+ num_msg = H5.H5Eget_num(stack_id);
+ }
+ catch (Throwable err) {
+ err.printStackTrace();
+ fail("H5.H5Eset_current_stack: " + err);
+ }
+ assertTrue("H5.H5Eset_current_stack: get_num #:" + num_msg,
+ num_msg == 3);
+
+ try {
+ H5.H5Eset_current_stack(stack_id);
+ num_msg = H5.H5Eget_num(HDF5Constants.H5E_DEFAULT);
+ }
+ catch (Throwable err) {
+ err.printStackTrace();
+ fail("H5.H5Eset_current_stack: " + err);
+ }
+ assertTrue("H5.H5Eset_current_stack: get_num - " + num_msg,
+ num_msg == 3);
+ }
+
+ @Test(expected = IllegalArgumentException.class)
+ public void testH5Epop_invalid_stkid() throws Throwable {
+ H5.H5Epop(-1, 0);
+ }
+
+ @Test
+ public void testH5Epop() throws Throwable {
+ long num_msg = -1;
+ try {
+ H5.H5Fopen("test", 0, 1);
+ }
+ catch (Throwable err) {
+ }
+ try {
+ num_msg = H5.H5Eget_num(HDF5Constants.H5E_DEFAULT);
+ }
+ catch (Throwable err) {
+ err.printStackTrace();
+ fail("H5.H5Epop: " + err);
+ }
+ assertTrue("H5.H5Epop before #:" + num_msg, num_msg == 3);
+ try {
+ H5.H5Epop(HDF5Constants.H5E_DEFAULT, 1);
+ num_msg = H5.H5Eget_num(HDF5Constants.H5E_DEFAULT);
+ }
+ catch (Throwable err) {
+ err.printStackTrace();
+ fail("H5.H5Epop: " + err);
+ }
+ assertTrue("H5.H5Epop after #:" + num_msg, num_msg == 2);
+ }
+
+ @Test(expected = IllegalArgumentException.class)
+ public void testH5Epush_invalid_stkid() throws Throwable {
+ H5.H5Epush(-1, "Invalid", "Invalid", 0, -1, -1, -1, "Invalid message");
+ }
+
+ @Test(expected = NullPointerException.class)
+ public void testH5Epush_null_name() throws Throwable {
+ H5.H5Epush(HDF5Constants.H5E_DEFAULT, null, "Invalid", 0, HDF5Constants.H5E_DEFAULT, HDF5Constants.H5E_DEFAULT, HDF5Constants.H5E_DEFAULT, "Invalid message");
+ }
+
+ @Test(expected = IllegalArgumentException.class)
+ public void testH5EprintInt_invalid_classid() throws Throwable {
+ H5.H5Eprint2(-1, null);
+ }
+
+ @Test
+ public void testH5EprintInt() {
+ try {
+ H5.H5Fopen("test", 0, 1);
+ }
+ catch (Throwable err) {
+ }
+ try {
+ H5.H5Eprint2(HDF5Constants.H5E_DEFAULT, null);
+ }
+ catch (Throwable err) {
+ err.printStackTrace();
+ fail("H5.H5EprintInt: " + err);
+ }
+ }
+
+ @Test
+ public void testH5EclearInt() {
+ try {
+ H5.H5Eclear(HDF5Constants.H5E_DEFAULT);
+ }
+ catch (Throwable err) {
+ err.printStackTrace();
+ fail("H5.H5EclearInt: " + err);
+ }
+ }
+
+ @Test(expected = IllegalArgumentException.class)
+ public void testH5Eclear2_invalid_stkid() throws Throwable {
+ H5.H5Eclear2(-1);
+ }
+
+ @Test
+ public void testH5Eclear() {
+ try {
+ H5.H5Eclear2(HDF5Constants.H5E_DEFAULT);
+ }
+ catch (Throwable err) {
+ err.printStackTrace();
+ fail("H5.H5Eclear2: " + err);
+ }
+ }
+
+ @Test
+ public void testH5Eclear2_with_msg() {
+ long num_msg = -1;
+ try {
+ H5.H5Fopen("test", 0, 1);
+ }
+ catch (Throwable err) {
+ }
+ try {
+ num_msg = H5.H5Eget_num(HDF5Constants.H5E_DEFAULT);
+ }
+ catch (Throwable err) {
+ err.printStackTrace();
+ fail("H5.H5Eclear2_with_msg: " + err);
+ }
+ assertTrue("H5.H5Eclear2_with_msg before #:" + num_msg,
+ num_msg == 3);
+ try {
+ H5.H5Eclear2(HDF5Constants.H5E_DEFAULT);
+ num_msg = H5.H5Eget_num(HDF5Constants.H5E_DEFAULT);
+ }
+ catch (Throwable err) {
+ err.printStackTrace();
+ fail("H5.H5Eclear2_with_msg: " + err);
+ }
+ assertTrue("H5.H5Eclear2_with_msg after #:" + num_msg, num_msg == 0);
+ }
+
+ @Test(expected = IllegalArgumentException.class)
+ public void testH5Eauto_is_v2_invalid_stkid() throws Throwable {
+ H5.H5Eauto_is_v2(-1);
+ }
+
+ @Test
+ public void testH5Eauto_is_v2() {
+ boolean is_v2 = false;
+ try {
+ is_v2 = H5.H5Eauto_is_v2(HDF5Constants.H5E_DEFAULT);
+ }
+ catch (Throwable err) {
+ err.printStackTrace();
+ fail("H5.H5Eauto_is_v2: " + err);
+ }
+ assertTrue("H5.H5Eauto_is_v2: ", is_v2);
+ }
+
+ @Test(expected = IllegalArgumentException.class)
+ public void testH5Eget_num_invalid_stkid() throws Throwable {
+ H5.H5Eget_num(-1);
+ }
+
+ @Test
+ public void testH5Eget_num() {
+ long num_msg = -1;
+ try {
+ num_msg = H5.H5Eget_num(HDF5Constants.H5E_DEFAULT);
+ }
+ catch (Throwable err) {
+ err.printStackTrace();
+ fail("H5.H5Eget_num: " + err);
+ }
+ assertTrue("H5.H5Eget_num #:" + num_msg, num_msg == 0);
+ }
+
+ @Test
+ public void testH5Eget_num_with_msg() {
+ long num_msg = -1;
+ try {
+ H5.H5Fopen("test", 0, 1);
+ }
+ catch (Throwable err) {
+ }
+ try {
+ num_msg = H5.H5Eget_num(HDF5Constants.H5E_DEFAULT);
+ }
+ catch (Throwable err) {
+ err.printStackTrace();
+ fail("H5.H5Eget_num_with_msg: " + err);
+ }
+ assertTrue("H5.H5Eget_num_with_msg #:" + num_msg, num_msg > 0);
+ }
+
+}
diff --git a/java/test/TestH5Eregister.java b/java/test/TestH5Eregister.java
new file mode 100644
index 0000000..13e0ca2
--- /dev/null
+++ b/java/test/TestH5Eregister.java
@@ -0,0 +1,78 @@
+/* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
+ * Copyright by The HDF Group. *
+ * Copyright by the Board of Trustees of the University of Illinois. *
+ * All rights reserved. *
+ * *
+ * This file is part of HDF5. The full HDF5 copyright notice, including *
+ * terms governing use, modification, and redistribution, is contained in *
+ * the files COPYING and Copyright.html. COPYING can be found at the root *
+ * of the source code distribution tree; Copyright.html can be found at the *
+ * root level of an installed copy of the electronic HDF5 document set and *
+ * is linked from the top-level documents page. It can also be found at *
+ * http://hdfgroup.org/HDF5/doc/Copyright.html. If you do not have *
+ * access to either file, you may request a copy from help@hdfgroup.org. *
+ * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */
+
+package test;
+
+import static org.junit.Assert.assertTrue;
+import static org.junit.Assert.fail;
+import hdf.hdf5lib.H5;
+
+import org.junit.After;
+import org.junit.Before;
+import org.junit.Rule;
+import org.junit.Test;
+import org.junit.rules.TestName;
+
+public class TestH5Eregister {
+ @Rule public TestName testname = new TestName();
+
+ @Before
+ public void checkOpenIDs() {
+ assertTrue("H5 open ids is 0",H5.getOpenIDCount()==0);
+ System.out.print(testname.getMethodName());
+ }
+ @After
+ public void nextTestName() {
+ System.out.println();
+ }
+
+ @Test(expected = NullPointerException.class)
+ public void testH5Eregister_class_cls_name_null() throws Throwable {
+ H5.H5Eregister_class(null, "libname", "version");
+ }
+
+ @Test(expected = NullPointerException.class)
+ public void testH5Eregister_class_lib_name_null() throws Throwable {
+ H5.H5Eregister_class("clsname", null, "version");
+ }
+
+ @Test(expected = NullPointerException.class)
+ public void testH5Eregister_class_version_null() throws Throwable {
+ H5.H5Eregister_class("clsname", "libname", null);
+ }
+
+ @Test(expected = IllegalArgumentException.class)
+ public void testH5Eunregister_class_invalid_classid() throws Throwable {
+ H5.H5Eunregister_class(-1);
+ }
+
+ @Test
+ public void testH5Eregister_class() {
+ long hdf_java_classid = -1;
+ try {
+ hdf_java_classid = H5.H5Eregister_class("HDF-Java-Error",
+ "hdf-java", "2.5");
+ }
+ catch (Throwable err) {
+ fail("H5.H5Eregister_class: " + err);
+ }
+ try {
+ H5.H5Eunregister_class(hdf_java_classid);
+ }
+ catch (Throwable err) {
+ fail("H5.H5Eunregister_class: " + err);
+ }
+ }
+}
diff --git a/java/test/TestH5F.java b/java/test/TestH5F.java
new file mode 100644
index 0000000..3451187
--- /dev/null
+++ b/java/test/TestH5F.java
@@ -0,0 +1,332 @@
+/* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
+ * Copyright by The HDF Group. *
+ * Copyright by the Board of Trustees of the University of Illinois. *
+ * All rights reserved. *
+ * *
+ * This file is part of HDF5. The full HDF5 copyright notice, including *
+ * terms governing use, modification, and redistribution, is contained in *
+ * the files COPYING and Copyright.html. COPYING can be found at the root *
+ * of the source code distribution tree; Copyright.html can be found at the *
+ * root level of an installed copy of the electronic HDF5 document set and *
+ * is linked from the top-level documents page. It can also be found at *
+ * http://hdfgroup.org/HDF5/doc/Copyright.html. If you do not have *
+ * access to either file, you may request a copy from help@hdfgroup.org. *
+ * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */
+
+package test;
+
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertTrue;
+import static org.junit.Assert.fail;
+
+import java.io.File;
+
+import hdf.hdf5lib.H5;
+import hdf.hdf5lib.HDF5Constants;
+import hdf.hdf5lib.exceptions.HDF5LibraryException;
+
+import org.junit.After;
+import org.junit.Before;
+import org.junit.Rule;
+import org.junit.Test;
+import org.junit.rules.TestName;
+
+public class TestH5F {
+ @Rule public TestName testname = new TestName();
+ private static final String H5_FILE = "test.h5";
+
+ private static final int COUNT_OBJ_FILE = 1;
+ private static final int COUNT_OBJ_DATASET = 0;
+ private static final int COUNT_OBJ_GROUP = 0;
+ private static final int COUNT_OBJ_DATATYPE = 0;
+ private static final int COUNT_OBJ_ATTR = 0;
+ private static final int COUNT_OBJ_ALL = (COUNT_OBJ_FILE
+ + COUNT_OBJ_DATASET + COUNT_OBJ_GROUP + COUNT_OBJ_DATATYPE + COUNT_OBJ_ATTR);
+ private static final int[] OBJ_COUNTS = { COUNT_OBJ_FILE,
+ COUNT_OBJ_DATASET, COUNT_OBJ_GROUP, COUNT_OBJ_DATATYPE,
+ COUNT_OBJ_ATTR, COUNT_OBJ_ALL };
+ private static final int[] OBJ_TYPES = { HDF5Constants.H5F_OBJ_FILE,
+ HDF5Constants.H5F_OBJ_DATASET, HDF5Constants.H5F_OBJ_GROUP,
+ HDF5Constants.H5F_OBJ_DATATYPE, HDF5Constants.H5F_OBJ_ATTR,
+ HDF5Constants.H5F_OBJ_ALL };
+ long H5fid = -1;
+
+ private final void _deleteFile(String filename) {
+ File file = new File(filename);
+
+ if (file.exists()) {
+ try {file.delete();} catch (SecurityException e) {}
+ }
+ }
+
+ @Before
+ public void createH5file()
+ throws HDF5LibraryException, NullPointerException {
+ assertTrue("H5 open ids is 0",H5.getOpenIDCount()==0);
+ System.out.print(testname.getMethodName());
+
+ H5fid = H5.H5Fcreate(H5_FILE, HDF5Constants.H5F_ACC_TRUNC,
+ HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT);
+ H5.H5Fflush(H5fid, HDF5Constants.H5F_SCOPE_LOCAL);
+ }
+
+ @After
+ public void deleteH5file() throws HDF5LibraryException {
+ if (H5fid > 0) {
+ try {H5.H5Fclose(H5fid);} catch (Exception ex) {}
+ H5fid = -1;
+ }
+ _deleteFile(H5_FILE);
+ System.out.println();
+ }
+
+ @Test
+ public void testH5Fget_create_plist() {
+ long plist = -1;
+
+ try {
+ plist = H5.H5Fget_create_plist(H5fid);
+ }
+ catch (Throwable err) {
+ fail("H5.H5Fget_create_plist: " + err);
+ }
+ assertTrue(plist > 0);
+ try {H5.H5Pclose(plist);} catch (HDF5LibraryException e) {e.printStackTrace();}
+ }
+
+ @Test(expected = HDF5LibraryException.class)
+ public void testH5Fget_create_plist_closed() throws Throwable {
+ long fid = -1;
+
+ if (H5fid > 0) {
+ try {H5.H5Fclose(H5fid);} catch (Exception ex) {}
+ H5fid = -1;
+ }
+
+ try {
+ fid = H5.H5Fopen(H5_FILE, HDF5Constants.H5F_ACC_RDWR,
+ HDF5Constants.H5P_DEFAULT);
+ }
+ catch (Throwable err) {
+ fail("H5.H5Fopen: " + err);
+ }
+ try {
+ H5.H5Fclose(fid);
+ }
+ catch (Exception ex) {
+ }
+
+ // it should fail because the file was closed.
+ H5.H5Fget_create_plist(fid);
+ }
+
+ @Test
+ public void testH5Fget_access_plist() {
+ long plist = -1;
+
+ try {
+ plist = H5.H5Fget_access_plist(H5fid);
+ }
+ catch (Throwable err) {
+ fail("H5.H5Fget_access_plist: " + err);
+ }
+ assertTrue(plist > 0);
+ try {H5.H5Pclose(plist);} catch (HDF5LibraryException e) {e.printStackTrace();}
+ }
+
+ @Test(expected = HDF5LibraryException.class)
+ public void testH5Fget_access_plist_closed() throws Throwable {
+ long fid = -1;
+
+ if (H5fid > 0) {
+ try {H5.H5Fclose(H5fid);} catch (Exception ex) {}
+ H5fid = -1;
+ }
+
+ try {
+ fid = H5.H5Fopen(H5_FILE, HDF5Constants.H5F_ACC_RDWR,
+ HDF5Constants.H5P_DEFAULT);
+ }
+ catch (Throwable err) {
+ fail("H5.H5Fopen: " + err);
+ }
+ try {
+ H5.H5Fclose(fid);
+ }
+ catch (Exception ex) {
+ }
+
+ // it should fail because the file was closed.
+ H5.H5Fget_access_plist(fid);
+ }
+
+ @Test
+ public void testH5Fget_intent_rdwr() {
+ int intent = 0;
+ long fid = -1;
+
+ if (H5fid > 0) {
+ try {H5.H5Fclose(H5fid);} catch (Exception ex) {}
+ H5fid = -1;
+ }
+
+ try {
+ fid = H5.H5Fopen(H5_FILE, HDF5Constants.H5F_ACC_RDWR,
+ HDF5Constants.H5P_DEFAULT);
+ }
+ catch (Throwable err) {
+ fail("H5.H5Fopen: " + err);
+ }
+ try {
+ intent = H5.H5Fget_intent(fid);
+ }
+ catch (Throwable err) {
+ fail("H5.H5Fget_intent: " + err);
+ }
+ assertEquals(HDF5Constants.H5F_ACC_RDWR, intent);
+
+ try {
+ H5.H5Fclose(fid);
+ }
+ catch (Exception ex) {
+ }
+ }
+
+ @Test
+ public void testH5Fget_intent_rdonly() {
+ int intent = 0;
+ long fid = -1;
+
+ if (H5fid > 0) {
+ try {H5.H5Fclose(H5fid);} catch (Exception ex) {}
+ H5fid = -1;
+ }
+
+ try {
+ fid = H5.H5Fopen(H5_FILE, HDF5Constants.H5F_ACC_RDONLY,
+ HDF5Constants.H5P_DEFAULT);
+ }
+ catch (Throwable err) {
+ fail("H5.H5Fopen: " + err);
+ }
+ try {
+ intent = H5.H5Fget_intent(fid);
+ }
+ catch (Throwable err) {
+ fail("H5.H5Fget_intent: " + err);
+ }
+ assertEquals(HDF5Constants.H5F_ACC_RDONLY, intent);
+
+ try {
+ H5.H5Fclose(fid);
+ }
+ catch (Exception ex) {
+ }
+ }
+
+ @Test
+ public void testH5Fget_obj_count() {
+ long count = -1;
+
+ for (int i = 0; i < OBJ_TYPES.length; i++) {
+ try {
+ count = H5.H5Fget_obj_count(H5fid, OBJ_TYPES[i]);
+ }
+ catch (Throwable err) {
+ fail("H5.H5Fget_obj_count: " + err);
+ }
+
+ assertEquals(count, OBJ_COUNTS[i]);
+ }
+ }
+
+ @Test
+ public void testH5Fget_obj_ids() {
+ long count = 0;
+ long max_objs = 100;
+ long[] obj_id_list = new long[(int)max_objs];
+ int[] open_obj_counts = new int[OBJ_TYPES.length];
+
+ for (int i = 0; i < OBJ_TYPES.length; i++)
+ open_obj_counts[i] = 0;
+
+ open_obj_counts[0] = 1;
+ for (int i = 0; i < OBJ_TYPES.length - 1; i++)
+ open_obj_counts[OBJ_TYPES.length - 1] += open_obj_counts[i];
+
+ for (int i = 0; i < OBJ_TYPES.length; i++) {
+ try {
+ count = H5.H5Fget_obj_ids(H5fid, OBJ_TYPES[i], max_objs,
+ obj_id_list);
+ }
+ catch (Throwable err) {
+ fail("H5.H5Fget_obj_ids: " + err);
+ }
+ assertEquals(count, open_obj_counts[i]);
+ }
+ }
+ /**
+ * TODO: need to add objects to the file after H5G[D,T]create() functions
+ * are implemented.
+ */
+ //
+ // @Test
+ // public void testH5Fmount_unmount() {
+ // String file1 = "src.h5";
+ // String file2 = "dst.h5";
+ // String group1 = "/G";
+ // String group2 = "/MOUNTED";
+ //
+ // _createH5File(file1);
+ // _createH5File(file2);
+ //
+ // int fid1 = _openFile(file1, HDF5Constants.H5F_ACC_RDWR);
+ // assertTrue(fid1 > 0);
+ //
+ // int fid2 = _openFile(file2, HDF5Constants.H5F_ACC_RDWR);
+ // assertTrue(fid2 > 0);
+ //
+ // // create a group at file1
+ // int gid = _createGroup(fid1, group1);
+ // try { H5.H5Gclose(gid); } catch (Exception ex) {}
+ //
+ // // create a group at file 2
+ // gid = _createGroup(fid2, group2);
+ // try { H5.H5Gclose(gid); } catch (Exception ex) {}
+ //
+ // // before mount, "/G/MOUNTED" does not exists in file1
+ // gid = _openGroup(fid1, group1+group2);
+ // assertTrue(gid < 0);
+ //
+ // // Mount file2 under G in the file1
+ // try {
+ // H5.H5Fmount(fid1, group1, fid2, HDF5Constants.H5P_DEFAULT);
+ // }
+ // catch (Throwable err) {
+ // fail("H5.H5Fmount: "+err);
+ // }
+ //
+ // // now file1 should have group "/G/MOUNTED"
+ // gid = _openGroup(fid1, group1+group2);
+ // assertTrue(gid > 0);
+ // try { H5.H5Gclose(gid); } catch (Exception ex) {}
+ //
+ // // unmount file2 from file1
+ // try {
+ // H5.H5Funmount(fid1, group1);
+ // }
+ // catch (Throwable err) {
+ // fail("H5.H5Funmount: "+err);
+ // }
+ //
+ // // file2 was unmounted from file1, "/G/MOUNTED" does not exists in file1
+ // gid = _openGroup(fid1, group1+group2);
+ // assertTrue(gid < 0);
+ //
+ // try { H5.H5Fclose(fid1); } catch (Exception ex) {}
+ // try { H5.H5Fclose(fid2); } catch (Exception ex) {}
+ //
+ // _deleteFile(file1);
+ // _deleteFile(file2);
+ // }
+}
diff --git a/java/test/TestH5Fbasic.java b/java/test/TestH5Fbasic.java
new file mode 100644
index 0000000..c08daff
--- /dev/null
+++ b/java/test/TestH5Fbasic.java
@@ -0,0 +1,320 @@
+/* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
+ * Copyright by The HDF Group. *
+ * Copyright by the Board of Trustees of the University of Illinois. *
+ * All rights reserved. *
+ * *
+ * This file is part of HDF5. The full HDF5 copyright notice, including *
+ * terms governing use, modification, and redistribution, is contained in *
+ * the files COPYING and Copyright.html. COPYING can be found at the root *
+ * of the source code distribution tree; Copyright.html can be found at the *
+ * root level of an installed copy of the electronic HDF5 document set and *
+ * is linked from the top-level documents page. It can also be found at *
+ * http://hdfgroup.org/HDF5/doc/Copyright.html. If you do not have *
+ * access to either file, you may request a copy from help@hdfgroup.org. *
+ * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */
+
+package test;
+
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertNotNull;
+import static org.junit.Assert.assertTrue;
+import static org.junit.Assert.fail;
+
+import java.io.File;
+
+import hdf.hdf5lib.H5;
+import hdf.hdf5lib.HDF5Constants;
+import hdf.hdf5lib.exceptions.HDF5LibraryException;
+
+import org.junit.After;
+import org.junit.Before;
+import org.junit.Rule;
+import org.junit.Test;
+import org.junit.rules.TestName;
+
+public class TestH5Fbasic {
+ @Rule public TestName testname = new TestName();
+ private static final String H5_FILE = "test.h5";
+ private static final String TXT_FILE = "test.txt";
+ long H5fid = -1;
+
+ private final void _deleteFile(String filename) {
+ File file = new File(filename);
+
+ if (file.exists()) {
+ try {file.delete();} catch (SecurityException e) {}
+ }
+ }
+
+ @Before
+ public void createH5file() throws HDF5LibraryException, NullPointerException {
+ assertTrue("H5 open ids is 0",H5.getOpenIDCount()==0);
+ System.out.print(testname.getMethodName());
+
+ H5fid = H5.H5Fcreate(H5_FILE, HDF5Constants.H5F_ACC_TRUNC,
+ HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT);
+ H5.H5Fflush(H5fid, HDF5Constants.H5F_SCOPE_LOCAL);
+ }
+
+ @After
+ public void deleteH5file() throws HDF5LibraryException {
+ if (H5fid > 0) {
+ try {H5.H5Fclose(H5fid);} catch (Exception ex) {}
+ }
+ _deleteFile(H5_FILE);
+ System.out.println();
+ }
+
+ @Test
+ public void testH5Fcreate() {
+ assertTrue(H5fid > 0);
+ }
+
+ @Test
+ public void testH5Fis_hdf5() {
+ boolean isH5 = false;
+
+ try {
+ isH5 = H5.H5Fis_hdf5(H5_FILE);
+ }
+ catch (Throwable err) {
+ fail("H5.H5Fis_hdf5 failed on " + H5_FILE + ": " + err);
+ }
+ assertTrue(isH5 == true);
+ }
+
+ @Test(expected = HDF5LibraryException.class)
+ public void testH5Fcreate_EXCL() throws Throwable {
+ H5.H5Fcreate(H5_FILE, HDF5Constants.H5F_ACC_EXCL,
+ HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT);
+ }
+
+ @Test(expected = HDF5LibraryException.class)
+ public void testH5Fopen_read_only() throws Throwable {
+ long fid = -1;
+
+ try {
+ fid = H5.H5Fopen(H5_FILE, HDF5Constants.H5F_ACC_RDWR,
+ HDF5Constants.H5P_DEFAULT);
+ }
+ catch (Throwable err) {
+ fail("H5.H5Fopen: " + err);
+ }
+ try {
+ H5.H5Fclose(fid);
+ }
+ catch (Exception ex) {
+ }
+
+ // set the file to read-only
+ File file = new File(H5_FILE);
+ if (file.setWritable(false)) {
+ // this should fail.
+ fid = H5.H5Fopen(H5_FILE, HDF5Constants.H5F_ACC_RDWR,
+ HDF5Constants.H5P_DEFAULT);
+
+ try {
+ H5.H5Fclose(fid);
+ }
+ catch (Exception ex) {
+ }
+ }
+ else {
+ fail("File.setWritable(true) failed.");
+ }
+ }
+
+ @Test(expected = HDF5LibraryException.class)
+ public void testH5Freopen_closed() throws Throwable {
+ long fid = -1;
+ long fid2 = -1;
+
+ try {
+ fid = H5.H5Fopen(H5_FILE, HDF5Constants.H5F_ACC_RDWR,
+ HDF5Constants.H5P_DEFAULT);
+ }
+ catch (Throwable err) {
+ fail("H5.H5Fopen: " + err);
+ }
+
+ try {
+ H5.H5Fclose(fid);
+ }
+ catch (Exception ex) {
+ }
+
+ // should fail because the file was closed.
+ fid2 = H5.H5Freopen(fid);
+ }
+
+ @Test
+ public void testH5Freopen() {
+ long fid = -1;
+ long fid2 = -1;
+
+ try {
+ fid = H5.H5Fopen(H5_FILE, HDF5Constants.H5F_ACC_RDWR,
+ HDF5Constants.H5P_DEFAULT);
+ }
+ catch (Throwable err) {
+ fail("H5.H5Fopen: " + err);
+ }
+
+ try {
+ fid2 = H5.H5Freopen(fid);
+ }
+ catch (Throwable err) {
+ fail("H5.H5Freopen: " + err);
+ }
+ assertTrue(fid2 > 0);
+
+ try {
+ H5.H5Fclose(fid2);
+ }
+ catch (Exception ex) {
+ }
+
+ try {
+ H5.H5Fclose(fid);
+ }
+ catch (Exception ex) {
+ }
+ }
+
+ @Test
+ public void testH5Fclose() {
+ long fid = -1;
+
+ try {
+ fid = H5.H5Fopen(H5_FILE, HDF5Constants.H5F_ACC_RDWR,
+ HDF5Constants.H5P_DEFAULT);
+ }
+ catch (Throwable err) {
+ fail("H5.H5Fopen: " + err);
+ }
+
+ try {
+ H5.H5Fclose(fid);
+ }
+ catch (Throwable err) {
+ fail("H5.H5Fclose: " + err);
+ }
+ }
+
+ @Test(expected = HDF5LibraryException.class)
+ public void testH5Fclose_twice() throws Throwable {
+ long fid = -1;
+
+ try {
+ fid = H5.H5Fopen(H5_FILE, HDF5Constants.H5F_ACC_RDWR,
+ HDF5Constants.H5P_DEFAULT);
+ }
+ catch (Throwable err) {
+ fail("H5.H5Fopen: " + err);
+ }
+
+ try {
+ H5.H5Fclose(fid);
+ }
+ catch (Throwable err) {
+ fail("H5.H5Fclose: " + err);
+ }
+
+ // it should fail since the file was closed.
+ H5.H5Fclose(fid);
+ }
+
+ @Test
+ public void testH5Fget_freespace() {
+ long freeSpace = 0;
+
+ try {
+ freeSpace = H5.H5Fget_freespace(H5fid);
+ }
+ catch (Throwable err) {
+ fail("H5.H5Fget_freespace: " + err);
+ }
+ assertEquals(freeSpace, 0);
+ }
+
+ // TODO add/and delete objects and test freespace
+
+ @Test
+ public void testH5Fget_filesize() {
+ long fileSize = 0;
+
+ try {
+ fileSize = H5.H5Fget_filesize(H5fid);
+ }
+ catch (Throwable err) {
+ fail("H5.H5Fget_freespace: " + err);
+ }
+ assertTrue(fileSize > 0);
+ }
+
+ // TODO add/and delete objects and test freespace
+
+ @Test
+ public void testH5Fget_mdc_hit_rate() {
+ double rate;
+
+ try {
+ rate = H5.H5Fget_mdc_hit_rate(H5fid);
+ }
+ catch (Throwable err) {
+ fail("H5.H5Fget_mdc_hit_rate: " + err);
+ }
+ }
+
+ @Test
+ public void testH5Fget_mdc_size() {
+ int nentries = -1;
+ long cache_sizes[] = new long[3];
+
+ try {
+ nentries = H5.H5Fget_mdc_size(H5fid, cache_sizes);
+ }
+ catch (Throwable err) {
+ fail("H5.H5Fget_mdc_size: " + err);
+ }
+ assertTrue("H5.H5Fget_mdc_size #:" + nentries, nentries == 4);
+ }
+
+ // TODO: test more cases of different cache sizes.
+
+ @Test
+ public void testH5Freset_mdc_hit_rate_stats() {
+
+ try {
+ H5.H5Freset_mdc_hit_rate_stats(H5fid);
+ }
+ catch (Throwable err) {
+ fail("H5.H5Freset_mdc_hit_rate_stats: " + err);
+ }
+ }
+
+ @Test
+ public void testH5Fget_name() {
+ String fname = null;
+
+ try {
+ fname = H5.H5Fget_name(H5fid);
+ }
+ catch (Throwable err) {
+ fail("H5.H5Fget_name: " + err);
+ }
+ assertNotNull(fname);
+ assertEquals(fname, H5_FILE);
+ }
+
+ @Test
+ public void testH5Fclear_elink_file_cache() {
+
+ try {
+ H5.H5Fclear_elink_file_cache(H5fid);
+ }
+ catch (Throwable err) {
+ fail("H5.H5Freset_mdc_hit_rate_stats: " + err);
+ }
+ }
+}
diff --git a/java/test/TestH5Fparams.java b/java/test/TestH5Fparams.java
new file mode 100644
index 0000000..2d67f3d
--- /dev/null
+++ b/java/test/TestH5Fparams.java
@@ -0,0 +1,215 @@
+/* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
+ * Copyright by The HDF Group. *
+ * Copyright by the Board of Trustees of the University of Illinois. *
+ * All rights reserved. *
+ * *
+ * This file is part of HDF5. The full HDF5 copyright notice, including *
+ * terms governing use, modification, and redistribution, is contained in *
+ * the files COPYING and Copyright.html. COPYING can be found at the root *
+ * of the source code distribution tree; Copyright.html can be found at the *
+ * root level of an installed copy of the electronic HDF5 document set and *
+ * is linked from the top-level documents page. It can also be found at *
+ * http://hdfgroup.org/HDF5/doc/Copyright.html. If you do not have *
+ * access to either file, you may request a copy from help@hdfgroup.org. *
+ * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */
+
+package test;
+
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertFalse;
+import static org.junit.Assert.assertTrue;
+import static org.junit.Assert.fail;
+
+import java.io.File;
+
+import hdf.hdf5lib.H5;
+import hdf.hdf5lib.HDF5Constants;
+import hdf.hdf5lib.structs.H5F_info2_t;
+
+import org.junit.After;
+import org.junit.Before;
+import org.junit.Rule;
+import org.junit.Test;
+import org.junit.rules.TestName;
+
+public class TestH5Fparams {
+ @Rule public TestName testname = new TestName();
+
+ @Before
+ public void checkOpenIDs() {
+ assertTrue("H5 open ids is 0",H5.getOpenIDCount()==0);
+ System.out.print(testname.getMethodName());
+ }
+ @After
+ public void nextTestName() {
+ System.out.println();
+ }
+
+ @Test(expected = NullPointerException.class)
+ public void testH5Fcreate_null() throws Throwable {
+ H5.H5Fcreate(null, HDF5Constants.H5F_ACC_TRUNC,
+ HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT);
+ }
+
+ @Test(expected = NullPointerException.class)
+ public void testH5Fopen_null() throws Throwable {
+ H5.H5Fopen(null, HDF5Constants.H5F_ACC_RDWR, HDF5Constants.H5P_DEFAULT);
+ }
+
+ @Test(expected = NullPointerException.class)
+ public void testH5Fis_hdf5_null() throws Throwable {
+ H5.H5Fis_hdf5(null);
+ }
+
+ @Test(expected = NullPointerException.class)
+ public void testH5Fmount_null() throws Throwable {
+ H5.H5Fmount(-1, null, -1, HDF5Constants.H5P_DEFAULT);
+ }
+
+ @Test(expected = NullPointerException.class)
+ public void testH5Funmount_null() throws Throwable {
+ H5.H5Funmount(-1, null);
+ }
+
+ @Test
+ public void testH5Fis_hdf5_text() {
+ File txtFile = null;
+ boolean isH5 = false;
+
+ try {
+ txtFile = new File("test.txt");
+ if (!txtFile.exists())
+ txtFile.createNewFile();
+ isH5 = H5.H5Fis_hdf5("test.txt");
+ }
+ catch (Throwable err) {
+ fail("H5.H5Fis_hdf5 failed on test.txt: " + err);
+ }
+
+ assertFalse(isH5);
+
+ try {
+ txtFile.delete();
+ }
+ catch (SecurityException e) {
+ ;// e.printStackTrace();
+ }
+ }
+
+ @Test//(expected = HDF5LibraryException.class)
+ public void testH5Fclose_negative() throws Throwable {
+ // cannot close a file with negative id.
+ int fid = H5.H5Fclose(-1);
+ assertTrue(fid == 0);
+ }
+
+ @Test
+ public void testH5Fcreate() {
+ long fid = -1;
+ File file = null;
+
+ try {
+ fid = H5.H5Fcreate("test.h5", HDF5Constants.H5F_ACC_TRUNC,
+ HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT);
+ if (fid > 0) {
+ H5.H5Fclose(fid);
+ }
+ file = new File("test.h5");
+ }
+ catch (Throwable err) {
+ fail("H5.H5Fopen: " + err);
+ }
+
+ if (file.exists()) {
+ try {
+ file.delete();
+ }
+ catch (SecurityException e) {
+ ;// e.printStackTrace();
+ }
+ }
+ }
+
+ @Test
+ public void testH5Fflush_global() {
+ long fid = -1;
+
+ try {
+ fid = H5.H5Fcreate("test.h5", HDF5Constants.H5F_ACC_TRUNC,
+ HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT);
+ }
+ catch (Throwable err) {
+ fail("H5.H5Fopen: " + err);
+ }
+
+ try {
+ H5.H5Fflush(fid, HDF5Constants.H5F_SCOPE_GLOBAL);
+ }
+ catch (Throwable err) {
+ fail("H5.H5Fflush: " + err);
+ }
+
+ try {
+ H5.H5Fclose(fid);
+ }
+ catch (Exception ex) {
+ }
+ }
+
+ @Test
+ public void testH5Fflush_local() {
+ long fid = -1;
+
+ try {
+ fid = H5.H5Fcreate("test.h5", HDF5Constants.H5F_ACC_TRUNC,
+ HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT);
+ }
+ catch (Throwable err) {
+ fail("H5.H5Fopen: " + err);
+ }
+
+ try {
+ H5.H5Fflush(fid, HDF5Constants.H5F_SCOPE_LOCAL);
+ }
+ catch (Throwable err) {
+ fail("H5.H5Fflush: " + err);
+ }
+
+ try {
+ H5.H5Fclose(fid);
+ }
+ catch (Exception ex) {
+ }
+ }
+
+ @Test
+ public void testH5Fget_info() {
+ long fid = -1;
+
+ try {
+ try {
+ fid = H5.H5Fcreate("test.h5", HDF5Constants.H5F_ACC_TRUNC,
+ HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT);
+ }
+ catch (Throwable err) {
+ fail("H5.H5Fopen: " + err);
+ }
+
+ try {
+ H5F_info2_t finfo = H5.H5Fget_info(fid);
+ assertEquals(finfo.super_version, 0);
+ assertEquals(finfo.free_version, 0);
+ assertEquals(finfo.sohm_version, 0);
+ }
+ catch (Throwable err) {
+ fail("H5.H5Fget_info: " + err);
+ }
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+ finally {
+ try {H5.H5Fclose(fid);} catch (Exception ex) {}
+ }
+ }
+}
diff --git a/java/test/TestH5G.java b/java/test/TestH5G.java
new file mode 100644
index 0000000..32329bb
--- /dev/null
+++ b/java/test/TestH5G.java
@@ -0,0 +1,505 @@
+/* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
+ * Copyright by The HDF Group. *
+ * Copyright by the Board of Trustees of the University of Illinois. *
+ * All rights reserved. *
+ * *
+ * This file is part of HDF5. The full HDF5 copyright notice, including *
+ * terms governing use, modification, and redistribution, is contained in *
+ * the files COPYING and Copyright.html. COPYING can be found at the root *
+ * of the source code distribution tree; Copyright.html can be found at the *
+ * root level of an installed copy of the electronic HDF5 document set and *
+ * is linked from the top-level documents page. It can also be found at *
+ * http://hdfgroup.org/HDF5/doc/Copyright.html. If you do not have *
+ * access to either file, you may request a copy from help@hdfgroup.org. *
+ * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */
+
+package test;
+
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertNotNull;
+import static org.junit.Assert.assertTrue;
+import static org.junit.Assert.fail;
+
+import java.io.File;
+
+import hdf.hdf5lib.H5;
+import hdf.hdf5lib.HDF5Constants;
+import hdf.hdf5lib.exceptions.HDF5LibraryException;
+import hdf.hdf5lib.structs.H5G_info_t;
+
+import org.junit.After;
+import org.junit.Before;
+import org.junit.Rule;
+import org.junit.Test;
+import org.junit.rules.TestName;
+
+public class TestH5G {
+ @Rule public TestName testname = new TestName();
+ private static final String H5_FILE = "test.h5";
+ private static final String H5_FILE2 = "test2.h5";
+ private static final String[] GROUPS = { "/G1", "/G1/G11", "/G1/G12",
+ "/G1/G11/G111", "/G1/G11/G112", "/G1/G11/G113", "/G1/G11/G114" };
+ private static final String[] GROUPS2 = { "/G1", "/G1/G14", "/G1/G12", "/G1/G13", "/G1/G11"};
+ long H5fid = -1;
+ long H5fid2 = -1;
+
+ private final long _createGroup(long fid, String name) {
+ long gid = -1;
+ try {
+ gid = H5.H5Gcreate(fid, name, HDF5Constants.H5P_DEFAULT,
+ HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT);
+ }
+ catch (Throwable err) {
+ err.printStackTrace();
+ fail("H5.H5Gcreate: " + err);
+ }
+ assertTrue("TestH5G._createGroup: ", gid > 0);
+
+ return gid;
+ }
+
+ private final long _createGroup2(long fid, String name) {
+ long gid = -1;
+ long gcpl = -1;
+ try {
+ gcpl = H5.H5Pcreate(HDF5Constants.H5P_GROUP_CREATE); //create gcpl
+ }
+ catch (final Exception ex) {
+ fail("H5.H5Pcreate(): " + ex);
+ }
+ assertTrue("TestH5G._createGroup2: ", gcpl >= 0);
+ try {
+ H5.H5Pset_link_creation_order(gcpl, HDF5Constants.H5P_CRT_ORDER_TRACKED
+ + HDF5Constants.H5P_CRT_ORDER_INDEXED); // Set link creation order
+ }
+ catch (final Exception ex) {
+ try {H5.H5Pclose(gcpl);} catch (final Exception exx) {}
+ fail("H5.H5Pset_link_creation_order: " + ex);
+ }
+ try {
+ gid = H5.H5Gcreate(fid, name, HDF5Constants.H5P_DEFAULT,
+ gcpl, HDF5Constants.H5P_DEFAULT);
+ }
+ catch (Throwable err) {
+ err.printStackTrace();
+ fail("H5.H5Gcreate: " + err);
+ }
+ finally {
+ try {H5.H5Pclose(gcpl);} catch (final Exception ex) {}
+ }
+ assertTrue("TestH5G._createGroup2: ", gid > 0);
+
+ return gid;
+ }
+
+ private final long _openGroup(long fid, String name) {
+ long gid = -1;
+ try {
+ gid = H5.H5Gopen(fid, name, HDF5Constants.H5P_DEFAULT);
+ }
+ catch (Throwable err) {
+ gid = -1;
+ err.printStackTrace();
+ fail("H5.H5Gopen: " + err);
+ }
+ assertTrue("TestH5G._openGroup: ", gid > 0);
+
+ return gid;
+ }
+
+ private final void _deleteFile(String filename) {
+ File file = new File(filename);
+
+ if (file.exists()) {
+ try {file.delete();} catch (SecurityException e) {}
+ }
+ }
+
+ @Before
+ public void createH5file()
+ throws HDF5LibraryException, NullPointerException {
+ assertTrue("H5 open ids is 0",H5.getOpenIDCount()==0);
+ System.out.print(testname.getMethodName());
+
+ try {
+ H5fid = H5.H5Fcreate(H5_FILE, HDF5Constants.H5F_ACC_TRUNC, HDF5Constants.H5P_DEFAULT,
+ HDF5Constants.H5P_DEFAULT);
+
+ H5fid2 = H5.H5Fcreate(H5_FILE2, HDF5Constants.H5F_ACC_TRUNC, HDF5Constants.H5P_DEFAULT,
+ HDF5Constants.H5P_DEFAULT);
+ }
+ catch (Throwable err) {
+ err.printStackTrace();
+ fail("TestH5G.createH5file: " + err);
+ }
+ assertTrue("TestH5G.createH5file: H5.H5Fcreate: ", H5fid > 0);
+ assertTrue("TestH5G.createH5file: H5.H5Fcreate: ", H5fid2 > 0);
+
+ long gid = -1;
+
+ for (int i = 0; i < GROUPS.length; i++) {
+ gid = _createGroup(H5fid, GROUPS[i]);
+ try {H5.H5Gclose(gid);} catch (Exception ex) {}
+ }
+
+ for (int i = 0; i < GROUPS2.length; i++) {
+ gid = _createGroup2(H5fid2, GROUPS2[i]);
+ try {H5.H5Gclose(gid);} catch (Exception ex) {}
+ }
+
+ H5.H5Fflush(H5fid, HDF5Constants.H5F_SCOPE_LOCAL);
+ H5.H5Fflush(H5fid2, HDF5Constants.H5F_SCOPE_LOCAL);
+ }
+
+ @After
+ public void deleteH5file() throws HDF5LibraryException {
+ if (H5fid > 0) {
+ try {H5.H5Fclose(H5fid);} catch (Exception ex) {}
+ }
+ if (H5fid2 > 0) {
+ try {H5.H5Fclose(H5fid2);} catch (Exception ex) {}
+ }
+ _deleteFile(H5_FILE);
+ _deleteFile(H5_FILE2);
+ System.out.println();
+ }
+
+ @Test
+ public void testH5Gopen() {
+ long gid = -1;
+ for (int i = 0; i < GROUPS.length; i++) {
+ try {
+ gid = H5.H5Gopen(H5fid, GROUPS[i], HDF5Constants.H5P_DEFAULT);
+ }
+ catch (Throwable err) {
+ err.printStackTrace();
+ fail("TestH5G.testH5Gopen: H5.H5Gopen: " + err);
+ }
+ assertTrue("TestH5G.testH5Gopen: ", gid > 0);
+ try {
+ H5.H5Gclose(gid);
+ }
+ catch (Exception ex) {
+ }
+ }
+ }
+
+ @Test
+ public void testH5Gget_create_plist() {
+ long gid = -1;
+ long pid = -1;
+
+ for (int i = 0; i < GROUPS.length; i++) {
+ try {
+ gid = H5.H5Gopen(H5fid, GROUPS[i], HDF5Constants.H5P_DEFAULT);
+ }
+ catch (Throwable err) {
+ err.printStackTrace();
+ fail("TestH5G.testH5Gget_create_plist: H5.H5Gopen: " + err);
+ }
+ assertTrue("TestH5G.testH5Gget_create_plist: ", gid > 0);
+
+ try {
+ pid = H5.H5Gget_create_plist(gid);
+ }
+ catch (Throwable err) {
+ err.printStackTrace();
+ fail("TestH5G.testH5Gget_create_plist: H5.H5Gget_create_plist: " + err);
+ }
+ assertTrue("TestH5G.testH5Gget_create_plist: ", pid > 0);
+
+ try {
+ H5.H5Gclose(gid);
+ }
+ catch (Exception ex) {
+ }
+ }
+ }
+
+ @Test
+ public void testH5Gget_info() {
+ H5G_info_t info = null;
+
+ for (int i = 0; i < GROUPS.length; i++) {
+
+ try {
+ info = H5.H5Gget_info(H5fid);
+ }
+ catch (Throwable err) {
+ err.printStackTrace();
+ fail("TestH5G.testH5Gget_info: H5.H5Gget_info: " + err);
+ }
+ assertNotNull("TestH5G.testH5Gget_info: ", info);
+ }
+ }
+
+ @Test
+ public void testH5Gget_info_by_name() {
+ H5G_info_t info = null;
+
+ for (int i = 0; i < GROUPS.length; i++) {
+ try {
+ info = H5.H5Gget_info_by_name(H5fid, GROUPS[i],
+ HDF5Constants.H5P_DEFAULT);
+ }
+ catch (Throwable err) {
+ err.printStackTrace();
+ fail("TestH5G.testH5Gget_info_by_name: H5.H5Gget_info_by_name: " + err);
+ }
+ assertNotNull("TestH5G.testH5Gget_info_by_name: ", info);
+ }
+ }
+
+ @Test
+ public void testH5Gget_info_by_idx() {
+ H5G_info_t info = null;
+ for (int i = 0; i < 2; i++) {
+ try {
+ info = H5.H5Gget_info_by_idx(H5fid, "/G1",
+ HDF5Constants.H5_INDEX_NAME, HDF5Constants.H5_ITER_INC,
+ i, HDF5Constants.H5P_DEFAULT);
+ }
+ catch (Throwable err) {
+ err.printStackTrace();
+ fail("TestH5G.testH5Gget_info_by_idx: H5.H5Gget_info_by_idx: " + err);
+ }
+ assertNotNull("TestH5G.testH5Gget_info_by_idx: ", info);
+ }
+ }
+
+ @Test
+ public void testH5Gget_obj_info_all() {
+ H5G_info_t info = null;
+
+ long gid = _openGroup(H5fid, GROUPS[0]);
+
+ try {
+ info = H5.H5Gget_info(gid);
+ }
+ catch (Throwable err) {
+ err.printStackTrace();
+ fail("TestH5G.testH5Gget_obj_info_all: H5.H5Gget_info: " + err);
+ }
+ finally {
+ try {H5.H5Gclose(gid);} catch (Exception ex) { }
+ }
+ assertNotNull("TestH5G.testH5Gget_obj_info_all: ", info);
+ assertTrue("TestH5G.testH5Gget_obj_info_all: number of links is empty", info.nlinks > 0);
+ String objNames[] = new String[(int) info.nlinks];
+ int objTypes[] = new int[(int) info.nlinks];
+ int lnkTypes[] = new int[(int) info.nlinks];
+ long objRefs[] = new long[(int) info.nlinks];
+
+ int names_found = 0;
+ try {
+ names_found = H5.H5Gget_obj_info_all(H5fid, GROUPS[0], objNames,
+ objTypes, lnkTypes, objRefs, HDF5Constants.H5_INDEX_NAME);
+ }
+ catch (Throwable err) {
+ err.printStackTrace();
+ fail("TestH5G.testH5Gget_obj_info_all: H5.H5Gget_obj_info_all: " + err);
+ }
+
+ assertTrue("number found[" + names_found + "] different than expected["
+ + objNames.length + "]", names_found == objNames.length);
+ for (int i = 0; i < objNames.length; i++) {
+ assertNotNull("name #" + i + " does not exist", objNames[i]);
+ assertTrue("TestH5G.testH5Gget_obj_info_all: ", objNames[i].length() > 0);
+ }
+ }
+
+ @Test
+ public void testH5Gget_obj_info_all_gid() {
+ H5G_info_t info = null;
+
+ long gid = _openGroup(H5fid, GROUPS[0]);
+
+ try {
+ info = H5.H5Gget_info(gid);
+ assertNotNull("TestH5G.testH5Gget_obj_info_all_gid: ", info);
+ assertTrue("TestH5G.testH5Gget_obj_info_all_gid: number of links is empty", info.nlinks > 0);
+ String objNames[] = new String[(int) info.nlinks];
+ long objRefs[] = new long[(int) info.nlinks];
+ int lnkTypes[] = new int[(int) info.nlinks];
+ int objTypes[] = new int[(int) info.nlinks];
+
+ int names_found = 0;
+ try {
+ names_found = H5.H5Gget_obj_info_all(gid, null, objNames, objTypes, lnkTypes,
+ objRefs, HDF5Constants.H5_INDEX_NAME);
+ }
+ catch (Throwable err) {
+ err.printStackTrace();
+ fail("TestH5G.testH5Gget_obj_info_all_gid: H5.H5Gget_obj_info_all: " + err);
+ }
+
+ assertTrue("TestH5G.testH5Gget_obj_info_all_gid: number found[" + names_found + "] different than expected["
+ + objNames.length + "]", names_found == objNames.length);
+ for (int i = 0; i < objNames.length; i++) {
+ assertNotNull("TestH5G.testH5Gget_obj_info_all_gid: name #" + i + " does not exist", objNames[i]);
+ assertTrue("TestH5G.testH5Gget_obj_info_all_gid: ", objNames[i].length() > 0);
+ }
+ }
+ catch (Throwable err) {
+ err.printStackTrace();
+ fail("TestH5G.testH5Gget_obj_info_all_gid: H5.H5Gget_info: " + err);
+ }
+ finally {
+ try {H5.H5Gclose(gid);} catch (Exception ex) { }
+ }
+ }
+
+ @Test
+ public void testH5Gget_obj_info_all_gid2() {
+ H5G_info_t info = null;
+
+ long gid = _openGroup(H5fid, GROUPS[1]);
+
+ try {
+ info = H5.H5Gget_info(gid);
+ assertNotNull("TestH5G.testH5Gget_obj_info_all_gid2: ", info);
+ assertTrue("TestH5G.testH5Gget_obj_info_all_gid2: number of links is empty", info.nlinks > 0);
+ String objNames[] = new String[(int) info.nlinks];
+ long objRefs[] = new long[(int) info.nlinks];
+ int lnkTypes[] = new int[(int) info.nlinks];
+ int objTypes[] = new int[(int) info.nlinks];
+
+ int names_found = 0;
+ try {
+ names_found = H5.H5Gget_obj_info_all(gid, null, objNames, objTypes, lnkTypes,
+ objRefs, HDF5Constants.H5_INDEX_NAME);
+ }
+ catch (Throwable err) {
+ err.printStackTrace();
+ fail("TestH5G.testH5Gget_obj_info_all_gid2: H5.H5Gget_obj_info_all: " + err);
+ }
+
+ assertTrue("TestH5G.testH5Gget_obj_info_all_gid2: number found[" + names_found + "] different than expected["
+ + objNames.length + "]", names_found == objNames.length);
+ for (int i = 0; i < objNames.length; i++) {
+ assertNotNull("TestH5G.testH5Gget_obj_info_all_gid2: name #" + i + " does not exist", objNames[i]);
+ assertTrue("TestH5G.testH5Gget_obj_info_all_gid2: ", objNames[i].length() > 0);
+ }
+ }
+ catch (Throwable err) {
+ err.printStackTrace();
+ fail("TestH5G.testH5Gget_obj_info_all_gid2: H5.H5Gget_info: " + err);
+ }
+ finally {
+ try {H5.H5Gclose(gid);} catch (Exception ex) { }
+ }
+ }
+
+ @Test
+ public void testH5Gget_obj_info_max() {
+ long gid = _openGroup(H5fid, GROUPS[0]);
+ long groups_max_size = GROUPS.length + 1;
+ String objNames[] = new String[(int)groups_max_size];
+ int objTypes[] = new int[(int)groups_max_size];
+ int lnkTypes[] = new int[(int)groups_max_size];
+ long objRefs[] = new long[(int)groups_max_size];
+
+ int names_found = 0;
+ try {
+ names_found = H5.H5Gget_obj_info_max(gid, objNames, objTypes, lnkTypes,
+ objRefs, groups_max_size);
+ }
+ catch (Throwable err) {
+ err.printStackTrace();
+ fail("TestH5G.testH5Gget_obj_info_max: H5.H5Gget_obj_info_max: " + err);
+ }
+ finally {
+ try {H5.H5Gclose(gid);} catch (Exception ex) { }
+ }
+
+ // expected number does not include root group
+ assertTrue("TestH5G.testH5Gget_obj_info_max: number found[" + names_found + "] different than expected["
+ + (GROUPS.length - 1) + "]", names_found == (GROUPS.length - 1));
+ for (int i = 0; i < GROUPS.length-1; i++) {
+ assertNotNull("TestH5G.testH5Gget_obj_info_max: name #"+i+" does not exist",objNames[i]);
+ assertTrue("TestH5G.testH5Gget_obj_info_max: ", objNames[i].length()>0);
+ }
+ }
+
+ @Test
+ public void testH5Gget_obj_info_max_limit() {
+ long gid = _openGroup(H5fid, GROUPS[0]);
+ long groups_max_size = GROUPS.length - 3;
+ String objNames[] = new String[(int)groups_max_size];
+ int objTypes[] = new int[(int)groups_max_size];
+ int lnkTypes[] = new int[(int)groups_max_size];
+ long objRefs[] = new long[(int)groups_max_size];
+
+ int names_found = 0;
+ try {
+ names_found = H5.H5Gget_obj_info_max(gid, objNames, objTypes, lnkTypes,
+ objRefs, groups_max_size);
+ }
+ catch (Throwable err) {
+ err.printStackTrace();
+ fail("TestH5G.testH5Gget_obj_info_max_limit: H5.H5Gget_obj_info_max: " + err);
+ }
+ finally {
+ try {H5.H5Gclose(gid);} catch (Exception ex) { }
+ }
+
+ assertTrue("TestH5G.testH5Gget_obj_info_max_limit: number found[" + names_found + "] different than expected["
+ + groups_max_size + "]", names_found == groups_max_size);
+ for (int i = 0; i < objNames.length; i++) {
+ assertNotNull("TestH5G.testH5Gget_obj_info_max_limit: name #" + i + " does not exist", objNames[i]);
+ assertTrue("TestH5G.testH5Gget_obj_info_max_limit: ", objNames[i].length() > 0);
+ }
+ }
+
+ @Test
+ public void testH5Gget_obj_info_all_byIndexType() {
+ H5G_info_t info = null;
+
+ long gid = _openGroup(H5fid2, GROUPS2[0]);
+
+ try {
+ info = H5.H5Gget_info(gid);
+ }
+ catch (Throwable err) {
+ err.printStackTrace();
+ fail("TestH5G.testH5Gget_obj_info_all_byIndexType: H5.H5Gget_info: " + err);
+ }
+ finally {
+ try {H5.H5Gclose(gid);} catch (Exception ex) { }
+ }
+
+ assertNotNull("TestH5G.testH5Gget_obj_info_all_byIndexType: ", info);
+ assertTrue("TestH5G.testH5Gget_obj_info_all_byIndexType: number of links is empty", info.nlinks > 0);
+ String objNames[] = new String[(int) info.nlinks];
+ int objTypes[] = new int[(int) info.nlinks];
+ int lnkTypes[] = new int[(int) info.nlinks];
+ long objRefs[] = new long[(int) info.nlinks];
+
+ try {
+ H5.H5Gget_obj_info_all(H5fid2, GROUPS2[0], objNames,
+ objTypes, lnkTypes, objRefs, HDF5Constants.H5_INDEX_CRT_ORDER);
+ }
+ catch (Throwable err) {
+ err.printStackTrace();
+ fail("TestH5G.testH5Gget_obj_info_all_byIndexType: H5.H5Gget_obj_info_all: " + err);
+ }
+
+ assertEquals("G12",objNames[1]);
+ assertEquals("G13", objNames[2] );
+ assertEquals("G11", objNames[3] );
+
+ try {
+ H5.H5Gget_obj_info_all(H5fid2, GROUPS2[0], objNames,
+ objTypes, lnkTypes, objRefs, HDF5Constants.H5_INDEX_NAME);
+ }
+ catch (Throwable err) {
+ err.printStackTrace();
+ fail("TestH5G.testH5Gget_obj_info_all_byIndexType: H5.H5Gget_obj_info_all: " + err);
+ }
+
+ assertEquals("G12",objNames[1]);
+ assertEquals("G13", objNames[2] );
+ assertEquals("G14", objNames[3] );
+ }
+
+}
diff --git a/java/test/TestH5Gbasic.java b/java/test/TestH5Gbasic.java
new file mode 100644
index 0000000..70acef4
--- /dev/null
+++ b/java/test/TestH5Gbasic.java
@@ -0,0 +1,371 @@
+/* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
+ * Copyright by The HDF Group. *
+ * Copyright by the Board of Trustees of the University of Illinois. *
+ * All rights reserved. *
+ * *
+ * This file is part of HDF5. The full HDF5 copyright notice, including *
+ * terms governing use, modification, and redistribution, is contained in *
+ * the files COPYING and Copyright.html. COPYING can be found at the root *
+ * of the source code distribution tree; Copyright.html can be found at the *
+ * root level of an installed copy of the electronic HDF5 document set and *
+ * is linked from the top-level documents page. It can also be found at *
+ * http://hdfgroup.org/HDF5/doc/Copyright.html. If you do not have *
+ * access to either file, you may request a copy from help@hdfgroup.org. *
+ * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */
+
+package test;
+
+import static org.junit.Assert.assertNotNull;
+import static org.junit.Assert.assertTrue;
+import static org.junit.Assert.fail;
+
+import java.io.File;
+
+import hdf.hdf5lib.H5;
+import hdf.hdf5lib.HDF5Constants;
+import hdf.hdf5lib.exceptions.HDF5LibraryException;
+import hdf.hdf5lib.structs.H5G_info_t;
+
+import org.junit.After;
+import org.junit.Before;
+import org.junit.Rule;
+import org.junit.Test;
+import org.junit.rules.TestName;
+
+public class TestH5Gbasic {
+ @Rule public TestName testname = new TestName();
+ private static final String H5_FILE = "test.h5";
+ long H5fid = -1;
+
+ private final long _createGroup(long fid, String name) {
+ long gid = -1;
+ try {
+ gid = H5.H5Gcreate(fid, name, HDF5Constants.H5P_DEFAULT,
+ HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT);
+ }
+ catch (Throwable err) {
+ err.printStackTrace();
+ fail("H5.H5Gcreate: " + err);
+ }
+
+ return gid;
+ }
+
+ private final void _deleteFile(String filename) {
+ File file = new File(filename);
+
+ if (file.exists()) {
+ try {file.delete();} catch (SecurityException e) {}
+ }
+ }
+
+ @Before
+ public void createH5file()
+ throws HDF5LibraryException, NullPointerException {
+ assertTrue("H5 open ids is 0",H5.getOpenIDCount()==0);
+ System.out.print(testname.getMethodName());
+
+ H5fid = H5.H5Fcreate(H5_FILE, HDF5Constants.H5F_ACC_TRUNC,
+ HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT);
+ H5.H5Fflush(H5fid, HDF5Constants.H5F_SCOPE_LOCAL);
+ }
+
+ @After
+ public void deleteH5file() throws HDF5LibraryException {
+ if (H5fid > 0) {
+ try {H5.H5Fclose(H5fid);} catch (Exception ex) {}
+ }
+ _deleteFile(H5_FILE);
+ System.out.println();
+ }
+
+ @Test//(expected = HDF5LibraryException.class)
+ public void testH5Gclose_invalid() throws Throwable {
+ long gid = H5.H5Gclose(-1);
+ assertTrue(gid == 0);
+ }
+
+ @Test(expected = NullPointerException.class)
+ public void testH5Gcreate_null() throws Throwable {
+ long gid = -1;
+
+ // it should fail because the group name is null
+ gid = H5.H5Gcreate(H5fid, null, HDF5Constants.H5P_DEFAULT,
+ HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT);
+
+ try {H5.H5Gclose(gid);} catch (Exception ex) {}
+ }
+
+ @Test(expected = HDF5LibraryException.class)
+ public void testH5Gcreate_invalid() throws Throwable {
+ H5.H5Gcreate(-1, "Invalid ID", HDF5Constants.H5P_DEFAULT,
+ HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT);
+ }
+
+ @Test
+ public void testH5Gcreate() {
+ long gid = -1;
+ try {
+ gid = H5.H5Gcreate(H5fid, "/testH5Gcreate",
+ HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT,
+ HDF5Constants.H5P_DEFAULT);
+ }
+ catch (Throwable err) {
+ err.printStackTrace();
+ fail("H5.H5Gcreate: " + err);
+ }
+ assertTrue(gid > 0);
+
+ try {H5.H5Gclose(gid);} catch (Exception ex) {}
+ }
+
+ @Test
+ public void testH5Gclose() {
+ long gid = _createGroup(H5fid, "/testH5Gcreate");
+ assertTrue(gid > 0);
+
+ try {
+ H5.H5Gclose(gid);
+ }
+ catch (Throwable err) {
+ fail("H5Gclose: " + err);
+ }
+ }
+
+ @Test(expected = HDF5LibraryException.class)
+ public void testH5Gcreate_exists() throws Throwable {
+ long gid = _createGroup(H5fid, "/testH5Gcreate");
+ assertTrue(gid > 0);
+
+ try {H5.H5Gclose(gid);} catch (Exception ex) {}
+
+ // it should failed now because the group already exists in file
+ gid = H5.H5Gcreate(H5fid, "/testH5Gcreate",
+ HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT,
+ HDF5Constants.H5P_DEFAULT);
+ }
+
+ @Test
+ public void testH5Gcreate_anon() {
+ long gid = -1;
+ try {
+ gid = H5.H5Gcreate_anon(H5fid, HDF5Constants.H5P_DEFAULT,
+ HDF5Constants.H5P_DEFAULT);
+ }
+ catch (Throwable err) {
+ err.printStackTrace();
+ fail("H5.H5Gcreate_anon: " + err);
+ }
+ assertTrue(gid > 0);
+
+ try {H5.H5Gclose(gid);} catch (Exception ex) {}
+ }
+
+ @Test(expected = NullPointerException.class)
+ public void testH5Gopen_null() throws Throwable {
+ long gid = -1;
+
+ gid = H5.H5Gopen(H5fid, null, HDF5Constants.H5P_DEFAULT);
+
+ try {H5.H5Gclose(gid);} catch (Exception ex) {}
+ }
+
+ @Test(expected = HDF5LibraryException.class)
+ public void testH5Gopen_invalid() throws Throwable {
+ H5.H5Gopen(-1, "Invalid ID", HDF5Constants.H5P_DEFAULT);
+ }
+
+ @Test(expected = HDF5LibraryException.class)
+ public void testH5Gopen_not_exists() throws Throwable {
+ long gid = -1;
+
+ gid = H5.H5Gopen(H5fid, "Never_created", HDF5Constants.H5P_DEFAULT);
+
+ try {H5.H5Gclose(gid);} catch (Exception ex) {}
+ }
+
+ @Test
+ public void testH5Gopen() {
+ long gid = _createGroup(H5fid, "/testH5Gcreate");
+ assertTrue(gid > 0);
+
+ try {H5.H5Gclose(gid);} catch (Exception ex) {}
+
+ try {
+ gid = H5.H5Gopen(H5fid, "/testH5Gcreate",
+ HDF5Constants.H5P_DEFAULT);
+ }
+ catch (Throwable err) {
+ err.printStackTrace();
+ fail("H5.H5Gopen: " + err);
+ }
+ assertTrue(gid > 0);
+
+ try {H5.H5Gclose(gid);} catch (Exception ex) {}
+ }
+
+ @Test(expected = HDF5LibraryException.class)
+ public void testH5Gget_create_plist_invalid() throws Throwable {
+ H5.H5Gget_create_plist(-1);
+ }
+
+ @Test
+ public void testH5Gget_create_plist() {
+ long pid = -1;
+ long gid = _createGroup(H5fid, "/testH5Gcreate");
+ assertTrue(gid > 0);
+
+ try {
+ pid = H5.H5Gget_create_plist(gid);
+ }
+ catch (Throwable err) {
+ try {H5.H5Gclose(gid);} catch (Exception ex) {}
+ err.printStackTrace();
+ fail("H5.H5Gget_create_plist: " + err);
+ }
+ assertTrue(pid > 0);
+
+ try {H5.H5Pclose(pid);} catch (Exception ex) {}
+
+ try {H5.H5Gclose(gid);} catch (Exception ex) {}
+ }
+
+ @Test(expected = HDF5LibraryException.class)
+ public void testH5Gget_info_invalid() throws Throwable {
+ H5.H5Gget_info(-1);
+ }
+
+ @Test
+ public void testH5Gget_info() {
+ H5G_info_t info = null;
+ long gid = _createGroup(H5fid, "/testH5Gcreate");
+ assertTrue(gid > 0);
+
+ try {
+ info = H5.H5Gget_info(gid);
+ }
+ catch (Throwable err) {
+ try {H5.H5Gclose(gid);} catch (Exception ex) {}
+ err.printStackTrace();
+ fail("H5.H5Gget_info: " + err);
+ }
+ assertNotNull(info);
+
+ try {H5.H5Gclose(gid);} catch (Exception ex) {}
+ }
+
+ @Test(expected = NullPointerException.class)
+ public void testH5Gget_info_by_name_null() throws Throwable {
+ H5.H5Gget_info_by_name(-1, null, HDF5Constants.H5P_DEFAULT);
+ }
+
+ @Test(expected = HDF5LibraryException.class)
+ public void testH5Gget_info_by_name_invalid() throws Throwable {
+ H5.H5Gget_info_by_name(-1, "/testH5Gcreate", HDF5Constants.H5P_DEFAULT);
+ }
+
+ @Test(expected = HDF5LibraryException.class)
+ public void testH5Gget_info_by_name_not_exists() throws Throwable {
+ H5.H5Gget_info_by_name(H5fid, "/testH5Gcreate",
+ HDF5Constants.H5P_DEFAULT);
+ }
+
+ @Test
+ public void testH5Gget_info_by_name() {
+ H5G_info_t info = null;
+ long gid = _createGroup(H5fid, "/testH5Gcreate");
+ assertTrue(gid > 0);
+
+ try {
+ info = H5.H5Gget_info_by_name(gid, "/testH5Gcreate",
+ HDF5Constants.H5P_DEFAULT);
+ }
+ catch (Throwable err) {
+ try {H5.H5Gclose(gid);} catch (Exception ex) {}
+ err.printStackTrace();
+ fail("H5.H5Gget_info_by_name: " + err);
+ }
+ assertNotNull(info);
+
+ try {H5.H5Gclose(gid);} catch (Exception ex) {}
+ }
+
+ @Test
+ public void testH5Gget_info_by_name_fileid() {
+ H5G_info_t info = null;
+ long gid = _createGroup(H5fid, "/testH5Gcreate");
+ assertTrue(gid > 0);
+ try {H5.H5Gclose(gid);} catch (Exception ex) {}
+
+ try {
+ info = H5.H5Gget_info_by_name(H5fid, "/testH5Gcreate",
+ HDF5Constants.H5P_DEFAULT);
+ }
+ catch (Throwable err) {
+ try {H5.H5Gclose(gid);} catch (Exception ex) {}
+ err.printStackTrace();
+ fail("H5.H5Gget_info_by_name: " + err);
+ }
+ assertNotNull(info);
+
+ try {H5.H5Gclose(gid);} catch (Exception ex) {}
+ }
+
+ @Test(expected = NullPointerException.class)
+ public void testH5Gget_info_by_idx_null() throws Throwable {
+ H5.H5Gget_info_by_idx(-1, null, HDF5Constants.H5_INDEX_NAME,
+ HDF5Constants.H5_ITER_INC, 1L, HDF5Constants.H5P_DEFAULT);
+ }
+
+ @Test(expected = HDF5LibraryException.class)
+ public void testH5Gget_info_by_idx_invalid() throws Throwable {
+ H5.H5Gget_info_by_idx(-1, "/testH5Gcreate", HDF5Constants.H5_INDEX_NAME,
+ HDF5Constants.H5_ITER_INC, 1L, HDF5Constants.H5P_DEFAULT);
+ }
+
+ @Test(expected = HDF5LibraryException.class)
+ public void testH5Gget_info_by_idx_not_exists() throws Throwable {
+ H5.H5Gget_info_by_idx(H5fid, "/testH5Gcreate",
+ HDF5Constants.H5_INDEX_NAME, HDF5Constants.H5_ITER_INC, 1L,
+ HDF5Constants.H5P_DEFAULT);
+ }
+
+ @Test
+ public void testH5Gget_info_by_idx() {
+ H5G_info_t info = null;
+ long gid = _createGroup(H5fid, "/testH5Gcreate");
+ assertTrue(gid > 0);
+
+ try {
+ info = H5.H5Gget_info_by_idx(gid, "/", HDF5Constants.H5_INDEX_NAME,
+ HDF5Constants.H5_ITER_INC, 0, HDF5Constants.H5P_DEFAULT);
+ }
+ catch (Throwable err) {
+ err.printStackTrace();
+ fail("H5.H5Gget_info_by_idx: " + err);
+ }
+ assertNotNull(info);
+
+ try {H5.H5Gclose(gid);} catch (Exception ex) {}
+ }
+
+ @Test
+ public void testH5Gget_info_by_idx_fileid() {
+ H5G_info_t info = null;
+ long gid = _createGroup(H5fid, "/testH5Gcreate");
+ assertTrue(gid > 0);
+ try {H5.H5Gclose(gid);} catch (Exception ex) {}
+
+ try {
+ info = H5.H5Gget_info_by_idx(H5fid, "/",
+ HDF5Constants.H5_INDEX_NAME, HDF5Constants.H5_ITER_INC, 0,
+ HDF5Constants.H5P_DEFAULT);
+ }
+ catch (Throwable err) {
+ err.printStackTrace();
+ fail("H5.H5Gget_info_by_idx: " + err);
+ }
+ assertNotNull(info);
+ }
+
+}
diff --git a/java/test/TestH5Giterate.java b/java/test/TestH5Giterate.java
new file mode 100644
index 0000000..17f594e
--- /dev/null
+++ b/java/test/TestH5Giterate.java
@@ -0,0 +1,136 @@
+/* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
+ * Copyright by The HDF Group. *
+ * Copyright by the Board of Trustees of the University of Illinois. *
+ * All rights reserved. *
+ * *
+ * This file is part of HDF5. The full HDF5 copyright notice, including *
+ * terms governing use, modification, and redistribution, is contained in *
+ * the files COPYING and Copyright.html. COPYING can be found at the root *
+ * of the source code distribution tree; Copyright.html can be found at the *
+ * root level of an installed copy of the electronic HDF5 document set and *
+ * is linked from the top-level documents page. It can also be found at *
+ * http://hdfgroup.org/HDF5/doc/Copyright.html. If you do not have *
+ * access to either file, you may request a copy from help@hdfgroup.org. *
+ * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */
+
+package test;
+
+import static org.junit.Assert.assertNotNull;
+import static org.junit.Assert.assertTrue;
+import static org.junit.Assert.fail;
+import hdf.hdf5lib.H5;
+import hdf.hdf5lib.HDF5Constants;
+import hdf.hdf5lib.exceptions.HDF5LibraryException;
+import hdf.hdf5lib.structs.H5G_info_t;
+
+import org.junit.After;
+import org.junit.Before;
+import org.junit.Rule;
+import org.junit.Test;
+import org.junit.rules.TestName;
+
+public class TestH5Giterate {
+ @Rule public TestName testname = new TestName();
+ private static final String H5_FILE = "h5ex_g_iterate.hdf";
+ long H5fid = -1;
+
+ private final long _openGroup(long fid, String name) {
+ long gid = -1;
+ try {
+ gid = H5.H5Gopen(fid, name, HDF5Constants.H5P_DEFAULT);
+ }
+ catch (Throwable err) {
+ gid = -1;
+ err.printStackTrace();
+ fail("H5.H5Gcreate: " + err);
+ }
+
+ return gid;
+ }
+
+ @Before
+ public void openH5file()
+ throws HDF5LibraryException, NullPointerException {
+ assertTrue("H5 open ids is 0",H5.getOpenIDCount()==0);
+ System.out.print(testname.getMethodName());
+
+ try {
+ H5fid = H5.H5Fopen(H5_FILE, HDF5Constants.H5F_ACC_RDONLY,
+ HDF5Constants.H5P_DEFAULT);
+ }
+ catch (Throwable err) {
+ err.printStackTrace();
+ fail("H5.H5Fopen: openH5file: " + err);
+ }
+ }
+
+ @After
+ public void deleteH5file() throws HDF5LibraryException {
+ if (H5fid > 0) {
+ try {H5.H5Fclose(H5fid);} catch (Exception ex) {}
+ }
+ System.out.println();
+ }
+
+ @Test
+ public void testH5Gget_obj_info_all() {
+ H5G_info_t info = null;
+
+ long gid = _openGroup(H5fid, "/");
+
+ try {
+ info = H5.H5Gget_info(gid);
+ }
+ catch (Throwable err) {
+ err.printStackTrace();
+ fail("H5.H5Gget_info: " + err);
+ }
+ try {
+ H5.H5Gclose(gid);
+ }
+ catch (Exception ex) {
+ }
+ assertNotNull(info);
+ assertTrue("number of links is empty", info.nlinks > 0);
+ String objNames[] = new String[(int) info.nlinks];
+ int objTypes[] = new int[(int) info.nlinks];
+ int lnkTypes[] = new int[(int) info.nlinks];
+ long objRefs[] = new long[(int) info.nlinks];
+
+ int names_found = 0;
+ try {
+ names_found = H5.H5Gget_obj_info_all(H5fid, "/", objNames,
+ objTypes, lnkTypes, objRefs, HDF5Constants.H5_INDEX_NAME);
+ }
+ catch (Throwable err) {
+ err.printStackTrace();
+ fail("H5.H5Gget_obj_info_all: " + err);
+ }
+
+ assertTrue("number found[" + names_found + "] different than expected["
+ + objNames.length + "]", names_found == objNames.length);
+ for (int i = 0; i < objNames.length; i++) {
+ assertNotNull("name #" + i + " does not exist", objNames[i]);
+ assertTrue(objNames[i].length() > 0);
+ if (objTypes[i]==HDF5Constants.H5O_TYPE_GROUP) {
+ assertTrue("Group is index: "+i + " ",i==2);
+ assertTrue("Group is : "+objNames[i] + " ",objNames[i].compareToIgnoreCase("G1")==0);
+ }
+ else if (objTypes[i]==HDF5Constants.H5O_TYPE_DATASET) {
+ assertTrue("Dataset is index: "+i + " ",(i==0)||(i==3));
+ if(i==0)
+ assertTrue("Dataset is : "+objNames[i] + " ",objNames[i].compareToIgnoreCase("DS1")==0);
+ else
+ assertTrue("Dataset is : "+objNames[i] + " ",objNames[i].compareToIgnoreCase("L1")==0);
+ }
+ else if (objTypes[i]==HDF5Constants.H5O_TYPE_NAMED_DATATYPE) {
+ assertTrue("Datatype is index: "+i + " ",i==1);
+ assertTrue("Datatype is : "+objNames[i] + " ",objNames[i].compareToIgnoreCase("DT1")==0);
+ }
+ else {
+ fail(" Unknown at index: " + i + " " + objNames[i]);
+ }
+ }
+ }
+
+}
diff --git a/java/test/TestH5Lbasic.java b/java/test/TestH5Lbasic.java
new file mode 100644
index 0000000..c35519e
--- /dev/null
+++ b/java/test/TestH5Lbasic.java
@@ -0,0 +1,371 @@
+/* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
+ * Copyright by The HDF Group. *
+ * Copyright by the Board of Trustees of the University of Illinois. *
+ * All rights reserved. *
+ * *
+ * This file is part of HDF5. The full HDF5 copyright notice, including *
+ * terms governing use, modification, and redistribution, is contained in *
+ * the files COPYING and Copyright.html. COPYING can be found at the root *
+ * of the source code distribution tree; Copyright.html can be found at the *
+ * root level of an installed copy of the electronic HDF5 document set and *
+ * is linked from the top-level documents page. It can also be found at *
+ * http://hdfgroup.org/HDF5/doc/Copyright.html. If you do not have *
+ * access to either file, you may request a copy from help@hdfgroup.org. *
+ * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */
+
+package test;
+
+import static org.junit.Assert.assertFalse;
+import static org.junit.Assert.assertTrue;
+import static org.junit.Assert.fail;
+
+import java.util.ArrayList;
+
+import hdf.hdf5lib.H5;
+import hdf.hdf5lib.HDF5Constants;
+import hdf.hdf5lib.callbacks.H5L_iterate_cb;
+import hdf.hdf5lib.callbacks.H5L_iterate_t;
+import hdf.hdf5lib.exceptions.HDF5LibraryException;
+import hdf.hdf5lib.structs.H5L_info_t;
+
+import org.junit.After;
+import org.junit.Before;
+import org.junit.Rule;
+import org.junit.Test;
+import org.junit.rules.TestName;
+
+public class TestH5Lbasic {
+ @Rule public TestName testname = new TestName();
+ private static final String H5_FILE = "h5ex_g_iterate.hdf";
+ long H5fid = -1;
+
+ @Before
+ public void openH5file()
+ throws HDF5LibraryException, NullPointerException {
+ assertTrue("H5 open ids is 0",H5.getOpenIDCount()==0);
+ System.out.print(testname.getMethodName());
+
+ try {
+ H5fid = H5.H5Fopen(H5_FILE, HDF5Constants.H5F_ACC_RDONLY,
+ HDF5Constants.H5P_DEFAULT);
+ }
+ catch (Throwable err) {
+ err.printStackTrace();
+ fail("H5.H5Fopen: openH5file: " + err);
+ }
+ }
+
+ @After
+ public void closeH5file() throws HDF5LibraryException {
+ if (H5fid > 0) {
+ try {H5.H5Fclose(H5fid);} catch (Exception ex) {}
+ }
+ System.out.println();
+ }
+
+ @Test
+ public void testH5Lexists() {
+ boolean link_exists = false;
+ try {
+ link_exists = H5.H5Lexists(H5fid, "None", HDF5Constants.H5P_DEFAULT);
+ }
+ catch (Throwable err) {
+ err.printStackTrace();
+ fail("H5.H5Lexists: " + err);
+ }
+ assertFalse("H5Lexists ",link_exists);
+ try {
+ link_exists = H5.H5Lexists(H5fid, "DS1", HDF5Constants.H5P_DEFAULT);
+ }
+ catch (Throwable err) {
+ err.printStackTrace();
+ fail("H5.H5Lexists: " + err);
+ }
+ assertTrue("H5Lexists ",link_exists);
+ try {
+ link_exists = H5.H5Lexists(H5fid, "G1/DS2", HDF5Constants.H5P_DEFAULT);
+ }
+ catch (Throwable err) {
+ err.printStackTrace();
+ fail("H5.H5Lexists: " + err);
+ }
+ assertTrue("H5Lexists ",link_exists);
+ }
+
+ @Test(expected = HDF5LibraryException.class)
+ public void testH5Lget_info_not_exist() throws Throwable {
+ H5.H5Lget_info(H5fid, "None", HDF5Constants.H5P_DEFAULT);
+ }
+
+ @Test
+ public void testH5Lget_info_dataset() {
+ H5L_info_t link_info = null;
+ try {
+ link_info = H5.H5Lget_info(H5fid, "DS1", HDF5Constants.H5P_DEFAULT);
+ }
+ catch (Throwable err) {
+ err.printStackTrace();
+ fail("H5.H5Lget_info: " + err);
+ }
+ assertFalse("H5Lget_info ",link_info==null);
+ assertTrue("H5Lget_info link type",link_info.type==HDF5Constants.H5L_TYPE_HARD);
+ }
+
+ @Test
+ public void testH5Lget_info_hardlink() {
+ H5L_info_t link_info = null;
+ try {
+ link_info = H5.H5Lget_info(H5fid, "L1", HDF5Constants.H5P_DEFAULT);
+ }
+ catch (Throwable err) {
+ err.printStackTrace();
+ fail("H5.H5Lget_info: " + err);
+ }
+ assertFalse("H5Lget_info ",link_info==null);
+ assertTrue("H5Lget_info link type",link_info.type==HDF5Constants.H5L_TYPE_HARD);
+ assertTrue("Link Address ",link_info.address_val_size>0);
+ }
+
+ @Test(expected = HDF5LibraryException.class)
+ public void testH5Lget_info_by_idx_name_not_exist_name() throws Throwable {
+ H5.H5Lget_info_by_idx(H5fid, "None", HDF5Constants.H5_INDEX_NAME, HDF5Constants.H5_ITER_INC, 0, HDF5Constants.H5P_DEFAULT);
+ }
+
+ @Test(expected = HDF5LibraryException.class)
+ public void testH5Lget_info_by_idx_name_not_exist_create() throws Throwable {
+ H5.H5Lget_info_by_idx(H5fid, "None", HDF5Constants.H5_INDEX_CRT_ORDER, HDF5Constants.H5_ITER_INC, 0, HDF5Constants.H5P_DEFAULT);
+ }
+
+ @Test(expected = HDF5LibraryException.class)
+ public void testH5Lget_info_by_idx_not_exist_name() throws Throwable {
+ H5.H5Lget_info_by_idx(H5fid, "/", HDF5Constants.H5_INDEX_NAME, HDF5Constants.H5_ITER_INC, 5, HDF5Constants.H5P_DEFAULT);
+ }
+
+ @Test(expected = HDF5LibraryException.class)
+ public void testH5Lget_info_by_idx_not_exist_create() throws Throwable {
+ H5.H5Lget_info_by_idx(H5fid, "/", HDF5Constants.H5_INDEX_CRT_ORDER, HDF5Constants.H5_ITER_INC, 5, HDF5Constants.H5P_DEFAULT);
+ }
+
+ @Test
+ public void testH5Lget_info_by_idx_n0() {
+ H5L_info_t link_info = null;
+ H5L_info_t link_info2 = null;
+ try {
+ link_info = H5.H5Lget_info_by_idx(H5fid, "/", HDF5Constants.H5_INDEX_NAME, HDF5Constants.H5_ITER_INC, 0, HDF5Constants.H5P_DEFAULT);
+ }
+ catch (Throwable err) {
+ err.printStackTrace();
+ fail("H5.H5Lget_info_by_idx: " + err);
+ }
+ assertFalse("H5Lget_info_by_idx ",link_info==null);
+ assertTrue("H5Lget_info_by_idx link type",link_info.type==HDF5Constants.H5L_TYPE_HARD);
+ try {
+ link_info2 = H5.H5Lget_info(H5fid, "DS1", HDF5Constants.H5P_DEFAULT);
+ }
+ catch (Throwable err) {
+ err.printStackTrace();
+ fail("H5.H5Lget_info: " + err);
+ }
+ assertTrue("Link Address ",link_info.address_val_size==link_info2.address_val_size);
+ }
+
+ @Test
+ public void testH5Lget_info_by_idx_n3() {
+ H5L_info_t link_info = null;
+ H5L_info_t link_info2 = null;
+ try {
+ link_info = H5.H5Lget_info_by_idx(H5fid, "/", HDF5Constants.H5_INDEX_NAME, HDF5Constants.H5_ITER_INC, 3, HDF5Constants.H5P_DEFAULT);
+ }
+ catch (Throwable err) {
+ err.printStackTrace();
+ fail("H5.H5Lget_info_by_idx: " + err);
+ }
+ assertFalse("H5Lget_info_by_idx ",link_info==null);
+ assertTrue("H5Lget_info_by_idx link type",link_info.type==HDF5Constants.H5L_TYPE_HARD);
+ try {
+ link_info2 = H5.H5Lget_info(H5fid, "L1", HDF5Constants.H5P_DEFAULT);
+ }
+ catch (Throwable err) {
+ err.printStackTrace();
+ fail("H5.H5Lget_info: " + err);
+ }
+ assertTrue("Link Address ",link_info.address_val_size==link_info2.address_val_size);
+ }
+
+ @Test(expected = HDF5LibraryException.class)
+ public void testH5Lget_name_by_idx_not_exist() throws Throwable {
+ H5.H5Lget_name_by_idx(H5fid, "None", HDF5Constants.H5_INDEX_CRT_ORDER, HDF5Constants.H5_ITER_INC, 0, HDF5Constants.H5P_DEFAULT);
+ }
+
+ @Test
+ public void testH5Lget_name_by_idx_n0() {
+ String link_name = null;
+ try {
+ link_name = H5.H5Lget_name_by_idx(H5fid, "/", HDF5Constants.H5_INDEX_NAME, HDF5Constants.H5_ITER_INC, 0, HDF5Constants.H5P_DEFAULT);
+ }
+ catch (Throwable err) {
+ err.printStackTrace();
+ fail("H5.H5Lget_name_by_idx: " + err);
+ }
+ assertFalse("H5Lget_name_by_idx ",link_name==null);
+ assertTrue("Link Name ",link_name.compareTo("DS1")==0);
+ }
+
+ @Test
+ public void testH5Lget_name_by_idx_n3() {
+ String link_name = null;
+ try {
+ link_name = H5.H5Lget_name_by_idx(H5fid, "/", HDF5Constants.H5_INDEX_NAME, HDF5Constants.H5_ITER_INC, 3, HDF5Constants.H5P_DEFAULT);
+ }
+ catch (Throwable err) {
+ err.printStackTrace();
+ fail("H5.H5Lget_name_by_idx: " + err);
+ }
+ assertFalse("H5Lget_name_by_idx ",link_name==null);
+ assertTrue("Link Name ",link_name.compareTo("L1")==0);
+ }
+
+ @Test
+ public void testH5Lvisit() {
+ class idata {
+ public String link_name = null;
+ public int link_type = -1;
+ idata(String name, int type) {
+ this.link_name = name;
+ this.link_type = type;
+ }
+ }
+ class H5L_iter_data implements H5L_iterate_t {
+ public ArrayList<idata> iterdata = new ArrayList<idata>();
+ }
+ H5L_iterate_t iter_data = new H5L_iter_data();
+ class H5L_iter_callback implements H5L_iterate_cb {
+ public int callback(long group, String name, H5L_info_t info, H5L_iterate_t op_data) {
+ idata id = new idata(name, info.type);
+ ((H5L_iter_data)op_data).iterdata.add(id);
+ return 0;
+ }
+ }
+ H5L_iterate_cb iter_cb = new H5L_iter_callback();
+ try {
+ H5.H5Lvisit(H5fid, HDF5Constants.H5_INDEX_NAME, HDF5Constants.H5_ITER_INC, iter_cb, iter_data);
+ }
+ catch (Throwable err) {
+ err.printStackTrace();
+ fail("H5.H5Lvisit: " + err);
+ }
+ assertFalse("H5Lvisit ",((H5L_iter_data)iter_data).iterdata.isEmpty());
+ assertTrue("H5Lvisit "+((H5L_iter_data)iter_data).iterdata.size(),((H5L_iter_data)iter_data).iterdata.size()==5);
+ assertTrue("H5Lvisit "+(((H5L_iter_data)iter_data).iterdata.get(0)).link_name,(((H5L_iter_data)iter_data).iterdata.get(0)).link_name.compareToIgnoreCase("DS1")==0);
+ assertTrue("H5Lvisit "+(((H5L_iter_data)iter_data).iterdata.get(1)).link_name,(((H5L_iter_data)iter_data).iterdata.get(1)).link_name.compareToIgnoreCase("DT1")==0);
+ assertTrue("H5Lvisit "+(((H5L_iter_data)iter_data).iterdata.get(2)).link_name,(((H5L_iter_data)iter_data).iterdata.get(2)).link_name.compareToIgnoreCase("G1")==0);
+ assertTrue("H5Lvisit "+(((H5L_iter_data)iter_data).iterdata.get(3)).link_name,(((H5L_iter_data)iter_data).iterdata.get(3)).link_name.compareToIgnoreCase("G1/DS2")==0);
+ assertTrue("H5Lvisit "+(((H5L_iter_data)iter_data).iterdata.get(4)).link_name,(((H5L_iter_data)iter_data).iterdata.get(4)).link_name.compareToIgnoreCase("L1")==0);
+ }
+
+ @Test
+ public void testH5Lvisit_by_name() {
+ class idata {
+ public String link_name = null;
+ public int link_type = -1;
+ idata(String name, int type) {
+ this.link_name = name;
+ this.link_type = type;
+ }
+ }
+ class H5L_iter_data implements H5L_iterate_t {
+ public ArrayList<idata> iterdata = new ArrayList<idata>();
+ }
+ H5L_iterate_t iter_data = new H5L_iter_data();
+ class H5L_iter_callback implements H5L_iterate_cb {
+ public int callback(long group, String name, H5L_info_t info, H5L_iterate_t op_data) {
+ idata id = new idata(name, info.type);
+ ((H5L_iter_data)op_data).iterdata.add(id);
+ return 0;
+ }
+ }
+ H5L_iterate_cb iter_cb = new H5L_iter_callback();
+ try {
+ H5.H5Lvisit_by_name(H5fid, "G1", HDF5Constants.H5_INDEX_NAME, HDF5Constants.H5_ITER_INC, iter_cb, iter_data, HDF5Constants.H5P_DEFAULT);
+ }
+ catch (Throwable err) {
+ err.printStackTrace();
+ fail("H5.H5Lvisit_by_name: " + err);
+ }
+ assertFalse("H5Lvisit_by_name ",((H5L_iter_data)iter_data).iterdata.isEmpty());
+ assertTrue("H5Lvisit_by_name "+((H5L_iter_data)iter_data).iterdata.size(),((H5L_iter_data)iter_data).iterdata.size()==1);
+ assertTrue("H5Lvisit_by_name "+(((H5L_iter_data)iter_data).iterdata.get(0)).link_name,(((H5L_iter_data)iter_data).iterdata.get(0)).link_name.compareToIgnoreCase("DS2")==0);
+ }
+
+ @Test
+ public void testH5Literate() {
+ class idata {
+ public String link_name = null;
+ public int link_type = -1;
+ idata(String name, int type) {
+ this.link_name = name;
+ this.link_type = type;
+ }
+ }
+ class H5L_iter_data implements H5L_iterate_t {
+ public ArrayList<idata> iterdata = new ArrayList<idata>();
+ }
+ H5L_iterate_t iter_data = new H5L_iter_data();
+ class H5L_iter_callback implements H5L_iterate_cb {
+ public int callback(long group, String name, H5L_info_t info, H5L_iterate_t op_data) {
+ idata id = new idata(name, info.type);
+ ((H5L_iter_data)op_data).iterdata.add(id);
+ return 0;
+ }
+ }
+ H5L_iterate_cb iter_cb = new H5L_iter_callback();
+ try {
+ H5.H5Literate(H5fid, HDF5Constants.H5_INDEX_NAME, HDF5Constants.H5_ITER_INC, 0L, iter_cb, iter_data);
+ }
+ catch (Throwable err) {
+ err.printStackTrace();
+ fail("H5.H5Literate: " + err);
+ }
+ assertFalse("H5Literate ",((H5L_iter_data)iter_data).iterdata.isEmpty());
+ assertTrue("H5Literate "+((H5L_iter_data)iter_data).iterdata.size(),((H5L_iter_data)iter_data).iterdata.size()==4);
+ assertTrue("H5Literate "+(((H5L_iter_data)iter_data).iterdata.get(0)).link_name,(((H5L_iter_data)iter_data).iterdata.get(0)).link_name.compareToIgnoreCase("DS1")==0);
+ assertTrue("H5Literate "+(((H5L_iter_data)iter_data).iterdata.get(1)).link_name,(((H5L_iter_data)iter_data).iterdata.get(1)).link_name.compareToIgnoreCase("DT1")==0);
+ assertTrue("H5Literate "+((idata)((H5L_iter_data)iter_data).iterdata.get(2)).link_name,(((H5L_iter_data)iter_data).iterdata.get(2)).link_name.compareToIgnoreCase("G1")==0);
+ assertTrue("H5Literate "+((idata)((H5L_iter_data)iter_data).iterdata.get(3)).link_name,((idata)((H5L_iter_data)iter_data).iterdata.get(3)).link_name.compareToIgnoreCase("L1")==0);
+ }
+
+ @Test
+ public void testH5Literate_by_name() {
+ class idata {
+ public String link_name = null;
+ public int link_type = -1;
+ idata(String name, int type) {
+ this.link_name = name;
+ this.link_type = type;
+ }
+ }
+ class H5L_iter_data implements H5L_iterate_t {
+ public ArrayList<idata> iterdata = new ArrayList<idata>();
+ }
+ H5L_iterate_t iter_data = new H5L_iter_data();
+ class H5L_iter_callback implements H5L_iterate_cb {
+ public int callback(long group, String name, H5L_info_t info, H5L_iterate_t op_data) {
+ idata id = new idata(name, info.type);
+ ((H5L_iter_data)op_data).iterdata.add(id);
+ return 0;
+ }
+ }
+ H5L_iterate_cb iter_cb = new H5L_iter_callback();
+ try {
+ H5.H5Literate_by_name(H5fid, "G1", HDF5Constants.H5_INDEX_NAME, HDF5Constants.H5_ITER_INC, 0L, iter_cb, iter_data, HDF5Constants.H5P_DEFAULT);
+ }
+ catch (Throwable err) {
+ err.printStackTrace();
+ fail("H5.H5Literate_by_name: " + err);
+ }
+ assertFalse("H5Literate_by_name ",((H5L_iter_data)iter_data).iterdata.isEmpty());
+ assertTrue("H5Literate_by_name "+((H5L_iter_data)iter_data).iterdata.size(),((H5L_iter_data)iter_data).iterdata.size()==1);
+ assertTrue("H5Literate_by_name "+((idata)((H5L_iter_data)iter_data).iterdata.get(0)).link_name,((idata)((H5L_iter_data)iter_data).iterdata.get(0)).link_name.compareToIgnoreCase("DS2")==0);
+ }
+
+}
diff --git a/java/test/TestH5Lcreate.java b/java/test/TestH5Lcreate.java
new file mode 100644
index 0000000..2fbd9e3
--- /dev/null
+++ b/java/test/TestH5Lcreate.java
@@ -0,0 +1,819 @@
+/* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
+ * Copyright by The HDF Group. *
+ * Copyright by the Board of Trustees of the University of Illinois. *
+ * All rights reserved. *
+ * *
+ * This file is part of HDF5. The full HDF5 copyright notice, including *
+ * terms governing use, modification, and redistribution, is contained in *
+ * the files COPYING and Copyright.html. COPYING can be found at the root *
+ * of the source code distribution tree; Copyright.html can be found at the *
+ * root level of an installed copy of the electronic HDF5 document set and *
+ * is linked from the top-level documents page. It can also be found at *
+ * http://hdfgroup.org/HDF5/doc/Copyright.html. If you do not have *
+ * access to either file, you may request a copy from help@hdfgroup.org. *
+ * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */
+
+package test;
+
+import static org.junit.Assert.assertFalse;
+import static org.junit.Assert.assertTrue;
+import static org.junit.Assert.fail;
+
+import java.io.File;
+import java.util.ArrayList;
+
+import hdf.hdf5lib.H5;
+import hdf.hdf5lib.HDF5Constants;
+import hdf.hdf5lib.callbacks.H5L_iterate_cb;
+import hdf.hdf5lib.callbacks.H5L_iterate_t;
+import hdf.hdf5lib.exceptions.HDF5Exception;
+import hdf.hdf5lib.exceptions.HDF5LibraryException;
+import hdf.hdf5lib.structs.H5L_info_t;
+
+import org.junit.After;
+import org.junit.Before;
+import org.junit.Ignore;
+import org.junit.Rule;
+import org.junit.Test;
+import org.junit.rules.TestName;
+
+public class TestH5Lcreate {
+ @Rule public TestName testname = new TestName();
+ private static final String H5_EXTFILE = "h5ex_g_iterate.hdf";
+ private static final String H5_FILE = "test.h5";
+ private static final int DIM_X = 4;
+ private static final int DIM_Y = 6;
+ long H5fcpl = -1;
+ long H5fid = -1;
+ long H5dsid = -1;
+ long H5did1 = -1;
+ long H5did2 = -1;
+ long H5gcpl = -1;
+ long H5gid = -1;
+ long[] H5dims = { DIM_X, DIM_Y };
+
+ private final void _deleteFile(String filename) {
+ File file = new File(filename);
+
+ if (file.exists()) {
+ try {
+ file.delete();
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+ }
+ }
+
+ private final long _createDataset(long fid, long dsid, String name, long dapl) {
+ long did = -1;
+ try {
+ did = H5.H5Dcreate(fid, name,
+ HDF5Constants.H5T_STD_I32BE, dsid,
+ HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT, dapl);
+ }
+ catch (Throwable err) {
+ err.printStackTrace();
+ fail("H5.H5Dcreate: " + err);
+ }
+ assertTrue("TestH5L._createDataset: ",did > 0);
+
+ return did;
+ }
+
+ private final long _createGroup(long fid, String name) {
+ long gid = -1;
+ try {
+ H5gcpl = HDF5Constants.H5P_DEFAULT;
+ gid = H5.H5Gcreate(fid, name, HDF5Constants.H5P_DEFAULT,
+ H5gcpl, HDF5Constants.H5P_DEFAULT);
+ }
+ catch (Throwable err) {
+ err.printStackTrace();
+ fail("H5.H5Gcreate: " + err);
+ }
+ assertTrue("TestH5L._createGroup: ",gid > 0);
+
+ return gid;
+ }
+
+ private final void _createHardLink(long fid, long cid, String curname, long did, String dstname, long lcpl, long lapl) {
+ boolean link_exists = false;
+ try {
+ H5.H5Lcreate_hard(cid, curname, did, dstname, lcpl, lapl);
+ H5.H5Fflush(fid, HDF5Constants.H5F_SCOPE_LOCAL);
+ link_exists = H5.H5Lexists(did, dstname, lapl);
+ }
+ catch (Throwable err) {
+ err.printStackTrace();
+ fail("H5.H5Lcreate_hard: " + err);
+ }
+ assertTrue("TestH5L._createHardLink ", link_exists);
+ }
+
+ private final void _createSoftLink(long fid, String curname, long did, String dstname, long lcpl, long lapl) {
+ boolean link_exists = false;
+ try {
+ H5.H5Lcreate_soft(curname, did, dstname, lcpl, lapl);
+ H5.H5Fflush(fid, HDF5Constants.H5F_SCOPE_LOCAL);
+ link_exists = H5.H5Lexists(did, dstname, lapl);
+ }
+ catch (Throwable err) {
+ err.printStackTrace();
+ fail("H5.H5Lcreate_soft: " + err);
+ }
+ assertTrue("TestH5L._createSoftLink ", link_exists);
+ }
+
+ private final void _createExternalLink(long fid, String ext_filename, String curname, long did, String dstname, long lcpl, long lapl) {
+ boolean link_exists = false;
+ try {
+ H5.H5Lcreate_external(ext_filename, curname, did, dstname, lcpl, lapl);
+ H5.H5Fflush(fid, HDF5Constants.H5F_SCOPE_LOCAL);
+ link_exists = H5.H5Lexists(did, dstname, lapl);
+ }
+ catch (Throwable err) {
+ err.printStackTrace();
+ fail("H5.H5Lcreate_external: " + err);
+ }
+ assertTrue("TestH5L._createExternalLink ", link_exists);
+ }
+
+ @Before
+ public void createH5file()
+ throws NullPointerException, HDF5Exception {
+ assertTrue("H5 open ids is 0",H5.getOpenIDCount()==0);
+ System.out.print(testname.getMethodName());
+ try {
+ H5fcpl = H5.H5Pcreate(HDF5Constants.H5P_FILE_CREATE);
+ H5.H5Pset_link_creation_order(H5fcpl, HDF5Constants.H5P_CRT_ORDER_TRACKED+HDF5Constants.H5P_CRT_ORDER_INDEXED);
+ H5fid = H5.H5Fcreate(H5_FILE, HDF5Constants.H5F_ACC_TRUNC,
+ H5fcpl, HDF5Constants.H5P_DEFAULT);
+ H5dsid = H5.H5Screate_simple(2, H5dims, null);
+ H5did1 = _createDataset(H5fid, H5dsid, "DS1", HDF5Constants.H5P_DEFAULT);
+ H5gid = _createGroup(H5fid, "/G1");
+ H5did2 = _createDataset(H5gid, H5dsid, "DS2", HDF5Constants.H5P_DEFAULT);
+ }
+ catch (Throwable err) {
+ err.printStackTrace();
+ fail("TestH5L.createH5file: " + err);
+ }
+ assertTrue("TestH5L.createH5file: H5.H5Fcreate: ",H5fid > 0);
+ assertTrue("TestH5L.createH5file: H5.H5Screate_simple: ",H5dsid > 0);
+ assertTrue("TestH5L.createH5file: H5.H5Gcreate: ",H5gid > 0);
+
+ H5.H5Fflush(H5fid, HDF5Constants.H5F_SCOPE_LOCAL);
+ }
+
+ @After
+ public void deleteH5file() throws HDF5LibraryException {
+ if (H5gid > 0)
+ try {H5.H5Gclose(H5gid);} catch (Exception ex) {}
+ if (H5gcpl > 0)
+ try {H5.H5Pclose(H5gcpl);} catch (Exception ex) {}
+ if (H5did2 > 0)
+ try {H5.H5Dclose(H5did2);} catch (Exception ex) {}
+ if (H5dsid > 0)
+ try {H5.H5Sclose(H5dsid);} catch (Exception ex) {}
+ if (H5did1 > 0)
+ try {H5.H5Dclose(H5did1);} catch (Exception ex) {}
+ if (H5fid > 0)
+ try {H5.H5Fclose(H5fid);} catch (Exception ex) {}
+ if (H5fcpl > 0)
+ try {H5.H5Pclose(H5fcpl);} catch (Exception ex) {}
+
+ _deleteFile(H5_FILE);
+ System.out.println();
+ }
+
+ @Test
+ public void testH5Lget_info_by_idx_n0_create() {
+ H5L_info_t link_info = null;
+ try {
+ int order = H5.H5Pget_link_creation_order(H5fcpl);
+ assertTrue("creation order :"+order, order == HDF5Constants.H5P_CRT_ORDER_TRACKED+HDF5Constants.H5P_CRT_ORDER_INDEXED);
+ }
+ catch (Throwable err) {
+ err.printStackTrace();
+ fail("H5.H5Lget_info_by_idx_n0_create:H5Pget_link_creation_order " + err);
+ }
+ try {
+ link_info = H5.H5Lget_info_by_idx(H5fid, "/", HDF5Constants.H5_INDEX_CRT_ORDER, HDF5Constants.H5_ITER_INC, 0, HDF5Constants.H5P_DEFAULT);
+ }
+ catch (Throwable err) {
+ err.printStackTrace();
+ fail("H5.H5Lget_info_by_idx: " + err);
+ }
+ assertFalse("H5Lget_info_by_idx ", link_info==null);
+ assertTrue("H5Lget_info_by_idx link type", link_info.type==HDF5Constants.H5L_TYPE_HARD);
+ }
+
+ @Test
+ public void testH5Lget_info_by_idx_n1_create() {
+ H5L_info_t link_info = null;
+ try {
+ int order = H5.H5Pget_link_creation_order(H5fcpl);
+ assertTrue("creation order :"+order, order == HDF5Constants.H5P_CRT_ORDER_TRACKED+HDF5Constants.H5P_CRT_ORDER_INDEXED);
+ }
+ catch (Throwable err) {
+ err.printStackTrace();
+ fail("H5.H5Lget_info_by_idx_n1_create:H5Pget_link_creation_order " + err);
+ }
+ try {
+ link_info = H5.H5Lget_info_by_idx(H5fid, "/", HDF5Constants.H5_INDEX_CRT_ORDER, HDF5Constants.H5_ITER_INC, 1, HDF5Constants.H5P_DEFAULT);
+ }
+ catch (Throwable err) {
+ err.printStackTrace();
+ fail("H5.H5Lget_info_by_idx: " + err);
+ }
+ assertFalse("H5Lget_info_by_idx ", link_info==null);
+ assertTrue("H5Lget_info_by_idx link type", link_info.type==HDF5Constants.H5L_TYPE_HARD);
+ }
+
+ @Test(expected = HDF5LibraryException.class)
+ public void testH5Lcreate_hard_cur_not_exists() throws Throwable {
+ H5.H5Lcreate_hard(H5fid, "None", H5fid, "DS1", HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT);
+ }
+
+ @Test
+ public void testH5Lcreate_hard() {
+ try {
+ H5.H5Lcreate_hard(H5fid, "DS1", H5fid, "L1", HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT);
+ H5.H5Fflush(H5fid, HDF5Constants.H5F_SCOPE_LOCAL);
+ boolean link_exists = H5.H5Lexists(H5fid, "L1", HDF5Constants.H5P_DEFAULT);
+ assertTrue("testH5Lcreate_hard:H5Lexists ", link_exists);
+ }
+ catch (Throwable err) {
+ err.printStackTrace();
+ fail("H5.H5Lexists: " + err);
+ }
+ }
+
+ @Test(expected = HDF5LibraryException.class)
+ public void testH5Lcreate_hard_dst_link_exists() throws Throwable {
+ _createHardLink(H5fid, H5fid, "/G1/DS2", H5fid, "L1", HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT);
+ H5.H5Lcreate_hard(H5fid, "L1", H5fid, "/G1/DS2", HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT);
+ }
+
+ @Test
+ public void testH5Ldelete_hard_link() {
+ _createHardLink(H5fid, H5fid, "/G1/DS2", H5fid, "L1", HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT);
+ try {
+ H5.H5Ldelete(H5fid, "L1", HDF5Constants.H5P_DEFAULT);
+ H5.H5Fflush(H5fid, HDF5Constants.H5F_SCOPE_LOCAL);
+ boolean link_exists = H5.H5Lexists(H5fid, "L1", HDF5Constants.H5P_DEFAULT);
+ assertFalse("testH5Lcreate_hard:H5Lexists ", link_exists);
+ }
+ catch (Throwable err) {
+ err.printStackTrace();
+ fail("H5.H5Lexists: " + err);
+ }
+ }
+
+ @Test
+ public void testH5Lcreate_soft() {
+ try {
+ H5.H5Lcreate_soft("DS1", H5fid, "L1", HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT);
+ H5.H5Fflush(H5fid, HDF5Constants.H5F_SCOPE_LOCAL);
+ boolean link_exists = H5.H5Lexists(H5fid, "L1", HDF5Constants.H5P_DEFAULT);
+ assertTrue("testH5Lcreate_soft:H5Lexists ", link_exists);
+ }
+ catch (Throwable err) {
+ err.printStackTrace();
+ fail("H5.H5Lexists: " + err);
+ }
+ }
+
+ @Test(expected = HDF5LibraryException.class)
+ public void testH5Lcreate_soft_dst_link_exists() throws Throwable {
+ _createSoftLink(H5fid, "/G1/DS2", H5fid, "L1", HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT);
+ H5.H5Lcreate_soft("L1", H5fid, "/G1/DS2", HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT);
+ }
+
+ @Test
+ public void testH5Ldelete_soft_link() {
+ _createSoftLink(H5fid, "/G1/DS2", H5fid, "L1", HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT);
+ try {
+ H5.H5Ldelete(H5fid, "L1", HDF5Constants.H5P_DEFAULT);
+ H5.H5Fflush(H5fid, HDF5Constants.H5F_SCOPE_LOCAL);
+ boolean link_exists = H5.H5Lexists(H5fid, "L1", HDF5Constants.H5P_DEFAULT);
+ assertFalse("testH5Lcreate_soft:H5Lexists ", link_exists);
+ }
+ catch (Throwable err) {
+ err.printStackTrace();
+ fail("H5.H5Lexists: " + err);
+ }
+ }
+
+ @Test
+ public void testH5Lget_info_softlink() {
+ H5L_info_t link_info = null;
+ _createSoftLink(H5fid, "/G1/DS2", H5fid, "L1", HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT);
+ try {
+ link_info = H5.H5Lget_info(H5fid, "L1", HDF5Constants.H5P_DEFAULT);
+ }
+ catch (Throwable err) {
+ err.printStackTrace();
+ fail("H5.H5Lget_info: " + err);
+ }
+ assertFalse("H5Lget_info ", link_info==null);
+ assertTrue("H5Lget_info link type", link_info.type==HDF5Constants.H5L_TYPE_SOFT);
+ assertTrue("Link Address ", link_info.address_val_size>0);
+ }
+
+ @Test
+ public void testH5Lget_value_soft() {
+ String[] link_value = {null, null};
+ int link_type = -1;
+
+ _createSoftLink(H5fid, "/G1/DS2", H5fid, "L1", HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT);
+ try {
+ link_type = H5.H5Lget_value(H5fid, "L1", link_value, HDF5Constants.H5P_DEFAULT);
+ }
+ catch (Throwable err) {
+ err.printStackTrace();
+ fail("H5.H5Lget_value: " + err);
+ }
+ assertTrue("Link Type", link_type == HDF5Constants.H5L_TYPE_SOFT);
+ assertFalse("H5Lget_value ", link_value[0]==null);
+ assertTrue("Link Value ", link_value[0].compareTo("/G1/DS2")==0);
+ }
+
+ @Test
+ public void testH5Lcreate_soft_dangle() {
+ try {
+ H5.H5Lcreate_soft("DS3", H5fid, "L2", HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT);
+ H5.H5Fflush(H5fid, HDF5Constants.H5F_SCOPE_LOCAL);
+ boolean link_exists = H5.H5Lexists(H5fid, "L2", HDF5Constants.H5P_DEFAULT);
+ assertTrue("testH5Lcreate_soft:H5Lexists ", link_exists);
+ }
+ catch (Throwable err) {
+ err.printStackTrace();
+ fail("H5.H5Lexists: " + err);
+ }
+ }
+
+ @Test
+ public void testH5Ldelete_soft_link_dangle() {
+ _createSoftLink(H5fid, "DS3", H5fid, "L2", HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT);
+ try {
+ H5.H5Ldelete(H5fid, "L2", HDF5Constants.H5P_DEFAULT);
+ H5.H5Fflush(H5fid, HDF5Constants.H5F_SCOPE_LOCAL);
+ boolean link_exists = H5.H5Lexists(H5fid, "L2", HDF5Constants.H5P_DEFAULT);
+ assertFalse("testH5Lcreate_soft:H5Lexists ", link_exists);
+ }
+ catch (Throwable err) {
+ err.printStackTrace();
+ fail("H5.H5Lexists: " + err);
+ }
+ }
+
+ @Test
+ public void testH5Lget_info_softlink_dangle() {
+ H5L_info_t link_info = null;
+ _createSoftLink(H5fid, "DS3", H5fid, "L2", HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT);
+ try {
+ link_info = H5.H5Lget_info(H5fid, "L2", HDF5Constants.H5P_DEFAULT);
+ }
+ catch (Throwable err) {
+ err.printStackTrace();
+ fail("H5.H5Lget_info: " + err);
+ }
+ assertFalse("H5Lget_info ", link_info==null);
+ assertTrue("H5Lget_info link type", link_info.type==HDF5Constants.H5L_TYPE_SOFT);
+ assertTrue("Link Address ", link_info.address_val_size>0);
+ }
+
+ @Test
+ public void testH5Lget_value_dangle() {
+ String[] link_value = {null,null};
+ int link_type = -1;
+
+ _createSoftLink(H5fid, "DS3", H5fid, "L2", HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT);
+ try {
+ link_type = H5.H5Lget_value(H5fid, "L2", link_value, HDF5Constants.H5P_DEFAULT);
+ }
+ catch (Throwable err) {
+ err.printStackTrace();
+ fail("H5.H5Lget_value: " + err);
+ }
+ assertTrue("Link Type", link_type == HDF5Constants.H5L_TYPE_SOFT);
+ assertFalse("H5Lget_value ", link_value[0]==null);
+ assertTrue("Link Value ", link_value[0].compareTo("DS3")==0);
+ }
+
+ @Test
+ public void testH5Lcreate_external() {
+ try {
+ H5.H5Lcreate_external(H5_EXTFILE, "DT1", H5fid, "L1", HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT);
+ H5.H5Fflush(H5fid, HDF5Constants.H5F_SCOPE_LOCAL);
+ boolean link_exists = H5.H5Lexists(H5fid, "L1", HDF5Constants.H5P_DEFAULT);
+ assertTrue("testH5Lcreate_external:H5Lexists ", link_exists);
+ }
+ catch (Throwable err) {
+ err.printStackTrace();
+ fail("H5.H5Lexists: " + err);
+ }
+ }
+
+ @Test
+ public void testH5Lget_info_externallink() {
+ H5L_info_t link_info = null;
+ _createExternalLink(H5fid, H5_EXTFILE, "DT1", H5fid, "L1", HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT);
+ try {
+ link_info = H5.H5Lget_info(H5fid, "L1", HDF5Constants.H5P_DEFAULT);
+ }
+ catch (Throwable err) {
+ err.printStackTrace();
+ fail("H5.H5Lget_info: " + err);
+ }
+ assertFalse("H5Lget_info ", link_info==null);
+ assertTrue("H5Lget_info link type", link_info.type==HDF5Constants.H5L_TYPE_EXTERNAL);
+ assertTrue("Link Address ", link_info.address_val_size>0);
+ }
+
+ @Test
+ public void testH5Lget_value_external(){
+ String[] link_value = {null,null};
+ int link_type = -1;
+
+ _createExternalLink(H5fid, H5_EXTFILE, "DT1", H5fid, "L1", HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT);
+ try {
+ link_type = H5.H5Lget_value(H5fid, "L1", link_value, HDF5Constants.H5P_DEFAULT);
+ }
+ catch (Throwable err) {
+ err.printStackTrace();
+ fail("H5.H5Lget_value: " + err);
+ }
+ assertTrue("Link Type", link_type == HDF5Constants.H5L_TYPE_EXTERNAL);
+ assertFalse("H5Lget_value ", link_value[0]==null);
+ assertFalse("H5Lget_value ", link_value[1]==null);
+ assertTrue("Link Value ", link_value[0].compareTo("DT1")==0);
+ }
+
+ @Test(expected = HDF5LibraryException.class)
+ public void testH5Lcopy_cur_not_exists() throws Throwable {
+ H5.H5Lcopy(H5fid, "None", H5fid, "DS1", HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT);
+ }
+
+ @Test
+ public void testH5Lcopy() {
+ try {
+ H5.H5Lcopy(H5fid, "DS1", H5fid, "CPY1", HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT);
+ H5.H5Fflush(H5fid, HDF5Constants.H5F_SCOPE_LOCAL);
+ boolean link_exists = H5.H5Lexists(H5fid, "CPY1", HDF5Constants.H5P_DEFAULT);
+ assertTrue("testH5Lcopy:H5Lexists ", link_exists);
+ }
+ catch (Throwable err) {
+ err.printStackTrace();
+ fail("testH5Lcopy:H5Lexists: " + err);
+ }
+ }
+
+ @Test(expected = HDF5LibraryException.class)
+ public void testH5Lcopy_dst_link_exists() throws Throwable {
+ _createHardLink(H5fid, H5fid, "/G1/DS2", H5fid, "CPY1", HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT);
+ H5.H5Lcopy(H5fid, "CPY1", H5fid, "/G1/DS2", HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT);
+ }
+
+ @Test(expected = HDF5LibraryException.class)
+ public void testH5Lmove_cur_not_exists() throws Throwable {
+ H5.H5Lmove(H5fid, "None", H5fid, "DS1", HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT);
+ }
+
+ @Test
+ public void testH5Lmove() {
+ try {
+ H5.H5Lmove(H5fid, "DS1", H5fid, "CPY1", HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT);
+ H5.H5Fflush(H5fid, HDF5Constants.H5F_SCOPE_LOCAL);
+ boolean link_exists = H5.H5Lexists(H5fid, "CPY1", HDF5Constants.H5P_DEFAULT);
+ assertTrue("testH5Lmove:H5Lexists ", link_exists);
+ link_exists = H5.H5Lexists(H5fid, "DS1", HDF5Constants.H5P_DEFAULT);
+ assertFalse("testH5Lmove:H5Lexists ", link_exists);
+ }
+ catch (Throwable err) {
+ err.printStackTrace();
+ fail("testH5Lmove:H5Lexists: " + err);
+ }
+ }
+
+ @Test(expected = HDF5LibraryException.class)
+ public void testH5Lmove_dst_link_exists() throws Throwable {
+ _createHardLink(H5fid, H5fid, "/G1/DS2", H5fid, "CPY1", HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT);
+ H5.H5Lmove(H5fid, "CPY1", H5fid, "/G1/DS2", HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT);
+ }
+
+ @Test(expected = HDF5LibraryException.class)
+ public void testH5Lget_value_by_idx_not_exist_name() throws Throwable {
+ String[] link_value = {null,null};
+ H5.H5Lget_value_by_idx(H5fid, "None", HDF5Constants.H5_INDEX_CRT_ORDER, HDF5Constants.H5_ITER_INC, 0, link_value, HDF5Constants.H5P_DEFAULT);
+ }
+
+ @Test(expected = HDF5LibraryException.class)
+ public void testH5Lget_value_by_idx_not_exist_create() throws Throwable {
+ String[] link_value = {null,null};
+ H5.H5Lget_value_by_idx(H5fid, "None", HDF5Constants.H5_INDEX_NAME, HDF5Constants.H5_ITER_INC, 0, link_value, HDF5Constants.H5P_DEFAULT);
+ }
+
+ @Test
+ public void testH5Lget_value_by_idx_n2_name() {
+ H5L_info_t link_info = null;
+ String[] link_value = {null,null};
+ int link_type = -1;
+
+ _createSoftLink(H5fid, "/G1/DS2", H5fid, "LS", HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT);
+ try {
+ link_info = H5.H5Lget_info_by_idx(H5fid, "/", HDF5Constants.H5_INDEX_NAME, HDF5Constants.H5_ITER_INC, 2, HDF5Constants.H5P_DEFAULT);
+ }
+ catch (Throwable err) {
+ err.printStackTrace();
+ fail("H5.H5Lget_info_by_idx: " + err);
+ }
+ assertFalse("testH5Lget_value_by_idx_n2 ",link_info==null);
+ assertTrue("testH5Lget_value_by_idx_n2 link type", link_info.type==HDF5Constants.H5L_TYPE_SOFT);
+ try {
+ link_type = H5.H5Lget_value_by_idx(H5fid, "/", HDF5Constants.H5_INDEX_NAME, HDF5Constants.H5_ITER_INC, 2, link_value, HDF5Constants.H5P_DEFAULT);
+ }
+ catch (Throwable err) {
+ err.printStackTrace();
+ fail("H5.H5Lget_value_by_idx: " + err);
+ }
+ assertTrue("Link Type", link_type == HDF5Constants.H5L_TYPE_SOFT);
+ assertFalse("testH5Lget_value_by_idx_n2 ", link_value[0]==null);
+ assertTrue("testH5Lget_value_by_idx_n2 Link Value ", link_value[0].compareTo("/G1/DS2")==0);
+ }
+
+ @Test
+ public void testH5Lget_value_by_idx_n2_create() {
+ H5L_info_t link_info = null;
+ String[] link_value = {null,null};
+ int link_type = -1;
+
+ try {
+ int order = H5.H5Pget_link_creation_order(H5fcpl);
+ assertTrue("creation order :"+order, order == HDF5Constants.H5P_CRT_ORDER_TRACKED+HDF5Constants.H5P_CRT_ORDER_INDEXED);
+ }
+ catch (Throwable err) {
+ err.printStackTrace();
+ fail("H5.H5Lget_value_by_idx_n2_create: H5Pget_link_creation_order " + err);
+ }
+ _createSoftLink(H5fid, "/G1/DS2", H5fid, "LS", HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT);
+ try {
+ link_info = H5.H5Lget_info_by_idx(H5fid, "/", HDF5Constants.H5_INDEX_CRT_ORDER, HDF5Constants.H5_ITER_INC, 2, HDF5Constants.H5P_DEFAULT);
+ }
+ catch (Throwable err) {
+ err.printStackTrace();
+ fail("H5.H5Lget_info_by_idx: " + err);
+ }
+ assertFalse("testH5Lget_value_by_idx_n2 ", link_info==null);
+ assertTrue("testH5Lget_value_by_idx_n2 link type", link_info.type==HDF5Constants.H5L_TYPE_SOFT);
+ try {
+ link_type = H5.H5Lget_value_by_idx(H5fid, "/", HDF5Constants.H5_INDEX_CRT_ORDER, HDF5Constants.H5_ITER_INC, 2, link_value, HDF5Constants.H5P_DEFAULT);
+ }
+ catch (Throwable err) {
+ err.printStackTrace();
+ fail("H5.H5Lget_value_by_idx: " + err);
+ }
+ assertTrue("Link Type", link_type == HDF5Constants.H5L_TYPE_SOFT);
+ assertFalse("testH5Lget_value_by_idx_n2 ", link_value[0]==null);
+ assertTrue("testH5Lget_value_by_idx_n2 Link Value ", link_value[0].compareTo("/G1/DS2")==0);
+ }
+
+ @Test
+ public void testH5Lget_value_by_idx_external_name() {
+ H5L_info_t link_info = null;
+ String[] link_value = {null,null};
+ int link_type = -1;
+
+ _createExternalLink(H5fid, H5_EXTFILE, "DT1", H5fid, "LE", HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT);
+ try {
+ link_info = H5.H5Lget_info_by_idx(H5fid, "/", HDF5Constants.H5_INDEX_NAME, HDF5Constants.H5_ITER_INC, 2, HDF5Constants.H5P_DEFAULT);
+ }
+ catch (Throwable err) {
+ err.printStackTrace();
+ fail("H5.H5Lget_info_by_idx: " + err);
+ }
+ assertFalse("testH5Lget_value_by_idx_ext ", link_info==null);
+ assertTrue("testH5Lget_value_by_idx_ext link type "+link_info.type, link_info.type==HDF5Constants.H5L_TYPE_EXTERNAL);
+ try {
+ link_type = H5.H5Lget_value_by_idx(H5fid, "/", HDF5Constants.H5_INDEX_NAME, HDF5Constants.H5_ITER_INC, 2, link_value, HDF5Constants.H5P_DEFAULT);
+ }
+ catch (Throwable err) {
+ err.printStackTrace();
+ fail("H5.H5Lget_value_by_idx: " + err);
+ }
+ assertTrue("Link Type", link_type == HDF5Constants.H5L_TYPE_EXTERNAL);
+ assertFalse("testH5Lget_value_by_idx_ext ", link_value[0]==null);
+ assertFalse("testH5Lget_value_by_idx_ext ", link_value[1]==null);
+ assertTrue("testH5Lget_value_by_idx_ext Link Value ", link_value[0].compareTo("DT1")==0);
+ }
+
+ @Test
+ public void testH5Lget_value_by_idx_external_create() {
+ H5L_info_t link_info = null;
+ String[] link_value = {null,null};
+ int link_type = -1;
+
+ _createExternalLink(H5fid, H5_EXTFILE, "DT1", H5fid, "LE", HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT);
+ try {
+ link_info = H5.H5Lget_info_by_idx(H5fid, "/", HDF5Constants.H5_INDEX_CRT_ORDER, HDF5Constants.H5_ITER_INC, 2, HDF5Constants.H5P_DEFAULT);
+ }
+ catch (Throwable err) {
+ err.printStackTrace();
+ fail("H5.H5Lget_info_by_idx: " + err);
+ }
+ assertFalse("testH5Lget_value_by_idx_ext ", link_info==null);
+ assertTrue("testH5Lget_value_by_idx_ext link type "+link_info.type, link_info.type==HDF5Constants.H5L_TYPE_EXTERNAL);
+ try {
+ link_type = H5.H5Lget_value_by_idx(H5fid, "/", HDF5Constants.H5_INDEX_CRT_ORDER, HDF5Constants.H5_ITER_INC, 2, link_value, HDF5Constants.H5P_DEFAULT);
+ }
+ catch (Throwable err) {
+ err.printStackTrace();
+ fail("H5.H5Lget_value_by_idx: " + err);
+ }
+ assertTrue("Link Type", link_type == HDF5Constants.H5L_TYPE_EXTERNAL);
+ assertFalse("testH5Lget_value_by_idx_ext ", link_value[0]==null);
+ assertFalse("testH5Lget_value_by_idx_ext ", link_value[1]==null);
+ assertTrue("testH5Lget_value_by_idx_ext Link Value ", link_value[0].compareTo("DT1")==0);
+ }
+
+ @Test(expected = HDF5LibraryException.class)
+ public void testH5Ldelete_by_idx_not_exist_name() throws Throwable {
+ H5.H5Ldelete_by_idx(H5fid, "None", HDF5Constants.H5_INDEX_NAME, HDF5Constants.H5_ITER_INC, 0, HDF5Constants.H5P_DEFAULT);
+ }
+
+ @Test(expected = HDF5LibraryException.class)
+ public void testH5Ldelete_by_idx_not_exist_create() throws Throwable {
+ H5.H5Ldelete_by_idx(H5fid, "None", HDF5Constants.H5_INDEX_CRT_ORDER, HDF5Constants.H5_ITER_INC, 0, HDF5Constants.H5P_DEFAULT);
+ }
+
+ @Test
+ public void testH5Ldelete_by_idx_n2_name() {
+ H5L_info_t link_info = null;
+ _createSoftLink(H5fid, "/G1/DS2", H5fid, "LS", HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT);
+ try {
+ link_info = H5.H5Lget_info_by_idx(H5fid, "/", HDF5Constants.H5_INDEX_NAME, HDF5Constants.H5_ITER_INC, 2, HDF5Constants.H5P_DEFAULT);
+ }
+ catch (Throwable err) {
+ err.printStackTrace();
+ fail("H5.H5Lget_info_by_idx: " + err);
+ }
+ assertFalse("testH5Ldelete_by_idx_n2 ", link_info==null);
+ assertTrue("testH5Ldelete_by_idx_n2 link type", link_info.type==HDF5Constants.H5L_TYPE_SOFT);
+ try {
+ H5.H5Ldelete_by_idx(H5fid, "/", HDF5Constants.H5_INDEX_NAME, HDF5Constants.H5_ITER_INC, 2, HDF5Constants.H5P_DEFAULT);
+ }
+ catch (Throwable err) {
+ err.printStackTrace();
+ fail("H5.H5Ldelete_by_idx: " + err);
+ }
+ try {
+ link_info = H5.H5Lget_info_by_idx(H5fid, "/", HDF5Constants.H5_INDEX_NAME, HDF5Constants.H5_ITER_INC, 2, HDF5Constants.H5P_DEFAULT);
+ }
+ catch (HDF5LibraryException err) {
+ link_info = null;
+ }
+ catch (Throwable err) {
+ err.printStackTrace();
+ fail("H5.H5Ldelete_by_idx: " + err);
+ }
+ assertTrue("testH5Ldelete_by_idx_n2 ",link_info==null);
+ }
+
+ @Test
+ public void testH5Ldelete_by_idx_n2_create() {
+ H5L_info_t link_info = null;
+ _createSoftLink(H5fid, "/G1/DS2", H5fid, "LS", HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT);
+ try {
+ link_info = H5.H5Lget_info_by_idx(H5fid, "/", HDF5Constants.H5_INDEX_CRT_ORDER, HDF5Constants.H5_ITER_INC, 2, HDF5Constants.H5P_DEFAULT);
+ }
+ catch (Throwable err) {
+ err.printStackTrace();
+ fail("H5.H5Lget_info_by_idx: " + err);
+ }
+ assertFalse("testH5Ldelete_by_idx_n2 ", link_info==null);
+ assertTrue("testH5Ldelete_by_idx_n2 link type", link_info.type==HDF5Constants.H5L_TYPE_SOFT);
+ try {
+ H5.H5Ldelete_by_idx(H5fid, "/", HDF5Constants.H5_INDEX_CRT_ORDER, HDF5Constants.H5_ITER_INC, 2, HDF5Constants.H5P_DEFAULT);
+ }
+ catch (Throwable err) {
+ err.printStackTrace();
+ fail("H5.H5Ldelete_by_idx: " + err);
+ }
+ try {
+ link_info = H5.H5Lget_info_by_idx(H5fid, "/", HDF5Constants.H5_INDEX_CRT_ORDER, HDF5Constants.H5_ITER_INC, 2, HDF5Constants.H5P_DEFAULT);
+ }
+ catch (HDF5LibraryException err) {
+ link_info = null;
+ }
+ catch (Throwable err) {
+ err.printStackTrace();
+ fail("H5.H5Ldelete_by_idx: " + err);
+ }
+ assertTrue("testH5Ldelete_by_idx_n2 ",link_info==null);
+ }
+
+ @Test
+ public void testH5Lvisit_create() {
+ try {
+ int order = H5.H5Pget_link_creation_order(H5fcpl);
+ assertTrue("creation order :"+order, order == HDF5Constants.H5P_CRT_ORDER_TRACKED+HDF5Constants.H5P_CRT_ORDER_INDEXED);
+ }
+ catch (Throwable err) {
+ err.printStackTrace();
+ fail("H5.H5Lvisit_create: H5Pget_link_creation_order " + err);
+ }
+
+ _createHardLink(H5fid, H5fid, "/G1/DS2", H5fid, "CPY1", HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT);
+ _createExternalLink(H5fid, H5_EXTFILE, "DT1", H5fid, "LE", HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT);
+ _createSoftLink(H5fid, "/G1/DS2", H5fid, "LS", HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT);
+
+ class idata {
+ public String link_name = null;
+ public int link_type = -1;
+ idata(String name, int type) {
+ this.link_name = name;
+ this.link_type = type;
+ }
+ }
+ class H5L_iter_data implements H5L_iterate_t {
+ public ArrayList<idata> iterdata = new ArrayList<idata>();
+ }
+ H5L_iterate_t iter_data = new H5L_iter_data();
+ class H5L_iter_callback implements H5L_iterate_cb {
+ public int callback(long group, String name, H5L_info_t info, H5L_iterate_t op_data) {
+ idata id = new idata(name, info.type);
+ ((H5L_iter_data)op_data).iterdata.add(id);
+ return 0;
+ }
+ }
+ H5L_iterate_cb iter_cb = new H5L_iter_callback();
+ try {
+ H5.H5Lvisit(H5fid, HDF5Constants.H5_INDEX_CRT_ORDER, HDF5Constants.H5_ITER_INC, iter_cb, iter_data);
+ }
+ catch (Throwable err) {
+ err.printStackTrace();
+ fail("H5.H5Lvisit: " + err);
+ }
+ assertFalse("H5Lvisit ",((H5L_iter_data)iter_data).iterdata.isEmpty());
+ assertTrue("H5Lvisit "+((H5L_iter_data)iter_data).iterdata.size(),((H5L_iter_data)iter_data).iterdata.size()==6);
+ assertTrue("H5Lvisit "+((idata)((H5L_iter_data)iter_data).iterdata.get(0)).link_name,((idata)((H5L_iter_data)iter_data).iterdata.get(0)).link_name.compareToIgnoreCase("DS1")==0);
+ assertTrue("H5Lvisit "+((idata)((H5L_iter_data)iter_data).iterdata.get(1)).link_name,((idata)((H5L_iter_data)iter_data).iterdata.get(1)).link_name.compareToIgnoreCase("G1")==0);
+ assertTrue("H5Lvisit "+((idata)((H5L_iter_data)iter_data).iterdata.get(2)).link_name,((idata)((H5L_iter_data)iter_data).iterdata.get(2)).link_name.compareToIgnoreCase("G1/DS2")==0);
+ assertTrue("H5Lvisit "+((idata)((H5L_iter_data)iter_data).iterdata.get(3)).link_name,((idata)((H5L_iter_data)iter_data).iterdata.get(3)).link_name.compareToIgnoreCase("CPY1")==0);
+ assertTrue("H5Lvisit "+((idata)((H5L_iter_data)iter_data).iterdata.get(4)).link_name,((idata)((H5L_iter_data)iter_data).iterdata.get(4)).link_name.compareToIgnoreCase("LE")==0);
+ assertTrue("H5Lvisit "+((idata)((H5L_iter_data)iter_data).iterdata.get(5)).link_name,((idata)((H5L_iter_data)iter_data).iterdata.get(5)).link_name.compareToIgnoreCase("LS")==0);
+ }
+
+ @Test
+ public void testH5Literate_create() {
+ try {
+ int order = H5.H5Pget_link_creation_order(H5fcpl);
+ assertTrue("creation order :"+order, order == HDF5Constants.H5P_CRT_ORDER_TRACKED+HDF5Constants.H5P_CRT_ORDER_INDEXED);
+ }
+ catch (Throwable err) {
+ err.printStackTrace();
+ fail("H5.H5Literate_create: H5Pget_link_creation_order " + err);
+ }
+
+ _createHardLink(H5fid, H5fid, "/G1/DS2", H5fid, "CPY1", HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT);
+ _createExternalLink(H5fid, H5_EXTFILE, "DT1", H5fid, "LE", HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT);
+ _createSoftLink(H5fid, "/G1/DS2", H5fid, "LS", HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT);
+
+ class idata {
+ public String link_name = null;
+ public int link_type = -1;
+ idata(String name, int type) {
+ this.link_name = name;
+ this.link_type = type;
+ }
+ }
+ class H5L_iter_data implements H5L_iterate_t {
+ public ArrayList<idata> iterdata = new ArrayList<idata>();
+ }
+ H5L_iterate_t iter_data = new H5L_iter_data();
+ class H5L_iter_callback implements H5L_iterate_cb {
+ public int callback(long group, String name, H5L_info_t info, H5L_iterate_t op_data) {
+ idata id = new idata(name, info.type);
+ ((H5L_iter_data)op_data).iterdata.add(id);
+ return 0;
+ }
+ }
+ H5L_iterate_cb iter_cb = new H5L_iter_callback();
+ try {
+ H5.H5Literate(H5fid, HDF5Constants.H5_INDEX_CRT_ORDER, HDF5Constants.H5_ITER_INC, 0, iter_cb, iter_data);
+ }
+ catch (Throwable err) {
+ err.printStackTrace();
+ fail("H5.H5Literate: " + err);
+ }
+ assertFalse("H5Literate ",((H5L_iter_data)iter_data).iterdata.isEmpty());
+ assertTrue("H5Literate "+((H5L_iter_data)iter_data).iterdata.size(),((H5L_iter_data)iter_data).iterdata.size()==5);
+ assertTrue("H5Literate "+((idata)((H5L_iter_data)iter_data).iterdata.get(0)).link_name,((idata)((H5L_iter_data)iter_data).iterdata.get(0)).link_name.compareToIgnoreCase("DS1")==0);
+ assertTrue("H5Literate "+((idata)((H5L_iter_data)iter_data).iterdata.get(1)).link_name,((idata)((H5L_iter_data)iter_data).iterdata.get(1)).link_name.compareToIgnoreCase("G1")==0);
+ assertTrue("H5Literate "+((idata)((H5L_iter_data)iter_data).iterdata.get(2)).link_name,((idata)((H5L_iter_data)iter_data).iterdata.get(2)).link_name.compareToIgnoreCase("CPY1")==0);
+ assertTrue("H5Literate "+((idata)((H5L_iter_data)iter_data).iterdata.get(3)).link_name,((idata)((H5L_iter_data)iter_data).iterdata.get(3)).link_name.compareToIgnoreCase("LE")==0);
+ assertTrue("H5Literate "+((idata)((H5L_iter_data)iter_data).iterdata.get(4)).link_name,((idata)((H5L_iter_data)iter_data).iterdata.get(4)).link_name.compareToIgnoreCase("LS")==0);
+ }
+
+}
diff --git a/java/test/TestH5Lparams.java b/java/test/TestH5Lparams.java
new file mode 100644
index 0000000..9a2c204
--- /dev/null
+++ b/java/test/TestH5Lparams.java
@@ -0,0 +1,231 @@
+/* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
+ * Copyright by The HDF Group. *
+ * Copyright by the Board of Trustees of the University of Illinois. *
+ * All rights reserved. *
+ * *
+ * This file is part of HDF5. The full HDF5 copyright notice, including *
+ * terms governing use, modification, and redistribution, is contained in *
+ * the files COPYING and Copyright.html. COPYING can be found at the root *
+ * of the source code distribution tree; Copyright.html can be found at the *
+ * root level of an installed copy of the electronic HDF5 document set and *
+ * is linked from the top-level documents page. It can also be found at *
+ * http://hdfgroup.org/HDF5/doc/Copyright.html. If you do not have *
+ * access to either file, you may request a copy from help@hdfgroup.org. *
+ * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */
+
+package test;
+
+import static org.junit.Assert.assertTrue;
+import hdf.hdf5lib.H5;
+import hdf.hdf5lib.exceptions.HDF5LibraryException;
+
+import org.junit.After;
+import org.junit.Before;
+import org.junit.Rule;
+import org.junit.Test;
+import org.junit.rules.TestName;
+
+public class TestH5Lparams {
+ @Rule public TestName testname = new TestName();
+
+ @Before
+ public void checkOpenIDs() {
+ assertTrue("H5 open ids is 0",H5.getOpenIDCount()==0);
+ System.out.print(testname.getMethodName());
+ }
+ @After
+ public void nextTestName() {
+ System.out.println();
+ }
+
+ @Test(expected = HDF5LibraryException.class)
+ public void testH5Lget_value_invalid() throws Throwable {
+ H5.H5Lget_value(-1, "Bogus", null, -1);
+ }
+
+ @Test(expected = NullPointerException.class)
+ public void testH5Lget_value_null() throws Throwable {
+ H5.H5Lget_value(-1, null, null, 0);
+ }
+
+ @Test(expected = HDF5LibraryException.class)
+ public void testH5Lexists_invalid() throws Throwable {
+ H5.H5Lexists(-1, "Bogus", -1);
+ }
+
+ @Test(expected = NullPointerException.class)
+ public void testH5Lexists_null() throws Throwable {
+ H5.H5Lexists(-1, null, 0);
+ }
+
+ @Test(expected = HDF5LibraryException.class)
+ public void testH5Lget_info_invalid() throws Throwable {
+ H5.H5Lget_info(-1, "Bogus", -1);
+ }
+
+ @Test(expected = NullPointerException.class)
+ public void testH5Lget_info_null() throws Throwable {
+ H5.H5Lget_info(-1, null, 0);
+ }
+
+ @Test(expected = HDF5LibraryException.class)
+ public void testH5Lget_info_by_idx_invalid() throws Throwable {
+ H5.H5Lget_info_by_idx(-1, "Bogus", -1, -1, -1L, -1);
+ }
+
+ @Test(expected = NullPointerException.class)
+ public void testH5Lget_info_by_idx_null() throws Throwable {
+ H5.H5Lget_info_by_idx(-1, null, 0, 0, 0L, 0);
+ }
+
+ @Test(expected = HDF5LibraryException.class)
+ public void testH5Lget_name_by_idx_invalid() throws Throwable {
+ H5.H5Lget_name_by_idx(-1, "Bogus", -1, -1, -1L, -1);
+ }
+
+ @Test(expected = NullPointerException.class)
+ public void testH5Lget_name_by_idx_null() throws Throwable {
+ H5.H5Lget_name_by_idx(-1, null, 0, 0, 0L, 0);
+ }
+
+ @Test(expected = HDF5LibraryException.class)
+ public void testH5Lcreate_hard_invalid() throws Throwable {
+ H5.H5Lcreate_hard(-1, "Bogus", -1, "Bogus", -1, -1);
+ }
+
+ @Test(expected = NullPointerException.class)
+ public void testH5Lcreate_hard_null_current() throws Throwable {
+ H5.H5Lcreate_hard(-1, null, 0, "Bogus", 0, 0);
+ }
+
+ @Test(expected = NullPointerException.class)
+ public void testH5Lcreate_hard_null_dest() throws Throwable {
+ H5.H5Lcreate_hard(-1, "Bogus", 0, null, 0, 0);
+ }
+
+ @Test(expected = HDF5LibraryException.class)
+ public void testH5Ldelete_invalid() throws Throwable {
+ H5.H5Ldelete(-1, "Bogus", -1);
+ }
+
+ @Test(expected = NullPointerException.class)
+ public void testH5Ldelete_null() throws Throwable {
+ H5.H5Ldelete(-1, null, 0);
+ }
+
+ @Test(expected = HDF5LibraryException.class)
+ public void testH5Lcreate_soft_invalid() throws Throwable {
+ H5.H5Lcreate_soft( "Bogus", -1, "Bogus", -1, -1);
+ }
+
+ @Test(expected = NullPointerException.class)
+ public void testH5Lcreate_soft_null_current() throws Throwable {
+ H5.H5Lcreate_soft(null, 0, "Bogus", 0, 0);
+ }
+
+ @Test(expected = NullPointerException.class)
+ public void testH5Lcreate_soft_null_dest() throws Throwable {
+ H5.H5Lcreate_soft("Bogus", 0, null, 0, 0);
+ }
+
+ @Test(expected = HDF5LibraryException.class)
+ public void testH5Lcreate_external_invalid() throws Throwable {
+ H5.H5Lcreate_external("PathToFile", "Bogus", -1, "Bogus", -1, -1);
+ }
+
+ @Test(expected = NullPointerException.class)
+ public void testH5Lcreate_external_null_file() throws Throwable {
+ H5.H5Lcreate_external(null, "Bogus", 0, "Bogus", 0, 0);
+ }
+
+ @Test(expected = NullPointerException.class)
+ public void testH5Lcreate_external_null_current() throws Throwable {
+ H5.H5Lcreate_external("PathToFile", null, 0, "Bogus", 0, 0);
+ }
+
+ @Test(expected = NullPointerException.class)
+ public void testH5Lcreate_external_null_dest() throws Throwable {
+ H5.H5Lcreate_external("PathToFile", "Bogus", 0, null, 0, 0);
+ }
+
+ @Test(expected = HDF5LibraryException.class)
+ public void testH5Lcopy_invalid() throws Throwable {
+ H5.H5Lcopy(-1, "Bogus", -1, "Bogus", -1, -1);
+ }
+
+ @Test(expected = NullPointerException.class)
+ public void testH5Lcopy_null_current() throws Throwable {
+ H5.H5Lcopy(-1, null, 0, "Bogus", 0, 0);
+ }
+
+ @Test(expected = NullPointerException.class)
+ public void testH5Lcopy_null_dest() throws Throwable {
+ H5.H5Lcopy(-1, "Bogus", 0, null, 0, 0);
+ }
+
+ @Test(expected = HDF5LibraryException.class)
+ public void testH5Lmove_invalid() throws Throwable {
+ H5.H5Lmove(-1, "Bogus", -1, "Bogus", -1, -1);
+ }
+
+ @Test(expected = NullPointerException.class)
+ public void testH5Lmove_null_current() throws Throwable {
+ H5.H5Lmove(-1, null, 0, "Bogus", 0, 0);
+ }
+
+ @Test(expected = NullPointerException.class)
+ public void testH5Lmove_null_dest() throws Throwable {
+ H5.H5Lmove(-1, "Bogus", 0, null, 0, 0);
+ }
+
+ @Test(expected = HDF5LibraryException.class)
+ public void testH5Lget_value_by_idx_invalid() throws Throwable {
+ H5.H5Lget_value_by_idx(-1, "Bogus", -1, -1, -1L, null, -1);
+ }
+
+ @Test(expected = NullPointerException.class)
+ public void testH5Lget_value_by_idx_null() throws Throwable {
+ H5.H5Lget_value_by_idx(-1, null, 0, 0, 0L, null, 0);
+ }
+
+ @Test(expected = HDF5LibraryException.class)
+ public void testH5Ldelete_by_idx_invalid() throws Throwable {
+ H5.H5Ldelete_by_idx(-1, "Bogus", -1, -1, -1L, -1);
+ }
+
+ @Test(expected = NullPointerException.class)
+ public void testH5Ldelete_by_idx_null() throws Throwable {
+ H5.H5Ldelete_by_idx(-1, null, 0, 0, 0L, 0);
+ }
+
+ @Test(expected = NullPointerException.class)
+ public void testH5Lvisit_null() throws Throwable {
+ H5.H5Lvisit(-1, -1, -1, null, null);
+ }
+
+ @Test(expected = NullPointerException.class)
+ public void testH5Lvisit_by_name_nullname() throws Throwable {
+ H5.H5Lvisit_by_name(-1, null, -1, -1, null, null, -1);
+ }
+
+ @Test(expected = NullPointerException.class)
+ public void testH5Lvisit_by_name_null() throws Throwable {
+ H5.H5Lvisit_by_name(-1, "Bogus", -1, -1, null, null, -1);
+ }
+
+ @Test(expected = NullPointerException.class)
+ public void testH5Literate_null() throws Throwable {
+ H5.H5Literate(-1, -1, -1, -1, null, null);
+ }
+
+ @Test(expected = NullPointerException.class)
+ public void testH5Literate_by_name_nullname() throws Throwable {
+ H5.H5Literate_by_name(-1, null, -1, -1, -1, null, null, -1);
+ }
+
+ @Test(expected = NullPointerException.class)
+ public void testH5Literate_by_name_null() throws Throwable {
+ H5.H5Literate_by_name(-1, "Bogus", -1, -1, -1, null, null, -1);
+ }
+
+}
diff --git a/java/test/TestH5Obasic.java b/java/test/TestH5Obasic.java
new file mode 100644
index 0000000..b564089
--- /dev/null
+++ b/java/test/TestH5Obasic.java
@@ -0,0 +1,488 @@
+/* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
+ * Copyright by The HDF Group. *
+ * Copyright by the Board of Trustees of the University of Illinois. *
+ * All rights reserved. *
+ * *
+ * This file is part of HDF5. The full HDF5 copyright notice, including *
+ * terms governing use, modification, and redistribution, is contained in *
+ * the files COPYING and Copyright.html. COPYING can be found at the root *
+ * of the source code distribution tree; Copyright.html can be found at the *
+ * root level of an installed copy of the electronic HDF5 document set and *
+ * is linked from the top-level documents page. It can also be found at *
+ * http://hdfgroup.org/HDF5/doc/Copyright.html. If you do not have *
+ * access to either file, you may request a copy from help@hdfgroup.org. *
+ * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */
+
+package test;
+
+import static org.junit.Assert.assertFalse;
+import static org.junit.Assert.assertTrue;
+import static org.junit.Assert.fail;
+
+import java.util.ArrayList;
+
+import hdf.hdf5lib.H5;
+import hdf.hdf5lib.HDF5Constants;
+import hdf.hdf5lib.callbacks.H5O_iterate_cb;
+import hdf.hdf5lib.callbacks.H5O_iterate_t;
+import hdf.hdf5lib.exceptions.HDF5LibraryException;
+import hdf.hdf5lib.structs.H5O_info_t;
+
+import org.junit.After;
+import org.junit.Before;
+import org.junit.Rule;
+import org.junit.Test;
+import org.junit.rules.TestName;
+
+public class TestH5Obasic {
+ @Rule public TestName testname = new TestName();
+ private static final String H5_FILE = "h5ex_g_iterate.hdf";
+ private static long H5la_ds1 = -1;
+ private static long H5la_l1 = -1;
+ private static long H5la_dt1 = -1;
+ private static long H5la_g1 = -1;
+ long H5fid = -1;
+
+ @Before
+ public void openH5file()
+ throws HDF5LibraryException, NullPointerException {
+ assertTrue("H5 open ids is 0",H5.getOpenIDCount()==0);
+ System.out.print(testname.getMethodName());
+
+ try {
+ H5fid = H5.H5Fopen(H5_FILE, HDF5Constants.H5F_ACC_RDONLY,
+ HDF5Constants.H5P_DEFAULT);
+ }
+ catch (Throwable err) {
+ err.printStackTrace();
+ fail("H5.H5Fopen: openH5file: " + err);
+ }
+ }
+
+ @After
+ public void closeH5file() throws HDF5LibraryException {
+ if (H5fid > 0) {
+ try {H5.H5Fclose(H5fid);} catch (Exception ex) {}
+ }
+ System.out.println();
+ }
+
+ @Test(expected = HDF5LibraryException.class)
+ public void testH5Oopen_not_exists() throws Throwable {
+ long oid = -1;
+
+ oid = H5.H5Oopen(H5fid, "Never_created", HDF5Constants.H5P_DEFAULT);
+
+ try {H5.H5Oclose(oid);} catch (Exception ex) {}
+ }
+
+ @Test
+ public void testH5Oget_info_dataset() {
+ long oid = -1;
+ H5O_info_t obj_info = null;
+
+ try {
+ oid = H5.H5Oopen(H5fid, "DS1", HDF5Constants.H5P_DEFAULT);
+ obj_info = H5.H5Oget_info(oid);
+ }
+ catch (Throwable err) {
+ err.printStackTrace();
+ fail("H5.H5Oget_info: " + err);
+ }
+ assertFalse("H5Oget_info ",obj_info==null);
+ assertTrue("H5Oget_info object type",obj_info.type==HDF5Constants.H5O_TYPE_DATASET);
+ try {H5.H5Oclose(oid);} catch (Exception ex) {}
+ }
+
+ @Test
+ public void testH5Oget_info_hardlink() {
+ long oid = -1;
+ H5O_info_t obj_info = null;
+ try {
+ oid = H5.H5Oopen(H5fid, "L1", HDF5Constants.H5P_DEFAULT);
+ obj_info = H5.H5Oget_info(oid);
+ }
+ catch (Throwable err) {
+ err.printStackTrace();
+ fail("H5.H5Oget_info: " + err);
+ }
+ assertFalse("H5Oget_info ",obj_info==null);
+ assertTrue("H5Oget_info object type",obj_info.type==HDF5Constants.H5O_TYPE_DATASET);
+ try {H5.H5Oclose(oid);} catch (Exception ex) {}
+ }
+
+ @Test
+ public void testH5Oget_info_group() {
+ long oid = -1;
+ H5O_info_t obj_info = null;
+ try {
+ oid = H5.H5Oopen(H5fid, "G1", HDF5Constants.H5P_DEFAULT);
+ obj_info = H5.H5Oget_info(oid);
+ }
+ catch (Throwable err) {
+ err.printStackTrace();
+ fail("H5.H5Oget_info: " + err);
+ }
+ assertFalse("H5Oget_info ",obj_info==null);
+ assertTrue("H5Oget_info object type",obj_info.type==HDF5Constants.H5O_TYPE_GROUP);
+ try {H5.H5Oclose(oid);} catch (Exception ex) {}
+ }
+
+ @Test
+ public void testH5Oget_info_datatype() {
+ long oid = -1;
+ H5O_info_t obj_info = null;
+ try {
+ oid = H5.H5Oopen(H5fid, "DT1", HDF5Constants.H5P_DEFAULT);
+ obj_info = H5.H5Oget_info(oid);
+ }
+ catch (Throwable err) {
+ err.printStackTrace();
+ fail("H5.H5Oget_info: " + err);
+ }
+ assertFalse("H5Oget_info ",obj_info==null);
+ assertTrue("H5Oget_info object type",obj_info.type==HDF5Constants.H5O_TYPE_NAMED_DATATYPE);
+ try {H5.H5Oclose(oid);} catch (Exception ex) {}
+ }
+
+ @Test(expected = HDF5LibraryException.class)
+ public void testH5Oget_info_by_name_not_exist_name() throws Throwable {
+ H5.H5Oget_info_by_name(H5fid, "None", HDF5Constants.H5P_DEFAULT);
+ }
+
+ @Test(expected = HDF5LibraryException.class)
+ public void testH5Oget_info_by_name_not_exists() throws Throwable {
+ H5.H5Oget_info_by_name(H5fid, "Bogus", HDF5Constants.H5P_DEFAULT);
+ }
+
+ @Test
+ public void testH5Oget_info_by_name_dataset() {
+ H5O_info_t obj_info = null;
+
+ try {
+ obj_info = H5.H5Oget_info_by_name(H5fid, "DS1", HDF5Constants.H5P_DEFAULT);
+ }
+ catch (Throwable err) {
+ err.printStackTrace();
+ fail("H5.H5Oget_info: " + err);
+ }
+ assertFalse("H5Oget_info ",obj_info==null);
+ assertTrue("H5Oget_info object type",obj_info.type==HDF5Constants.H5O_TYPE_DATASET);
+ }
+
+ @Test
+ public void testH5Oget_info_by_name_hardlink() {
+ H5O_info_t obj_info = null;
+ try {
+ obj_info = H5.H5Oget_info_by_name(H5fid, "L1", HDF5Constants.H5P_DEFAULT);
+ }
+ catch (Throwable err) {
+ err.printStackTrace();
+ fail("H5.H5Oget_info: " + err);
+ }
+ assertFalse("H5Oget_info ",obj_info==null);
+ assertTrue("H5Oget_info object type",obj_info.type==HDF5Constants.H5O_TYPE_DATASET);
+ }
+
+ @Test
+ public void testH5Oget_info_by_name_group() {
+ H5O_info_t obj_info = null;
+ try {
+ obj_info = H5.H5Oget_info_by_name(H5fid, "G1", HDF5Constants.H5P_DEFAULT);
+ }
+ catch (Throwable err) {
+ err.printStackTrace();
+ fail("H5.H5Oget_info: " + err);
+ }
+ assertFalse("H5Oget_info ",obj_info==null);
+ assertTrue("H5Oget_info object type",obj_info.type==HDF5Constants.H5O_TYPE_GROUP);
+ }
+
+ @Test
+ public void testH5Oget_info_by_name_datatype() {
+ H5O_info_t obj_info = null;
+ try {
+ obj_info = H5.H5Oget_info_by_name(H5fid, "DT1", HDF5Constants.H5P_DEFAULT);
+ }
+ catch (Throwable err) {
+ err.printStackTrace();
+ fail("H5.H5Oget_info: " + err);
+ }
+ assertFalse("H5Oget_info ",obj_info==null);
+ assertTrue("H5Oget_info object type",obj_info.type==HDF5Constants.H5O_TYPE_NAMED_DATATYPE);
+ }
+
+ @Test(expected = HDF5LibraryException.class)
+ public void testH5Oget_info_by_idx_name_not_exist_name() throws Throwable {
+ H5.H5Oget_info_by_idx(H5fid, "None", HDF5Constants.H5_INDEX_NAME, HDF5Constants.H5_ITER_INC, 0, HDF5Constants.H5P_DEFAULT);
+ }
+
+ @Test(expected = HDF5LibraryException.class)
+ public void testH5Oget_info_by_idx_name_not_exist_create() throws Throwable {
+ H5.H5Oget_info_by_idx(H5fid, "None", HDF5Constants.H5_INDEX_CRT_ORDER, HDF5Constants.H5_ITER_INC, 0, HDF5Constants.H5P_DEFAULT);
+ }
+
+ @Test(expected = HDF5LibraryException.class)
+ public void testH5Oget_info_by_idx_not_exist_name() throws Throwable {
+ H5.H5Oget_info_by_idx(H5fid, "/", HDF5Constants.H5_INDEX_NAME, HDF5Constants.H5_ITER_INC, 5, HDF5Constants.H5P_DEFAULT);
+ }
+
+ @Test(expected = HDF5LibraryException.class)
+ public void testH5Oget_info_by_idx_not_exist_create() throws Throwable {
+ H5.H5Oget_info_by_idx(H5fid, "/", HDF5Constants.H5_INDEX_CRT_ORDER, HDF5Constants.H5_ITER_INC, 5, HDF5Constants.H5P_DEFAULT);
+ }
+
+ @Test
+ public void testH5Oget_info_by_idx_n0() {
+ long oid = -1;
+ H5O_info_t obj_info = null;
+ try {
+ oid = H5.H5Oopen(H5fid, "DS1", HDF5Constants.H5P_DEFAULT);
+ obj_info = H5.H5Oget_info(oid);
+ }
+ catch (Throwable err) {
+ err.printStackTrace();
+ fail("testH5Oget_info_by_idx_n0:H5.H5Oget_info: " + err);
+ }
+ H5la_ds1 = obj_info.addr;
+ try {H5.H5Oclose(oid);} catch (Exception ex) {}
+ try {
+ obj_info = H5.H5Oget_info_by_idx(H5fid, "/", HDF5Constants.H5_INDEX_NAME, HDF5Constants.H5_ITER_INC, 0, HDF5Constants.H5P_DEFAULT);
+ }
+ catch (Throwable err) {
+ err.printStackTrace();
+ fail("testH5Oget_info_by_idx_n0:H5.H5Oget_info_by_idx: " + err);
+ }
+ assertFalse("testH5Oget_info_by_idx_n0:H5Oget_info_by_idx ",obj_info==null);
+ assertTrue("testH5Oget_info_by_idx_n0:H5Oget_info_by_idx link type",obj_info.type==HDF5Constants.H5O_TYPE_DATASET);
+ assertTrue("testH5Oget_info_by_idx_n0:Link Address ",obj_info.addr==H5la_ds1);
+ }
+
+ @Test
+ public void testH5Oget_info_by_idx_n3() {
+ long oid = -1;
+ H5O_info_t obj_info = null;
+ try {
+ oid = H5.H5Oopen(H5fid, "L1", HDF5Constants.H5P_DEFAULT);
+ obj_info = H5.H5Oget_info(oid);
+ }
+ catch (Throwable err) {
+ err.printStackTrace();
+ fail("testH5Oget_info_by_idx_n3:H5.H5Oget_info: " + err);
+ }
+ H5la_l1 = obj_info.addr;
+ try {H5.H5Oclose(oid);} catch (Exception ex) {}
+ try {
+ obj_info = H5.H5Oget_info_by_idx(H5fid, "/", HDF5Constants.H5_INDEX_NAME, HDF5Constants.H5_ITER_INC, 3, HDF5Constants.H5P_DEFAULT);
+ }
+ catch (Throwable err) {
+ err.printStackTrace();
+ fail("testH5Oget_info_by_idx_n3:H5.H5Oget_info_by_idx: " + err);
+ }
+ assertFalse("testH5Oget_info_by_idx_n3:H5Oget_info_by_idx ",obj_info==null);
+ assertTrue("testH5Oget_info_by_idx_n3:H5Oget_info_by_idx link type",obj_info.type==HDF5Constants.H5O_TYPE_DATASET);
+ assertTrue("testH5Oget_info_by_idx_n3:Link Address ",obj_info.addr==H5la_l1);
+ }
+
+ @Test
+ public void testH5Ovisit() {
+ class idata {
+ public String link_name = null;
+ public int link_type = -1;
+ idata(String name, int type) {
+ this.link_name = name;
+ this.link_type = type;
+ }
+ }
+ class H5O_iter_data implements H5O_iterate_t {
+ public ArrayList<idata> iterdata = new ArrayList<idata>();
+ }
+ H5O_iterate_t iter_data = new H5O_iter_data();
+ class H5O_iter_callback implements H5O_iterate_cb {
+ public int callback(long group, String name, H5O_info_t info, H5O_iterate_t op_data) {
+ idata id = new idata(name, info.type);
+ ((H5O_iter_data)op_data).iterdata.add(id);
+ return 0;
+ }
+ }
+ H5O_iterate_cb iter_cb = new H5O_iter_callback();
+ try {
+ H5.H5Ovisit(H5fid, HDF5Constants.H5_INDEX_NAME, HDF5Constants.H5_ITER_INC, iter_cb, iter_data);
+ }
+ catch (Throwable err) {
+ err.printStackTrace();
+ fail("H5.H5Ovisit: " + err);
+ }
+ assertFalse("H5Ovisit ",((H5O_iter_data)iter_data).iterdata.isEmpty());
+ assertTrue("H5Ovisit "+((H5O_iter_data)iter_data).iterdata.size(),((H5O_iter_data)iter_data).iterdata.size()==5);
+ assertTrue("H5Ovisit "+(((H5O_iter_data)iter_data).iterdata.get(0)).link_name,(((H5O_iter_data)iter_data).iterdata.get(0)).link_name.compareToIgnoreCase(".")==0);
+ assertTrue("H5Ovisit "+(((H5O_iter_data)iter_data).iterdata.get(1)).link_name,(((H5O_iter_data)iter_data).iterdata.get(1)).link_name.compareToIgnoreCase("DS1")==0);
+ assertTrue("H5Ovisit "+(((H5O_iter_data)iter_data).iterdata.get(2)).link_name,(((H5O_iter_data)iter_data).iterdata.get(2)).link_name.compareToIgnoreCase("DT1")==0);
+ assertTrue("H5Ovisit "+(((H5O_iter_data)iter_data).iterdata.get(3)).link_name,(((H5O_iter_data)iter_data).iterdata.get(3)).link_name.compareToIgnoreCase("G1")==0);
+ assertTrue("H5Ovisit "+(((H5O_iter_data)iter_data).iterdata.get(4)).link_name,(((H5O_iter_data)iter_data).iterdata.get(4)).link_name.compareToIgnoreCase("G1/DS2")==0);
+// assertTrue("H5Ovisit "+((idata)((H5O_iter_data)iter_data).iterdata.get(5)).link_name,((idata)((H5O_iter_data)iter_data).iterdata.get(5)).link_name.compareToIgnoreCase("L1")==0);
+ }
+
+ @Test
+ public void testH5Ovisit_by_name() {
+ class idata {
+ public String link_name = null;
+ public int link_type = -1;
+ idata(String name, int type) {
+ this.link_name = name;
+ this.link_type = type;
+ }
+ }
+ class H5O_iter_data implements H5O_iterate_t {
+ public ArrayList<idata> iterdata = new ArrayList<idata>();
+ }
+ H5O_iterate_t iter_data = new H5O_iter_data();
+ class H5O_iter_callback implements H5O_iterate_cb {
+ public int callback(long group, String name, H5O_info_t info, H5O_iterate_t op_data) {
+ idata id = new idata(name, info.type);
+ ((H5O_iter_data)op_data).iterdata.add(id);
+ return 0;
+ }
+ }
+ H5O_iterate_cb iter_cb = new H5O_iter_callback();
+ try {
+ H5.H5Ovisit_by_name(H5fid, "G1", HDF5Constants.H5_INDEX_NAME, HDF5Constants.H5_ITER_INC, iter_cb, iter_data, HDF5Constants.H5P_DEFAULT);
+ }
+ catch (Throwable err) {
+ err.printStackTrace();
+ fail("H5.H5Ovisit_by_name: " + err);
+ }
+ assertFalse("H5Ovisit_by_name ",((H5O_iter_data)iter_data).iterdata.isEmpty());
+ assertTrue("H5Ovisit_by_name "+((H5O_iter_data)iter_data).iterdata.size(),((H5O_iter_data)iter_data).iterdata.size()==2);
+ assertTrue("H5Ovisit_by_name "+(((H5O_iter_data)iter_data).iterdata.get(0)).link_name,(((H5O_iter_data)iter_data).iterdata.get(0)).link_name.compareToIgnoreCase(".")==0);
+ assertTrue("H5Ovisit_by_name "+(((H5O_iter_data)iter_data).iterdata.get(1)).link_name,(((H5O_iter_data)iter_data).iterdata.get(1)).link_name.compareToIgnoreCase("DS2")==0);
+ }
+
+ @Test
+ public void testH5Oexists_by_name() {
+ boolean name_exists = false;
+ try {
+ name_exists = H5.H5Oexists_by_name(H5fid, "G1", HDF5Constants.H5P_DEFAULT);
+ }
+ catch (Throwable err) {
+ err.printStackTrace();
+ fail("H5.H5Oexists_by_name: " + err);
+ }
+ assertTrue("H5Oexists_by_name ", name_exists);
+ //TODO get dangling link result
+ }
+
+ @Test
+ public void testH5Oopen_by_addr() {
+ long oid = -1;
+ H5O_info_t obj_info = null;
+ try {
+ try {
+ oid = H5.H5Oopen(H5fid, "DS1", HDF5Constants.H5P_DEFAULT);
+ obj_info = H5.H5Oget_info(oid);
+ }
+ catch (Throwable err) {
+ err.printStackTrace();
+ fail("testH5Oopen_by_addr: H5.H5Oget_info: " + err);
+ }
+ H5la_ds1 = obj_info.addr;
+ try {H5.H5Oclose(oid);} catch (Exception ex) {}
+ try {
+ oid = H5.H5Oopen_by_addr(H5fid, H5la_ds1);
+ }
+ catch (Throwable err) {
+ err.printStackTrace();
+ fail("testH5Oopen_by_addr: H5.H5Oopen_by_addr: " + err);
+ }
+ try {
+ obj_info = H5.H5Oget_info(oid);
+ }
+ catch (Throwable err) {
+ err.printStackTrace();
+ fail("testH5Oopen_by_addr: H5.H5Oget_info: " + err);
+ }
+ assertFalse("testH5Oopen_by_addr: H5Oget_info ",obj_info==null);
+ assertTrue("testH5Oopen_by_addr: H5Oget_info link type",obj_info.type==HDF5Constants.H5O_TYPE_DATASET);
+ assertTrue("testH5Oopen_by_addr: Link Address ",obj_info.addr==H5la_ds1);
+ }
+ finally {
+ try{H5.H5Oclose(oid);} catch (Exception ex) {}
+ }
+ }
+
+ @Test
+ public void testH5Oopen_by_idx_n0() {
+ long oid = -1;
+ H5O_info_t obj_info = null;
+ try {
+ try {
+ oid = H5.H5Oopen(H5fid, "DS1", HDF5Constants.H5P_DEFAULT);
+ obj_info = H5.H5Oget_info(oid);
+ }
+ catch (Throwable err) {
+ err.printStackTrace();
+ fail("testH5Oopen_by_idx_n0: H5.H5Oget_info: " + err);
+ }
+ H5la_ds1 = obj_info.addr;
+ try {H5.H5Oclose(oid);} catch (Exception ex) {}
+ try {
+ oid = H5.H5Oopen_by_idx(H5fid, "/", HDF5Constants.H5_INDEX_NAME, HDF5Constants.H5_ITER_INC, 0, HDF5Constants.H5P_DEFAULT);
+ }
+ catch (Throwable err) {
+ err.printStackTrace();
+ fail("testH5Oopen_by_addr: H5.H5Oopen_by_addr: " + err);
+ }
+ try {
+ obj_info = H5.H5Oget_info(oid);
+ }
+ catch (Throwable err) {
+ err.printStackTrace();
+ fail("testH5Oopen_by_idx_n0: H5.H5Oget_info_by_idx: " + err);
+ }
+ assertFalse("testH5Oopen_by_idx_n0: H5Oget_info_by_idx ",obj_info==null);
+ assertTrue("testH5Oopen_by_idx_n0: H5Oget_info_by_idx link type",obj_info.type==HDF5Constants.H5O_TYPE_DATASET);
+ assertTrue("testH5Oopen_by_idx_n0: Link Address ",obj_info.addr==H5la_ds1);
+ }
+ finally {
+ try{H5.H5Oclose(oid);} catch (Exception ex) {}
+ }
+ }
+
+ @Test
+ public void testH5Oopen_by_idx_n3() {
+ long oid = -1;
+ H5O_info_t obj_info = null;
+ try {
+ try {
+ oid = H5.H5Oopen(H5fid, "L1", HDF5Constants.H5P_DEFAULT);
+ obj_info = H5.H5Oget_info(oid);
+ }
+ catch (Throwable err) {
+ err.printStackTrace();
+ fail("testH5Oopen_by_idx_n3:H5.H5Oget_info: " + err);
+ }
+ H5la_l1 = obj_info.addr;
+ try {H5.H5Oclose(oid);} catch (Exception ex) {}
+ try {
+ oid = H5.H5Oopen_by_idx(H5fid, "/", HDF5Constants.H5_INDEX_NAME, HDF5Constants.H5_ITER_INC, 3, HDF5Constants.H5P_DEFAULT);
+ }
+ catch (Throwable err) {
+ err.printStackTrace();
+ fail("testH5Oopen_by_addr: H5.H5Oopen_by_addr: " + err);
+ }
+ try {
+ obj_info = H5.H5Oget_info(oid);
+ }
+ catch (Throwable err) {
+ err.printStackTrace();
+ fail("testH5Oopen_by_idx_n3:H5.H5Oget_info_by_idx: " + err);
+ }
+ assertFalse("testH5Oopen_by_idx_n3:H5Oget_info_by_idx ",obj_info==null);
+ assertTrue("testH5Oopen_by_idx_n3:H5Oget_info_by_idx link type",obj_info.type==HDF5Constants.H5O_TYPE_DATASET);
+ assertTrue("testH5Oopen_by_idx_n3:Link Address ",obj_info.addr==H5la_l1);
+ }
+ finally {
+ try{H5.H5Oclose(oid);} catch (Exception ex) {}
+ }
+ }
+}
diff --git a/java/test/TestH5Ocopy.java b/java/test/TestH5Ocopy.java
new file mode 100644
index 0000000..62dd886
--- /dev/null
+++ b/java/test/TestH5Ocopy.java
@@ -0,0 +1,365 @@
+/* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
+ * Copyright by The HDF Group. *
+ * Copyright by the Board of Trustees of the University of Illinois. *
+ * All rights reserved. *
+ * *
+ * This file is part of HDF5. The full HDF5 copyright notice, including *
+ * terms governing use, modification, and redistribution, is contained in *
+ * the files COPYING and Copyright.html. COPYING can be found at the root *
+ * of the source code distribution tree; Copyright.html can be found at the *
+ * root level of an installed copy of the electronic HDF5 document set and *
+ * is linked from the top-level documents page. It can also be found at *
+ * http://hdfgroup.org/HDF5/doc/Copyright.html. If you do not have *
+ * access to either file, you may request a copy from help@hdfgroup.org. *
+ * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */
+
+package test;
+
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertTrue;
+import static org.junit.Assert.fail;
+
+import java.io.File;
+
+import hdf.hdf5lib.H5;
+import hdf.hdf5lib.HDF5Constants;
+import hdf.hdf5lib.exceptions.HDF5Exception;
+import hdf.hdf5lib.exceptions.HDF5LibraryException;
+
+import org.junit.After;
+import org.junit.Before;
+import org.junit.Ignore;
+import org.junit.Rule;
+import org.junit.Test;
+import org.junit.rules.TestName;
+
+public class TestH5Ocopy {
+ @Rule public TestName testname = new TestName();
+ private static final String FILENAME = "testRefsattribute.h5";
+ private static final int DIM_X = 4;
+ private static final int DIM_Y = 6;
+ long H5fid = -1;
+ long H5dsid = -1;
+ long H5did1 = -1;
+ long H5did2 = -1;
+ long H5gcpl = -1;
+ long H5gid = -1;
+ long H5dsid2 = -1;
+ long[] dims = { 2 };
+
+ private final void _deleteFile(String filename) {
+ File file = new File(filename);
+
+ if (file.exists()) {
+ try {
+ file.delete();
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+ }
+ }
+
+ private final long _createDataset(long fid, long dsid, String name, long dapl) {
+ long did = -1;
+ try {
+ did = H5.H5Dcreate(fid, name,
+ HDF5Constants.H5T_STD_I32BE, dsid,
+ HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT, dapl);
+ }
+ catch (Throwable err) {
+ err.printStackTrace();
+ fail("H5.H5Dcreate: " + err);
+ }
+ assertTrue("TestH5O._createDataset: ",did >= 0);
+
+ return did;
+ }
+
+ private final long _createGroup(long fid, String name) {
+ long gid = -1;
+ try {
+ H5gcpl = HDF5Constants.H5P_DEFAULT;
+ gid = H5.H5Gcreate(fid, name, HDF5Constants.H5P_DEFAULT,
+ H5gcpl, HDF5Constants.H5P_DEFAULT);
+ }
+ catch (Throwable err) {
+ err.printStackTrace();
+ fail("H5.H5Gcreate: " + err);
+ }
+ assertTrue("TestH5O._createGroup: ",gid >= 0);
+
+ return gid;
+ }
+
+ @Before
+ public void createH5file()
+ throws NullPointerException, HDF5Exception {
+ assertTrue("H5 open ids is 0",H5.getOpenIDCount()==0);
+ System.out.print(testname.getMethodName());
+ try {
+ H5fid = H5.H5Fcreate(FILENAME, HDF5Constants.H5F_ACC_TRUNC,
+ HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT);
+ H5dsid2 = H5.H5Screate(HDF5Constants.H5S_SCALAR);
+ H5did1 = _createDataset(H5fid, H5dsid2, "DS2", HDF5Constants.H5P_DEFAULT);
+ H5dsid = H5.H5Screate_simple(1, dims, null);
+ H5gid = _createGroup(H5fid, "/G1");
+ H5did2 = _createDataset(H5gid, H5dsid, "DS1", HDF5Constants.H5P_DEFAULT);
+ }
+ catch (Throwable err) {
+ err.printStackTrace();
+ fail("TestH5O.createH5file: " + err);
+ }
+ assertTrue("TestH5O.createH5file: H5.H5Fcreate: ",H5fid >= 0);
+ assertTrue("TestH5O.createH5file: H5.H5Screate_simple: ",H5dsid >= 0);
+ assertTrue("TestH5O.createH5file: H5.H5Gcreate: ",H5gid >= 0);
+
+ H5.H5Fflush(H5fid, HDF5Constants.H5F_SCOPE_LOCAL);
+ }
+
+ @After
+ public void deleteH5file() throws HDF5LibraryException {
+ if (H5gid > 0)
+ try {H5.H5Gclose(H5gid);} catch (Exception ex) {}
+ if (H5did2 > 0)
+ try {H5.H5Dclose(H5did2);} catch (Exception ex) {}
+ if (H5dsid > 0)
+ try {H5.H5Sclose(H5dsid);} catch (Exception ex) {}
+ if (H5dsid2 > 0)
+ try {H5.H5Sclose(H5dsid2);} catch (Exception ex) {}
+ if (H5did1 > 0)
+ try {H5.H5Dclose(H5did1);} catch (Exception ex) {}
+ if (H5fid > 0)
+ try {H5.H5Fclose(H5fid);} catch (Exception ex) {}
+
+ _deleteFile(FILENAME);
+ System.out.println();
+ }
+
+ @Test
+ public void testH5OcopyRefsAttr() {
+ long ocp_plist_id = -1;
+ byte rbuf0[]=null , rbuf1[] = null;
+ byte[] dset_data = new byte[16];
+ long attribute_id = -1;
+
+
+ try {
+ rbuf0 = H5.H5Rcreate(H5fid, "/G1", HDF5Constants.H5R_OBJECT, -1);
+ rbuf1 = H5.H5Rcreate(H5fid, "DS2", HDF5Constants.H5R_OBJECT, -1);
+ //System.arraycopy(rbuf0, 0, dset_data, 0, 8);
+ System.arraycopy(rbuf1, 0, dset_data, 8, 8);
+ }
+ catch (Exception ex) {
+ fail("testH5OcopyRefsAttr: H5Rcreate failed");
+ }
+
+ try {
+ attribute_id = H5.H5Acreate(H5did2, "A1", HDF5Constants.H5T_STD_REF_OBJ, H5dsid, HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT);
+ assertTrue("testH5OcopyRefsAttr.H5Acreate: ", attribute_id >= 0);
+ H5.H5Awrite(attribute_id, HDF5Constants.H5T_STD_REF_OBJ, dset_data);
+
+ H5.H5Aclose(attribute_id);
+ }
+ catch (Exception ex) {
+ fail("testH5OcopyRefsAttr: H5Awrite failed");
+ }
+ finally {
+ try {H5.H5Aclose(attribute_id);} catch (Exception exx) {}
+ }
+
+ try {
+ ocp_plist_id = H5.H5Pcreate(HDF5Constants.H5P_OBJECT_COPY);
+ assertTrue("testH5OcopyRefsAttr.H5Pcreate: ", ocp_plist_id >= 0);
+ H5.H5Pset_copy_object(ocp_plist_id, HDF5Constants.H5O_COPY_EXPAND_REFERENCE_FLAG);
+ H5.H5Ocopy(H5fid, ".", H5fid, "CPYREF", ocp_plist_id, HDF5Constants.H5P_DEFAULT);
+ }
+ catch (Exception ex) {
+ fail("testH5OcopyRefsAttr: H5Ocopy failed");
+ }
+ finally {
+ try {H5.H5Pclose(ocp_plist_id);} catch (Exception ex) {}
+ }
+ }
+
+ @Test
+ public void testH5OcopyRefsDatasettodiffFile() {
+ byte rbuf1[] = null;
+ byte[] dset_data = new byte[16];
+ long ocp_plist_id = -1;
+ long dataset_id = -1;
+ long H5fid2 = -1;
+
+ try {
+ rbuf1 = H5.H5Rcreate(H5fid, "DS2", HDF5Constants.H5R_OBJECT, -1);
+ System.arraycopy(rbuf1, 0, dset_data, 8, 8);
+
+ dataset_id = H5.H5Dcreate(H5fid, "DSREF",
+ HDF5Constants.H5T_STD_REF_OBJ, H5dsid,
+ HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT);
+ assertTrue("testH5OcopyRefsDatasettodiffFile.H5Dcreate: ", dataset_id >= 0);
+ H5.H5Dwrite(dataset_id, HDF5Constants.H5T_STD_REF_OBJ,
+ HDF5Constants.H5S_ALL, HDF5Constants.H5S_ALL,
+ HDF5Constants.H5P_DEFAULT, dset_data);
+ H5.H5Dclose(dataset_id);
+ }
+ catch (Exception ex) {
+ fail("testH5OcopyRefsDatasettodiffFile: create dataset failed");
+ }
+ finally {
+ try {H5.H5Dclose(dataset_id);} catch (Exception exx) {}
+ }
+
+ try {
+ //create new file
+ H5fid2 = H5.H5Fcreate("copy.h5", HDF5Constants.H5F_ACC_TRUNC,
+ HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT);
+ assertTrue("testH5OcopyRefsDatasettodiffFile.H5Fcreate: ", H5fid2 >= 0);
+ H5.H5Fflush(H5fid2, HDF5Constants.H5F_SCOPE_LOCAL);
+ }
+ catch (Exception ex) {
+ try {H5.H5Fclose(H5fid2);} catch (Exception exx) {}
+ fail("testH5OcopyRefsDatasettodiffFile: H5Fcreate failed");
+ }
+
+ try {
+ //create object copy property list id and set the flags.
+ ocp_plist_id = H5.H5Pcreate(HDF5Constants.H5P_OBJECT_COPY);
+ assertTrue("testH5OcopyRefsDatasettodiffFile.H5Pcreate: ", ocp_plist_id >= 0);
+ H5.H5Pset_copy_object(ocp_plist_id, HDF5Constants.H5O_COPY_EXPAND_REFERENCE_FLAG);
+
+ //Perform copy function.
+ H5.H5Ocopy(H5fid, ".", H5fid2, "CPYREFD", ocp_plist_id, HDF5Constants.H5P_DEFAULT);
+ }
+ catch (Exception ex){
+ ex.printStackTrace();
+ fail("testH5OcopyRefsDatasettodiffFile: H5Ocopy failed");
+ }
+ finally {
+ try {H5.H5Pclose(ocp_plist_id);} catch (Exception ex) {}
+ try {H5.H5Fclose(H5fid2);} catch (Exception ex) {}
+ }
+ }
+
+ @Test
+ public void testH5OcopyRefsDatasettosameFile() {
+ byte rbuf0[]=null , rbuf1[] = null;
+ byte[] dset_data = new byte[16];
+ long ocp_plist_id = -1;
+ long dataset_id = -1;
+ long did = -1;
+ int obj_type = -1;
+ byte[] read_data = new byte[16];
+
+ try {
+ rbuf0 = H5.H5Rcreate(H5fid, "/G1", HDF5Constants.H5R_OBJECT, -1);
+ rbuf1 = H5.H5Rcreate(H5fid, "DS2", HDF5Constants.H5R_OBJECT, -1);
+ System.arraycopy(rbuf0, 0, dset_data, 0, 8);
+ System.arraycopy(rbuf1, 0, dset_data, 8, 8);
+
+ //Create a dataset and write object references to it.
+ dataset_id = H5.H5Dcreate(H5fid, "DSREF",
+ HDF5Constants.H5T_STD_REF_OBJ, H5dsid,
+ HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT);
+ assertTrue("testH5OcopyRefsDatasettosameFile.H5Dcreate: ", dataset_id >= 0);
+ H5.H5Dwrite(dataset_id, HDF5Constants.H5T_STD_REF_OBJ,
+ HDF5Constants.H5S_ALL, HDF5Constants.H5S_ALL,
+ HDF5Constants.H5P_DEFAULT, dset_data);
+ //Close the dataset.
+ H5.H5Dclose(dataset_id);
+ }
+ catch (Exception ex) {
+ try {H5.H5Dclose(dataset_id);} catch (Exception exx) {}
+ fail("testH5OcopyRefsDatasettosameFile: create dataset failed");
+ }
+
+ try {
+ ocp_plist_id = H5.H5Pcreate(HDF5Constants.H5P_OBJECT_COPY);
+ assertTrue("testH5OcopyRefsDatasettosameFile.H5Pcreate: ", ocp_plist_id >= 0);
+ H5.H5Pset_copy_object(ocp_plist_id, HDF5Constants.H5O_COPY_EXPAND_REFERENCE_FLAG);
+ }
+ catch (Exception ex) {
+ try {H5.H5Pclose(ocp_plist_id);} catch (Exception exx) {}
+ fail("testH5OcopyRefsDatasettosameFile: H5Pset_copy_object failed");
+ }
+
+ //Perform copy function.
+ try {
+ H5.H5Ocopy(H5fid, "DSREF", H5fid, "CPYREFD", ocp_plist_id, HDF5Constants.H5P_DEFAULT);
+ }
+ catch(Exception ex) {
+ try {H5.H5Pclose(ocp_plist_id);} catch (Exception exx) {}
+ fail("testH5OcopyRefsDatasettosameFile: H5Ocopy failed");
+ }
+
+ //Open the dataset that has been copied
+ try {
+ did = H5.H5Dopen(H5fid, "DSREF", HDF5Constants.H5P_DEFAULT);
+ assertTrue("testH5OcopyRefsDatasettosameFile.H5Dopen: ", did >= 0);
+ }
+ catch (Exception e) {
+ try {H5.H5Dclose(did);} catch (Exception exx) {}
+ e.printStackTrace();
+ fail("testH5OcopyRefsDatasettosameFile: H5Dopen failed");
+ }
+
+ try {
+ //Read the dataset object references in the read_data buffer.
+ H5.H5Dread(did, HDF5Constants.H5T_STD_REF_OBJ, HDF5Constants.H5S_ALL,HDF5Constants.H5S_ALL, HDF5Constants.H5P_DEFAULT, read_data);
+ System.arraycopy(read_data, 0, rbuf0, 0, 8);
+ System.arraycopy(read_data, 8, rbuf1, 0, 8);
+
+ //Get the type of object the reference points to.
+ obj_type = H5.H5Rget_obj_type(H5fid, HDF5Constants.H5R_OBJECT, rbuf1);
+ assertEquals(obj_type, HDF5Constants.H5O_TYPE_DATASET);
+
+ obj_type = H5.H5Rget_obj_type(H5fid, HDF5Constants.H5R_OBJECT, rbuf0);
+ assertEquals(obj_type, HDF5Constants.H5O_TYPE_GROUP);
+ }
+ catch (Exception ex) {
+ ex.printStackTrace();
+ }
+ finally {
+ try {H5.H5Dclose(did);} catch (Exception ex) {}
+ try {H5.H5Pclose(ocp_plist_id);} catch (Exception ex) {}
+ }
+ }
+
+// @Ignore because of JIRA HDF5-9547
+// @Test(expected = HDF5LibraryException.class)
+// public void testH5OcopyInvalidRef() throws Throwable {
+// final long _pid_ = HDF5Constants.H5P_DEFAULT;
+// long sid = -1;
+// long did = -1;
+// long aid = -1;
+//
+// try {
+// sid = H5.H5Screate_simple(1, new long[] {1}, null);
+// assertTrue("testH5OcopyInvalidRef.H5Screate_simple: ", sid >= 0);
+// did = H5.H5Dcreate(H5fid, "Dataset_with_invalid_Ref", HDF5Constants.H5T_NATIVE_INT, sid, _pid_, _pid_, _pid_);
+// assertTrue("testH5OcopyInvalidRef.H5Dcreate: ", did > 0);
+// aid = H5.H5Acreate(did, "Invalid_Ref", HDF5Constants.H5T_STD_REF_OBJ, sid, _pid_, _pid_);
+// assertTrue("testH5OcopyInvalidRef.H5Acreate: ", aid > 0);
+// H5.H5Awrite(aid, HDF5Constants.H5T_STD_REF_OBJ, new long[]{-1});
+// }
+// catch (Exception ex) {
+// ex.printStackTrace();
+// }
+// finally {
+// try {H5.H5Dclose(did);} catch (Exception exx) {}
+// try {H5.H5Aclose(aid);} catch (Exception exx) {}
+// try {H5.H5Sclose(sid);} catch (Exception exx) {}
+// }
+//
+// long ocp_plist_id = H5.H5Pcreate(HDF5Constants.H5P_OBJECT_COPY);
+// assertTrue("testH5OcopyInvalidRef.H5Pcreate: ", ocp_plist_id >= 0);
+// H5.H5Pset_copy_object(ocp_plist_id, HDF5Constants.H5O_COPY_EXPAND_REFERENCE_FLAG);
+// try {
+// H5.H5Ocopy(H5fid, "/Dataset_with_invalid_Ref", H5fid, "/Dataset_with_invalid_Ref_cp", ocp_plist_id, _pid_);
+// }
+// finally {
+// try {H5.H5Pclose(ocp_plist_id);} catch (Exception exx) {}
+// }
+// }
+
+}
diff --git a/java/test/TestH5Ocreate.java b/java/test/TestH5Ocreate.java
new file mode 100644
index 0000000..0edecba
--- /dev/null
+++ b/java/test/TestH5Ocreate.java
@@ -0,0 +1,562 @@
+/* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
+ * Copyright by The HDF Group. *
+ * Copyright by the Board of Trustees of the University of Illinois. *
+ * All rights reserved. *
+ * *
+ * This file is part of HDF5. The full HDF5 copyright notice, including *
+ * terms governing use, modification, and redistribution, is contained in *
+ * the files COPYING and Copyright.html. COPYING can be found at the root *
+ * of the source code distribution tree; Copyright.html can be found at the *
+ * root level of an installed copy of the electronic HDF5 document set and *
+ * is linked from the top-level documents page. It can also be found at *
+ * http://hdfgroup.org/HDF5/doc/Copyright.html. If you do not have *
+ * access to either file, you may request a copy from help@hdfgroup.org. *
+ * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */
+
+package test;
+
+import static org.junit.Assert.assertFalse;
+import static org.junit.Assert.assertTrue;
+import static org.junit.Assert.fail;
+
+import java.io.File;
+import java.util.ArrayList;
+
+import hdf.hdf5lib.H5;
+import hdf.hdf5lib.HDF5Constants;
+import hdf.hdf5lib.callbacks.H5O_iterate_cb;
+import hdf.hdf5lib.callbacks.H5O_iterate_t;
+import hdf.hdf5lib.exceptions.HDF5Exception;
+import hdf.hdf5lib.exceptions.HDF5LibraryException;
+import hdf.hdf5lib.structs.H5O_info_t;
+
+import org.junit.After;
+import org.junit.Before;
+import org.junit.Ignore;
+import org.junit.Rule;
+import org.junit.Test;
+import org.junit.rules.TestName;
+
+public class TestH5Ocreate {
+ @Rule public TestName testname = new TestName();
+ private static final String H5_EXTFILE = "h5ex_g_iterate.hdf";
+ private static final String H5_FILE = "test.h5";
+ private static final int DIM_X = 4;
+ private static final int DIM_Y = 6;
+ long H5fcpl = -1;
+ long H5fid = -1;
+ long H5dsid = -1;
+ long H5did1 = -1;
+ long H5did2 = -1;
+ long H5gcpl = -1;
+ long H5gid = -1;
+ long[] H5dims = { DIM_X, DIM_Y };
+
+ private final void _deleteFile(String filename) {
+ File file = new File(filename);
+
+ if (file.exists()) {
+ try {
+ file.delete();
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+ }
+ }
+
+ private final long _createDataset(long fid, long dsid, String name, long dapl) {
+ long did = -1;
+ try {
+ did = H5.H5Dcreate(fid, name,
+ HDF5Constants.H5T_STD_I32BE, dsid,
+ HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT, dapl);
+ }
+ catch (Throwable err) {
+ err.printStackTrace();
+ fail("H5.H5Dcreate: " + err);
+ }
+ assertTrue("TestH5O._createDataset: ",did > 0);
+
+ return did;
+ }
+
+ private final long _createGroup(long fid, String name) {
+ long gid = -1;
+ try {
+ H5gcpl = HDF5Constants.H5P_DEFAULT;
+ gid = H5.H5Gcreate(fid, name, HDF5Constants.H5P_DEFAULT,
+ H5gcpl, HDF5Constants.H5P_DEFAULT);
+ }
+ catch (Throwable err) {
+ err.printStackTrace();
+ fail("H5.H5Gcreate: " + err);
+ }
+ assertTrue("TestH5O._createGroup: ",gid > 0);
+
+ return gid;
+ }
+
+ private final void _createHardLink(long fid, long cid, String curname, long did, String dstname, long lcpl, long lapl) {
+ boolean link_exists = false;
+ try {
+ H5.H5Lcreate_hard(cid, curname, did, dstname, lcpl, lapl);
+ H5.H5Fflush(fid, HDF5Constants.H5F_SCOPE_LOCAL);
+ link_exists = H5.H5Lexists(did, dstname, lapl);
+ }
+ catch (Throwable err) {
+ err.printStackTrace();
+ fail("H5.H5Lcreate_hard: " + err);
+ }
+ assertTrue("TestH5O._createHardLink ", link_exists);
+ }
+
+ private final void _createSoftLink(long fid, String curname, long did, String dstname, long lcpl, long lapl) {
+ boolean link_exists = false;
+ try {
+ H5.H5Lcreate_soft(curname, did, dstname, lcpl, lapl);
+ H5.H5Fflush(fid, HDF5Constants.H5F_SCOPE_LOCAL);
+ link_exists = H5.H5Lexists(did, dstname, lapl);
+ }
+ catch (Throwable err) {
+ err.printStackTrace();
+ fail("H5.H5Lcreate_soft: " + err);
+ }
+ assertTrue("TestH5O._createSoftLink ", link_exists);
+ }
+
+ private final void _createExternalLink(long fid, String ext_filename, String curname, long did, String dstname, long lcpl, long lapl) {
+ boolean link_exists = false;
+ try {
+ H5.H5Lcreate_external(ext_filename, curname, did, dstname, lcpl, lapl);
+ H5.H5Fflush(fid, HDF5Constants.H5F_SCOPE_LOCAL);
+ link_exists = H5.H5Lexists(did, dstname, lapl);
+ }
+ catch (Throwable err) {
+ err.printStackTrace();
+ fail("H5.H5Lcreate_external: " + err);
+ }
+ assertTrue("TestH5O._createExternalLink ", link_exists);
+ }
+
+ @Before
+ public void createH5file()
+ throws NullPointerException, HDF5Exception {
+ assertTrue("H5 open ids is 0",H5.getOpenIDCount()==0);
+ System.out.print(testname.getMethodName());
+ try {
+ H5fcpl = H5.H5Pcreate(HDF5Constants.H5P_FILE_CREATE);
+ H5.H5Pset_link_creation_order(H5fcpl, HDF5Constants.H5P_CRT_ORDER_TRACKED+HDF5Constants.H5P_CRT_ORDER_INDEXED);
+ H5fid = H5.H5Fcreate(H5_FILE, HDF5Constants.H5F_ACC_TRUNC,
+ H5fcpl, HDF5Constants.H5P_DEFAULT);
+ H5dsid = H5.H5Screate_simple(2, H5dims, null);
+ H5did1 = _createDataset(H5fid, H5dsid, "DS1", HDF5Constants.H5P_DEFAULT);
+ H5gid = _createGroup(H5fid, "/G1");
+ H5did2 = _createDataset(H5gid, H5dsid, "DS2", HDF5Constants.H5P_DEFAULT);
+ }
+ catch (Throwable err) {
+ err.printStackTrace();
+ fail("TestH5O.createH5file: " + err);
+ }
+ assertTrue("TestH5O.createH5file: H5.H5Fcreate: ",H5fid > 0);
+ assertTrue("TestH5O.createH5file: H5.H5Screate_simple: ",H5dsid > 0);
+ assertTrue("TestH5O.createH5file: H5.H5Gcreate: ",H5gid > 0);
+
+ H5.H5Fflush(H5fid, HDF5Constants.H5F_SCOPE_LOCAL);
+ }
+
+ @After
+ public void deleteH5file() throws HDF5LibraryException {
+ if (H5gid > 0)
+ try {H5.H5Gclose(H5gid);} catch (Exception ex) {}
+ if (H5gcpl > 0)
+ try {H5.H5Pclose(H5gcpl);} catch (Exception ex) {}
+ if (H5did2 > 0)
+ try {H5.H5Dclose(H5did2);} catch (Exception ex) {}
+ if (H5dsid > 0)
+ try {H5.H5Sclose(H5dsid);} catch (Exception ex) {}
+ if (H5did1 > 0)
+ try {H5.H5Dclose(H5did1);} catch (Exception ex) {}
+ if (H5fid > 0)
+ try {H5.H5Fclose(H5fid);} catch (Exception ex) {}
+ if (H5fcpl > 0)
+ try {H5.H5Pclose(H5fcpl);} catch (Exception ex) {}
+
+ _deleteFile(H5_FILE);
+ System.out.println();
+ }
+
+ @Test(expected = HDF5LibraryException.class)
+ public void testH5Ocopy_cur_not_exists() throws Throwable {
+ H5.H5Ocopy(H5fid, "None", H5fid, "DS1", HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT);
+ }
+
+ @Test
+ public void testH5Ocopy() {
+ try {
+ H5.H5Ocopy(H5fid, "DS1", H5fid, "CPY1", HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT);
+ H5.H5Fflush(H5fid, HDF5Constants.H5F_SCOPE_LOCAL);
+ boolean link_exists = H5.H5Lexists(H5fid, "CPY1", HDF5Constants.H5P_DEFAULT);
+ assertTrue("testH5Ocopy:H5Lexists ",link_exists);
+ }
+ catch (Throwable err) {
+ err.printStackTrace();
+ fail("H5.H5Ocopy: " + err);
+ }
+ }
+
+ @Test(expected = HDF5LibraryException.class)
+ public void testH5Ocopy_dst_link_exists() throws Throwable {
+ _createHardLink(H5fid, H5fid, "/G1/DS2", H5fid, "CPY1", HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT);
+ H5.H5Ocopy(H5fid, "CPY1", H5fid, "/G1/DS2", HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT);
+ }
+
+ @Test
+ public void testH5Oget_info_by_idx_n0_create() {
+ H5O_info_t obj_info = null;
+ try {
+ int order = H5.H5Pget_link_creation_order(H5fcpl);
+ assertTrue("creation order :"+order, order == HDF5Constants.H5P_CRT_ORDER_TRACKED+HDF5Constants.H5P_CRT_ORDER_INDEXED);
+ }
+ catch (Throwable err) {
+ err.printStackTrace();
+ fail("H5.H5Oget_info_by_idx_n0:H5Pget_link_creation_order " + err);
+ }
+ try {
+ obj_info = H5.H5Oget_info_by_idx(H5fid, "/", HDF5Constants.H5_INDEX_CRT_ORDER, HDF5Constants.H5_ITER_INC, 0, HDF5Constants.H5P_DEFAULT);
+ }
+ catch (Throwable err) {
+ err.printStackTrace();
+ fail("H5.H5Oget_info_by_idx: " + err);
+ }
+ assertFalse("H5Oget_info_by_idx ", obj_info==null);
+ assertTrue("H5Oget_info_by_idx link type", obj_info.type==HDF5Constants.H5O_TYPE_DATASET);
+ }
+
+ @Test
+ public void testH5Oget_info_by_idx_n1_create() {
+ H5O_info_t obj_info = null;
+ try {
+ int order = H5.H5Pget_link_creation_order(H5fcpl);
+ assertTrue("creation order :"+order, order == HDF5Constants.H5P_CRT_ORDER_TRACKED+HDF5Constants.H5P_CRT_ORDER_INDEXED);
+ }
+ catch (Throwable err) {
+ err.printStackTrace();
+ fail("H5.H5Oget_info_by_idx_n1:H5Pget_link_creation_order " + err);
+ }
+ try {
+ obj_info = H5.H5Oget_info_by_idx(H5fid, "/", HDF5Constants.H5_INDEX_CRT_ORDER, HDF5Constants.H5_ITER_INC, 1, HDF5Constants.H5P_DEFAULT);
+ }
+ catch (Throwable err) {
+ err.printStackTrace();
+ fail("H5.H5Oget_info_by_idx: " + err);
+ }
+ assertFalse("H5Oget_info_by_idx ", obj_info==null);
+ assertTrue("H5Oget_info_by_idx link type", obj_info.type==HDF5Constants.H5O_TYPE_GROUP);
+ }
+
+ @Test
+ public void testH5Oget_info_softlink() {
+ H5O_info_t obj_info = null;
+ _createSoftLink(H5fid, "/G1/DS2", H5fid, "L1", HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT);
+ try {
+ obj_info = H5.H5Oget_info_by_name(H5fid, "L1", HDF5Constants.H5P_DEFAULT);
+ }
+ catch (Throwable err) {
+ err.printStackTrace();
+ fail("H5.H5Oget_info: " + err);
+ }
+ assertFalse("H5Oget_info ", obj_info==null);
+ assertTrue("H5Oget_info link type", obj_info.type==HDF5Constants.H5O_TYPE_DATASET);
+ assertTrue("Link Address ", obj_info.addr>0);
+ }
+
+ @Test(expected = HDF5LibraryException.class)
+ public void testH5Oget_info_softlink_dangle() throws Throwable {
+ _createSoftLink(H5fid, "DS3", H5fid, "L2", HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT);
+ H5.H5Oget_info_by_name(H5fid, "L2", HDF5Constants.H5P_DEFAULT);
+ }
+
+ @Test
+ public void testH5Oget_info_externallink() {
+ H5O_info_t obj_info = null;
+ _createExternalLink(H5fid, H5_EXTFILE, "DT1", H5fid, "L1", HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT);
+ try {
+ obj_info = H5.H5Oget_info_by_name(H5fid, "L1", HDF5Constants.H5P_DEFAULT);
+ }
+ catch (Throwable err) {
+ err.printStackTrace();
+ fail("H5.H5Oget_info: " + err);
+ }
+ assertFalse("H5Oget_info ", obj_info==null);
+ assertTrue("H5Oget_info link type", obj_info.type==HDF5Constants.H5O_TYPE_NAMED_DATATYPE);
+ assertTrue("Link Address ", obj_info.addr>0);
+ }
+
+ @Test
+ public void testH5Olink() {
+ long oid = -1;
+ H5O_info_t obj_info = null;
+ H5O_info_t dst_obj_info = null;
+ try {
+ oid = H5.H5Oopen(H5fid, "DS1", HDF5Constants.H5P_DEFAULT);
+ obj_info = H5.H5Oget_info(oid);
+ }
+ catch (Throwable err) {
+ err.printStackTrace();
+ fail("H5.H5Oget_info: " + err);
+ }
+ try {
+ H5.H5Olink(oid, H5fid, "CPY1", HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT);
+ H5.H5Fflush(H5fid, HDF5Constants.H5F_SCOPE_LOCAL);
+ }
+ catch (Throwable err) {
+ err.printStackTrace();
+ fail("H5.H5Olink: " + err);
+ }
+ try {H5.H5Oclose(oid);} catch (Exception ex) {}
+
+ assertFalse("H5Oget_info ", obj_info==null);
+ assertTrue("H5Oget_info object type", obj_info.type==HDF5Constants.H5O_TYPE_DATASET);
+
+ try {
+ dst_obj_info = H5.H5Oget_info_by_name(H5fid, "CPY1", HDF5Constants.H5P_DEFAULT);
+ }
+ catch (Throwable err) {
+ err.printStackTrace();
+ fail("H5.H5Oget_info_by_name: " + err);
+ }
+ assertFalse("H5Oget_info ", dst_obj_info==null);
+ assertTrue("H5Oget_info object type", dst_obj_info.type==HDF5Constants.H5O_TYPE_DATASET);
+ }
+
+ @Test
+ public void testH5Ovisit_create() {
+ try {
+ int order = H5.H5Pget_link_creation_order(H5fcpl);
+ assertTrue("creation order :"+order, order == HDF5Constants.H5P_CRT_ORDER_TRACKED+HDF5Constants.H5P_CRT_ORDER_INDEXED);
+ }
+ catch (Throwable err) {
+ err.printStackTrace();
+ fail("H5.H5Ovisit_create:H5Pget_link_creation_order " + err);
+ }
+
+ _createHardLink(H5fid, H5fid, "/G1/DS2", H5fid, "CPY1", HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT);
+ _createExternalLink(H5fid, H5_EXTFILE, "DT1", H5fid, "LE", HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT);
+ _createSoftLink(H5fid, "/G1/DS2", H5fid, "LS", HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT);
+
+ class idata {
+ public String link_name = null;
+ public int link_type = -1;
+ idata(String name, int type) {
+ this.link_name = name;
+ this.link_type = type;
+ }
+ }
+ class H5O_iter_data implements H5O_iterate_t {
+ public ArrayList<idata> iterdata = new ArrayList<idata>();
+ }
+ H5O_iterate_t iter_data = new H5O_iter_data();
+ class H5O_iter_callback implements H5O_iterate_cb {
+ public int callback(long group, String name, H5O_info_t info, H5O_iterate_t op_data) {
+ idata id = new idata(name, info.type);
+ ((H5O_iter_data)op_data).iterdata.add(id);
+ return 0;
+ }
+ }
+ H5O_iterate_cb iter_cb = new H5O_iter_callback();
+ try {
+ H5.H5Ovisit(H5fid, HDF5Constants.H5_INDEX_CRT_ORDER, HDF5Constants.H5_ITER_INC, iter_cb, iter_data);
+ }
+ catch (Throwable err) {
+ err.printStackTrace();
+ fail("H5.H5Ovisit: " + err);
+ }
+ assertFalse("H5Ovisit ", ((H5O_iter_data)iter_data).iterdata.isEmpty());
+ assertTrue("H5Ovisit "+((H5O_iter_data)iter_data).iterdata.size(), ((H5O_iter_data)iter_data).iterdata.size()==4);
+ assertTrue("H5Ovisit "+((idata)((H5O_iter_data)iter_data).iterdata.get(0)).link_name, ((idata)((H5O_iter_data)iter_data).iterdata.get(0)).link_name.compareToIgnoreCase(".")==0);
+ assertTrue("H5Ovisit "+((idata)((H5O_iter_data)iter_data).iterdata.get(1)).link_name, ((idata)((H5O_iter_data)iter_data).iterdata.get(1)).link_name.compareToIgnoreCase("DS1")==0);
+ assertTrue("H5Ovisit "+((idata)((H5O_iter_data)iter_data).iterdata.get(2)).link_name, ((idata)((H5O_iter_data)iter_data).iterdata.get(2)).link_name.compareToIgnoreCase("G1")==0);
+ assertTrue("H5Ovisit "+((idata)((H5O_iter_data)iter_data).iterdata.get(3)).link_name, ((idata)((H5O_iter_data)iter_data).iterdata.get(3)).link_name.compareToIgnoreCase("G1/DS2")==0);
+ }
+
+ @Test
+ public void testH5Ocomment() {
+ long oid = -1;
+ String obj_comment = null;
+ try {
+ oid = H5.H5Oopen(H5fid, "DS1", HDF5Constants.H5P_DEFAULT);
+ H5.H5Oset_comment(oid, "Test Comment");
+ H5.H5Fflush(H5fid, HDF5Constants.H5F_SCOPE_LOCAL);
+ }
+ catch (Throwable err) {
+ err.printStackTrace();
+ fail("H5.H5Oset_comment: " + err);
+ }
+ try {
+ obj_comment = H5.H5Oget_comment(oid);
+ }
+ catch (Throwable err) {
+ err.printStackTrace();
+ fail("H5.H5Oget_comment: " + err);
+ }
+ try {H5.H5Oclose(oid);} catch (Exception ex) {}
+ assertFalse("H5Oget_comment: ", obj_comment==null);
+ assertTrue("H5Oget_comment: ", obj_comment.compareTo("Test Comment")==0);
+ }
+
+ @Test
+ public void testH5Ocomment_clear() {
+ long oid = -1;
+ String obj_comment = null;
+ try {
+ oid = H5.H5Oopen(H5fid, "DS1", HDF5Constants.H5P_DEFAULT);
+ H5.H5Oset_comment(oid, "Test Comment");
+ H5.H5Fflush(H5fid, HDF5Constants.H5F_SCOPE_LOCAL);
+ }
+ catch (Throwable err) {
+ err.printStackTrace();
+ fail("H5.H5Oset_comment: " + err);
+ }
+ try {
+ obj_comment = H5.H5Oget_comment(oid);
+ }
+ catch (Throwable err) {
+ err.printStackTrace();
+ fail("H5.H5Oget_comment: " + err);
+ }
+ assertFalse("H5Oget_comment: ", obj_comment==null);
+ assertTrue("H5Oget_comment: ", obj_comment.compareTo("Test Comment")==0);
+ try {
+ H5.H5Oset_comment(oid, null);
+ H5.H5Fflush(H5fid, HDF5Constants.H5F_SCOPE_LOCAL);
+ }
+ catch (Throwable err) {
+ err.printStackTrace();
+ fail("H5.H5Oset_comment: " + err);
+ }
+ try {
+ obj_comment = H5.H5Oget_comment(oid);
+ }
+ catch (Throwable err) {
+ err.printStackTrace();
+ fail("H5.H5Oget_comment: " + err);
+ }
+ try {H5.H5Oclose(oid);} catch (Exception ex) {}
+ assertTrue("H5Oget_comment: ", obj_comment==null);
+ }
+
+ @Test
+ public void testH5Ocomment_by_name() {
+ String obj_comment = null;
+ try {
+ H5.H5Oset_comment_by_name(H5fid, "DS1", "Test Comment", HDF5Constants.H5P_DEFAULT);
+ H5.H5Fflush(H5fid, HDF5Constants.H5F_SCOPE_LOCAL);
+ }
+ catch (Throwable err) {
+ err.printStackTrace();
+ fail("H5.H5Oset_comment_by_name: " + err);
+ }
+ try {
+ obj_comment = H5.H5Oget_comment_by_name(H5fid, "DS1", HDF5Constants.H5P_DEFAULT);
+ }
+ catch (Throwable err) {
+ err.printStackTrace();
+ fail("H5.H5Oget_comment_by_name: " + err);
+ }
+ assertFalse("H5Oget_comment_by_name: ", obj_comment==null);
+ assertTrue("H5Oget_comment_by_name: ", obj_comment.compareTo("Test Comment")==0);
+ }
+
+ @Test
+ public void testH5Ocomment_by_name_clear() {
+ String obj_comment = null;
+ try {
+ H5.H5Oset_comment_by_name(H5fid, "DS1", "Test Comment", HDF5Constants.H5P_DEFAULT);
+ H5.H5Fflush(H5fid, HDF5Constants.H5F_SCOPE_LOCAL);
+ }
+ catch (Throwable err) {
+ err.printStackTrace();
+ fail("H5.H5Oset_comment_by_name: " + err);
+ }
+ try {
+ obj_comment = H5.H5Oget_comment_by_name(H5fid, "DS1", HDF5Constants.H5P_DEFAULT);
+ }
+ catch (Throwable err) {
+ err.printStackTrace();
+ fail("H5.H5Oget_comment_by_name: " + err);
+ }
+ assertFalse("H5Oget_comment_by_name: ", obj_comment==null);
+ assertTrue("H5Oget_comment_by_name: ", obj_comment.compareTo("Test Comment")==0);
+ try {
+ H5.H5Oset_comment_by_name(H5fid, "DS1", null, HDF5Constants.H5P_DEFAULT);
+ H5.H5Fflush(H5fid, HDF5Constants.H5F_SCOPE_LOCAL);
+ }
+ catch (Throwable err) {
+ err.printStackTrace();
+ fail("H5.H5Oset_comment_by_name: " + err);
+ }
+ try {
+ obj_comment = H5.H5Oget_comment_by_name(H5fid, "DS1", HDF5Constants.H5P_DEFAULT);
+ }
+ catch (Throwable err) {
+ err.printStackTrace();
+ fail("H5.H5Oget_comment_by_name: " + err);
+ }
+ assertTrue("H5Oget_comment_by_name: ", obj_comment==null);
+ }
+
+ @Test
+ public void testH5Oinc_dec_count() {
+ long oid = -1;
+ H5O_info_t obj_info = null;
+ try {
+ try {
+ oid = H5.H5Oopen(H5fid, "G1", HDF5Constants.H5P_DEFAULT);
+ obj_info = H5.H5Oget_info(oid);
+ assertFalse("testH5Oinc_dec_count: H5Oget_info ",obj_info==null);
+ assertTrue("testH5Oinc_dec_count: H5Oget_info reference count",obj_info.rc==1);
+ }
+ catch (Throwable err) {
+ err.printStackTrace();
+ fail("testH5Oinc_dec_count: H5.H5Oget_info: " + err);
+ }
+ try {
+ H5.H5Oincr_refcount(oid);
+ }
+ catch (Throwable err) {
+ err.printStackTrace();
+ fail("testH5Oinc_dec_count: H5.H5Oincr_refcount: " + err);
+ }
+ try {
+ obj_info = H5.H5Oget_info(oid);
+ assertFalse("testH5Oinc_dec_count: H5Oget_info ",obj_info==null);
+ assertTrue("testH5Oinc_dec_count: H5Oget_info reference count",obj_info.rc==2);
+ }
+ catch (Throwable err) {
+ err.printStackTrace();
+ fail("testH5Oinc_dec_count: H5.H5Oget_info: " + err);
+ }
+ try {
+ H5.H5Odecr_refcount(oid);
+ }
+ catch (Throwable err) {
+ err.printStackTrace();
+ fail("testH5Oinc_dec_count: H5.H5Odecr_refcount: " + err);
+ }
+ try {
+ obj_info = H5.H5Oget_info(oid);
+ assertFalse("testH5Oinc_dec_count: H5Oget_info ",obj_info==null);
+ assertTrue("testH5Oinc_dec_count: H5Oget_info reference count",obj_info.rc==1);
+ }
+ catch (Throwable err) {
+ err.printStackTrace();
+ fail("testH5Oinc_dec_count: H5.H5Oget_info: " + err);
+ }
+ }
+ finally {
+ try{H5.H5Oclose(oid);} catch (Exception ex) {}
+ }
+ }
+
+}
diff --git a/java/test/TestH5Oparams.java b/java/test/TestH5Oparams.java
new file mode 100644
index 0000000..ced66f5
--- /dev/null
+++ b/java/test/TestH5Oparams.java
@@ -0,0 +1,154 @@
+/* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
+ * Copyright by The HDF Group. *
+ * Copyright by the Board of Trustees of the University of Illinois. *
+ * All rights reserved. *
+ * *
+ * This file is part of HDF5. The full HDF5 copyright notice, including *
+ * terms governing use, modification, and redistribution, is contained in *
+ * the files COPYING and Copyright.html. COPYING can be found at the root *
+ * of the source code distribution tree; Copyright.html can be found at the *
+ * root level of an installed copy of the electronic HDF5 document set and *
+ * is linked from the top-level documents page. It can also be found at *
+ * http://hdfgroup.org/HDF5/doc/Copyright.html. If you do not have *
+ * access to either file, you may request a copy from help@hdfgroup.org. *
+ * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */
+
+package test;
+
+import static org.junit.Assert.assertTrue;
+import hdf.hdf5lib.H5;
+import hdf.hdf5lib.HDF5Constants;
+import hdf.hdf5lib.exceptions.HDF5LibraryException;
+
+import org.junit.After;
+import org.junit.Before;
+import org.junit.Ignore;
+import org.junit.Rule;
+import org.junit.Test;
+import org.junit.rules.TestName;
+
+public class TestH5Oparams {
+ @Rule public TestName testname = new TestName();
+
+ @Before
+ public void checkOpenIDs() {
+ assertTrue("H5 open ids is 0",H5.getOpenIDCount()==0);
+ System.out.print(testname.getMethodName());
+ }
+ @After
+ public void nextTestName() {
+ System.out.println();
+ }
+
+ @Test//(expected = HDF5LibraryException.class)
+ public void testH5Oclose_invalid() throws Throwable {
+ long oid = H5.H5Oclose(-1);
+ assertTrue(oid == 0);
+ }
+
+ @Test(expected = NullPointerException.class)
+ public void testH5Oopen_null() throws Throwable {
+ H5.H5Oopen(-1, null, 0);
+ }
+
+ @Test(expected = HDF5LibraryException.class)
+ public void testH5Oopen_invalid() throws Throwable {
+ H5.H5Oopen(-1, "Bogus", 0);
+ }
+
+ @Test(expected = HDF5LibraryException.class)
+ public void testH5Ocopy_invalid() throws Throwable {
+ H5.H5Ocopy(-1, "Bogus", -1, "Bogus", -1, -1);
+ }
+
+ @Test(expected = NullPointerException.class)
+ public void testH5Ocopy_null_current() throws Throwable {
+ H5.H5Ocopy(-1, null, 0, "Bogus", 0, 0);
+ }
+
+ @Test(expected = NullPointerException.class)
+ public void testH5Ocopy_null_dest() throws Throwable {
+ H5.H5Ocopy(-1, "Bogus", 0, null, 0, 0);
+ }
+
+ @Test(expected = HDF5LibraryException.class)
+ public void testH5Oget_info_invalid() throws Throwable {
+ H5.H5Oget_info(-1);
+ }
+
+ @Test(expected = NullPointerException.class)
+ public void testH5Oget_info_by_name_null() throws Throwable {
+ H5.H5Oget_info_by_name(-1, null, HDF5Constants.H5P_DEFAULT);
+ }
+
+ @Test(expected = HDF5LibraryException.class)
+ public void testH5Oget_info_by_name_invalid() throws Throwable {
+ H5.H5Oget_info_by_name(-1, "/testH5Gcreate", HDF5Constants.H5P_DEFAULT);
+ }
+
+ @Test(expected = HDF5LibraryException.class)
+ public void testH5Oget_info_by_idx_invalid() throws Throwable {
+ H5.H5Oget_info_by_idx(-1, "Bogus", -1, -1, -1L, -1);
+ }
+
+ @Test(expected = NullPointerException.class)
+ public void testH5Oget_info_by_idx_null() throws Throwable {
+ H5.H5Oget_info_by_idx(-1, null, 0, 0, 0L, 0);
+ }
+
+ @Test(expected = HDF5LibraryException.class)
+ public void testH5Olink_invalid() throws Throwable {
+ H5.H5Olink(-1, -1, "Bogus", -1, -1);
+ }
+
+ @Test(expected = NullPointerException.class)
+ public void testH5Olink_null_dest() throws Throwable {
+ H5.H5Olink(-1, 0, null, 0, 0);
+ }
+
+ @Test(expected = NullPointerException.class)
+ public void testH5Ovisit_null() throws Throwable {
+ H5.H5Ovisit(-1, -1, -1, null, null);
+ }
+
+ @Test(expected = NullPointerException.class)
+ public void testH5Ovisit_by_name_nullname() throws Throwable {
+ H5.H5Ovisit_by_name(-1, null, -1, -1, null, null, -1);
+ }
+
+ @Test(expected = NullPointerException.class)
+ public void testH5Ovisit_by_name_null() throws Throwable {
+ H5.H5Ovisit_by_name(-1, "Bogus", -1, -1, null, null, -1);
+ }
+
+ @Test(expected = HDF5LibraryException.class)
+ public void testH5Oset_comment_invalid() throws Throwable {
+ H5.H5Oset_comment(-1, "Bogus");
+ }
+
+ @Test(expected = IllegalArgumentException.class)
+ public void testH5Oget_comment_invalid() throws Throwable {
+ H5.H5Oget_comment(-1);
+ }
+
+ @Test(expected = HDF5LibraryException.class)
+ public void testH5Oset_comment_by_name_invalid() throws Throwable {
+ H5.H5Oset_comment_by_name(-1, "Bogus", null, -1);
+ }
+
+ @Test(expected = NullPointerException.class)
+ public void testH5Oset_comment_by_name_null() throws Throwable {
+ H5.H5Oset_comment_by_name(-1, null, null, -1);
+ }
+
+ @Test(expected = IllegalArgumentException.class)
+ public void testH5Oget_comment_by_name_invalid() throws Throwable {
+ H5.H5Oget_comment_by_name(-1, "Bogus", -1);
+ }
+
+ @Test(expected = NullPointerException.class)
+ public void testH5Oget_comment_by_name_null() throws Throwable {
+ H5.H5Oget_comment_by_name(-1, null, -1);
+ }
+
+}
diff --git a/java/test/TestH5P.java b/java/test/TestH5P.java
new file mode 100644
index 0000000..b8a4376
--- /dev/null
+++ b/java/test/TestH5P.java
@@ -0,0 +1,1222 @@
+/* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
+ * Copyright by The HDF Group. *
+ * Copyright by the Board of Trustees of the University of Illinois. *
+ * All rights reserved. *
+ * *
+ * This file is part of HDF5. The full HDF5 copyright notice, including *
+ * terms governing use, modification, and redistribution, is contained in *
+ * the files COPYING and Copyright.html. COPYING can be found at the root *
+ * of the source code distribution tree; Copyright.html can be found at the *
+ * root level of an installed copy of the electronic HDF5 document set and *
+ * is linked from the top-level documents page. It can also be found at *
+ * http://hdfgroup.org/HDF5/doc/Copyright.html. If you do not have *
+ * access to either file, you may request a copy from help@hdfgroup.org. *
+ * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */
+
+package test;
+
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertFalse;
+import static org.junit.Assert.assertTrue;
+import static org.junit.Assert.fail;
+
+import java.io.File;
+
+import hdf.hdf5lib.H5;
+import hdf.hdf5lib.HDF5Constants;
+import hdf.hdf5lib.exceptions.HDF5Exception;
+import hdf.hdf5lib.exceptions.HDF5FunctionArgumentException;
+import hdf.hdf5lib.exceptions.HDF5LibraryException;
+
+import org.junit.After;
+import org.junit.Before;
+import org.junit.Rule;
+import org.junit.Test;
+import org.junit.rules.TestName;
+
+public class TestH5P {
+ @Rule public TestName testname = new TestName();
+
+ private static final String H5_FILE = "test.h5";
+ private static final int DIM_X = 4;
+ private static final int DIM_Y = 6;
+ long[] H5dims = { DIM_X, DIM_Y };
+ long H5fid = -1;
+ long H5dsid = -1;
+ long H5did = -1;
+ long lapl_id = -1;
+ long fapl_id = -1;
+ long fcpl_id = -1;
+ long ocpl_id = -1;
+ long ocp_plist_id = -1;
+ long lcpl_id = -1;
+ long plapl_id = -1;
+ long plist_id = -1;
+ long gapl_id = -1;
+ long gcpl_id = -1;
+ long acpl_id = -1;
+
+ private final void _deleteFile(String filename) {
+ File file = new File(filename);
+
+ if (file.exists()) {
+ try {file.delete();} catch (SecurityException e) {}
+ }
+ }
+
+ private final long _createDataset(long fid, long dsid, String name, long dapl) {
+ long did = -1;
+ try {
+ did = H5.H5Dcreate(fid, name, HDF5Constants.H5T_STD_I32BE, dsid,
+ HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT, dapl);
+ }
+ catch (Throwable err) {
+ err.printStackTrace();
+ fail("H5.H5Dcreate: " + err);
+ }
+ assertTrue("TestH5P._createDataset: ", did > 0);
+
+ return did;
+ }
+
+ private final void _createH5File(long fcpl, long fapl) {
+ try {
+ H5fid = H5.H5Fcreate(H5_FILE, HDF5Constants.H5F_ACC_TRUNC,
+ fcpl, fapl);
+ H5dsid = H5.H5Screate_simple(2, H5dims, null);
+ H5did = _createDataset(H5fid, H5dsid, "dset", HDF5Constants.H5P_DEFAULT);
+ }
+ catch (Throwable err) {
+ err.printStackTrace();
+ fail("TestH5P.createH5file: " + err);
+ }
+ assertTrue("TestH5P.createH5file: H5.H5Fcreate: ", H5fid > 0);
+ assertTrue("TestH5P.createH5file: H5.H5Screate_simple: ", H5dsid > 0);
+ assertTrue("TestH5P.createH5file: _createDataset: ", H5did > 0);
+
+ try {
+ H5.H5Fflush(H5fid, HDF5Constants.H5F_SCOPE_LOCAL);
+ }
+ catch (Throwable err) {
+ err.printStackTrace();
+ }
+ }
+
+ public void deleteH5file() throws HDF5LibraryException {
+ _deleteFile(H5_FILE);
+ }
+
+ @Before
+ public void createH5fileProperties()
+ throws NullPointerException, HDF5Exception {
+ assertTrue("H5 open ids is 0",H5.getOpenIDCount()==0);
+ System.out.print(testname.getMethodName());
+
+ try {
+ lapl_id = H5.H5Pcreate(HDF5Constants.H5P_DATASET_ACCESS);
+ fapl_id = H5.H5Pcreate(HDF5Constants.H5P_FILE_ACCESS);
+ fcpl_id = H5.H5Pcreate(HDF5Constants.H5P_FILE_CREATE);
+ ocpl_id = H5.H5Pcreate(HDF5Constants.H5P_DATASET_CREATE);
+ ocp_plist_id = H5.H5Pcreate(HDF5Constants.H5P_OBJECT_COPY);
+ lcpl_id = H5.H5Pcreate(HDF5Constants.H5P_LINK_CREATE);
+ plapl_id = H5.H5Pcreate(HDF5Constants.H5P_LINK_ACCESS);
+ plist_id = H5.H5Pcreate(HDF5Constants.H5P_DATASET_XFER);
+ gapl_id = H5.H5Pcreate(HDF5Constants.H5P_GROUP_ACCESS);
+ gcpl_id = H5.H5Pcreate(HDF5Constants.H5P_GROUP_CREATE);
+ acpl_id = H5.H5Pcreate(HDF5Constants.H5P_ATTRIBUTE_CREATE);
+ }
+ catch (Throwable err) {
+ err.printStackTrace();
+ fail("TestH5P.createH5file: " + err);
+ }
+ assertTrue(lapl_id > 0);
+ assertTrue(fapl_id > 0);
+ assertTrue(fcpl_id > 0);
+ assertTrue(ocpl_id > 0);
+ assertTrue(ocp_plist_id > 0);
+ assertTrue(lcpl_id > 0);
+ assertTrue(plapl_id>0);
+ assertTrue(plist_id > 0);
+ assertTrue(gapl_id > 0);
+ assertTrue(gcpl_id >0);
+ assertTrue(acpl_id >0);
+ }
+
+ @After
+ public void deleteH5fileProperties() throws HDF5LibraryException {
+ if (lapl_id >0)
+ try {H5.H5Pclose(lapl_id);} catch (Exception ex) {}
+ if (fapl_id >0)
+ try {H5.H5Pclose(fapl_id);} catch (Exception ex) {}
+ if (fcpl_id >0)
+ try {H5.H5Pclose(fcpl_id);} catch (Exception ex) {}
+ if (ocpl_id >0)
+ try {H5.H5Pclose(ocpl_id);} catch (Exception ex) {}
+ if (ocp_plist_id >0)
+ try {H5.H5Pclose(ocp_plist_id);} catch (Exception ex) {}
+ if (lcpl_id >0)
+ try {H5.H5Pclose(lcpl_id);} catch (Exception ex) {}
+ if (plapl_id >0)
+ try {H5.H5Pclose(plapl_id);} catch (Exception ex) {}
+ if (plist_id >0)
+ try {H5.H5Pclose(plist_id);} catch (Exception ex) {}
+ if (gapl_id >0)
+ try {H5.H5Pclose(gapl_id);} catch (Exception ex) {}
+ if (gcpl_id >0)
+ try {H5.H5Pclose(gcpl_id);} catch (Exception ex) {}
+ if (acpl_id >0)
+ try {H5.H5Pclose(acpl_id);} catch (Exception ex) {}
+ if (H5dsid > 0)
+ try {H5.H5Sclose(H5dsid);} catch (Exception ex) {}
+ if (H5did > 0)
+ try {H5.H5Dclose(H5did);} catch (Exception ex) {}
+ if (H5fid > 0)
+ try {H5.H5Fclose(H5fid);} catch (Exception ex) {}
+ System.out.println();
+ }
+
+ @Test
+ public void testH5Pget_nlinks() {
+ long nlinks = -1;
+ try {
+ nlinks = (long) H5.H5Pget_nlinks(lapl_id);
+ }
+ catch (Throwable err) {
+ err.printStackTrace();
+ fail("H5.H5Pget_nlinks: " + err);
+ }
+ assertTrue("testH5Pget_nlinks", nlinks > 0);
+ // Check the default value of nlinks.
+ assertEquals(nlinks, 16L);
+ }
+
+ @Test
+ public void testH5Pset_nlinks() {
+ long nlinks = 20;
+ int ret_val = -1;
+ try {
+ ret_val = H5.H5Pset_nlinks(lapl_id, nlinks);
+ nlinks = (long) H5.H5Pget_nlinks(lapl_id);
+ }
+ catch (Throwable err) {
+ err.printStackTrace();
+ fail("H5.H5Pset_nlinks: " + err);
+ }
+ assertTrue("testH5Pset_nlinks", ret_val >= 0);
+ // Check the value of nlinks retrieved from H5Pget_nlinks function.
+ assertEquals(nlinks, 20L);
+ }
+
+ @Test(expected = IllegalArgumentException.class)
+ public void testH5Pset_libver_bounds_invalidlow() throws Throwable {
+ H5.H5Pset_libver_bounds(fapl_id, 5, HDF5Constants.H5F_LIBVER_LATEST);
+ }
+
+ @Test(expected = IllegalArgumentException.class)
+ public void testH5Pset_libver_bounds_invalidhigh() throws Throwable {
+ H5.H5Pset_libver_bounds(fapl_id, HDF5Constants.H5F_LIBVER_LATEST, 5);
+ }
+
+ @Test
+ public void testH5Pget_link_creation_order() {
+ int crt_order_flags = 0;
+ try {
+ crt_order_flags = H5.H5Pget_link_creation_order(fcpl_id);
+ }
+ catch (Throwable err) {
+ err.printStackTrace();
+ fail("H5Pget_link_creation_order: " + err);
+ }
+ assertTrue("testH5Pget_link_creation_order", crt_order_flags >= 0);
+ }
+
+ @Test
+ public void testH5Pset_link_creation_order_trackedPLUSindexed() {
+ int ret_val = -1;
+ int crt_order_flags = HDF5Constants.H5P_CRT_ORDER_TRACKED + HDF5Constants.H5P_CRT_ORDER_INDEXED;
+ int crtorderflags = 0;
+
+ try {
+ ret_val = H5.H5Pset_link_creation_order(fcpl_id, crt_order_flags);
+ crtorderflags = H5.H5Pget_link_creation_order(fcpl_id);
+ }
+ catch (Throwable err) {
+ err.printStackTrace();
+ fail("H5Pset_link_creation_order: " + err);
+ }
+ assertTrue("testH5Pset_link_creation_order_trackedPLUSindexed",ret_val >= 0);
+ assertEquals(crt_order_flags, crtorderflags);
+ }
+
+ @Test
+ public void testH5Pset_link_creation_order_tracked() {
+ int ret_val = -1;
+ int crtorderflags = 0;
+
+ try {
+ ret_val = H5.H5Pset_link_creation_order(fcpl_id, HDF5Constants.H5P_CRT_ORDER_TRACKED);
+ crtorderflags = H5.H5Pget_link_creation_order(fcpl_id);
+ }
+ catch (Throwable err) {
+ err.printStackTrace();
+ fail("H5Pset_link_creation_order: " + err);
+ }
+ assertTrue("testH5Pset_link_creation_order_tracked",ret_val >= 0);
+ assertEquals(HDF5Constants.H5P_CRT_ORDER_TRACKED, crtorderflags);
+ }
+
+ @Test(expected = HDF5LibraryException.class)
+ public void testH5Pset_link_creation_order_invalidvalue() throws Throwable {
+ H5.H5Pset_link_creation_order(fcpl_id, HDF5Constants.H5P_CRT_ORDER_INDEXED);
+ }
+
+ @Test
+ public void testH5Pget_attr_creation_order() {
+ int crt_order_flags = 0;
+
+ try {
+ crt_order_flags = H5.H5Pget_attr_creation_order(ocpl_id);
+ }
+ catch (Throwable err) {
+ err.printStackTrace();
+ fail("H5Pget_attr_creation_order: " + err);
+ }
+ assertTrue("testH5Pget_attr_creation_order", crt_order_flags >= 0);
+ }
+
+ @Test
+ public void testH5Pset_attr_creation_order_trackedPLUSindexed() {
+ int ret_val = -1;
+ int crt_order_flags = HDF5Constants.H5P_CRT_ORDER_TRACKED + HDF5Constants.H5P_CRT_ORDER_INDEXED;
+ int crtorderflags = 0;
+
+ try {
+ ret_val = H5.H5Pset_attr_creation_order(ocpl_id, crt_order_flags);
+ crtorderflags = H5.H5Pget_attr_creation_order(ocpl_id);
+ }
+ catch (Throwable err) {
+ err.printStackTrace();
+ fail("H5Pset_attr_creation_order: " + err);
+ }
+ assertTrue("testH5Pset_attr_creation_order_trackedPLUSindexed", ret_val >= 0);
+ assertEquals(crt_order_flags, crtorderflags);
+ }
+
+ @Test
+ public void testH5Pset_attr_creation_order_tracked() {
+ int ret_val = -1;
+ int crtorderflags = 0;
+
+ try {
+ ret_val = H5.H5Pset_attr_creation_order(ocpl_id, HDF5Constants.H5P_CRT_ORDER_TRACKED);
+ crtorderflags = H5.H5Pget_attr_creation_order(ocpl_id);
+ }
+ catch (Throwable err) {
+ err.printStackTrace();
+ fail("H5Pset_attr_creation_order: " + err);
+ }
+ assertTrue("testH5Pset_attr_creation_order_tracked", ret_val >= 0);
+ assertEquals(HDF5Constants.H5P_CRT_ORDER_TRACKED, crtorderflags);
+ }
+
+ @Test(expected = HDF5LibraryException.class)
+ public void testH5Pset_attr_creation_order_invalidvalue() throws Throwable {
+ H5.H5Pset_attr_creation_order(ocpl_id, HDF5Constants.H5P_CRT_ORDER_INDEXED);
+ }
+
+ @Test
+ public void testH5Pset_copy_object() {
+
+ int cpy_option = -1;
+
+ try {
+ H5.H5Pset_copy_object(ocp_plist_id, HDF5Constants.H5O_COPY_SHALLOW_HIERARCHY_FLAG);
+ cpy_option = H5.H5Pget_copy_object(ocp_plist_id);
+ }
+ catch (Throwable err) {
+ err.printStackTrace();
+ fail("H5Pset_copy_object: " + err);
+ }
+ assertEquals(HDF5Constants.H5O_COPY_SHALLOW_HIERARCHY_FLAG, cpy_option);
+
+ try {
+ H5.H5Pset_copy_object(ocp_plist_id, HDF5Constants.H5O_COPY_EXPAND_REFERENCE_FLAG);
+ cpy_option = H5.H5Pget_copy_object(ocp_plist_id);
+ }
+ catch (Throwable err) {
+ err.printStackTrace();
+ fail("H5Pset_copy_object: " + err);
+ }
+ assertEquals(HDF5Constants.H5O_COPY_EXPAND_REFERENCE_FLAG, cpy_option);
+ }
+
+ @Test(expected = HDF5LibraryException.class)
+ public void testH5Pset_copy_object_invalidobject() throws Throwable {
+ H5.H5Pset_copy_object(HDF5Constants.H5P_DEFAULT, HDF5Constants.H5O_COPY_SHALLOW_HIERARCHY_FLAG);
+ }
+
+ @Test
+ public void testH5Pset_create_intermediate_group() {
+
+ int ret_val = -1;
+ try {
+ ret_val = H5.H5Pset_create_intermediate_group(lcpl_id, true);
+ }
+ catch (Throwable err) {
+ err.printStackTrace();
+ fail("H5Pset_create_intermediate_group: " + err);
+ }
+ assertTrue(ret_val>=0);
+ }
+
+ @Test(expected = HDF5LibraryException.class)
+ public void testH5Pset_create_intermediate_group_invalidobject() throws Throwable {
+ H5.H5Pset_create_intermediate_group(ocp_plist_id, true);
+ }
+
+ @Test
+ public void testH5Pget_create_intermediate_group() {
+ boolean flag = false;
+ try {
+ H5.H5Pset_create_intermediate_group(lcpl_id, true);
+ flag = H5.H5Pget_create_intermediate_group(lcpl_id);
+ }
+ catch (Throwable err) {
+ err.printStackTrace();
+ fail("H5Pget_create_intermediate_group: " + err);
+ }
+ assertEquals(true, flag);
+ }
+
+ @Test
+ public void testH5Pget_create_intermediate_group_notcreated() {
+ boolean flag = true;
+ try {
+ flag = H5.H5Pget_create_intermediate_group(lcpl_id);
+ }
+ catch (Throwable err) {
+ err.printStackTrace();
+ fail("H5Pget_create_intermediate_group_notcreated: " + err);
+ }
+ assertEquals(false, flag);
+ }
+
+ @Test
+ public void testH5Pset_data_transform() {
+
+ String expression = "(5/9.0)*(x-32)";
+ int ret_val = -1;
+
+ try {
+ ret_val= H5.H5Pset_data_transform(plist_id, expression);
+ }
+ catch (Throwable err) {
+ err.printStackTrace();
+ fail("H5Pset_data_transform: " + err);
+ }
+ assertTrue(ret_val>=0);
+ }
+
+ @Test(expected = NullPointerException.class)
+ public void testH5Pset_data_transform_NullExpression() throws Throwable {
+ H5.H5Pset_data_transform(plist_id, null);
+ }
+
+ @Test(expected = HDF5LibraryException.class)
+ public void testH5Pset_data_transform_InvalidExpression1() throws Throwable {
+ H5.H5Pset_data_transform(plist_id, "");
+ }
+
+ @Test(expected = HDF5LibraryException.class)
+ public void testH5Pset_data_transform_InvalidExpression2() throws Throwable {
+ H5.H5Pset_data_transform(plist_id, "hello");
+ }
+
+ @Test
+ public void testH5Pget_data_transform() {
+
+ String expression = "(5/9.0)*(x-32)";
+ String [] express = {""};
+ long express_size = 0;
+ long size = 20;
+
+ try {
+ H5.H5Pset_data_transform(plist_id, expression);
+ express_size = H5.H5Pget_data_transform(plist_id, express, size);
+ }
+ catch (Throwable err) {
+ err.printStackTrace();
+ fail("H5Pget_data_transform: " + err);
+ }
+ assertTrue(express_size>=0);
+ assertTrue("The data transform expression: ", expression.equals(express[0]));
+ }
+
+ @Test(expected = HDF5LibraryException.class)
+ public void testH5Pget_data_transform_ExpressionNotSet() throws Throwable {
+ String [] express = {""};
+ H5.H5Pget_data_transform(plist_id, express, 20);
+ }
+
+ @Test(expected = IllegalArgumentException.class)
+ public void testH5Pget_data_transform_IllegalSize() throws Throwable {
+ String [] express = {""};
+ H5.H5Pset_data_transform(plist_id, "(5/9.0)*(x-32)");
+ H5.H5Pget_data_transform(plist_id, express, 0);
+ }
+
+ @Test
+ public void testH5Pget_elink_acc_flags() {
+
+ int get_flags = -1;
+ try {
+ get_flags = H5.H5Pget_elink_acc_flags(gapl_id);
+ }
+ catch (Throwable err) {
+ err.printStackTrace();
+ fail("H5Pget_elink_acc_flags: " + err);
+ }
+ assertTrue("H5Pget_elink_acc_flags", get_flags >= 0);
+ assertEquals(HDF5Constants.H5F_ACC_DEFAULT, get_flags);
+ }
+
+ @Test
+ public void testH5Pset_elink_acc_flags() {
+
+ int get_flags = -1;
+ int ret_val = -1;
+ try {
+ ret_val = H5.H5Pset_elink_acc_flags(lapl_id, HDF5Constants.H5F_ACC_RDWR);
+ get_flags = H5.H5Pget_elink_acc_flags(lapl_id);
+ }
+ catch (Throwable err) {
+ err.printStackTrace();
+ fail("H5Pset_elink_acc_flags: " + err);
+ }
+ assertTrue("H5Pset_elink_acc_flags", ret_val >= 0);
+ assertEquals(HDF5Constants.H5F_ACC_RDWR, get_flags);
+ }
+
+ @Test(expected = HDF5FunctionArgumentException.class)
+ public void testH5Pset_elink_acc_flags_InvalidFlag1() throws Throwable {
+ H5.H5Pset_elink_acc_flags(lapl_id, HDF5Constants.H5F_ACC_TRUNC);
+ }
+
+ @Test(expected = HDF5FunctionArgumentException.class)
+ public void testH5Pset_elink_acc_flags_InvalidFlag2() throws Throwable {
+ H5.H5Pset_elink_acc_flags(lapl_id, -1);
+ }
+
+ @Test
+ public void testH5Pset_link_phase_change() {
+
+ int ret_val = -1;
+ try {
+ ret_val = H5.H5Pset_link_phase_change(fcpl_id , 2, 2);
+ }
+ catch (Throwable err) {
+ err.printStackTrace();
+ fail("H5Pset_link_phase_change: " + err);
+ }
+ assertTrue("H5Pset_link_phase_change", ret_val >= 0);
+ }
+
+ @Test(expected = IllegalArgumentException.class)
+ public void testH5Pset_link_phase_change_Highmax_Compact() throws Throwable {
+ H5.H5Pset_link_phase_change(fcpl_id , 70000000, 3);
+ }
+
+ @Test(expected = IllegalArgumentException.class)
+ public void testH5Pset_link_phase_change_max_compactLESSTHANmin_dense() throws Throwable {
+ H5.H5Pset_link_phase_change(fcpl_id , 5, 6);
+ }
+
+ @Test
+ public void testH5Pget_link_phase_change() {
+ int ret_val = -1;
+ int[] links = new int[2];
+
+ try {
+ ret_val = H5.H5Pget_link_phase_change(fcpl_id, links);
+ }
+ catch (Throwable err) {
+ err.printStackTrace();
+ fail("H5Pget_link_phase_change: " + err);
+ }
+ assertTrue("testH5Pget_link_phase_change", ret_val >= 0);
+ assertEquals("Default value of maximum compact storage", 8, links[0]);
+ assertEquals("Default value of minimum dense storage", 6, links[1]);
+ }
+
+ @Test
+ public void testH5Pget_link_phase_change_EqualsSet() {
+ int[] links = new int[2];
+ try {
+ H5.H5Pset_link_phase_change(fcpl_id , 10, 7);
+ H5.H5Pget_link_phase_change(fcpl_id, links);
+ }
+ catch (Throwable err) {
+ err.printStackTrace();
+ fail("H5Pget_link_phase_change_EqualsSet: " + err);
+ }
+ assertEquals("Value of maximum compact storage set", 10, links[0]);
+ assertEquals("Value of minimum dense storage set", 7, links[1]);
+ }
+
+ @Test(expected = NullPointerException.class)
+ public void testH5Pget_link_phase_change_Null() throws Throwable {
+ H5.H5Pget_link_phase_change(fcpl_id, null);
+ }
+
+ @Test
+ public void testH5Pget_attr_phase_change() {
+ int ret_val = -1;
+ int[] attributes = new int[2];
+
+ try {
+ ret_val = H5.H5Pget_attr_phase_change(ocpl_id, attributes);
+ }
+ catch (Throwable err) {
+ err.printStackTrace();
+ fail("H5Pget_attr_phase_change: " + err);
+ }
+ assertTrue("testH5Pget_attr_phase_change", ret_val >= 0);
+ assertEquals("Default value of the max. no. of attributes stored in compact storage", 8, attributes[0]);
+ assertEquals("Default value of the min. no. of attributes stored in dense storage", 6, attributes[1]);
+ try {
+ H5.H5Pset_attr_phase_change(ocpl_id, 9, 5);
+ }
+ catch (Throwable err) {
+ err.printStackTrace();
+ fail("H5Pset_attr_phase_change: " + err);
+ }
+ try {
+ ret_val = H5.H5Pget_attr_phase_change(ocpl_id, attributes);
+ }
+ catch (Throwable err) {
+ err.printStackTrace();
+ fail("H5Pget_attr_phase_change: " + err);
+ }
+ assertTrue("testH5Pget_attr_phase_change", ret_val >= 0);
+ assertEquals("Default value of the max. no. of attributes stored in compact storage", 9, attributes[0]);
+ assertEquals("Default value of the min. no. of attributes stored in dense storage", 5, attributes[1]);
+ }
+
+ @Test
+ public void testH5Pget_shared_mesg_phase_change() {
+ int ret_val = -1;
+ int[] size = new int[2];
+
+ try {
+ ret_val = H5.H5Pget_shared_mesg_phase_change(fcpl_id, size);
+ }
+ catch (Throwable err) {
+ err.printStackTrace();
+ fail("H5Pget_shared_mesg_phase_change: " + err);
+ }
+ assertTrue("testH5Pget_shared_mesg_phase_change", ret_val >= 0);
+ }
+
+ @Test
+ public void testH5Pget_shared_mesg_phase_change_EqualsSET() {
+ int[] size = new int[2];
+
+ try {
+ H5.H5Pset_shared_mesg_phase_change(fcpl_id,50, 40);
+ H5.H5Pget_shared_mesg_phase_change(fcpl_id, size);
+ }
+ catch (Throwable err) {
+ err.printStackTrace();
+ fail("H5Pget_shared_mesg_phase_change_EqualsSET: " + err);
+ }
+ assertEquals("Value of maximum list set", 50, size[0]);
+ assertEquals("Value of minimum btree set", 40, size[1]);
+ }
+
+ @Test
+ public void testH5Pset_shared_mesg_phase_change() {
+
+ int ret_val = -1;
+ try {
+ ret_val = H5.H5Pset_shared_mesg_phase_change(fcpl_id,2, 1);
+ }
+ catch (Throwable err) {
+ err.printStackTrace();
+ fail("H5Pset_shared_mesg_phase_change: " + err);
+ }
+ assertTrue("H5Pset_shared_mesg_phase_change", ret_val >= 0);
+ }
+
+ @Test(expected = IllegalArgumentException.class)
+ public void testH5PH5Pset_shared_mesg_phase_change_HighMaxlistValue() throws Throwable {
+ H5.H5Pset_shared_mesg_phase_change(fcpl_id, 5001, 4000);
+ }
+
+ @Test(expected = IllegalArgumentException.class)
+ public void testH5PH5Pset_shared_mesg_phase_change_HighMinbtreeValue() throws Throwable {
+ H5.H5Pset_shared_mesg_phase_change(fcpl_id, 5000, 5001);
+ }
+
+ @Test(expected = IllegalArgumentException.class)
+ public void testH5PH5Pset_shared_mesg_phase_change_MinbtreeGreaterThanMaxlist() throws Throwable {
+ H5.H5Pset_link_phase_change(fcpl_id , 3, 7);
+ }
+
+ @Test
+ public void testH5Pget_shared_mesg_nindexes() {
+
+ int nindexes = -1;
+ try {
+ nindexes = H5.H5Pget_shared_mesg_nindexes(fcpl_id);
+ }
+ catch (Throwable err) {
+ err.printStackTrace();
+ fail("H5Pget_shared_mesg_nindexes: " + err);
+ }
+ assertTrue("H5Pget_shared_mesg_nindexes", nindexes >= 0);
+ }
+
+ @Test
+ public void testH5Pset_shared_mesg_nindexes() {
+
+ int nindexes = -1;
+ int ret_val = -1;
+ try {
+ ret_val = H5.H5Pset_shared_mesg_nindexes(fcpl_id, 7);
+ nindexes = H5.H5Pget_shared_mesg_nindexes(fcpl_id);
+ }
+ catch (Throwable err) {
+ err.printStackTrace();
+ fail("H5Pset_shared_mesg_nindexes: " + err);
+ }
+ assertTrue("H5Pset_shared_mesg_nindexes", ret_val >= 0);
+ assertEquals("Value of nindexes is equal to value set",7 ,nindexes);
+ }
+
+ @Test(expected = IllegalArgumentException.class)
+ public void testH5Pset_shared_mesg_nindexes_InvalidHIGHnindexes()throws Throwable {
+ H5.H5Pset_shared_mesg_nindexes(fcpl_id, 9);
+ }
+
+ @Test
+ public void testH5Pset_shared_mesg_index() {
+
+ int ret_val = -1;
+ try {
+ H5.H5Pset_shared_mesg_nindexes(fcpl_id, 2);
+ ret_val = H5.H5Pset_shared_mesg_index(fcpl_id, 0,HDF5Constants.H5O_SHMESG_ATTR_FLAG, 10);
+ }
+ catch (Throwable err) {
+ err.printStackTrace();
+ fail("H5Pset_shared_mesg_index: " + err);
+ }
+ assertTrue("H5Pset_shared_mesg_index", ret_val >= 0);
+ }
+
+ @Test(expected = IllegalArgumentException.class)
+ public void testH5Pset_shared_mesg_index_Invalid_indexnum() throws Throwable {
+ H5.H5Pset_shared_mesg_index(fcpl_id, 2,HDF5Constants.H5O_SHMESG_ATTR_FLAG, 10);
+ }
+
+ @Test(expected = IllegalArgumentException.class)
+ public void testH5Pset_shared_mesg_index_InvalidFlag() throws Throwable {
+ H5.H5Pset_shared_mesg_nindexes(fcpl_id, 7);
+ H5.H5Pset_shared_mesg_index(fcpl_id, 2,HDF5Constants.H5O_SHMESG_ALL_FLAG + 1, 10);
+ }
+
+ @Test
+ public void testH5Pget_shared_mesg_index() {
+
+ int ret_val = -1;
+ int[] mesg_info = new int[2];
+ try {
+ H5.H5Pset_shared_mesg_nindexes(fcpl_id, 2);
+ H5.H5Pset_shared_mesg_index(fcpl_id, 0,HDF5Constants.H5O_SHMESG_ATTR_FLAG, 10);
+ ret_val = H5.H5Pget_shared_mesg_index(fcpl_id, 0, mesg_info);
+ }
+ catch (Throwable err) {
+ err.printStackTrace();
+ fail("H5Pget_shared_mesg_index: " + err);
+ }
+ assertTrue("H5Pget_shared_mesg_index", ret_val >= 0);
+ assertEquals("Type of message", HDF5Constants.H5O_SHMESG_ATTR_FLAG, mesg_info[0]);
+ assertEquals("minimum message size", 10, mesg_info[1]);
+ }
+
+ @Test(expected = IllegalArgumentException.class)
+ public void testH5Pget_shared_mesg_index_Invalid_indexnum() throws Throwable {
+ int[] mesg_info = new int[2];
+ H5.H5Pget_shared_mesg_index(fcpl_id, 0, mesg_info);
+ }
+
+ @Test
+ public void testH5Pset_local_heap_size_hint() {
+ int ret_val = -1;
+ try {
+ ret_val = H5.H5Pset_local_heap_size_hint(gcpl_id, 0);
+ }
+ catch (Throwable err) {
+ err.printStackTrace();
+ fail("H5Pset_local_heap_size_hint: " + err);
+ }
+ assertTrue("H5Pset_local_heap_size_hint", ret_val >= 0);
+ }
+
+ @Test
+ public void testH5Pget_local_heap_size_hint() {
+ long size_hint = -1;
+ try {
+ size_hint = H5.H5Pget_local_heap_size_hint(gcpl_id);
+ }
+ catch (Throwable err) {
+ err.printStackTrace();
+ fail("H5Pget_local_heap_size_hint: " + err);
+ }
+ assertTrue("H5Pget_local_heap_size_hint", size_hint >= 0);
+ }
+
+ @Test
+ public void testH5Pset_nbit() {
+ int ret_val = -1;
+ try {
+ ret_val = H5.H5Pset_nbit(ocpl_id);
+ }
+ catch (Throwable err) {
+ err.printStackTrace();
+ fail("H5Pset_nbit: " + err);
+ }
+ assertTrue("H5Pset_nbit", ret_val >= 0);
+ }
+
+ @Test
+ public void testH5Pset_scaleoffset() {
+ int ret_val = -1;
+ int scale_type = HDF5Constants.H5Z_SO_FLOAT_DSCALE;
+ int scale_factor = HDF5Constants.H5Z_SO_INT_MINBITS_DEFAULT;
+ try {
+ ret_val = H5.H5Pset_scaleoffset(ocpl_id, scale_type, scale_factor);
+ }
+ catch (Throwable err) {
+ err.printStackTrace();
+ fail("H5Pset_scaleoffset: " + err);
+ }
+ assertTrue("H5Pset_scaleoffset", ret_val >= 0);
+ }
+
+ @Test(expected = IllegalArgumentException.class)
+ public void testH5Pset_scaleoffset_Invalidscale_type() throws Throwable {
+ H5.H5Pset_scaleoffset(ocpl_id, 3, 1);
+ }
+
+ @Test(expected = IllegalArgumentException.class)
+ public void testH5Pset_scaleoffset_Invalidscale_factor() throws Throwable {
+ H5.H5Pset_scaleoffset(ocpl_id, HDF5Constants.H5Z_SO_INT, -1);
+ }
+
+ @Test
+ public void testH5Pset_est_link_info() {
+ int ret_val = -1;
+ try {
+ ret_val = H5.H5Pset_est_link_info(gcpl_id, 0,10);
+ }
+ catch (Throwable err) {
+ err.printStackTrace();
+ fail("H5Pset_est_link_info: " + err);
+ }
+ assertTrue("H5Pset_est_link_info", ret_val >= 0);
+ }
+
+ @Test(expected = IllegalArgumentException.class)
+ public void testH5Pset_est_link_info_InvalidValues() throws Throwable {
+ H5.H5Pset_est_link_info(gcpl_id, 100000,10);
+ }
+
+ @Test
+ public void testH5Pget_est_link_info() {
+ int ret_val = -1;
+ int[] link_info = new int[2];
+ try {
+ ret_val = H5.H5Pget_est_link_info(gcpl_id, link_info);
+ }
+ catch (Throwable err) {
+ err.printStackTrace();
+ fail("H5Pget_est_link_info: " + err);
+ }
+ assertTrue("H5Pget_est_link_info", ret_val >= 0);
+ }
+
+ @Test
+ public void testH5Pset_elink_prefix() {
+ int ret_val = -1;
+ String prefix = "tmp";
+ try {
+ ret_val = H5.H5Pset_elink_prefix(plapl_id, prefix);
+ }
+ catch (Throwable err) {
+ err.printStackTrace();
+ fail("H5Pset_est_link_info: " + err);
+ }
+ assertTrue("H5Pset_elink_prefix", ret_val >= 0);
+ }
+
+ @Test(expected = NullPointerException.class)
+ public void testH5Pset_elink_prefix_null() throws Throwable{
+ H5.H5Pset_elink_prefix(plapl_id, null);
+ }
+
+ @Test
+ public void testH5Pget_elink_prefix() {
+ String prefix = "tmp";
+ String[] pre = {""};
+ long prefix_size = 0;
+
+ try {
+ H5.H5Pset_elink_prefix(plapl_id, prefix);
+ prefix_size = H5.H5Pget_elink_prefix(plapl_id, pre);
+ }
+ catch (Throwable err) {
+ err.printStackTrace();
+ fail("H5Pget_elink_prefix: " + err);
+ }
+ assertTrue(prefix_size>=0);
+ assertTrue("The prefix: ", prefix.equals(pre[0]));
+ }
+
+ @Test(expected = NullPointerException.class)
+ public void testH5Pget_elink_prefix_null() throws Throwable {
+ H5.H5Pget_elink_prefix(plapl_id, null);
+ }
+
+ @Test(expected = NullPointerException.class)
+ public void testH5Pget_version_null() throws Throwable {
+ H5.H5Pget_version(fcpl_id, null);
+ }
+
+ @Test
+ public void testH5Pget_version() {
+ int[] version_info = {255,255,255,255};
+
+ try {
+ _createH5File(fcpl_id, fapl_id);
+ H5.H5Pget_version(fcpl_id, version_info);
+ deleteH5file();
+ }
+ catch (Throwable err) {
+ err.printStackTrace();
+ fail("H5Pget_version: " + err);
+ }
+ assertTrue("super block version: "+version_info[0], version_info[0] == 0);
+ assertTrue("global freelist version: "+version_info[1], version_info[1] == 0);
+ assertTrue("symbol table version: "+version_info[2], version_info[2] == 0);
+ assertTrue("shared object header version: "+version_info[3], version_info[3] == 0);
+ }
+
+ @Test(expected = NullPointerException.class)
+ public void testH5Pget_userblock_null() throws Throwable {
+ H5.H5Pget_userblock(fcpl_id, null);
+ }
+
+ @Test
+ public void testH5P_userblock() {
+ int[] version_info = {255,255,255,255};
+ long[] size = {0};
+
+ try {
+ H5.H5Pset_userblock(fcpl_id, 1024);
+ _createH5File(fcpl_id, fapl_id);
+
+ /* Close FCPL */
+ H5.H5Pclose(fcpl_id);
+
+ /* Get the file's dataset creation property list */
+ fcpl_id = H5.H5Fget_create_plist(H5fid);
+
+ /* Get the file's version information */
+ H5.H5Pget_version(fcpl_id, version_info);
+ H5.H5Pget_userblock(fcpl_id, size);
+ deleteH5file();
+ }
+ catch (Throwable err) {
+ err.printStackTrace();
+ fail("H5Pget_userblock: " + err);
+ }
+ assertTrue("super block version: "+version_info[0], version_info[0] == 0);
+ assertTrue("global freelist version: "+version_info[1], version_info[1] == 0);
+ assertTrue("symbol table version: "+version_info[2], version_info[2] == 0);
+ assertTrue("shared object header version: "+version_info[3], version_info[3] == 0);
+ assertTrue("user block size: "+size[0], size[0] == 1024);
+ }
+
+ @Test(expected = NullPointerException.class)
+ public void testH5Pget_sizes_null() throws Throwable {
+ H5.H5Pget_sizes(fcpl_id, null);
+ }
+
+ @Test
+ public void testH5P_sizes() {
+ int[] version_info = {255,255,255,255};
+ long[] size = {0,0};
+
+ try {
+ H5.H5Pset_sizes(fcpl_id, 4, 8);
+ _createH5File(fcpl_id, fapl_id);
+
+ /* Close FCPL */
+ H5.H5Pclose(fcpl_id);
+
+ /* Get the file's dataset creation property list */
+ fcpl_id = H5.H5Fget_create_plist(H5fid);
+
+ /* Get the file's version information */
+ H5.H5Pget_version(fcpl_id, version_info);
+ H5.H5Pget_sizes(fcpl_id, size);
+ deleteH5file();
+ }
+ catch (Throwable err) {
+ err.printStackTrace();
+ fail("H5Pget_sizes: " + err);
+ }
+ assertTrue("super block version: "+version_info[0], version_info[0] == 0);
+ assertTrue("global freelist version: "+version_info[1], version_info[1] == 0);
+ assertTrue("symbol table version: "+version_info[2], version_info[2] == 0);
+ assertTrue("shared object header version: "+version_info[3], version_info[3] == 0);
+ assertTrue("sizeof_addr size: "+size[0], size[0] == 4);
+ assertTrue("sizeof_size size: "+size[1], size[1] == 8);
+ }
+
+ @Test(expected = NullPointerException.class)
+ public void testH5Pget_sym_k_null() throws Throwable {
+ H5.H5Pget_sym_k(fcpl_id, null);
+ }
+
+ @Test
+ public void testH5P_sym_k() {
+ int[] version_info = {255,255,255,255};
+ int[] size = {0,0};
+
+ try {
+ H5.H5Pset_sym_k(fcpl_id, 32, 8);
+ _createH5File(fcpl_id, fapl_id);
+
+ /* Close FCPL */
+ H5.H5Pclose(fcpl_id);
+
+ /* Get the file's dataset creation property list */
+ fcpl_id = H5.H5Fget_create_plist(H5fid);
+
+ /* Get the file's version information */
+ H5.H5Pget_version(fcpl_id, version_info);
+ H5.H5Pget_sym_k(fcpl_id, size);
+ deleteH5file();
+ }
+ catch (Throwable err) {
+ err.printStackTrace();
+ fail("H5Pget_sym_k: " + err);
+ }
+ assertTrue("super block version: "+version_info[0], version_info[0] == 0);
+ assertTrue("global freelist version: "+version_info[1], version_info[1] == 0);
+ assertTrue("symbol table version: "+version_info[2], version_info[2] == 0);
+ assertTrue("shared object header version: "+version_info[3], version_info[3] == 0);
+ assertTrue("symbol table tree rank: "+size[0], size[0] == 32);
+ assertTrue("symbol table node size: "+size[1], size[1] == 8);
+ }
+
+ @Test(expected = NullPointerException.class)
+ public void testH5Pget_istore_k_null() throws Throwable {
+ H5.H5Pget_istore_k(fcpl_id, null);
+ }
+
+ @Test
+ public void testH5P_istore_k() {
+ int[] version_info = {255,255,255,255};
+ int[] size = {0};
+
+ try {
+ H5.H5Pset_istore_k(fcpl_id, 64);
+ _createH5File(fcpl_id, fapl_id);
+
+ /* Close FCPL */
+ H5.H5Pclose(fcpl_id);
+
+ /* Get the file's dataset creation property list */
+ fcpl_id = H5.H5Fget_create_plist(H5fid);
+
+ /* Get the file's version information */
+ H5.H5Pget_version(fcpl_id, version_info);
+ H5.H5Pget_istore_k(fcpl_id, size);
+ deleteH5file();
+ }
+ catch (Throwable err) {
+ err.printStackTrace();
+ fail("H5Pget_sym_k: " + err);
+ }
+ assertTrue("super block version: "+version_info[0], version_info[0] == 1);
+ assertTrue("global freelist version: "+version_info[1], version_info[1] == 0);
+ assertTrue("symbol table version: "+version_info[2], version_info[2] == 0);
+ assertTrue("shared object header version: "+version_info[3], version_info[3] == 0);
+ assertTrue("chunked storage b-tree 1/2-rank: "+size[0], size[0] == 64);
+ }
+
+ @Test
+ public void testH5P_obj_track_times() {
+ boolean default_ret_val = false;
+ boolean ret_val = true;
+ try {
+ default_ret_val = H5.H5Pget_obj_track_times(ocpl_id);
+ H5.H5Pset_obj_track_times(ocpl_id, false);
+ ret_val = H5.H5Pget_obj_track_times(ocpl_id);
+ }
+ catch (Throwable err) {
+ err.printStackTrace();
+ fail("H5Pget_obj_track_times: " + err);
+ }
+ assertTrue("H5Pget_obj_track_times default", default_ret_val);
+ assertFalse("H5Pget_obj_track_times", ret_val);
+ }
+
+ @Test
+ public void testH5Pget_char_encoding() {
+ int char_encoding = 0;
+
+ try {
+ char_encoding = H5.H5Pget_char_encoding(acpl_id);
+ }
+ catch (Throwable err) {
+ err.printStackTrace();
+ fail("H5Pget_char_encoding: " + err);
+ }
+ assertTrue("testH5Pget_char_encoding", char_encoding == HDF5Constants.H5T_CSET_ASCII);
+ try {
+ H5.H5Pset_char_encoding(acpl_id, HDF5Constants.H5T_CSET_UTF8);
+ char_encoding = H5.H5Pget_char_encoding(acpl_id);
+ }
+ catch (Throwable err) {
+ err.printStackTrace();
+ fail("H5Pget_char_encoding: " + err);
+ }
+ assertTrue("testH5Pget_char_encoding", char_encoding == HDF5Constants.H5T_CSET_UTF8);
+ }
+
+ @Test
+ public void testH5P_fill_time() {
+ int[] fill_time = {0};
+
+ try {
+ H5.H5Pget_fill_time(ocpl_id, fill_time);
+ assertTrue("fill_time: "+fill_time[0], fill_time[0] == HDF5Constants.H5D_FILL_TIME_IFSET);
+ H5.H5Pset_fill_time(ocpl_id, HDF5Constants.H5D_FILL_TIME_ALLOC);
+ H5.H5Pget_fill_time(ocpl_id, fill_time);
+ assertTrue("fill_time: "+fill_time[0], fill_time[0] == HDF5Constants.H5D_FILL_TIME_ALLOC);
+ H5.H5Pset_fill_time(ocpl_id, HDF5Constants.H5D_FILL_TIME_NEVER);
+ H5.H5Pget_fill_time(ocpl_id, fill_time);
+ assertTrue("fill_time: "+fill_time[0], fill_time[0] == HDF5Constants.H5D_FILL_TIME_NEVER);
+ }
+ catch (Throwable err) {
+ err.printStackTrace();
+ fail("H5Pget_fill_time: " + err);
+ }
+ }
+
+ @Test
+ public void testH5P_alloc_time() {
+ int[] alloc_time = {0};
+
+ try {
+ H5.H5Pget_alloc_time(ocpl_id, alloc_time);
+ assertTrue("alloc_time: "+alloc_time[0], alloc_time[0] == HDF5Constants.H5D_ALLOC_TIME_LATE);
+ H5.H5Pset_alloc_time(ocpl_id, HDF5Constants.H5D_ALLOC_TIME_EARLY);
+ H5.H5Pget_alloc_time(ocpl_id, alloc_time);
+ assertTrue("alloc_time: "+alloc_time[0], alloc_time[0] == HDF5Constants.H5D_ALLOC_TIME_EARLY);
+ H5.H5Pset_alloc_time(ocpl_id, HDF5Constants.H5D_ALLOC_TIME_INCR);
+ H5.H5Pget_alloc_time(ocpl_id, alloc_time);
+ assertTrue("alloc_time: "+alloc_time[0], alloc_time[0] == HDF5Constants.H5D_ALLOC_TIME_INCR);
+ }
+ catch (Throwable err) {
+ err.printStackTrace();
+ fail("H5Pget_alloc_time: " + err);
+ }
+ }
+
+ @Test
+ public void testH5P_fill_value() {
+ int[] fill_value = {-1};
+ int[] fill_value_status = {-1};
+
+ try {
+ H5.H5Pfill_value_defined(ocpl_id, fill_value_status);
+ assertTrue("fill_value_status: "+fill_value_status[0], fill_value_status[0] == HDF5Constants.H5D_FILL_VALUE_DEFAULT);
+ H5.H5Pget_fill_value(ocpl_id, HDF5Constants.H5T_NATIVE_INT, fill_value);
+ assertTrue("fill_value: "+fill_value[0], fill_value[0] == 0);
+ fill_value[0] = 255;
+ H5.H5Pset_fill_value(ocpl_id, HDF5Constants.H5T_NATIVE_INT, fill_value);
+ H5.H5Pget_fill_value(ocpl_id, HDF5Constants.H5T_NATIVE_INT, fill_value);
+ assertTrue("fill_value: "+fill_value[0], fill_value[0] == 255);
+ H5.H5Pfill_value_defined(ocpl_id, fill_value_status);
+ assertTrue("fill_value_status: "+fill_value_status[0], fill_value_status[0] == HDF5Constants.H5D_FILL_VALUE_USER_DEFINED);
+ }
+ catch (Throwable err) {
+ err.printStackTrace();
+ fail("H5Pget_fill_value: " + err);
+ }
+ }
+
+ @Test
+ public void testH5P_layout() {
+ int layout_type = -1;
+
+ try {
+ layout_type = H5.H5Pget_layout(ocpl_id);
+ assertTrue("layout: "+layout_type, layout_type == HDF5Constants.H5D_CONTIGUOUS);
+ H5.H5Pset_layout(ocpl_id, HDF5Constants.H5D_COMPACT);
+ layout_type = H5.H5Pget_layout(ocpl_id);
+ assertTrue("layout: "+layout_type, layout_type == HDF5Constants.H5D_COMPACT);
+ }
+ catch (Throwable err) {
+ err.printStackTrace();
+ fail("H5Pget_layout: " + err);
+ }
+ }
+
+ @Test
+ public void testH5P_chunk() {
+ long[] chunk_size = {0,0};
+ long[] chunk_new_size = {2,3};
+ int layout_type = -1;
+
+ try {
+ H5.H5Pset_chunk(ocpl_id, 2, chunk_new_size);
+ H5.H5Pget_chunk(ocpl_id, 2, chunk_size);
+ assertTrue("chunk: "+chunk_size[0], chunk_size[0] == chunk_new_size[0]);
+ assertTrue("chunk: "+chunk_size[1], chunk_size[1] == chunk_new_size[1]);
+ layout_type = H5.H5Pget_layout(ocpl_id);
+ assertTrue("layout: "+layout_type, layout_type == HDF5Constants.H5D_CHUNKED);
+ }
+ catch (Throwable err) {
+ err.printStackTrace();
+ fail("H5Pget_chunk: " + err);
+ }
+ }
+
+ @Test
+ public void testH5P_file_space() {
+ long[] threshold = {0};
+ int[] strategy = {0};
+ try {
+ H5.H5Pget_file_space(fcpl_id, strategy, threshold);
+ assertTrue("strategy: "+strategy[0], strategy[0] == HDF5Constants.H5F_FILE_SPACE_ALL);
+ assertTrue("theshold: "+threshold[0], threshold[0] == 1);
+ H5.H5Pset_file_space(fcpl_id, HDF5Constants.H5F_FILE_SPACE_ALL_PERSIST, 10);
+ H5.H5Pget_file_space(fcpl_id, strategy, threshold);
+ assertTrue("strategy: "+strategy[0], strategy[0] == HDF5Constants.H5F_FILE_SPACE_ALL_PERSIST);
+ assertTrue("theshold: "+threshold[0], threshold[0] == 10);
+ H5.H5Pset_file_space(fcpl_id, HDF5Constants.H5F_FILE_SPACE_VFD, 0);
+ H5.H5Pget_file_space(fcpl_id, strategy, threshold);
+ assertTrue("strategy: "+strategy[0], strategy[0] == HDF5Constants.H5F_FILE_SPACE_VFD);
+ assertTrue("theshold: "+threshold[0], threshold[0] == 10);
+ }
+ catch (Throwable err) {
+ err.printStackTrace();
+ fail("testH5P_file_space: " + err);
+ }
+ }
+}
diff --git a/java/test/TestH5PData.java b/java/test/TestH5PData.java
new file mode 100644
index 0000000..7fc154d
--- /dev/null
+++ b/java/test/TestH5PData.java
@@ -0,0 +1,170 @@
+/* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
+ * Copyright by The HDF Group. *
+ * Copyright by the Board of Trustees of the University of Illinois. *
+ * All rights reserved. *
+ * *
+ * This file is part of HDF5. The full HDF5 copyright notice, including *
+ * terms governing use, modification, and redistribution, is contained in *
+ * the files COPYING and Copyright.html. COPYING can be found at the root *
+ * of the source code distribution tree; Copyright.html can be found at the *
+ * root level of an installed copy of the electronic HDF5 document set and *
+ * is linked from the top-level documents page. It can also be found at *
+ * http://hdfgroup.org/HDF5/doc/Copyright.html. If you do not have *
+ * access to either file, you may request a copy from help@hdfgroup.org. *
+ * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */
+
+package test;
+
+import static org.junit.Assert.assertTrue;
+import static org.junit.Assert.fail;
+
+import java.io.File;
+import java.text.DecimalFormat;
+import java.text.NumberFormat;
+
+import hdf.hdf5lib.H5;
+import hdf.hdf5lib.HDF5Constants;
+import hdf.hdf5lib.exceptions.HDF5Exception;
+import hdf.hdf5lib.exceptions.HDF5LibraryException;
+
+import org.junit.After;
+import org.junit.Before;
+import org.junit.Rule;
+import org.junit.Test;
+import org.junit.rules.TestName;
+
+public class TestH5PData {
+ @Rule public TestName testname = new TestName();
+
+ private static final String H5_FILE = "test.h5";
+ private static final int DIM_X = 12;
+ private static final int DIM_Y = 18;
+ long H5fid = -1;
+ long H5dsid = -1;
+ long H5did = -1;
+ long plist_id = -1;
+ long[] H5dims = { DIM_X, DIM_Y };
+ double windchillF[][] =
+ {{36.0, 31.0, 25.0, 19.0, 13.0, 7.0, 1.0, -5.0, -11.0, -16.0, -22.0, -28.0, -34.0, -40.0, -46.0, -52.0, -57.0, -63.0},
+ {34.0, 27.0, 21.0, 15.0, 9.0, 3.0, -4.0, -10.0, -16.0, -22.0, -28.0, -35.0, -41.0, -47.0, -53.0, -59.0, -66.0, -72.0},
+ {32.0, 25.0, 19.0, 13.0, 6.0, 0.0, -7.0, -13.0, -19.0, -26.0, -32.0, -39.0, -45.0, -51.0, -58.0, -64.0, -71.0, -77.0},
+ {30.0, 24.0, 17.0, 11.0, 4.0, -2.0, -9.0, -15.0, -22.0, -29.0, -35.0, -42.0, -48.0, -55.0, -61.0, -68.0, -74.0, -81.0},
+ {29.0, 23.0, 16.0, 9.0, 3.0, -4.0, -11.0, -17.0, -24.0, -31.0, -37.0, -44.0, -51.0, -58.0, -64.0, -71.0, -78.0, -84.0},
+ {28.0, 22.0, 15.0, 8.0, 1.0, -5.0, -12.0, -19.0, -26.0, -33.0, -39.0, -46.0, -53.0, -60.0, -67.0, -73.0, -80.0, -87.0},
+ {28.0, 21.0, 14.0, 7.0, 0.0, -7.0, -14.0, -21.0, -27.0, -34.0, -41.0, -48.0, -55.0, -62.0, -69.0, -76.0, -82.0, -89.0},
+ {27.0, 20.0, 13.0, 6.0, -1.0, -8.0, -15.0, -22.0, -29.0, -36.0, -43.0, -50.0, -57.0, -64.0, -71.0, -78.0, -84.0, -91.0},
+ {26.0, 19.0, 12.0, 5.0, -2.0, -9.0, -16.0, -23.0, -30.0, -37.0, -44.0, -51.0, -58.0, -65.0, -72.0, -79.0, -86.0, -93.0},
+ {26.0, 19.0, 12.0, 4.0, -3.0, -10.0, -17.0, -24.0, -31.0, -38.0, -45.0, -52.0, -60.0, -67.0, -74.0, -81.0, -88.0, -95.0},
+ {25.0, 18.0, 11.0, 4.0, -3.0, -11.0, -18.0, -25.0, -32.0, -39.0, -46.0, -54.0, -61.0, -68.0, -75.0, -82.0, -89.0, -97.0},
+ {25.0, 17.0, 10.0, 3.0, -4.0, -11.0, -19.0, -26.0, -33.0, -40.0, -48.0, -55.0, -62.0, -69.0, -76.0, -84.0, -91.0, -98.0}
+ };
+
+ private final void _deleteFile(String filename) {
+ File file = new File(filename);
+
+ if (file.exists()) {
+ try {file.delete();} catch (SecurityException e) {}
+ }
+ }
+
+ private final long _createFloatDataset(long fid, long dsid, String name, long dapl) {
+ long did = -1;
+ try {
+ did = H5.H5Dcreate(fid, name, HDF5Constants.H5T_NATIVE_FLOAT, dsid,
+ HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT, dapl);
+ }
+ catch (Throwable err) {
+ err.printStackTrace();
+ fail("H5.H5Dcreate: " + err);
+ }
+ assertTrue("TestH5PData._createFloatDataset: ", did > 0);
+
+ return did;
+ }
+
+ @Before
+ public void createH5file()
+ throws NullPointerException, HDF5Exception {
+ assertTrue("H5 open ids is 0",H5.getOpenIDCount()==0);
+ System.out.print(testname.getMethodName());
+
+ try {
+ H5fid = H5.H5Fcreate(H5_FILE, HDF5Constants.H5F_ACC_TRUNC,
+ HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT);
+ H5dsid = H5.H5Screate_simple(2, H5dims, null);
+ H5did = _createFloatDataset(H5fid, H5dsid, "dset", HDF5Constants.H5P_DEFAULT);
+ plist_id = H5.H5Pcreate(HDF5Constants.H5P_DATASET_XFER);
+ }
+ catch (Throwable err) {
+ err.printStackTrace();
+ fail("TestH5PData.createH5file: " + err);
+ }
+ assertTrue("TestH5PData.createH5file: H5.H5Fcreate: ",H5fid > 0);
+ assertTrue("TestH5PData.createH5file: H5.H5Screate_simple: ",H5dsid > 0);
+ assertTrue("TestH5PData.createH5file: _createFloatDataset: ",H5did > 0);
+ assertTrue(plist_id > 0);
+
+ H5.H5Fflush(H5fid, HDF5Constants.H5F_SCOPE_LOCAL);
+ }
+
+ @After
+ public void deleteH5file() throws HDF5LibraryException {
+ if (H5dsid > 0)
+ try {H5.H5Sclose(H5dsid);} catch (Exception ex) {}
+ if (H5did > 0)
+ try {H5.H5Dclose(H5did);} catch (Exception ex) {}
+ if (H5fid > 0)
+ try {H5.H5Fclose(H5fid);} catch (Exception ex) {}
+
+ _deleteFile(H5_FILE);
+
+ if (plist_id > 0)
+ try {H5.H5Pclose(plist_id);} catch (Exception ex) {}
+ System.out.println();
+ }
+
+ @Test
+ public void testH5Pdata_transform() {
+ String f_to_c = "(5/9.0)*(x-32)";
+ double windchillFread[][] = new double[DIM_X][DIM_Y];
+ double windchillC;
+ NumberFormat formatter = new DecimalFormat("#0.000");
+
+ try {
+ H5.H5Pset_data_transform(plist_id, f_to_c);
+ H5.H5Dwrite(H5did, HDF5Constants.H5T_NATIVE_DOUBLE, HDF5Constants.H5S_ALL, HDF5Constants.H5S_ALL,
+ plist_id, windchillF);
+ H5.H5Dread(H5did, HDF5Constants.H5T_NATIVE_DOUBLE, HDF5Constants.H5S_ALL, HDF5Constants.H5S_ALL,
+ HDF5Constants.H5P_DEFAULT, windchillFread);
+ }
+ catch (Throwable err) {
+ err.printStackTrace();
+ fail("H5Pdata_transform: " + err);
+ }
+ for(int row = 0; row < DIM_X; row++)
+ for(int col = 0; col < DIM_Y; col++) {
+ windchillC = (5/9.0)*(windchillF[row][col]-32);
+ String Cstr = formatter.format(windchillC);
+ String Fread = formatter.format(windchillFread[row][col]);
+ assertTrue("H5Pdata_transform: <"+row+","+col+">"+Fread+"="+Cstr, Fread.compareTo(Cstr)==0);
+ }
+ }
+
+ @Test
+ public void testH5P_buffer() {
+ long default_size = 0;
+ long size = 0;
+
+ try {
+ default_size = H5.H5Pget_buffer_size(plist_id);
+ H5.H5Pset_buffer_size(plist_id, DIM_X*DIM_Y);
+ size = H5.H5Pget_buffer_size(plist_id);
+ }
+ catch (Throwable err) {
+ err.printStackTrace();
+ fail("H5P_buffer fail: " + err);
+ }
+ assertTrue("H5P_buffer default: "+default_size, default_size==1024*1024);
+ assertTrue("H5P_buffer size: "+size, size==DIM_X*DIM_Y);
+ }
+}
diff --git a/java/test/TestH5PL.java b/java/test/TestH5PL.java
new file mode 100644
index 0000000..afcb88a
--- /dev/null
+++ b/java/test/TestH5PL.java
@@ -0,0 +1,61 @@
+/* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
+ * Copyright by The HDF Group. *
+ * Copyright by the Board of Trustees of the University of Illinois. *
+ * All rights reserved. *
+ * *
+ * This file is part of HDF5. The full HDF5 copyright notice, including *
+ * terms governing use, modification, and redistribution, is contained in *
+ * the files COPYING and Copyright.html. COPYING can be found at the root *
+ * of the source code distribution tree; Copyright.html can be found at the *
+ * root level of an installed copy of the electronic HDF5 document set and *
+ * is linked from the top-level documents page. It can also be found at *
+ * http://hdfgroup.org/HDF5/doc/Copyright.html. If you do not have *
+ * access to either file, you may request a copy from help@hdfgroup.org. *
+ * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */
+
+package test;
+
+import static org.junit.Assert.assertTrue;
+import static org.junit.Assert.fail;
+import hdf.hdf5lib.H5;
+import hdf.hdf5lib.HDF5Constants;
+import hdf.hdf5lib.exceptions.HDF5LibraryException;
+
+import org.junit.After;
+import org.junit.Before;
+import org.junit.Rule;
+import org.junit.Test;
+import org.junit.rules.TestName;
+
+public class TestH5PL {
+ @Rule public TestName testname = new TestName();
+
+ @Before
+ public void checkOpenIDs() {
+ assertTrue("H5 open ids is 0",H5.getOpenIDCount()==0);
+ System.out.print(testname.getMethodName());
+ }
+ @After
+ public void nextTestName() {
+ System.out.println();
+ }
+
+ @Test
+ public void TestH5PLplugins() {
+ try {
+ int plugin_flags = H5.H5PLget_loading_state();
+ assertTrue("H5.H5PLget_loading_state: "+plugin_flags, plugin_flags == HDF5Constants.H5PL_ALL_PLUGIN);
+ int new_setting = plugin_flags & ~HDF5Constants.H5PL_FILTER_PLUGIN;
+ H5.H5PLset_loading_state (new_setting);
+ int changed_flags = H5.H5PLget_loading_state();
+ assertTrue("H5.H5PLget_loading_state: "+changed_flags, changed_flags == new_setting);
+ H5.H5PLset_loading_state (plugin_flags);
+ changed_flags = H5.H5PLget_loading_state();
+ assertTrue("H5.H5PLget_loading_state: "+changed_flags, changed_flags == HDF5Constants.H5PL_ALL_PLUGIN);
+ }
+ catch (Throwable err) {
+ err.printStackTrace();
+ fail("TestH5PLplugins " + err);
+ }
+ }
+}
diff --git a/java/test/TestH5Pfapl.java b/java/test/TestH5Pfapl.java
new file mode 100644
index 0000000..555afe0
--- /dev/null
+++ b/java/test/TestH5Pfapl.java
@@ -0,0 +1,1325 @@
+/* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
+ * Copyright by The HDF Group. *
+ * Copyright by the Board of Trustees of the University of Illinois. *
+ * All rights reserved. *
+ * *
+ * This file is part of HDF5. The full HDF5 copyright notice, including *
+ * terms governing use, modification, and redistribution, is contained in *
+ * the files COPYING and Copyright.html. COPYING can be found at the root *
+ * of the source code distribution tree; Copyright.html can be found at the *
+ * root level of an installed copy of the electronic HDF5 document set and *
+ * is linked from the top-level documents page. It can also be found at *
+ * http://hdfgroup.org/HDF5/doc/Copyright.html. If you do not have *
+ * access to either file, you may request a copy from help@hdfgroup.org. *
+ * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */
+
+package test;
+
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertTrue;
+import static org.junit.Assert.fail;
+
+import java.io.File;
+import java.text.DecimalFormat;
+import java.text.NumberFormat;
+
+import hdf.hdf5lib.H5;
+import hdf.hdf5lib.HDF5Constants;
+import hdf.hdf5lib.exceptions.HDF5Exception;
+import hdf.hdf5lib.exceptions.HDF5LibraryException;
+import hdf.hdf5lib.structs.H5AC_cache_config_t;
+
+import org.junit.After;
+import org.junit.Before;
+import org.junit.Rule;
+import org.junit.Test;
+import org.junit.rules.TestName;
+
+public class TestH5Pfapl {
+ @Rule public TestName testname = new TestName();
+
+ private static final String H5_FILE = "test.h5";
+ private static final String H5_LOG_FILE = "test.log";
+ private static final String H5_FAMILY_FILE = "test%05d";
+ private static final String H5_MULTI_FILE = "testmulti";
+ private static char MULTI_LETTERS[] = {'X','s','b','r','g','l','o'};
+ private static final int DIM_X = 4;
+ private static final int DIM_Y = 6;
+ private static final int DIMF_X = 12;
+ private static final int DIMF_Y = 18;
+ long H5fid = -1;
+ long H5dsid = -1;
+ long H5did = -1;
+ long H5Fdsid = -1;
+ long H5Fdid = -1;
+ long[] H5dims = { DIM_X, DIM_Y };
+ long fapl_id = -1;
+ long plapl_id = -1;
+ long dapl_id = -1;
+ long plist_id = -1;
+ long btplist_id = -1;
+ long[] H5Fdims = { DIMF_X, DIMF_Y };
+ double windchillF[][] =
+ {{36.0, 31.0, 25.0, 19.0, 13.0, 7.0, 1.0, -5.0, -11.0, -16.0, -22.0, -28.0, -34.0, -40.0, -46.0, -52.0, -57.0, -63.0},
+ {34.0, 27.0, 21.0, 15.0, 9.0, 3.0, -4.0, -10.0, -16.0, -22.0, -28.0, -35.0, -41.0, -47.0, -53.0, -59.0, -66.0, -72.0},
+ {32.0, 25.0, 19.0, 13.0, 6.0, 0.0, -7.0, -13.0, -19.0, -26.0, -32.0, -39.0, -45.0, -51.0, -58.0, -64.0, -71.0, -77.0},
+ {30.0, 24.0, 17.0, 11.0, 4.0, -2.0, -9.0, -15.0, -22.0, -29.0, -35.0, -42.0, -48.0, -55.0, -61.0, -68.0, -74.0, -81.0},
+ {29.0, 23.0, 16.0, 9.0, 3.0, -4.0, -11.0, -17.0, -24.0, -31.0, -37.0, -44.0, -51.0, -58.0, -64.0, -71.0, -78.0, -84.0},
+ {28.0, 22.0, 15.0, 8.0, 1.0, -5.0, -12.0, -19.0, -26.0, -33.0, -39.0, -46.0, -53.0, -60.0, -67.0, -73.0, -80.0, -87.0},
+ {28.0, 21.0, 14.0, 7.0, 0.0, -7.0, -14.0, -21.0, -27.0, -34.0, -41.0, -48.0, -55.0, -62.0, -69.0, -76.0, -82.0, -89.0},
+ {27.0, 20.0, 13.0, 6.0, -1.0, -8.0, -15.0, -22.0, -29.0, -36.0, -43.0, -50.0, -57.0, -64.0, -71.0, -78.0, -84.0, -91.0},
+ {26.0, 19.0, 12.0, 5.0, -2.0, -9.0, -16.0, -23.0, -30.0, -37.0, -44.0, -51.0, -58.0, -65.0, -72.0, -79.0, -86.0, -93.0},
+ {26.0, 19.0, 12.0, 4.0, -3.0, -10.0, -17.0, -24.0, -31.0, -38.0, -45.0, -52.0, -60.0, -67.0, -74.0, -81.0, -88.0, -95.0},
+ {25.0, 18.0, 11.0, 4.0, -3.0, -11.0, -18.0, -25.0, -32.0, -39.0, -46.0, -54.0, -61.0, -68.0, -75.0, -82.0, -89.0, -97.0},
+ {25.0, 17.0, 10.0, 3.0, -4.0, -11.0, -19.0, -26.0, -33.0, -40.0, -48.0, -55.0, -62.0, -69.0, -76.0, -84.0, -91.0, -98.0}
+ };
+
+ private final void _deleteFile(String filename) {
+ File file = null;
+ try {
+ file = new File(filename);
+ }
+ catch (Throwable err) {}
+
+ if (file.exists()) {
+ try {file.delete();} catch (SecurityException e) {}
+ }
+ }
+
+ private final void _deleteLogFile() {
+ File file = null;
+ try {
+ file = new File(H5_LOG_FILE);
+ }
+ catch (Throwable err) {}
+
+ if (file.exists()) {
+ try {file.delete();} catch (SecurityException e) {}
+ }
+ }
+
+ private final void _deleteFamilyFile() {
+ File file = null;
+ for(int indx = 0; ;indx++) {
+ java.text.DecimalFormat myFormat = new java.text.DecimalFormat("00000");
+ try {
+ file = new File("test"+myFormat.format(new Integer(indx))+".h5");
+ }
+ catch (Throwable err) {}
+
+ if (file.exists()) {
+ try {file.delete();} catch (SecurityException e) {}
+ }
+ else
+ return;
+ }
+ }
+
+ private final void _deleteMultiFile() {
+ File file = null;
+ for(int indx = 1;indx<7;indx++) {
+ try {
+ file = new File(H5_MULTI_FILE+"-"+MULTI_LETTERS[indx]+".h5");
+ }
+ catch (Throwable err) {}
+
+ if (file.exists()) {
+ try {file.delete();} catch (SecurityException e) {}
+ }
+ }
+ }
+
+ private final long _createDataset(long fid, long dsid, String name, long dapl) {
+ long did = -1;
+ try {
+ did = H5.H5Dcreate(fid, name, HDF5Constants.H5T_STD_I32BE, dsid,
+ HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT, dapl);
+ }
+ catch (Throwable err) {
+ err.printStackTrace();
+ fail("H5.H5Dcreate: " + err);
+ }
+ assertTrue("TestH5Pfapl._createDataset: ", did > 0);
+
+ return did;
+ }
+
+ private final void _createFloatDataset() {
+ try {
+ H5Fdsid = H5.H5Screate_simple(2, H5Fdims, null);
+ H5Fdid = H5.H5Dcreate(H5fid, "dsfloat", HDF5Constants.H5T_NATIVE_FLOAT, H5Fdsid,
+ HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT);
+ }
+ catch (Throwable err) {
+ err.printStackTrace();
+ fail("H5.H5Dcreate: " + err);
+ }
+ assertTrue("TestH5Pfapl._createFloatDataset: ", H5Fdid > 0);
+
+ try {
+ H5.H5Fflush(H5fid, HDF5Constants.H5F_SCOPE_LOCAL);
+ }
+ catch (Throwable err) {
+ err.printStackTrace();
+ }
+ }
+
+ private final void _createH5multiFileDS() {
+ try {
+ H5did = _createDataset(H5fid, H5dsid, "dset", HDF5Constants.H5P_DEFAULT);
+ }
+ catch (Throwable err) {
+ err.printStackTrace();
+ fail("TestH5Pfapl.createH5file: " + err);
+ }
+ assertTrue("TestH5Pfapl.createH5file: _createDataset: ", H5did > 0);
+
+ try {
+ H5.H5Fflush(H5fid, HDF5Constants.H5F_SCOPE_LOCAL);
+ }
+ catch (Throwable err) {
+ err.printStackTrace();
+ }
+ }
+
+ private final void _createH5File(long fapl) {
+ try {
+ H5fid = H5.H5Fcreate(H5_FILE, HDF5Constants.H5F_ACC_TRUNC,
+ HDF5Constants.H5P_DEFAULT, fapl);
+ H5dsid = H5.H5Screate_simple(2, H5dims, null);
+ H5did = _createDataset(H5fid, H5dsid, "dset", HDF5Constants.H5P_DEFAULT);
+ }
+ catch (Throwable err) {
+ err.printStackTrace();
+ fail("TestH5Pfapl.createH5file: " + err);
+ }
+ assertTrue("TestH5Pfapl.createH5file: H5.H5Fcreate: ", H5fid > 0);
+ assertTrue("TestH5Pfapl.createH5file: H5.H5Screate_simple: ", H5dsid > 0);
+ assertTrue("TestH5Pfapl.createH5file: _createDataset: ", H5did > 0);
+
+ try {
+ H5.H5Fflush(H5fid, HDF5Constants.H5F_SCOPE_LOCAL);
+ }
+ catch (Throwable err) {
+ err.printStackTrace();
+ }
+ }
+
+ private final void _createH5familyFile(long fapl) {
+ try {
+ H5fid = H5.H5Fcreate(H5_FAMILY_FILE+".h5", HDF5Constants.H5F_ACC_TRUNC,
+ HDF5Constants.H5P_DEFAULT, fapl);
+ H5dsid = H5.H5Screate_simple(2, H5dims, null);
+ H5did = _createDataset(H5fid, H5dsid, "dset", HDF5Constants.H5P_DEFAULT);
+ }
+ catch (Throwable err) {
+ err.printStackTrace();
+ fail("TestH5Pfapl.createH5file: " + err);
+ }
+ assertTrue("TestH5Pfapl.createH5file: H5.H5Fcreate: ", H5fid > 0);
+ assertTrue("TestH5Pfapl.createH5file: H5.H5Screate_simple: ", H5dsid > 0);
+ assertTrue("TestH5Pfapl.createH5file: _createDataset: ", H5did > 0);
+
+ try {
+ H5.H5Fflush(H5fid, HDF5Constants.H5F_SCOPE_LOCAL);
+ }
+ catch (Throwable err) {
+ err.printStackTrace();
+ }
+ }
+
+ private final void _createH5multiFile(long fapl) {
+ try {
+ H5fid = H5.H5Fcreate(H5_MULTI_FILE, HDF5Constants.H5F_ACC_TRUNC,
+ HDF5Constants.H5P_DEFAULT, fapl);
+ H5dsid = H5.H5Screate_simple(2, H5dims, null);
+ }
+ catch (Throwable err) {
+ err.printStackTrace();
+ fail("TestH5Pfapl.createH5file: " + err);
+ }
+ assertTrue("TestH5Pfapl.createH5file: H5.H5Fcreate: ", H5fid > 0);
+ assertTrue("TestH5Pfapl.createH5file: H5.H5Screate_simple: ", H5dsid > 0);
+
+ try {
+ H5.H5Fflush(H5fid, HDF5Constants.H5F_SCOPE_LOCAL);
+ }
+ catch (Throwable err) {
+ err.printStackTrace();
+ }
+ }
+
+ public void deleteH5file() {
+ _deleteFile(H5_FILE);
+ }
+
+ public void deleteH5familyfile() {
+ _deleteFamilyFile();
+ }
+
+ public void deleteH5multifile() {
+ _deleteMultiFile();
+ }
+
+ @Before
+ public void createFileAccess()
+ throws NullPointerException, HDF5Exception {
+ assertTrue("H5 open ids is 0",H5.getOpenIDCount()==0);
+ System.out.print(testname.getMethodName());
+
+ try {
+ fapl_id = H5.H5Pcreate(HDF5Constants.H5P_FILE_ACCESS);
+ }
+ catch (Throwable err) {
+ err.printStackTrace();
+ fail("TestH5Pfapl.createFileAccess: " + err);
+ }
+ assertTrue(fapl_id > 0);
+ try {
+ plapl_id = H5.H5Pcreate(HDF5Constants.H5P_LINK_ACCESS);
+ }
+ catch (Throwable err) {
+ err.printStackTrace();
+ fail("TestH5Pfapl.createFileAccess: " + err);
+ }
+ assertTrue(plapl_id > 0);
+ try {
+ plist_id = H5.H5Pcreate(HDF5Constants.H5P_DATASET_XFER);
+ btplist_id = H5.H5Pcreate(HDF5Constants.H5P_DATASET_XFER);
+ dapl_id = H5.H5Pcreate(HDF5Constants.H5P_DATASET_ACCESS);
+ }
+ catch (Throwable err) {
+ err.printStackTrace();
+ fail("TestH5Pfapl.createFileAccess: " + err);
+ }
+ assertTrue(plist_id > 0);
+ assertTrue(btplist_id > 0);
+ assertTrue(dapl_id > 0);
+ }
+
+ @After
+ public void deleteFileAccess() throws HDF5LibraryException {
+ if (fapl_id > 0)
+ try {H5.H5Pclose(fapl_id);} catch (Exception ex) {}
+ if (plapl_id > 0)
+ try {H5.H5Pclose(plapl_id);} catch (Exception ex) {}
+ if (dapl_id > 0)
+ try {H5.H5Pclose(dapl_id);} catch (Exception ex) {}
+ if (plist_id > 0)
+ try {H5.H5Pclose(plist_id);} catch (Exception ex) {}
+ if (btplist_id > 0)
+ try {H5.H5Pclose(btplist_id);} catch (Exception ex) {}
+
+ if (H5Fdsid > 0)
+ try {H5.H5Sclose(H5Fdsid);} catch (Exception ex) {}
+ if (H5Fdid > 0)
+ try {H5.H5Dclose(H5Fdid);} catch (Exception ex) {}
+ if (H5dsid > 0)
+ try {H5.H5Sclose(H5dsid);} catch (Exception ex) {}
+ if (H5did > 0)
+ try {H5.H5Dclose(H5did);} catch (Exception ex) {}
+ if (H5fid > 0)
+ try {H5.H5Fclose(H5fid);} catch (Exception ex) {}
+ System.out.println();
+ }
+
+ @Test
+ public void testH5Pget_libver_bounds() {
+ int ret_val = -1;
+ int[] libver = new int[2];
+
+ try {
+ ret_val = H5.H5Pget_libver_bounds(fapl_id, libver);
+ }
+ catch (Throwable err) {
+ err.printStackTrace();
+ fail("H5Pget_libver_bounds: " + err);
+ }
+ assertTrue("testH5Pget_libver_bounds", ret_val >= 0);
+ // Check the Earliest Version if the library
+ assertEquals(HDF5Constants.H5F_LIBVER_EARLIEST, libver[0]);
+ // Check the Latest Version if the library
+ assertEquals(HDF5Constants.H5F_LIBVER_LATEST, libver[1]);
+ }
+
+ @Test
+ public void testH5Pset_libver_bounds() {
+
+ int ret_val = -1;
+ int low = HDF5Constants.H5F_LIBVER_EARLIEST;
+ int high = HDF5Constants.H5F_LIBVER_LATEST;
+ int[] libver = new int[2];
+
+ try {
+ ret_val = H5.H5Pset_libver_bounds(fapl_id, low, high);
+ H5.H5Pget_libver_bounds(fapl_id, libver);
+ }
+ catch (Throwable err) {
+ err.printStackTrace();
+ fail("H5Pset_libver_bounds: " + err);
+ }
+ assertTrue("testH5Pset_libver_bounds", ret_val >= 0);
+ // Check the Earliest Version if the library
+ assertEquals(HDF5Constants.H5F_LIBVER_EARLIEST, libver[0]);
+ // Check the Latest Version if the library
+ assertEquals(HDF5Constants.H5F_LIBVER_LATEST, libver[1]);
+ }
+
+ @Test(expected = HDF5LibraryException.class)
+ public void testH5Pset_elink_fapl_NegativeID() throws Throwable {
+ H5.H5Pset_elink_fapl(-1, fapl_id );
+ }
+
+ @Test
+ public void testH5Pset_elink_fapl() {
+ int ret_val = -1;
+ try {
+ ret_val = H5.H5Pset_elink_fapl(plapl_id, fapl_id );
+ }
+ catch (Throwable err) {
+ err.printStackTrace();
+ fail("H5Pset_elink_fapl: " + err);
+ }
+ assertTrue("H5Pset_elink_fapl", ret_val >= 0);
+ }
+
+ @Test
+ public void testH5Pget_elink_fapl() {
+ long ret_val_id = -1;
+ try {
+ ret_val_id = H5.H5Pget_elink_fapl(plapl_id);
+ assertTrue("H5Pget_elink_fapl", ret_val_id >= 0);
+ assertEquals(HDF5Constants.H5P_DEFAULT, ret_val_id );
+ }
+ catch (Throwable err) {
+ err.printStackTrace();
+ fail("H5Pget_elink_fapl: " + err);
+ }
+ finally {
+ if (ret_val_id > 0)
+ try {H5.H5Pclose(ret_val_id);} catch (Exception ex) {}
+ }
+ }
+
+ @Test
+ public void testH5P_elink_fapl() {
+ long ret_val_id = -1;
+ try {
+ H5.H5Pset_elink_fapl(plapl_id, fapl_id );
+ ret_val_id = H5.H5Pget_elink_fapl(plapl_id);
+ assertTrue("H5P_elink_fapl", ret_val_id >= 0);
+ }
+ catch (Throwable err) {
+ err.printStackTrace();
+ fail("H5P_elink_fapl: " + err);
+ }
+ finally {
+ if (ret_val_id > 0)
+ try {H5.H5Pclose(ret_val_id);} catch (Exception ex) {}
+ }
+ }
+
+ @Test
+ public void testH5P_elink_file_cache_size() {
+ long elink_fapl_id = -1;
+ int efc_size = 0;
+ try {
+ H5.H5Pset_elink_fapl(plapl_id, fapl_id );
+ elink_fapl_id = H5.H5Pget_elink_fapl(plapl_id);
+ assertTrue("H5P_elink_file_cache_size", elink_fapl_id >= 0);
+ try {
+ efc_size = H5.H5Pget_elink_file_cache_size(elink_fapl_id);
+ assertTrue("H5P_elink_file_cache_size default", efc_size == 0);
+ }
+ catch (UnsupportedOperationException err) {
+ System.out.println(err.getMessage());
+ }
+ try {
+ efc_size = 8;
+ H5.H5Pset_elink_file_cache_size(elink_fapl_id, efc_size);
+ efc_size = H5.H5Pget_elink_file_cache_size(elink_fapl_id);
+ assertTrue("H5P_elink_file_cache_size 8", efc_size == 8);
+ }
+ catch (UnsupportedOperationException err) {
+ System.out.println(err.getMessage());
+ }
+ }
+ catch (Throwable err) {
+ err.printStackTrace();
+ fail("H5P_elink_file_cache_size: " + err);
+ }
+ finally {
+ if (elink_fapl_id > 0)
+ try {H5.H5Pclose(elink_fapl_id);} catch (Exception ex) {}
+ }
+ }
+
+ @Test
+ public void testH5P_btree_ratios() {
+ double[] left = {0.1};
+ double[] middle = {0.5};
+ double[] right = {0.7};
+ try {
+ H5.H5Pset_btree_ratios(plist_id, left[0], middle[0], right[0]);
+ H5.H5Pget_btree_ratios(plist_id, left, middle, right);
+ assertTrue("H5P_btree_ratios", left[0] == 0.1);
+ }
+ catch (Throwable err) {
+ err.printStackTrace();
+ fail("H5P_btree_ratios: " + err);
+ }
+ }
+
+ @Test
+ public void testH5P_edc_check() {
+ int ret_val_id = -1;
+ try {
+ ret_val_id = H5.H5Pget_edc_check(plist_id);
+ assertTrue("H5P_edc_check", ret_val_id == HDF5Constants.H5Z_ENABLE_EDC);
+ H5.H5Pset_edc_check(plist_id, HDF5Constants.H5Z_DISABLE_EDC);
+ ret_val_id = H5.H5Pget_edc_check(plist_id);
+ assertTrue("H5P_edc_check", ret_val_id == HDF5Constants.H5Z_DISABLE_EDC);
+ }
+ catch (Throwable err) {
+ err.printStackTrace();
+ fail("H5P_edc_check: " + err);
+ }
+ }
+
+ @Test
+ public void testH5P_fclose_degree() {
+ int ret_val_id = -1;
+ try {
+ ret_val_id = H5.H5Pget_fclose_degree(fapl_id);
+ assertTrue("H5Pget_fclose_degree default", ret_val_id == HDF5Constants.H5F_CLOSE_DEFAULT);
+ }
+ catch (Throwable err) {
+ err.printStackTrace();
+ fail("H5P_fclose_degree: default " + err);
+ }
+ try {
+ H5.H5Pset_fclose_degree(fapl_id, HDF5Constants.H5F_CLOSE_STRONG);
+ ret_val_id = H5.H5Pget_fclose_degree(fapl_id);
+ assertTrue("H5Pget_fclose_degree", ret_val_id == HDF5Constants.H5F_CLOSE_STRONG);
+ }
+ catch (Throwable err) {
+ err.printStackTrace();
+ fail("H5P_fclose_degree: H5F_CLOSE_STRONG " + err);
+ }
+ try {
+ H5.H5Pset_fclose_degree(fapl_id, HDF5Constants.H5F_CLOSE_SEMI);
+ ret_val_id = H5.H5Pget_fclose_degree(fapl_id);
+ assertTrue("H5Pget_fclose_degree", ret_val_id == HDF5Constants.H5F_CLOSE_SEMI);
+ }
+ catch (Throwable err) {
+ err.printStackTrace();
+ fail("H5P_fclose_degree: H5F_CLOSE_SEMI " + err);
+ }
+ }
+
+ @Test
+ public void testH5P_alignment() {
+ long[] align = {0,0};
+ try {
+ H5.H5Pget_alignment(fapl_id, align);
+ assertTrue("H5P_alignment threshold default", align[0] == 1);
+ assertTrue("H5P_alignment alignment default", align[1] == 1);
+ }
+ catch (Throwable err) {
+ err.printStackTrace();
+ fail("H5P_alignment: default " + err);
+ }
+ try {
+ align[0] = 1024;
+ align[1] = 2048;
+ H5.H5Pset_alignment(fapl_id, align[0], align[1]);
+ H5.H5Pget_alignment(fapl_id, align);
+ assertTrue("H5P_alignment threshold", align[0] == 1024);
+ assertTrue("H5P_alignment alignment", align[1] == 2048);
+ }
+ catch (Throwable err) {
+ err.printStackTrace();
+ fail("H5P_alignment: " + err);
+ }
+ }
+
+ @Test
+ public void testH5P_meta_block_size() {
+ long meta_size = 0;
+ try {
+ meta_size = H5.H5Pget_meta_block_size(fapl_id);
+ assertTrue("H5P_meta_block_size default", meta_size == 2048);
+ }
+ catch (Throwable err) {
+ err.printStackTrace();
+ fail("H5P_meta_block_size: default " + err);
+ }
+ try {
+ meta_size = 4096;
+ H5.H5Pset_meta_block_size(fapl_id, meta_size);
+ meta_size = H5.H5Pget_meta_block_size(fapl_id);
+ assertTrue("H5P_meta_block_size 4096", meta_size == 4096);
+ }
+ catch (Throwable err) {
+ err.printStackTrace();
+ fail("H5P_meta_block_size: " + err);
+ }
+ }
+
+ @Test
+ public void testH5P_small_data_block_size() {
+ long align = 0;
+ try {
+ align = H5.H5Pget_small_data_block_size(fapl_id);
+ assertTrue("H5P_small_data_block_size default", align == 2048);
+ }
+ catch (Throwable err) {
+ err.printStackTrace();
+ fail("H5P_small_data_block_size: default " + err);
+ }
+ try {
+ align = 4096;
+ H5.H5Pset_small_data_block_size(fapl_id, align);
+ align = H5.H5Pget_small_data_block_size(fapl_id);
+ assertTrue("H5P_small_data_block_size 4096", align == 4096);
+ }
+ catch (Throwable err) {
+ err.printStackTrace();
+ fail("H5P_small_data_block_size: " + err);
+ }
+ }
+
+ @Test
+ public void testH5P_hyper_vector_size() {
+ long[] align = {0};
+ try {
+ H5.H5Pget_hyper_vector_size(plist_id, align);
+ assertTrue("H5P_hyper_vector_size default", align[0] == 1024);
+ }
+ catch (Throwable err) {
+ err.printStackTrace();
+ fail("H5P_hyper_vector_size: default " + err);
+ }
+ try {
+ align[0] = 4096;
+ H5.H5Pset_hyper_vector_size(plist_id, align[0]);
+ H5.H5Pget_hyper_vector_size(plist_id, align);
+ assertTrue("H5P_hyper_vector_size 4096", align[0] == 4096);
+ }
+ catch (Throwable err) {
+ err.printStackTrace();
+ fail("H5P_hyper_vector_size: " + err);
+ }
+ }
+
+ @Test
+ public void testH5P_cache() {
+ long[] rdcc_nelmts = {0};
+ long[] rdcc_nbytes = {0};
+ double[] rdcc_w0 = {0};
+ try {
+ H5.H5Pget_cache(fapl_id, null, rdcc_nelmts, rdcc_nbytes, rdcc_w0);
+ assertTrue("H5P_cache default", rdcc_nelmts[0] == 521);
+ assertTrue("H5P_cache default", rdcc_nbytes[0] == (1024*1024));
+ assertTrue("H5P_cache default", rdcc_w0[0] == 0.75);
+ }
+ catch (Throwable err) {
+ err.printStackTrace();
+ fail("H5P_cache: default " + err);
+ }
+ try {
+ rdcc_nelmts[0] = 4096;
+ H5.H5Pset_cache(fapl_id, 0, rdcc_nelmts[0], rdcc_nbytes[0], rdcc_w0[0]);
+ H5.H5Pget_cache(fapl_id, null, rdcc_nelmts, rdcc_nbytes, rdcc_w0);
+ assertTrue("H5P_cache 4096", rdcc_nelmts[0] == 4096);
+ }
+ catch (Throwable err) {
+ err.printStackTrace();
+ fail("H5P_cache: " + err);
+ }
+ }
+
+ @Test
+ public void testH5P_chunk_cache() {
+ long[] rdcc_nslots = {0};
+ long[] rdcc_nbytes = {0};
+ double[] rdcc_w0 = {0};
+ try {
+ H5.H5Pget_chunk_cache(dapl_id, rdcc_nslots, rdcc_nbytes, rdcc_w0);
+ assertTrue("H5P_chunk_cache default", rdcc_nslots[0] == 521);
+ assertTrue("H5P_chunk_cache default", rdcc_nbytes[0] == (1024*1024));
+ assertTrue("H5P_chunk_cache default", rdcc_w0[0] == 0.75);
+ }
+ catch (Throwable err) {
+ err.printStackTrace();
+ fail("H5P_chunk_cache: default " + err);
+ }
+ try {
+ rdcc_nslots[0] = 4096;
+ H5.H5Pset_chunk_cache(dapl_id, rdcc_nslots[0], rdcc_nbytes[0], rdcc_w0[0]);
+ H5.H5Pget_chunk_cache(dapl_id, rdcc_nslots, rdcc_nbytes, rdcc_w0);
+ assertTrue("H5P_chunk_cache 4096", rdcc_nslots[0] == 4096);
+ }
+ catch (Throwable err) {
+ err.printStackTrace();
+ fail("H5P_chunk_cache: " + err);
+ }
+ }
+
+ @Test
+ public void testH5P_sieve_buf_size() {
+ long buf_size = 0;
+ try {
+ buf_size = H5.H5Pget_sieve_buf_size(fapl_id);
+ assertTrue("H5P_sieve_buf_size default", buf_size == (64*1024));
+ }
+ catch (Throwable err) {
+ err.printStackTrace();
+ fail("H5P_sieve_buf_size: default " + err);
+ }
+ try {
+ buf_size = 4096;
+ H5.H5Pset_sieve_buf_size(fapl_id, buf_size);
+ buf_size = H5.H5Pget_sieve_buf_size(fapl_id);
+ assertTrue("H5P_sieve_buf_size 4096", buf_size == 4096);
+ }
+ catch (Throwable err) {
+ err.printStackTrace();
+ fail("H5P_sieve_buf_size: " + err);
+ }
+ }
+
+ @Test
+ public void testH5P_gc_references() {
+ boolean ret_val_id = false;
+ try {
+ H5.H5Pset_gc_references(fapl_id, true);
+ ret_val_id = H5.H5Pget_gc_references(fapl_id);
+ assertTrue("H5P_gc_references", ret_val_id);
+ }
+ catch (Throwable err) {
+ err.printStackTrace();
+ fail("H5P_gc_references: " + err);
+ }
+ }
+
+ @Test
+ public void testH5Pget_mdc_config() {
+ H5AC_cache_config_t cache_config = null;
+ try {
+ cache_config = H5.H5Pget_mdc_config(fapl_id);
+ assertTrue("H5Pget_mdc_config", cache_config.version==HDF5Constants.H5AC_CURR_CACHE_CONFIG_VERSION);
+ }
+ catch (Throwable err) {
+ err.printStackTrace();
+ fail("H5Pget_mdc_config: " + err);
+ }
+ }
+
+ @Test
+ public void testH5Pset_mdc_config() {
+ H5AC_cache_config_t cache_config = null;
+ try {
+ cache_config = H5.H5Pget_mdc_config(fapl_id);
+ assertTrue("H5Pset_mdc_config", cache_config.version==HDF5Constants.H5AC_CURR_CACHE_CONFIG_VERSION);
+ }
+ catch (Throwable err) {
+ err.printStackTrace();
+ fail("H5Pset_mdc_config: " + err);
+ }
+ try {
+ cache_config.decr_mode = HDF5Constants.H5C_decr_off;
+ H5.H5Pset_mdc_config(fapl_id, cache_config);
+ cache_config = H5.H5Pget_mdc_config(fapl_id);
+ assertTrue("H5Pset_mdc_config", cache_config.version==HDF5Constants.H5AC_CURR_CACHE_CONFIG_VERSION);
+ assertTrue("H5Pset_mdc_config", cache_config.decr_mode==HDF5Constants.H5C_decr_off);
+ }
+ catch (Throwable err) {
+ err.printStackTrace();
+ fail("H5Pset_mdc_config: " + err);
+ }
+ }
+
+ @Test
+ public void testH5P_fapl_core() {
+ if (HDF5Constants.H5FD_CORE < 0)
+ return;
+ try {
+ H5.H5Pset_fapl_core(fapl_id, 4096, false);
+ long driver_type = H5.H5Pget_driver(fapl_id);
+ assertTrue("H5Pget_driver: core = "+ driver_type, HDF5Constants.H5FD_CORE==driver_type);
+ }
+ catch (Throwable err) {
+ err.printStackTrace();
+ fail("H5Pset_fapl_core: " + err);
+ }
+ try {
+ long[] increment = {-1};
+ boolean[] backingstore = {true};
+ H5.H5Pget_fapl_core(fapl_id, increment, backingstore);
+ assertTrue("H5Pget_fapl_core: increment="+increment[0], increment[0]==4096);
+ assertTrue("H5Pget_fapl_core: backingstore="+backingstore[0], !backingstore[0]);
+ }
+ catch (Throwable err) {
+ err.printStackTrace();
+ fail("H5Pget_fapl_core: " + err);
+ }
+ }
+
+ @Test
+ public void testH5P_fapl_family() {
+ if (HDF5Constants.H5FD_FAMILY < 0)
+ return;
+ try {
+ H5.H5Pset_fapl_family(fapl_id, 1024, HDF5Constants.H5P_DEFAULT);
+ long driver_type = H5.H5Pget_driver(fapl_id);
+ assertTrue("H5Pget_driver: family = "+ driver_type, HDF5Constants.H5FD_FAMILY==driver_type);
+ }
+ catch (Throwable err) {
+ err.printStackTrace();
+ fail("H5Pset_fapl_family: " + err);
+ }
+ try {
+ long[] member_size = {0};
+ long[] member_fapl = {-1};
+ H5.H5Pget_fapl_family(fapl_id, member_size, member_fapl);
+ assertTrue("H5Pget_fapl_family: member_size="+member_size[0], member_size[0]==1024);
+ assertTrue("H5Pget_fapl_family: member_fapl ", H5.H5P_equal(member_fapl[0], HDF5Constants.H5P_FILE_ACCESS_DEFAULT));
+ }
+ catch (Throwable err) {
+ err.printStackTrace();
+ fail("H5Pget_fapl_family: " + err);
+ }
+ _createH5familyFile(fapl_id);
+ deleteH5familyfile();
+ }
+
+ @Test
+ public void testH5P_family_offset() {
+ if (HDF5Constants.H5FD_FAMILY < 0)
+ return;
+ try {
+ H5.H5Pset_fapl_family(fapl_id, 1024, HDF5Constants.H5P_DEFAULT);
+ long driver_type = H5.H5Pget_driver(fapl_id);
+ assertTrue("H5Pget_driver: family = "+ driver_type, HDF5Constants.H5FD_FAMILY==driver_type);
+ }
+ catch (Throwable err) {
+ err.printStackTrace();
+ fail("H5Pset_fapl_family: " + err);
+ }
+ _createH5familyFile(fapl_id);
+ long family_offset = 512;
+ try {
+ H5.H5Pset_family_offset(fapl_id, family_offset);
+ }
+ catch (Throwable err) {
+ err.printStackTrace();
+ fail("H5Pget_fapl_family: " + err);
+ }
+ try {
+ long offset = H5.H5Pget_family_offset(fapl_id);
+ assertTrue("H5Pget_fapl_family: offset="+offset, offset==family_offset);
+ }
+ catch (Throwable err) {
+ err.printStackTrace();
+ fail("H5Pget_fapl_family: " + err);
+ }
+ deleteH5familyfile();
+ }
+
+ @Test
+ public void testH5Pset_fapl_sec2() {
+ if (HDF5Constants.H5FD_SEC2 < 0)
+ return;
+ try {
+ H5.H5Pset_fapl_sec2(fapl_id);
+ long driver_type = H5.H5Pget_driver(fapl_id);
+ assertTrue("H5Pget_driver: sec2 = "+ driver_type, HDF5Constants.H5FD_SEC2==driver_type);
+ }
+ catch (Throwable err) {
+ err.printStackTrace();
+ fail("H5Pset_fapl_sec2: " + err);
+ }
+ _createH5File(fapl_id);
+ deleteH5file();
+ }
+
+ @Test
+ public void testH5Pset_fapl_stdio() {
+ if (HDF5Constants.H5FD_STDIO < 0)
+ return;
+ try {
+ H5.H5Pset_fapl_stdio(fapl_id);
+ long driver_type = H5.H5Pget_driver(fapl_id);
+ assertTrue("H5Pget_driver: stdio = "+ driver_type, HDF5Constants.H5FD_STDIO==driver_type);
+ }
+ catch (Throwable err) {
+ err.printStackTrace();
+ fail("H5Pset_fapl_stdio: " + err);
+ }
+ _createH5File(fapl_id);
+ deleteH5file();
+ }
+
+ @Test
+ public void testH5Pset_fapl_log() {
+ if (HDF5Constants.H5FD_LOG < 0)
+ return;
+ try {
+ long log_flags = HDF5Constants.H5FD_LOG_LOC_IO;
+ H5.H5Pset_fapl_log(fapl_id, H5_LOG_FILE, log_flags, 1024);
+ long driver_type = H5.H5Pget_driver(fapl_id);
+ assertTrue("H5Pget_driver: log = "+ driver_type, HDF5Constants.H5FD_LOG==driver_type);
+ }
+ catch (Throwable err) {
+ err.printStackTrace();
+ fail("H5Pset_fapl_log: " + err);
+ }
+ _createH5File(fapl_id);
+ deleteH5file();
+ _deleteLogFile();
+ }
+
+ @Test
+ public void testH5P_fapl_muti_nulls() {
+ if (HDF5Constants.H5FD_MULTI < 0)
+ return;
+
+ int[] member_map = null;
+ long[] member_fapl = null;
+ String[] member_name = null;
+ long[] member_addr = null;
+
+ try {
+ H5.H5Pset_fapl_multi(fapl_id, member_map, member_fapl, member_name, member_addr, true);
+ long driver_type = H5.H5Pget_driver(fapl_id);
+ assertTrue("H5Pget_driver: muti = "+ driver_type, HDF5Constants.H5FD_MULTI==driver_type);
+ }
+ catch (Throwable err) {
+ err.printStackTrace();
+ fail("H5Pset_fapl_muti: " + err);
+ }
+ try {
+ boolean relax = H5.H5Pget_fapl_multi(fapl_id, member_map, member_fapl, member_name, member_addr);
+ assertTrue("H5Pget_fapl_muti: relax ", relax);
+ }
+ catch (Throwable err) {
+ err.printStackTrace();
+ fail("H5Pget_fapl_muti: " + err);
+ }
+ _createH5multiFile(fapl_id);
+ deleteH5multifile();
+ }
+
+ @Test
+ public void testH5P_fapl_muti_defaults() {
+ if (HDF5Constants.H5FD_MULTI < 0)
+ return;
+ int H5FD_MEM_NTYPES = HDF5Constants.H5FD_MEM_NTYPES; // 7
+
+ long sH5FD_MEM_DEFAULT_HADDR = HDF5Constants.H5FD_DEFAULT_HADDR_SIZE;
+ long sH5FD_MEM_HADDR = HDF5Constants.H5FD_MEM_DEFAULT_SIZE;
+ long sH5FD_MEM_SUPER_HADDR = HDF5Constants.H5FD_MEM_DEFAULT_SUPER_SIZE;
+ long sH5FD_MEM_BTREE_HADDR = HDF5Constants.H5FD_MEM_DEFAULT_BTREE_SIZE;
+ long sH5FD_MEM_DRAW_HADDR = HDF5Constants.H5FD_MEM_DEFAULT_DRAW_SIZE;
+ long sH5FD_MEM_GHEAP_HADDR = HDF5Constants.H5FD_MEM_DEFAULT_GHEAP_SIZE;
+ long sH5FD_MEM_LHEAP_HADDR = HDF5Constants.H5FD_MEM_DEFAULT_LHEAP_SIZE;
+ long sH5FD_MEM_OHDR_HADDR = HDF5Constants.H5FD_MEM_DEFAULT_OHDR_SIZE;
+ int[] member_map = null;
+ long[] member_fapl = null;
+ String[] member_name = null;
+ long[] member_addr = null;
+
+ try {
+ H5.H5Pset_fapl_multi(fapl_id, member_map, member_fapl, member_name, member_addr, true);
+ long driver_type = H5.H5Pget_driver(fapl_id);
+ assertTrue("H5Pget_driver: muti = "+ driver_type, HDF5Constants.H5FD_MULTI==driver_type);
+ }
+ catch (Throwable err) {
+ err.printStackTrace();
+ fail("H5Pset_fapl_muti: " + err);
+ }
+ try {
+ member_map = new int[HDF5Constants.H5FD_MEM_NTYPES];
+ member_fapl = new long[HDF5Constants.H5FD_MEM_NTYPES];
+ member_name = new String[HDF5Constants.H5FD_MEM_NTYPES];
+ member_addr = new long[HDF5Constants.H5FD_MEM_NTYPES];
+ boolean relax = H5.H5Pget_fapl_multi(fapl_id, member_map, member_fapl, member_name, member_addr);
+ assertTrue("H5Pget_fapl_muti: relax ", relax);
+ assertTrue("H5Pget_fapl_muti: member_map="+member_map[HDF5Constants.H5FD_MEM_DEFAULT], member_map[HDF5Constants.H5FD_MEM_DEFAULT] == HDF5Constants.H5FD_MEM_DEFAULT);
+ assertTrue("H5Pget_fapl_muti: member_fapl ", H5.H5P_equal(member_fapl[HDF5Constants.H5FD_MEM_DEFAULT], HDF5Constants.H5P_FILE_ACCESS_DEFAULT));
+ assertTrue("H5Pget_fapl_muti: member_name="+member_name[HDF5Constants.H5FD_MEM_DEFAULT], member_name[HDF5Constants.H5FD_MEM_DEFAULT].compareTo("%s-X.h5")==0);
+ assertTrue("H5Pget_fapl_muti: member_name="+member_name[HDF5Constants.H5FD_MEM_SUPER], member_name[HDF5Constants.H5FD_MEM_SUPER].compareTo("%s-s.h5")==0);
+ assertTrue("H5Pget_fapl_muti: member_name="+member_name[HDF5Constants.H5FD_MEM_BTREE], member_name[HDF5Constants.H5FD_MEM_BTREE].compareTo("%s-b.h5")==0);
+ assertTrue("H5Pget_fapl_muti: member_name="+member_name[HDF5Constants.H5FD_MEM_DRAW], member_name[HDF5Constants.H5FD_MEM_DRAW].compareTo("%s-r.h5")==0);
+ assertTrue("H5Pget_fapl_muti: member_addr="+member_addr[HDF5Constants.H5FD_MEM_DEFAULT], member_addr[HDF5Constants.H5FD_MEM_DEFAULT] == sH5FD_MEM_HADDR);
+ assertTrue("H5Pget_fapl_muti: member_addr="+member_addr[HDF5Constants.H5FD_MEM_SUPER], member_addr[HDF5Constants.H5FD_MEM_SUPER] == sH5FD_MEM_SUPER_HADDR);
+ assertTrue("H5Pget_fapl_muti: member_addr="+member_addr[HDF5Constants.H5FD_MEM_BTREE], member_addr[HDF5Constants.H5FD_MEM_BTREE] == sH5FD_MEM_BTREE_HADDR);
+ assertTrue("H5Pget_fapl_muti: member_addr="+member_addr[HDF5Constants.H5FD_MEM_DRAW], member_addr[HDF5Constants.H5FD_MEM_DRAW] == sH5FD_MEM_DRAW_HADDR);
+ assertTrue("H5Pget_fapl_muti: member_addr="+member_addr[HDF5Constants.H5FD_MEM_GHEAP], member_addr[HDF5Constants.H5FD_MEM_GHEAP] == sH5FD_MEM_GHEAP_HADDR);
+ assertTrue("H5Pget_fapl_muti: member_addr="+member_addr[HDF5Constants.H5FD_MEM_LHEAP], member_addr[HDF5Constants.H5FD_MEM_LHEAP] == sH5FD_MEM_LHEAP_HADDR);
+ assertTrue("H5Pget_fapl_muti: member_addr="+member_addr[HDF5Constants.H5FD_MEM_OHDR], member_addr[HDF5Constants.H5FD_MEM_OHDR] == sH5FD_MEM_OHDR_HADDR);
+ }
+ catch (Throwable err) {
+ err.printStackTrace();
+ fail("H5Pget_fapl_muti: " + err);
+ }
+ _createH5multiFile(fapl_id);
+ _createH5multiFileDS();
+ deleteH5multifile();
+ }
+
+ @Test
+ public void testH5P_fapl_muti() {
+ if (HDF5Constants.H5FD_MULTI < 0)
+ return;
+ long HADDR_DEFAULT_SIZE = HDF5Constants.H5FD_DEFAULT_HADDR_SIZE;
+ int[] member_map = new int[HDF5Constants.H5FD_MEM_NTYPES];
+ long[] member_fapl = new long[HDF5Constants.H5FD_MEM_NTYPES];
+ String[] member_name = new String[HDF5Constants.H5FD_MEM_NTYPES];
+ long[] member_addr = new long[HDF5Constants.H5FD_MEM_NTYPES];
+
+ for(int mt=HDF5Constants.H5FD_MEM_DEFAULT; mt<HDF5Constants.H5FD_MEM_NTYPES; mt++) {
+ member_fapl[mt] = HDF5Constants.H5P_DEFAULT;
+ member_map[mt] = HDF5Constants.H5FD_MEM_SUPER;
+ }
+ member_map[HDF5Constants.H5FD_MEM_DRAW] = HDF5Constants.H5FD_MEM_DRAW;
+ member_map[HDF5Constants.H5FD_MEM_BTREE] = HDF5Constants.H5FD_MEM_BTREE;
+ member_map[HDF5Constants.H5FD_MEM_GHEAP] = HDF5Constants.H5FD_MEM_GHEAP;
+
+ member_name[HDF5Constants.H5FD_MEM_SUPER] = new String("%s-super.h5");
+ member_addr[HDF5Constants.H5FD_MEM_SUPER] = 0;
+
+ member_name[HDF5Constants.H5FD_MEM_BTREE] = new String("%s-btree.h5");
+ member_addr[HDF5Constants.H5FD_MEM_BTREE] = HADDR_DEFAULT_SIZE/4;
+
+ member_name[HDF5Constants.H5FD_MEM_DRAW] = new String("%s-draw.h5");
+ member_addr[HDF5Constants.H5FD_MEM_DRAW] = HADDR_DEFAULT_SIZE/2;
+
+ member_name[HDF5Constants.H5FD_MEM_GHEAP] = new String("%s-gheap.h5");
+ member_addr[HDF5Constants.H5FD_MEM_GHEAP] = (HADDR_DEFAULT_SIZE/4)*3;
+
+ try {
+ H5.H5Pset_fapl_multi(fapl_id, member_map, member_fapl, member_name, member_addr, true);
+ long driver_type = H5.H5Pget_driver(fapl_id);
+ assertTrue("H5Pget_driver: muti = "+ driver_type, HDF5Constants.H5FD_MULTI==driver_type);
+ }
+ catch (Throwable err) {
+ err.printStackTrace();
+ fail("H5Pset_fapl_muti: " + err);
+ }
+ try {
+ boolean relax = H5.H5Pget_fapl_multi(fapl_id, member_map, member_fapl, member_name, member_addr);
+ assertTrue("H5Pget_fapl_muti: relax ", relax);
+ assertTrue("H5Pget_fapl_muti: member_map="+member_map[HDF5Constants.H5FD_MEM_DEFAULT], member_map[HDF5Constants.H5FD_MEM_DEFAULT] == HDF5Constants.H5FD_MEM_SUPER);
+ assertTrue("H5Pget_fapl_muti: member_map="+member_map[HDF5Constants.H5FD_MEM_DRAW], member_map[HDF5Constants.H5FD_MEM_DRAW] == HDF5Constants.H5FD_MEM_DRAW);
+ assertTrue("H5Pget_fapl_muti: member_map="+member_map[HDF5Constants.H5FD_MEM_BTREE], member_map[HDF5Constants.H5FD_MEM_BTREE] == HDF5Constants.H5FD_MEM_BTREE);
+ assertTrue("H5Pget_fapl_muti: member_map="+member_map[HDF5Constants.H5FD_MEM_GHEAP], member_map[HDF5Constants.H5FD_MEM_GHEAP] == HDF5Constants.H5FD_MEM_GHEAP);
+
+ assertTrue("H5Pget_fapl_muti: member_fapl ", H5.H5P_equal(member_fapl[HDF5Constants.H5FD_MEM_DEFAULT], HDF5Constants.H5P_FILE_ACCESS_DEFAULT));
+ assertTrue("H5Pget_fapl_muti: member_addr="+member_addr[HDF5Constants.H5FD_MEM_DEFAULT], member_addr[HDF5Constants.H5FD_MEM_DEFAULT] == 0);
+ assertTrue("H5Pget_fapl_muti: member_name="+member_name[HDF5Constants.H5FD_MEM_SUPER], member_name[HDF5Constants.H5FD_MEM_SUPER].compareTo("%s-super.h5")==0);
+
+ assertTrue("H5Pget_fapl_muti: member_name="+member_name[HDF5Constants.H5FD_MEM_BTREE], member_name[HDF5Constants.H5FD_MEM_BTREE].compareTo("%s-btree.h5")==0);
+ assertTrue("H5Pget_fapl_muti: member_addr="+member_addr[HDF5Constants.H5FD_MEM_BTREE], member_addr[HDF5Constants.H5FD_MEM_BTREE] == HADDR_DEFAULT_SIZE/4);
+
+ assertTrue("H5Pget_fapl_muti: member_name="+member_name[HDF5Constants.H5FD_MEM_DRAW], member_name[HDF5Constants.H5FD_MEM_DRAW].compareTo("%s-draw.h5")==0);
+ assertTrue("H5Pget_fapl_muti: member_addr="+member_addr[HDF5Constants.H5FD_MEM_DRAW], member_addr[HDF5Constants.H5FD_MEM_DRAW] == HADDR_DEFAULT_SIZE/2);
+
+ assertTrue("H5Pget_fapl_muti: member_name="+member_name[HDF5Constants.H5FD_MEM_GHEAP], member_name[HDF5Constants.H5FD_MEM_GHEAP].compareTo("%s-gheap.h5")==0);
+ assertTrue("H5Pget_fapl_muti: member_addr="+member_addr[HDF5Constants.H5FD_MEM_GHEAP], member_addr[HDF5Constants.H5FD_MEM_GHEAP] == (HADDR_DEFAULT_SIZE/4)*3);
+ }
+ catch (Throwable err) {
+ err.printStackTrace();
+ fail("H5Pget_fapl_muti: " + err);
+ }
+ _createH5multiFile(fapl_id);
+ try {
+ long file_size = H5.H5Fget_filesize(H5fid);
+ assertTrue("H5Pget_fapl_muti: file_size ", file_size >= HADDR_DEFAULT_SIZE/4 || file_size <= HADDR_DEFAULT_SIZE/2);
+ }
+ catch (Throwable err) {
+ err.printStackTrace();
+ fail("H5Pset_fapl_muti:H5Fget_filesize " + err);
+ }
+ _createH5multiFileDS();
+ deleteH5multifile();
+ File file = new File(H5_MULTI_FILE+"-super.h5");
+ if (file.exists()) {
+ try {
+ file.delete();
+ }
+ catch (SecurityException e) {
+ ;// e.printStackTrace();
+ }
+ }
+ file = new File(H5_MULTI_FILE+"-btree.h5");
+ if (file.exists()) {
+ try {
+ file.delete();
+ }
+ catch (SecurityException e) {
+ ;// e.printStackTrace();
+ }
+ }
+ file = new File(H5_MULTI_FILE+"-draw.h5");
+ if (file.exists()) {
+ try {
+ file.delete();
+ }
+ catch (SecurityException e) {
+ ;// e.printStackTrace();
+ }
+ }
+ file = new File(H5_MULTI_FILE+"-gheap.h5");
+ if (file.exists()) {
+ try {
+ file.delete();
+ }
+ catch (SecurityException e) {
+ ;// e.printStackTrace();
+ }
+ }
+ }
+
+ @Test
+ public void testH5P_fapl_split() {
+ if (HDF5Constants.H5FD_MULTI < 0)
+ return;
+
+ try {
+ H5.H5Pset_fapl_split(fapl_id, "-meta.h5", HDF5Constants.H5P_DEFAULT, "-raw.h5", HDF5Constants.H5P_DEFAULT);
+ long driver_type = H5.H5Pget_driver(fapl_id);
+ assertTrue("H5Pget_driver: split = "+ driver_type, HDF5Constants.H5FD_MULTI==driver_type);
+ }
+ catch (Throwable err) {
+ err.printStackTrace();
+ fail("H5Pset_fapl_split: " + err);
+ }
+ try {
+ int[] member_map = new int[HDF5Constants.H5FD_MEM_NTYPES];
+ long[] member_fapl = new long[HDF5Constants.H5FD_MEM_NTYPES];
+ String[] member_name = new String[HDF5Constants.H5FD_MEM_NTYPES];
+ long[] member_addr = new long[HDF5Constants.H5FD_MEM_NTYPES];
+ boolean relax = H5.H5Pget_fapl_multi(fapl_id, member_map, member_fapl, member_name, member_addr);
+ assertTrue("H5Pget_fapl_multi: relax ", relax);
+ assertTrue("H5Pget_fapl_multi: member_name="+member_name[HDF5Constants.H5FD_MEM_SUPER], member_name[HDF5Constants.H5FD_MEM_SUPER].compareTo("%s-meta.h5")==0);
+ assertTrue("H5Pget_fapl_multi: member_name="+member_name[HDF5Constants.H5FD_MEM_DRAW], member_name[HDF5Constants.H5FD_MEM_DRAW].compareTo("%s-raw.h5")==0);
+ }
+ catch (Throwable err) {
+ err.printStackTrace();
+ fail("H5Pget_fapl_split: " + err);
+ }
+ _createH5multiFile(fapl_id);
+ deleteH5multifile();
+ File file = new File(H5_MULTI_FILE+"-meta.h5");
+ if (file.exists()) {
+ try {
+ file.delete();
+ }
+ catch (SecurityException e) {
+ ;// e.printStackTrace();
+ }
+ }
+ file = new File(H5_MULTI_FILE+"-raw.h5");
+ if (file.exists()) {
+ try {
+ file.delete();
+ }
+ catch (SecurityException e) {
+ ;// e.printStackTrace();
+ }
+ }
+ }
+
+ @Test
+ public void testH5P_fapl_direct() {
+ if (HDF5Constants.H5FD_DIRECT < 0)
+ return;
+ try {
+ H5.H5Pset_fapl_direct(fapl_id, 1024, 4096, 8*4096);
+ long driver_type = H5.H5Pget_driver(fapl_id);
+ assertTrue("H5Pget_driver: direct = "+ driver_type, HDF5Constants.H5FD_DIRECT==driver_type);
+ }
+ catch (Throwable err) {
+ err.printStackTrace();
+ fail("H5Pset_fapl_direct: " + err);
+ }
+ try {
+ long[] params = {-1, -1, -1};
+ H5.H5Pget_fapl_direct(fapl_id, params);
+ assertTrue("H5Pget_fapl_direct: alignment="+params[0], params[0]==1024);
+ assertTrue("H5Pget_fapl_direct: block_size="+params[1], params[1]==4096);
+ assertTrue("H5Pget_fapl_direct: cbuf_size="+params[2], params[2]==8*4096);
+ }
+ catch (Throwable err) {
+ err.printStackTrace();
+ fail("H5Pget_fapl_direct: " + err);
+ }
+ _createH5File(fapl_id);
+ deleteH5file();
+ }
+
+ @Test
+ public void testH5Pset_fapl_windows() {
+ if (HDF5Constants.H5FD_WINDOWS < 0)
+ return;
+ try {
+ H5.H5Pset_fapl_windows(fapl_id);
+ long driver_type = H5.H5Pget_driver(fapl_id);
+ assertTrue("H5Pget_driver: windows = "+ driver_type, HDF5Constants.H5FD_WINDOWS==driver_type);
+ }
+ catch (Throwable err) {
+ err.printStackTrace();
+ fail("H5Pset_fapl_windows: " + err);
+ }
+ _createH5File(fapl_id);
+ deleteH5file();
+ }
+
+ @Test
+ public void testH5Pmulti_transform() {
+ if (HDF5Constants.H5FD_MULTI < 0)
+ return;
+ String f_to_c = "(5/9.0)*(x-32)";
+ double windchillFread[][] = new double[DIMF_X][DIMF_Y];
+ double windchillC;
+ NumberFormat formatter = new DecimalFormat("#0.000");
+ long HADDRMAX = HDF5Constants.H5FD_DEFAULT_HADDR_SIZE;
+
+ int[] member_map = new int[HDF5Constants.H5FD_MEM_NTYPES];
+ long[] member_fapl = new long[HDF5Constants.H5FD_MEM_NTYPES];
+ String[] member_name = new String[HDF5Constants.H5FD_MEM_NTYPES];
+ long[] member_addr = new long[HDF5Constants.H5FD_MEM_NTYPES];
+
+ try {
+ H5.H5Pset_data_transform(plist_id, f_to_c);
+ H5.H5Pset_btree_ratios(btplist_id, 0.1, 0.5, 0.7);
+ }
+ catch (Throwable err) {
+ err.printStackTrace();
+ fail("H5Pdata_transform: " + err);
+ }
+
+ for(int mt=HDF5Constants.H5FD_MEM_DEFAULT; mt<HDF5Constants.H5FD_MEM_NTYPES; mt++) {
+ member_fapl[mt] = HDF5Constants.H5P_DEFAULT;
+ member_map[mt] = HDF5Constants.H5FD_MEM_SUPER;
+ }
+ member_map[HDF5Constants.H5FD_MEM_DRAW] = HDF5Constants.H5FD_MEM_DRAW;
+ member_map[HDF5Constants.H5FD_MEM_BTREE] = HDF5Constants.H5FD_MEM_BTREE;
+ member_map[HDF5Constants.H5FD_MEM_GHEAP] = HDF5Constants.H5FD_MEM_GHEAP;
+
+ member_name[HDF5Constants.H5FD_MEM_SUPER] = new String("%s-super.h5");
+ member_addr[HDF5Constants.H5FD_MEM_SUPER] = 0;
+
+ member_name[HDF5Constants.H5FD_MEM_BTREE] = new String("%s-btree.h5");
+ member_addr[HDF5Constants.H5FD_MEM_BTREE] = HADDRMAX/4;
+
+ member_name[HDF5Constants.H5FD_MEM_DRAW] = new String("%s-draw.h5");
+ member_addr[HDF5Constants.H5FD_MEM_DRAW] = HADDRMAX/2;
+
+ member_name[HDF5Constants.H5FD_MEM_GHEAP] = new String("%s-gheap.h5");
+ member_addr[HDF5Constants.H5FD_MEM_GHEAP] = (HADDRMAX/4)*3;
+
+ try {
+ H5.H5Pset_fapl_multi(fapl_id, member_map, member_fapl, member_name, member_addr, true);
+ long driver_type = H5.H5Pget_driver(fapl_id);
+ assertTrue("H5Pget_driver: muti = "+ driver_type, HDF5Constants.H5FD_MULTI==driver_type);
+ }
+ catch (Throwable err) {
+ err.printStackTrace();
+ fail("H5Pset_fapl_muti: " + err);
+ }
+ try {
+ boolean relax = H5.H5Pget_fapl_multi(fapl_id, member_map, member_fapl, member_name, member_addr);
+ assertTrue("H5Pget_fapl_muti: relax ", relax);
+ assertTrue("H5Pget_fapl_muti: member_map="+member_map[HDF5Constants.H5FD_MEM_DEFAULT], member_map[HDF5Constants.H5FD_MEM_DEFAULT] == HDF5Constants.H5FD_MEM_SUPER);
+ assertTrue("H5Pget_fapl_muti: member_map="+member_map[HDF5Constants.H5FD_MEM_DRAW], member_map[HDF5Constants.H5FD_MEM_DRAW] == HDF5Constants.H5FD_MEM_DRAW);
+ assertTrue("H5Pget_fapl_muti: member_map="+member_map[HDF5Constants.H5FD_MEM_BTREE], member_map[HDF5Constants.H5FD_MEM_BTREE] == HDF5Constants.H5FD_MEM_BTREE);
+ assertTrue("H5Pget_fapl_muti: member_map="+member_map[HDF5Constants.H5FD_MEM_GHEAP], member_map[HDF5Constants.H5FD_MEM_GHEAP] == HDF5Constants.H5FD_MEM_GHEAP);
+
+ assertTrue("H5Pget_fapl_muti: member_fapl ", H5.H5P_equal(member_fapl[HDF5Constants.H5FD_MEM_DEFAULT], HDF5Constants.H5P_FILE_ACCESS_DEFAULT));
+ assertTrue("H5Pget_fapl_muti: member_addr="+member_addr[HDF5Constants.H5FD_MEM_DEFAULT], member_addr[HDF5Constants.H5FD_MEM_DEFAULT] == 0);
+ assertTrue("H5Pget_fapl_muti: member_name="+member_name[HDF5Constants.H5FD_MEM_SUPER], member_name[HDF5Constants.H5FD_MEM_SUPER].compareTo("%s-super.h5")==0);
+
+ assertTrue("H5Pget_fapl_muti: member_name="+member_name[HDF5Constants.H5FD_MEM_BTREE], member_name[HDF5Constants.H5FD_MEM_BTREE].compareTo("%s-btree.h5")==0);
+ assertTrue("H5Pget_fapl_muti: member_addr="+member_addr[HDF5Constants.H5FD_MEM_BTREE], member_addr[HDF5Constants.H5FD_MEM_BTREE] == HADDRMAX/4);
+
+ assertTrue("H5Pget_fapl_muti: member_name="+member_name[HDF5Constants.H5FD_MEM_DRAW], member_name[HDF5Constants.H5FD_MEM_DRAW].compareTo("%s-draw.h5")==0);
+ assertTrue("H5Pget_fapl_muti: member_addr="+member_addr[HDF5Constants.H5FD_MEM_DRAW], member_addr[HDF5Constants.H5FD_MEM_DRAW] == HADDRMAX/2);
+
+ assertTrue("H5Pget_fapl_muti: member_name="+member_name[HDF5Constants.H5FD_MEM_GHEAP], member_name[HDF5Constants.H5FD_MEM_GHEAP].compareTo("%s-gheap.h5")==0);
+ assertTrue("H5Pget_fapl_muti: member_addr="+member_addr[HDF5Constants.H5FD_MEM_GHEAP], member_addr[HDF5Constants.H5FD_MEM_GHEAP] == (HADDRMAX/4)*3);
+ }
+ catch (Throwable err) {
+ err.printStackTrace();
+ fail("H5Pget_fapl_muti: " + err);
+ }
+ try {
+ _createH5multiFile(fapl_id);
+ long file_size = H5.H5Fget_filesize(H5fid);
+ assertTrue("H5Pget_fapl_muti: file_size ", file_size >= HADDRMAX/4 || file_size <= HADDRMAX/2);
+ _createH5multiFileDS();
+ _createFloatDataset();
+ }
+ catch (Throwable err) {
+ err.printStackTrace();
+ fail("H5Pdata_transform: " + err);
+ }
+ try {
+ H5.H5Dwrite(H5Fdid, HDF5Constants.H5T_NATIVE_DOUBLE, HDF5Constants.H5S_ALL, HDF5Constants.H5S_ALL,
+ plist_id, windchillF);
+ H5.H5Dread(H5Fdid, HDF5Constants.H5T_NATIVE_DOUBLE, HDF5Constants.H5S_ALL, HDF5Constants.H5S_ALL,
+ HDF5Constants.H5P_DEFAULT, windchillFread);
+ }
+ catch (Throwable err) {
+ err.printStackTrace();
+ fail("H5Pdata_transform: " + err);
+ }
+ for(int row = 0; row < DIMF_X; row++) {
+ for(int col = 0; col < DIMF_Y; col++) {
+ windchillC = (5/9.0)*(windchillF[row][col]-32);
+ String Cstr = formatter.format(windchillC);
+ String Fread = formatter.format(windchillFread[row][col]);
+ assertTrue("H5Pdata_transform: <"+row+","+col+">"+Fread+"="+Cstr, Fread.compareTo(Cstr)==0);
+ }
+ }
+ deleteH5multifile();
+ File file = new File(H5_MULTI_FILE+"-super.h5");
+ if (file.exists()) {
+ try {
+ file.delete();
+ }
+ catch (SecurityException e) {
+ ;// e.printStackTrace();
+ }
+ }
+ file = new File(H5_MULTI_FILE+"-btree.h5");
+ if (file.exists()) {
+ try {
+ file.delete();
+ }
+ catch (SecurityException e) {
+ ;// e.printStackTrace();
+ }
+ }
+ file = new File(H5_MULTI_FILE+"-draw.h5");
+ if (file.exists()) {
+ try {
+ file.delete();
+ }
+ catch (SecurityException e) {
+ ;// e.printStackTrace();
+ }
+ }
+ file = new File(H5_MULTI_FILE+"-gheap.h5");
+ if (file.exists()) {
+ try {
+ file.delete();
+ }
+ catch (SecurityException e) {
+ ;// e.printStackTrace();
+ }
+ }
+ }
+}
diff --git a/java/test/TestH5Plist.java b/java/test/TestH5Plist.java
new file mode 100644
index 0000000..024237a
--- /dev/null
+++ b/java/test/TestH5Plist.java
@@ -0,0 +1,1013 @@
+/* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
+ * Copyright by The HDF Group. *
+ * Copyright by the Board of Trustees of the University of Illinois. *
+ * All rights reserved. *
+ * *
+ * This file is part of HDF5. The full HDF5 copyright notice, including *
+ * terms governing use, modification, and redistribution, is contained in *
+ * the files COPYING and Copyright.html. COPYING can be found at the root *
+ * of the source code distribution tree; Copyright.html can be found at the *
+ * root level of an installed copy of the electronic HDF5 document set and *
+ * is linked from the top-level documents page. It can also be found at *
+ * http://hdfgroup.org/HDF5/doc/Copyright.html. If you do not have *
+ * access to either file, you may request a copy from help@hdfgroup.org. *
+ * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */
+
+package test;
+
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertFalse;
+import static org.junit.Assert.assertTrue;
+import static org.junit.Assert.fail;
+
+import java.util.ArrayList;
+import java.io.File;
+import java.text.DecimalFormat;
+import java.text.NumberFormat;
+import java.nio.charset.StandardCharsets;
+
+import hdf.hdf5lib.H5;
+import hdf.hdf5lib.HDF5Constants;
+import hdf.hdf5lib.HDFNativeData;
+import hdf.hdf5lib.callbacks.H5P_cls_close_func_cb;
+import hdf.hdf5lib.callbacks.H5P_cls_close_func_t;
+import hdf.hdf5lib.callbacks.H5P_cls_copy_func_cb;
+import hdf.hdf5lib.callbacks.H5P_cls_copy_func_t;
+import hdf.hdf5lib.callbacks.H5P_cls_create_func_cb;
+import hdf.hdf5lib.callbacks.H5P_cls_create_func_t;
+import hdf.hdf5lib.callbacks.H5P_prp_set_func_cb;
+import hdf.hdf5lib.callbacks.H5P_prp_get_func_cb;
+import hdf.hdf5lib.callbacks.H5P_prp_delete_func_cb;
+import hdf.hdf5lib.callbacks.H5P_prp_copy_func_cb;
+import hdf.hdf5lib.callbacks.H5P_prp_compare_func_cb;
+import hdf.hdf5lib.callbacks.H5P_prp_close_func_cb;
+import hdf.hdf5lib.callbacks.H5P_prp_create_func_cb;
+import hdf.hdf5lib.callbacks.H5P_iterate_cb;
+import hdf.hdf5lib.callbacks.H5P_iterate_t;
+import hdf.hdf5lib.exceptions.HDF5Exception;
+import hdf.hdf5lib.exceptions.HDF5LibraryException;
+import hdf.hdf5lib.structs.H5AC_cache_config_t;
+
+import org.junit.After;
+import org.junit.Before;
+import org.junit.Rule;
+import org.junit.Test;
+import org.junit.rules.TestName;
+
+public class TestH5Plist {
+ @Rule public TestName testname = new TestName();
+
+ // Property definitions
+ private static final String CLASS1_NAME = "Class 1";
+ private static final String CLASS1_PATH = "root/Class 1";
+
+ private static final String CLASS2_NAME = "Class 2";
+ private static final String CLASS2_PATH = "root/Class 1/Class 2";
+
+ // Property definitions
+ private static final String PROP1_NAME = "Property 1";
+ private static final int prop1_def = 10; // Property 1 default value
+ private static final int PROP1_SIZE = 2;
+
+ private static final String PROP2_NAME = "Property 2";
+ private static final float prop2_def = 3.14F; // Property 2 default value
+ private static final int PROP2_SIZE = 8;
+
+ private static final String PROP3_NAME = "Property 3";
+ private static final char[] prop3_def = {'T','e','n',' ','c','h','a','r','s',' '}; // Property 3 default value
+ private static final int PROP3_SIZE = 10;
+
+ private static final String PROP4_NAME = "Property 4";
+ private static final double prop4_def = 1.41F; // Property 4 default value
+ private static final int PROP4_SIZE = 8;
+
+ private static final String [] pnames = { // Names of properties for iterator
+ PROP1_NAME,
+ PROP2_NAME,
+ PROP3_NAME,
+ PROP4_NAME};
+
+ long plist_class_id = -1;
+
+ @Before
+ public void createPropClass()throws NullPointerException, HDF5Exception
+ {
+ assertTrue("H5 open ids is 0",H5.getOpenIDCount()==0);
+ System.out.print(testname.getMethodName());
+ // Create a new generic class, derived from the root of the class hierarchy
+ try {
+ plist_class_id = H5.H5Pcreate_class_nocb(HDF5Constants.H5P_ROOT, CLASS1_NAME);
+ }
+ catch (Throwable err) {
+ err.printStackTrace();
+ fail("TestH5Plist.H5Pcreate_class: " + err);
+ }
+ assertTrue(plist_class_id > 0);
+ }
+
+ @After
+ public void deleteFileAccess() throws HDF5LibraryException {
+ if (plist_class_id > 0)
+ try {H5.H5Pclose(plist_class_id);} catch (Exception ex) {}
+ System.out.println();
+ }
+
+ // Test basic generic property list code. Tests creating new generic classes.
+ @Test
+ public void testH5P_genprop_basic_class() {
+ int status = -1;
+ long cid1 = -1; // Generic Property class ID
+ long cid2 = -1; // Generic Property class ID
+ long cid3 = -1; // Generic Property class ID
+ String name = null; // Name of class
+
+ try {
+ // Check class name
+ try {
+ name = H5.H5Pget_class_name(plist_class_id);
+ }
+ catch (Throwable err) {
+ err.printStackTrace();
+ fail("H5Pget_class_name plist_class_id: " + err);
+ }
+ assertTrue("Class names don't match!, "+name+"="+CLASS1_NAME+"\n", name.compareTo(CLASS1_NAME)==0);
+
+ // Check class parent
+ try {
+ cid2 = H5.H5Pget_class_parent(plist_class_id);
+ }
+ catch (Throwable err) {
+ err.printStackTrace();
+ fail("H5Pget_class_parent cid2: " + err);
+ }
+
+ // Verify class parent correct
+ try {
+ status = H5.H5Pequal(cid2, HDF5Constants.H5P_ROOT);
+ }
+ catch (Throwable err) {
+ err.printStackTrace();
+ fail("H5Pequal cid2: " + err);
+ }
+ assertTrue("H5Pequal cid2", status >= 0);
+
+ // Make certain false postives aren't being returned
+ try {
+ status = H5.H5Pequal(cid2, HDF5Constants.H5P_FILE_CREATE);
+ }
+ catch (Throwable err) {
+ err.printStackTrace();
+ fail("H5Pequal cid2: " + err);
+ }
+ assertTrue("H5Pequal cid2", status >= 0);
+
+ // Close parent class
+ try {
+ H5.H5Pclose_class(cid2);
+ cid2 = -1;
+ }
+ catch (Throwable err) {
+ err.printStackTrace();
+ fail("H5Pclose_class cid2: " + err);
+ }
+
+ // Close class
+ try {
+ H5.H5Pclose_class(plist_class_id);
+ plist_class_id = -1;
+ }
+ catch (Throwable err) {
+ err.printStackTrace();
+ fail("H5Pclose_class plist_class_id: " + err);
+ }
+
+ // Create another new generic class, derived from file creation class
+ try {
+ cid1 = H5.H5Pcreate_class_nocb(HDF5Constants.H5P_FILE_CREATE, CLASS2_NAME);
+ }
+ catch (Throwable err) {
+ err.printStackTrace();
+ fail("H5Pcreate_class cid1: " + err);
+ }
+ assertTrue("H5Pcreate_class cid1", cid1 >= 0);
+
+ // Check class name
+ try {
+ name = H5.H5Pget_class_name(cid1);
+ }
+ catch (Throwable err) {
+ err.printStackTrace();
+ fail("H5Pget_class_name cid1: " + err);
+ }
+ assertTrue("Class names don't match!, "+name+"="+CLASS2_NAME+"\n", name.compareTo(CLASS2_NAME)==0);
+
+ // Check class parent
+ try {
+ cid2 = H5.H5Pget_class_parent(cid1);
+ }
+ catch (Throwable err) {
+ err.printStackTrace();
+ fail("H5Pget_class_parent cid2: " + err);
+ }
+ assertTrue("H5Pget_class_parent cid2 ", cid2 >= 0);
+
+ // Verify class parent correct
+ try {
+ status = H5.H5Pequal(cid2, HDF5Constants.H5P_FILE_CREATE);
+ }
+ catch (Throwable err) {
+ err.printStackTrace();
+ fail("H5Pequal cid2: " + err);
+ }
+ assertTrue("H5Pequal cid2 ", status >= 0);
+
+ // Check class parent's parent
+ try {
+ cid3 = H5.H5Pget_class_parent(cid2);
+ }
+ catch (Throwable err) {
+ err.printStackTrace();
+ fail("H5Pget_class_parent cid3: " + err);
+ }
+ assertTrue("H5Pget_class_parent cid3", cid3 >= 0);
+
+ // Verify class parent's parent correct
+ try {
+ status = H5.H5Pequal(cid3, HDF5Constants.H5P_GROUP_CREATE);
+ }
+ catch (Throwable err) {
+ err.printStackTrace();
+ fail("H5Pequal cid3: " + err);
+ }
+ assertTrue("H5Pequal cid3 ", status >= 0);
+
+ // Close parent class's parent
+ try {
+ H5.H5Pclose_class(cid3);
+ cid3 = -1;
+ }
+ catch (Throwable err) {
+ err.printStackTrace();
+ fail("H5Pclose_class cid3: " + err);
+ }
+
+ // Close parent class's parent
+ try {
+ H5.H5Pclose_class(cid2);
+ cid2 = -1;
+ }
+ catch (Throwable err) {
+ err.printStackTrace();
+ fail("H5Pclose_class cid2: " + err);
+ }
+
+ // Close parent class's parent
+ try {
+ H5.H5Pclose_class(cid1);
+ cid1 = -1;
+ }
+ catch (Throwable err) {
+ err.printStackTrace();
+ fail("H5Pclose_class cid1: " + err);
+ }
+ }
+ finally {
+ if (cid3 > 0)
+ try {H5.H5Pclose_class(cid3);} catch (Throwable err) {}
+ if (cid2 > 0)
+ try {H5.H5Pclose_class(cid2);} catch (Throwable err) {}
+ if (cid1 > 0)
+ try {H5.H5Pclose_class(cid1);} catch (Throwable err) {}
+ }
+ }
+
+ // Test basic generic property list code. Tests adding properties to generic classes.
+ @Test
+ public void testH5P_genprop_basic_class_prop() {
+ boolean status = false;
+ long size = -1; // Generic Property size
+ long nprops = -1; // Generic Property class number
+
+ // Check the number of properties in class
+ try {
+ nprops = H5.H5Pget_nprops(plist_class_id);
+ }
+ catch (Throwable err) {
+ err.printStackTrace();
+ fail("H5Pget_nprops plist_class_id: " + err);
+ }
+ assertTrue("H5Pget_nprops: "+nprops, nprops==0);
+
+ // Check the existance of the first property (should fail)
+ try {
+ status = H5.H5Pexist(plist_class_id, PROP1_NAME);
+ }
+ catch (Throwable err) {
+ err.printStackTrace();
+ fail("H5Pexist plist_class_id: " + err);
+ }
+ assertFalse("H5Pexist plist_class_id "+PROP1_NAME, status);
+
+ // Insert first property into class (with no callbacks)
+ try {
+ byte[] prop_value = HDFNativeData.intToByte(prop1_def);
+
+ H5.H5Pregister2_nocb(plist_class_id, PROP1_NAME, PROP1_SIZE, prop_value);
+ }
+ catch (Throwable err) {
+ err.printStackTrace();
+ fail("H5Pregister2 plist_class_id: "+PROP1_NAME + err);
+ }
+
+ // Try to insert the first property again (should fail)
+ try {
+ byte[] prop_value = HDFNativeData.intToByte(prop1_def);
+
+ H5.H5Pregister2_nocb(plist_class_id, PROP1_NAME, PROP1_SIZE, prop_value);
+ fail("H5Pregister2 plist_class_id: "+PROP1_NAME);
+ }
+ catch (Throwable err) {
+ }
+
+ // Check the existance of the first property
+ try {
+ status = H5.H5Pexist(plist_class_id, PROP1_NAME);
+ }
+ catch (Throwable err) {
+ err.printStackTrace();
+ fail("H5Pexist plist_class_id: " + err);
+ }
+ assertTrue("H5Pexist plist_class_id "+PROP1_NAME, status);
+
+ // Check the size of the first property
+ try {
+ size = H5.H5Pget_size(plist_class_id, PROP1_NAME);
+ }
+ catch (Throwable err) {
+ err.printStackTrace();
+ fail("H5Pget_size PROP1_NAME: " + err);
+ }
+ assertTrue("H5Pget_size "+PROP1_NAME +" size: "+size, size == PROP1_SIZE);
+
+ // Check the number of properties in class
+ try {
+ nprops = H5.H5Pget_nprops(plist_class_id);
+ }
+ catch (Throwable err) {
+ err.printStackTrace();
+ fail("H5Pget_nprops plist_class_id: " + err);
+ }
+ assertTrue("H5Pget_nprops: "+nprops, nprops==1);
+
+ // Insert second property into class (with no callbacks)
+ try {
+ byte[] prop_value = HDFNativeData.floatToByte(prop2_def);
+
+ H5.H5Pregister2_nocb(plist_class_id, PROP2_NAME, PROP2_SIZE, prop_value);
+ }
+ catch (Throwable err) {
+ err.printStackTrace();
+ fail("H5Pregister2 plist_class_id: "+PROP2_NAME + err);
+ }
+
+ // Try to insert the second property again (should fail)
+ try {
+ byte[] prop_value = HDFNativeData.floatToByte(prop2_def);
+
+ H5.H5Pregister2_nocb(plist_class_id, PROP2_NAME, PROP2_SIZE, prop_value);
+ fail("H5Pregister2 plist_class_id: "+PROP2_NAME);
+ }
+ catch (Throwable err) {
+ }
+
+ // Check the existance of the second property
+ try {
+ status = H5.H5Pexist(plist_class_id, PROP2_NAME);
+ }
+ catch (Throwable err) {
+ err.printStackTrace();
+ fail("H5Pexist plist_class_id: " + err);
+ }
+ assertTrue("H5Pexist plist_class_id "+PROP2_NAME, status);
+
+ // Check the size of the second property
+ try {
+ size = H5.H5Pget_size(plist_class_id, PROP2_NAME);
+ }
+ catch (Throwable err) {
+ err.printStackTrace();
+ fail("H5Pget_size PROP2_NAME: " + err);
+ }
+ assertTrue("H5Pget_size "+PROP2_NAME +" size: "+size, size == PROP2_SIZE);
+
+ // Check the number of properties in class
+ try {
+ nprops = H5.H5Pget_nprops(plist_class_id);
+ }
+ catch (Throwable err) {
+ err.printStackTrace();
+ fail("H5Pget_nprops plist_class_id: " + err);
+ }
+ assertTrue("H5Pget_nprops: "+nprops, nprops==2);
+
+ // Insert third property into class (with no callbacks)
+ try {
+ byte[] prop_value = new String(prop3_def).getBytes(StandardCharsets.UTF_8);
+
+ H5.H5Pregister2_nocb(plist_class_id, PROP3_NAME, PROP3_SIZE, prop_value);
+ }
+ catch (Throwable err) {
+ err.printStackTrace();
+ fail("H5Pregister2 plist_class_id: "+PROP3_NAME + err);
+ }
+
+ // Check the existance of the third property
+ try {
+ status = H5.H5Pexist(plist_class_id, PROP3_NAME);
+ }
+ catch (Throwable err) {
+ err.printStackTrace();
+ fail("H5Pexist plist_class_id: " + err);
+ }
+ assertTrue("H5Pexist plist_class_id "+PROP3_NAME, status);
+
+ // Check the size of the third property
+ try {
+ size = H5.H5Pget_size(plist_class_id, PROP3_NAME);
+ }
+ catch (Throwable err) {
+ err.printStackTrace();
+ fail("H5Pget_size PROP3_NAME: " + err);
+ }
+ assertTrue("H5Pget_size "+PROP3_NAME +" size: "+size, size == PROP3_SIZE);
+
+ // Check the number of properties in class
+ try {
+ nprops = H5.H5Pget_nprops(plist_class_id);
+ }
+ catch (Throwable err) {
+ err.printStackTrace();
+ fail("H5Pget_nprops plist_class_id: " + err);
+ }
+ assertTrue("H5Pget_nprops: "+nprops, nprops==3);
+
+ // Unregister first property
+ try {
+ H5.H5Punregister(plist_class_id, PROP1_NAME);
+ }
+ catch (Throwable err) {
+ err.printStackTrace();
+ fail("H5Punregister plist_class_id: "+PROP1_NAME + err);
+ }
+
+ // Try to check the size of the first property (should fail)
+ try {
+ size = H5.H5Pget_size(plist_class_id, PROP1_NAME);
+ fail("H5Pget_size PROP1_NAME");
+ }
+ catch (Throwable err) {
+ }
+
+ // Check the number of properties in class
+ try {
+ nprops = H5.H5Pget_nprops(plist_class_id);
+ }
+ catch (Throwable err) {
+ err.printStackTrace();
+ fail("H5Pget_nprops plist_class_id: " + err);
+ }
+ assertTrue("H5Pget_nprops: "+nprops, nprops==2);
+
+ // Unregister second property
+ try {
+ H5.H5Punregister(plist_class_id, PROP2_NAME);
+ }
+ catch (Throwable err) {
+ err.printStackTrace();
+ fail("H5Punregister plist_class_id: "+PROP2_NAME + err);
+ }
+
+ // Check the number of properties in class
+ try {
+ nprops = H5.H5Pget_nprops(plist_class_id);
+ }
+ catch (Throwable err) {
+ err.printStackTrace();
+ fail("H5Pget_nprops plist_class_id: " + err);
+ }
+ assertTrue("H5Pget_nprops: "+nprops, nprops==1);
+
+ // Unregister third property
+ try {
+ H5.H5Punregister(plist_class_id, PROP3_NAME);
+ }
+ catch (Throwable err) {
+ err.printStackTrace();
+ fail("H5Punregister plist_class_id: "+PROP3_NAME + err);
+ }
+
+ // Check the number of properties in class
+ try {
+ nprops = H5.H5Pget_nprops(plist_class_id);
+ }
+ catch (Throwable err) {
+ err.printStackTrace();
+ fail("H5Pget_nprops plist_class_id: " + err);
+ }
+ assertTrue("H5Pget_nprops: "+nprops, nprops==0);
+ }
+
+ // Test basic generic property list code. Tests iterating over properties in a generic class.
+ @Test
+ public void testH5P_genprop_class_iter() {
+ class idata {
+ public String[] iter_names= null;
+ public int iter_count = -1;
+ idata(String[] names, int count) {
+ this.iter_names = names;
+ this.iter_count = count;
+ }
+ }
+ class H5P_iter_data implements H5P_iterate_t {
+ public ArrayList<idata> iterdata = new ArrayList<idata>();
+ }
+ H5P_iterate_t iter_data = new H5P_iter_data();
+
+ class H5P_iter_callback implements H5P_iterate_cb {
+ public int callback(long list_id, String name, H5P_iterate_t op_data) {
+ idata id = ((H5P_iter_data)op_data).iterdata.get(0);
+ return name.compareTo(id.iter_names[id.iter_count++]);
+ }
+ }
+ H5P_iterate_cb iter_cb = new H5P_iter_callback();
+
+ long size = -1; // Generic Property size
+ long nprops = -1; // Generic Property class number
+ int[] idx = {0}; // Index to start iteration at
+
+ // Insert first property into class (with no callbacks) */
+ try {
+ byte[] prop_value = HDFNativeData.intToByte(prop1_def);
+
+ H5.H5Pregister2_nocb(plist_class_id, PROP1_NAME, PROP1_SIZE, prop_value);
+ }
+ catch (Throwable err) {
+ err.printStackTrace();
+ fail("H5Pregister2 plist_class_id: "+PROP1_NAME + err);
+ }
+
+ // Insert second property into class (with no callbacks) */
+ try {
+ byte[] prop_value = HDFNativeData.floatToByte(prop2_def);
+
+ H5.H5Pregister2_nocb(plist_class_id, PROP2_NAME, PROP2_SIZE, prop_value);
+ }
+ catch (Throwable err) {
+ err.printStackTrace();
+ fail("H5Pregister2 plist_class_id: "+PROP2_NAME + err);
+ }
+
+ // Insert third property into class (with no callbacks) */
+ try {
+ byte[] prop_value = new String(prop3_def).getBytes(StandardCharsets.UTF_8);
+
+ H5.H5Pregister2_nocb(plist_class_id, PROP3_NAME, PROP3_SIZE, prop_value);
+ }
+ catch (Throwable err) {
+ err.printStackTrace();
+ fail("H5Pregister2 plist_class_id: "+PROP3_NAME + err);
+ }
+
+ // Insert fourth property into class (with no callbacks) */
+ try {
+ byte[] prop_value = HDFNativeData.doubleToByte(prop4_def);
+
+ H5.H5Pregister2_nocb(plist_class_id, PROP4_NAME, PROP4_SIZE, prop_value);
+ }
+ catch (Throwable err) {
+ err.printStackTrace();
+ fail("H5Pregister2 plist_class_id: "+PROP4_NAME + err);
+ }
+
+ // Check the number of properties in class */
+ try {
+ nprops = H5.H5Pget_nprops(plist_class_id);
+ }
+ catch (Throwable err) {
+ err.printStackTrace();
+ fail("H5Pget_nprops plist_class_id: " + err);
+ }
+ assertTrue("H5Pget_nprops: "+nprops, nprops==4);
+
+ // Iterate over all properties in class */
+ idata id = new idata(pnames, 0);
+ ((H5P_iter_data)iter_data).iterdata.add(id);
+ try {
+ H5.H5Piterate(plist_class_id, null, iter_cb, iter_data);
+ }
+ catch (Throwable err) {
+ err.printStackTrace();
+ fail("H5.H5Piterate: " + err);
+ }
+ assertFalse("H5Piterate ",((H5P_iter_data)iter_data).iterdata.isEmpty());
+ assertTrue("H5Piterate "+((H5P_iter_data)iter_data).iterdata.size(),((H5P_iter_data)iter_data).iterdata.size()==1);
+ assertTrue("H5Piterate "+(((H5P_iter_data)iter_data).iterdata.get(0)).iter_count,((idata)((H5P_iter_data)iter_data).iterdata.get(0)).iter_count==4);
+
+ // Iterate over last three properties in class */
+ idx[0] = 1;
+ ((H5P_iter_data)iter_data).iterdata.get(0).iter_count = 1;
+ try {
+ H5.H5Piterate(plist_class_id, idx, iter_cb, iter_data);
+ }
+ catch (Throwable err) {
+ err.printStackTrace();
+ fail("H5.H5Piterate: " + err);
+ }
+ assertFalse("H5Piterate ",((H5P_iter_data)iter_data).iterdata.isEmpty());
+ assertTrue("H5Piterate "+((H5P_iter_data)iter_data).iterdata.size(),((H5P_iter_data)iter_data).iterdata.size()==1);
+ assertTrue("H5Piterate "+(((H5P_iter_data)iter_data).iterdata.get(0)).iter_count,((idata)((H5P_iter_data)iter_data).iterdata.get(0)).iter_count==4);
+
+ assertTrue("H5Piterate: "+nprops+"="+idx[0], nprops == idx[0]);
+ }
+
+ // Test basic generic property list code.
+ // Tests creating new generic property lists and adding and
+ // removing properties from them.
+ @Test
+ public void testH5P_genprop_basic_list_prop() {
+ boolean status = false;
+ long lid1 = -1; // Generic Property list ID
+ long nprops = -1; // Number of properties in class
+
+ try {
+ // Add several properties (several w/default values)
+
+ // Insert first property into class (with no callbacks)
+ try {
+ byte[] prop_value = HDFNativeData.intToByte(prop1_def);
+
+ H5.H5Pregister2_nocb(plist_class_id, PROP1_NAME, PROP1_SIZE, prop_value);
+ }
+ catch (Throwable err) {
+ err.printStackTrace();
+ fail("H5Pregister2 plist_class_id: "+PROP1_NAME + err);
+ }
+
+ // Insert second property into class (with no callbacks)
+ try {
+ byte[] prop_value = HDFNativeData.floatToByte(prop2_def);
+
+ H5.H5Pregister2_nocb(plist_class_id, PROP2_NAME, PROP2_SIZE, prop_value);
+ }
+ catch (Throwable err) {
+ err.printStackTrace();
+ fail("H5Pregister2 plist_class_id: "+PROP2_NAME + err);
+ }
+
+ // Create a property list from the class
+ try {
+ lid1 = H5.H5Pcreate(plist_class_id);
+ }
+ catch (Throwable err) {
+ err.printStackTrace();
+ fail("H5Pcreate lid1: " + err);
+ }
+
+ // Check the number of properties in class
+ try {
+ nprops = H5.H5Pget_nprops(lid1);
+ }
+ catch (Throwable err) {
+ err.printStackTrace();
+ fail("H5Pget_nprops lid1: " + err);
+ }
+ assertTrue("H5Pget_nprops: "+nprops, nprops==2);
+
+ // Add temporary properties
+
+ // Insert first temporary property into list (with no callbacks)
+ try {
+ byte[] prop_value = new String(prop3_def).getBytes(StandardCharsets.UTF_8);
+
+ H5.H5Pinsert2_nocb(lid1, PROP3_NAME, PROP3_SIZE, prop_value);
+ }
+ catch (Throwable err) {
+ err.printStackTrace();
+ fail("H5Pinsertr2 lid1: "+PROP3_NAME + err);
+ }
+
+ // Insert second temporary property into list (with no callbacks)
+ try {
+ byte[] prop_value = HDFNativeData.doubleToByte(prop4_def);
+
+ H5.H5Pinsert2_nocb(lid1, PROP4_NAME, PROP4_SIZE, prop_value);
+ }
+ catch (Throwable err) {
+ err.printStackTrace();
+ fail("H5Pinsert2 lid1: "+PROP4_NAME + err);
+ }
+
+ // Check the number of properties in class
+ try {
+ nprops = H5.H5Pget_nprops(lid1);
+ }
+ catch (Throwable err) {
+ err.printStackTrace();
+ fail("H5Pget_nprops lid1: " + err);
+ }
+ assertTrue("H5Pget_nprops: "+nprops, nprops==4);
+
+ // Check existence of all properties
+ try {
+ status = H5.H5Pexist(lid1, PROP1_NAME);
+ }
+ catch (Throwable err) {
+ err.printStackTrace();
+ fail("H5Pexist plist_class_id: " + err);
+ }
+ assertTrue("H5Pexist lid1 "+PROP1_NAME, status);
+ try {
+ status = H5.H5Pexist(lid1, PROP2_NAME);
+ }
+ catch (Throwable err) {
+ err.printStackTrace();
+ fail("H5Pexist plist_class_id: " + err);
+ }
+ assertTrue("H5Pexist lid1 "+PROP2_NAME, status);
+ try {
+ status = H5.H5Pexist(lid1, PROP3_NAME);
+ }
+ catch (Throwable err) {
+ err.printStackTrace();
+ fail("H5Pexist plist_class_id: " + err);
+ }
+ assertTrue("H5Pexist lid1 "+PROP3_NAME, status);
+ try {
+ status = H5.H5Pexist(lid1, PROP4_NAME);
+ }
+ catch (Throwable err) {
+ err.printStackTrace();
+ fail("H5Pexist plist_class_id: " + err);
+ }
+ assertTrue("H5Pexist lid1 "+PROP4_NAME, status);
+
+ }
+ finally {
+ if (lid1 > 0)
+ try {H5.H5Pclose(lid1);} catch (Throwable err) {}
+ }
+ }
+
+// // Test basic generic property list code. Tests callbacks for property lists in a generic class.
+// @Test
+// public void testH5P_genprop_class_callback() {
+// class cdata {
+// public long cls_id = -1;
+// public int cls_count = -1;
+// cdata(long id, int count) {
+// this.cls_id = id;
+// this.cls_count = count;
+// }
+// }
+// class H5P_cls_create_data implements H5P_cls_create_func_t {
+// public ArrayList<cdata> clsdata = new ArrayList<cdata>();
+// }
+// H5P_cls_create_func_t cls_create_data = new H5P_cls_create_data();
+//
+// class H5P_cls_create_callback implements H5P_cls_create_func_cb {
+// public int callback(long list_id, H5P_cls_create_func_t cls_data) {
+// System.err.println("H5P_cls_create_callback enter");
+// cdata cd = ((H5P_cls_create_data)cls_create_data).clsdata.get(0);
+// cd.cls_count++;
+// cd.cls_id = list_id;
+// return 0;
+// }
+// }
+// H5P_cls_create_func_cb cls_create_cb = new H5P_cls_create_callback();
+//
+// class H5P_cls_copy_data implements H5P_cls_copy_func_t {
+// public ArrayList<cdata> clsdata = new ArrayList<cdata>();
+// }
+// H5P_cls_copy_func_t cls_copy_data = new H5P_cls_copy_data();
+//
+// class H5P_cls_copy_callback implements H5P_cls_copy_func_cb {
+// public int callback(long list_id1, long list_id2, H5P_cls_copy_func_t cls_data) {
+// cdata cd = ((H5P_cls_copy_data)cls_copy_data).clsdata.get(0);
+// cd.cls_count++;
+// cd.cls_id = list_id1;
+// return 0;
+// }
+// }
+// H5P_cls_copy_func_cb cls_copy_cb = new H5P_cls_copy_callback();
+//
+// class H5P_cls_close_data implements H5P_cls_close_func_t {
+// public ArrayList<cdata> clsdata = new ArrayList<cdata>();
+// }
+// H5P_cls_close_func_t cls_close_data = new H5P_cls_close_data();
+//
+// class H5P_cls_close_callback implements H5P_cls_close_func_cb {
+// public int callback(long list_id, H5P_cls_close_func_t cls_data) {
+// cdata cd = ((H5P_cls_close_data)cls_close_data).clsdata.get(0);
+// cd.cls_count++;
+// cd.cls_id = list_id;
+// return 0;
+// }
+// }
+// H5P_cls_close_func_cb cls_close_cb = new H5P_cls_close_callback();
+//
+// long cid1 = -1; // Generic Property class ID
+// long cid2 = -1; // Generic Property class ID
+// long lid1 = -1; // Generic Property list ID
+// long lid2 = -1; // Generic Property list ID
+// long lid3 = -1; // Generic Property list ID
+// long nprops = -1; // Number of properties in class
+//
+// try {
+// // Create a new generic class, derived from the root of the class hierarchy
+// try {
+// cid1 = H5.H5Pcreate_class(HDF5Constants.H5P_ROOT, CLASS1_NAME, cls_create_cb, cls_create_data, cls_copy_cb, cls_copy_data, cls_close_cb, cls_close_data);
+// }
+// catch (Throwable err) {
+// err.printStackTrace();
+// fail("H5Pcreate_class cid1: " + err);
+// }
+// assertTrue("H5Pcreate_class cid1", cid1 >= 0);
+//
+// // Insert first property into class (with no callbacks)
+// try {
+// byte[] prop_value = HDFNativeData.intToByte(prop1_def);
+//
+// H5.H5Pregister2(cid1, PROP1_NAME, PROP1_SIZE, prop_value, null, null, null, null, null, null, null);
+// }
+// catch (Throwable err) {
+// err.printStackTrace();
+// fail("H5Pregister2 cid1: "+PROP1_NAME + err);
+// }
+//
+// // Insert second property into class (with no callbacks)
+// try {
+// byte[] prop_value = HDFNativeData.floatToByte(prop2_def);
+//
+// H5.H5Pregister2(cid1, PROP2_NAME, PROP2_SIZE, prop_value, null, null, null, null, null, null, null);
+// }
+// catch (Throwable err) {
+// err.printStackTrace();
+// fail("H5Pregister2 cid1: "+PROP2_NAME + err);
+// }
+//
+// // Insert third property into class (with no callbacks)
+// try {
+// byte[] prop_value = new String(prop3_def).getBytes(StandardCharsets.UTF_8);
+//
+// H5.H5Pregister2(cid1, PROP3_NAME, PROP3_SIZE, prop_value, null, null, null, null, null, null, null);
+// }
+// catch (Throwable err) {
+// err.printStackTrace();
+// fail("H5Pregister2 cid1: "+PROP3_NAME + err);
+// }
+//
+// // Check the number of properties in class
+// try {
+// nprops = H5.H5Pget_nprops(cid1);
+// }
+// catch (Throwable err) {
+// err.printStackTrace();
+// fail("H5Pget_nprops cid1: " + err);
+// }
+// assertTrue("H5Pget_nprops: "+nprops, nprops==3);
+//
+// // Initialize class callback structs
+// cdata create_id = new cdata(-1, 0);
+// cdata copy_id = new cdata(-1, 0);
+// cdata close_id = new cdata(-1, 0);
+// ((H5P_cls_create_data)cls_create_data).clsdata.add(create_id);
+// ((H5P_cls_copy_data)cls_copy_data).clsdata.add(copy_id);
+// ((H5P_cls_close_data)cls_close_data).clsdata.add(close_id);
+//
+// // Create a property list from the class
+// try {
+// lid1 = H5.H5Pcreate(cid1);
+// }
+// catch (Throwable err) {
+// err.printStackTrace();
+// fail("H5Pcreate lid1: " + err);
+// }
+//
+// // Verify that the creation callback occurred
+// assertFalse("H5Pcreate ",((H5P_cls_create_data)cls_create_data).clsdata.isEmpty());
+// assertTrue("H5Pcreate "+((H5P_cls_create_data)cls_create_data).clsdata.get(0).cls_id ,((H5P_cls_create_data)cls_create_data).clsdata.get(0).cls_id == lid1);
+// assertTrue("H5Pcreate "+(((H5P_cls_create_data)cls_create_data).clsdata.get(0)).cls_count,((cdata)((H5P_cls_create_data)cls_create_data).clsdata.get(0)).cls_count==1);
+//
+// // Check the number of properties in list
+// try {
+// nprops = H5.H5Pget_nprops(lid1);
+// }
+// catch (Throwable err) {
+// err.printStackTrace();
+// fail("H5Pget_nprops lid1: " + err);
+// }
+// assertTrue("H5Pget_nprops: "+nprops, nprops==3);
+//
+// // Create another property list from the class
+// try {
+// lid2 = H5.H5Pcreate(cid1);
+// }
+// catch (Throwable err) {
+// err.printStackTrace();
+// fail("H5Pcreate lid2: " + err);
+// }
+//
+// /* Verify that the creation callback occurred */
+// assertFalse("H5Pcreate ",((H5P_cls_create_data)cls_create_data).clsdata.isEmpty());
+// assertTrue("H5Pcreate "+((H5P_cls_create_data)cls_create_data).clsdata.get(0).cls_id ,((H5P_cls_create_data)cls_create_data).clsdata.get(0).cls_id == lid2);
+// assertTrue("H5Pcreate "+(((H5P_cls_create_data)cls_create_data).clsdata.get(0)).cls_count,((cdata)((H5P_cls_create_data)cls_create_data).clsdata.get(0)).cls_count==2);
+//
+// // Check the number of properties in list
+// try {
+// nprops = H5.H5Pget_nprops(lid2);
+// }
+// catch (Throwable err) {
+// err.printStackTrace();
+// fail("H5Pget_nprops lid2: " + err);
+// }
+// assertTrue("H5Pget_nprops: "+nprops, nprops==3);
+//
+// // Create another property list by copying an existing list
+// try {
+// lid3= H5.H5Pcopy(lid1);
+// }
+// catch (Throwable err) {
+// err.printStackTrace();
+// fail("H5Pcopy lid3: " + err);
+// }
+//
+// // Verify that the copy callback occurred
+// assertFalse("H5Pcopy ",((H5P_cls_copy_data)cls_copy_data).clsdata.isEmpty());
+// assertTrue("H5Pcopy "+((H5P_cls_copy_data)cls_copy_data).clsdata.get(0).cls_id ,((H5P_cls_copy_data)cls_copy_data).clsdata.get(0).cls_id == lid3);
+// assertTrue("H5Pcopy "+(((H5P_cls_copy_data)cls_copy_data).clsdata.get(0)).cls_count,((cdata)((H5P_cls_copy_data)cls_copy_data).clsdata.get(0)).cls_count==1);
+//
+// // Check the number of properties in list
+// try {
+// nprops = H5.H5Pget_nprops(lid3);
+// }
+// catch (Throwable err) {
+// err.printStackTrace();
+// fail("H5Pget_nprops lid3: " + err);
+// }
+// assertTrue("H5Pget_nprops: "+nprops, nprops==3);
+//
+// // Close first list
+// try {
+// H5.H5Pclose(lid1);
+// }
+// catch (Throwable err) {
+// err.printStackTrace();
+// fail("H5Pclose lid1: " + err);
+// }
+//
+// /* Verify that the close callback occurred */
+// assertFalse("H5Pclose ",((H5P_cls_close_data)cls_close_data).clsdata.isEmpty());
+// assertTrue("H5Pclose "+((H5P_cls_close_data)cls_close_data).clsdata.get(0).cls_id ,((H5P_cls_close_data)cls_copy_data).clsdata.get(0).cls_id == lid1);
+// assertTrue("H5Pclose "+(((H5P_cls_close_data)cls_close_data).clsdata.get(0)).cls_count,((cdata)((H5P_cls_close_data)cls_copy_data).clsdata.get(0)).cls_count==1);
+//
+// // Close second list
+// try {
+// H5.H5Pclose(lid2);
+// }
+// catch (Throwable err) {
+// err.printStackTrace();
+// fail("H5Pclose lid2: " + err);
+// }
+//
+// // Verify that the close callback occurred
+// assertTrue("H5Pclose "+((H5P_cls_close_data)cls_close_data).clsdata.get(0).cls_id ,((H5P_cls_close_data)cls_close_data).clsdata.get(0).cls_id == lid2);
+// assertTrue("H5Pclose "+(((H5P_cls_close_data)cls_close_data).clsdata.get(0)).cls_count,((cdata)((H5P_cls_close_data)cls_close_data).clsdata.get(0)).cls_count==2);
+//
+// // Close third list
+// try {
+// H5.H5Pclose(lid3);
+// }
+// catch (Throwable err) {
+// err.printStackTrace();
+// fail("H5Pclose lid3: " + err);
+// }
+//
+// // Verify that the close callback occurred
+// assertTrue("H5Pclose "+((H5P_cls_close_data)cls_close_data).clsdata.get(0).cls_id ,((H5P_cls_close_data)cls_close_data).clsdata.get(0).cls_id == lid3);
+// assertTrue("H5Pclose "+(((H5P_cls_close_data)cls_close_data).clsdata.get(0)).cls_count,((cdata)((H5P_cls_close_data)cls_close_data).clsdata.get(0)).cls_count==3);
+// }
+// finally {
+// if (lid3 > 0)
+// try {H5.H5Pclose(lid3);} catch (Throwable err) {}
+// if (lid2 > 0)
+// try {H5.H5Pclose(lid2);} catch (Throwable err) {}
+// if (lid1 > 0)
+// try {H5.H5Pclose(lid1);} catch (Throwable err) {}
+// if (cid2 > 0)
+// try {H5.H5Pclose_class(cid2);} catch (Throwable err) {}
+// if (cid1 > 0)
+// try {H5.H5Pclose_class(cid1);} catch (Throwable err) {}
+// }
+// }
+
+}
diff --git a/java/test/TestH5Pvirtual.java b/java/test/TestH5Pvirtual.java
new file mode 100644
index 0000000..9372ae1
--- /dev/null
+++ b/java/test/TestH5Pvirtual.java
@@ -0,0 +1,433 @@
+/* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
+ * Copyright by The HDF Group. *
+ * Copyright by the Board of Trustees of the University of Illinois. *
+ * All rights reserved. *
+ * *
+ * This file is part of HDF5. The full HDF5 copyright notice, including *
+ * terms governing use, modification, and redistribution, is contained in *
+ * the files COPYING and Copyright.html. COPYING can be found at the root *
+ * of the source code distribution tree; Copyright.html can be found at the *
+ * root level of an installed copy of the electronic HDF5 document set and *
+ * is linked from the top-level documents page. It can also be found at *
+ * http://hdfgroup.org/HDF5/doc/Copyright.html. If you do not have *
+ * access to either file, you may request a copy from help@hdfgroup.org. *
+ * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */
+
+package test;
+
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertFalse;
+import static org.junit.Assert.assertTrue;
+import static org.junit.Assert.fail;
+
+import java.io.File;
+
+import hdf.hdf5lib.H5;
+import hdf.hdf5lib.HDF5Constants;
+import hdf.hdf5lib.exceptions.HDF5Exception;
+import hdf.hdf5lib.exceptions.HDF5LibraryException;
+
+import org.junit.After;
+import org.junit.Before;
+import org.junit.Ignore;
+import org.junit.Rule;
+import org.junit.Test;
+import org.junit.rules.TestName;
+
+public class TestH5Pvirtual {
+ @Rule public TestName testname = new TestName();
+
+ private static final String H5_FILE = "vds.h5";
+ private static final String SRC_FILE[] = {
+ "v-0.h5",
+ "v-1.h5",
+ "v-2.h5"
+ };
+ private static final String SRC_DATASET[] = {
+ "A",
+ "B",
+ "C"
+ };
+ private static final int DIM_Y = 6;
+ private static final int VDSDIM_X = 4;
+ private static final int VDSDIM_Y = 6;
+ private static final int fill_value = -1;
+ long[] H5dims = { DIM_Y };
+ long[] VDSH5dims = { VDSDIM_X, VDSDIM_Y };
+ long H5fid = -1;
+ long H5dsid = -1;
+ long H5dssid = -1;
+ long H5dvsid = -1;
+ long H5did = -1;
+ long H5dcplid = -1;
+ long H5dapl_id = -1;
+
+ private final void _deleteFile(String filename) {
+ File file = new File(filename);
+
+ if (file.exists()) {
+ try {file.delete();} catch (SecurityException e) {}
+ }
+ }
+
+ private final long _createDataset(long fid, long dsid, String name, long dcpl, long dapl) {
+ long did = -1;
+ long space_id = -1;
+ long[] start = {0, 0};
+ long[] stride = null;
+ long[] count = {1, 1};
+ long[] block = {1, VDSDIM_Y};
+
+ try {
+ H5dssid = H5.H5Screate_simple(1, H5dims, null);
+ for (int i = 0; i < 3; i++) {
+ start[0] = i;
+ /* Select i-th row in the virtual dataset; selection in the source datasets is the same. */
+ H5.H5Sselect_hyperslab(dsid, HDF5Constants.H5S_SELECT_SET, start, stride, count, block);
+ H5.H5Pset_virtual(dcpl, dsid, SRC_FILE[i], SRC_DATASET[i], H5dssid);
+ }
+ did = H5.H5Dcreate(fid, name, HDF5Constants.H5T_NATIVE_INT, dsid,
+ HDF5Constants.H5P_DEFAULT, dcpl, dapl);
+ }
+ catch (Throwable err) {
+ err.printStackTrace();
+ fail("H5.H5Dcreate: " + err);
+ }
+ assertTrue("TestH5Pvirtual._createDataset: ", did > 0);
+
+ return did;
+ }
+
+ private final void _createH5File(long fcpl, long fapl) {
+ int[] dset_data = new int[DIM_Y];
+ // Create source files and datasets
+ for (int i=0; i < 3; i++) {
+ long space_id = -1;
+ long dset_id = -1;
+ long file_id = -1;
+ for (int j = 0; j < DIM_Y; j++) dset_data[j] = i+1;
+
+ try {
+ file_id = H5.H5Fcreate(SRC_FILE[i], HDF5Constants.H5F_ACC_TRUNC,
+ HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT);
+ space_id = H5.H5Screate_simple(1, H5dims, null);
+ dset_id = H5.H5Dcreate(file_id, SRC_DATASET[i], HDF5Constants.H5T_NATIVE_INT, space_id,
+ HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT);
+ H5.H5Dwrite (dset_id, HDF5Constants.H5T_NATIVE_INT, HDF5Constants.H5S_ALL, HDF5Constants.H5S_ALL, HDF5Constants.H5P_DEFAULT,
+ dset_data);
+ }
+ catch (Throwable err) {
+ err.printStackTrace();
+ fail("TestH5Pvirtual.createH5file: " + err);
+ }
+ finally {
+ if (dset_id > 0)
+ try {H5.H5Dclose(dset_id);} catch (Exception ex) {}
+ if (space_id > 0)
+ try {H5.H5Sclose(space_id);} catch (Exception ex) {}
+ if (file_id > 0)
+ try {H5.H5Fclose(file_id);} catch (Exception ex) {}
+ }
+ }
+
+ try {
+ int[] fill_value = {-1};
+ H5fid = H5.H5Fcreate(H5_FILE, HDF5Constants.H5F_ACC_TRUNC, HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT);
+ H5dsid = H5.H5Screate_simple(2, VDSH5dims, null);
+ H5dcplid = H5.H5Pcreate(HDF5Constants.H5P_DATASET_CREATE);
+ H5.H5Pset_fill_value(H5dcplid, HDF5Constants.H5T_NATIVE_INT, fill_value);
+ }
+ catch (Throwable err) {
+ err.printStackTrace();
+ fail("TestH5Pvirtual.createH5file: " + err);
+ }
+ assertTrue("TestH5Pvirtual.createH5file: H5.H5Fcreate: ", H5fid > 0);
+ assertTrue("TestH5Pvirtual.createH5file: H5.H5Screate_simple: ", H5dsid > 0);
+ assertTrue("TestH5Pvirtual.createH5file: H5.H5Pcreate: ", H5dcplid > 0);
+
+ try {
+ H5.H5Fflush(H5fid, HDF5Constants.H5F_SCOPE_LOCAL);
+ }
+ catch (Throwable err) {
+ err.printStackTrace();
+ }
+ }
+
+ @Before
+ public void createH5file()
+ throws NullPointerException, HDF5Exception {
+ assertTrue("H5 open ids is 0",H5.getOpenIDCount()==0);
+ System.out.print(testname.getMethodName());
+ _createH5File(HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT);
+ H5dapl_id = H5.H5Pcreate(HDF5Constants.H5P_DATASET_ACCESS);
+ assertTrue("TestH5Pvirtual.createH5file: H5.H5Pcreate: ", H5dapl_id > 0);
+ }
+
+ @After
+ public void deleteH5file() throws HDF5LibraryException {
+ if (H5dapl_id > 0)
+ try {H5.H5Pclose(H5dapl_id);} catch (Exception ex) {}
+ if (H5dcplid > 0)
+ try {H5.H5Pclose(H5dcplid);} catch (Exception ex) {}
+ if (H5dsid > 0)
+ try {H5.H5Sclose(H5dsid);} catch (Exception ex) {}
+ if (H5fid > 0)
+ try {H5.H5Fclose(H5fid);} catch (Exception ex) {}
+ for (int i = 0; i < 3; i++) {
+ _deleteFile(SRC_FILE[i]);
+ }
+ _deleteFile(H5_FILE);
+ System.out.println();
+ }
+
+ @Test
+ public void testH5Pvirtual_storage() {
+ int layout = -1;
+
+ H5did = _createDataset(H5fid, H5dsid, "VDS", H5dcplid, H5dapl_id);
+ try {
+ layout = H5.H5Pget_layout (H5dcplid);
+ }
+ catch (Throwable err) {
+ err.printStackTrace();
+ fail("H5.H5Pget_layout: " + err);
+ }
+ finally {
+ if (H5dssid > 0)
+ try {H5.H5Sclose(H5dssid);} catch (Exception ex) {}
+ if (H5did > 0)
+ try {H5.H5Dclose(H5did);} catch (Exception ex) {}
+ }
+ assertTrue("testH5Pvirtual_storage", HDF5Constants.H5D_VIRTUAL == layout);
+ }
+
+ @Test
+ public void testH5Pget_virtual_count() {
+ long num_map = -1;
+
+ H5did = _createDataset(H5fid, H5dsid, "VDS", H5dcplid, H5dapl_id);
+ try {
+ num_map = H5.H5Pget_virtual_count(H5dcplid);
+ }
+ catch (Throwable err) {
+ err.printStackTrace();
+ fail("H5.H5Pget_virtual_count: " + err);
+ }
+ finally {
+ if (H5dssid > 0)
+ try {H5.H5Sclose(H5dssid);} catch (Exception ex) {}
+ if (H5did > 0)
+ try {H5.H5Dclose(H5did);} catch (Exception ex) {}
+ }
+ assertTrue("testH5Pget_virtual_count: "+num_map, num_map >= 0);
+ }
+
+ @Test
+ public void testH5Pget_source_filename() throws Throwable {
+ String filename = null;
+
+ H5did = _createDataset(H5fid, H5dsid, "VDS", H5dcplid, H5dapl_id);
+ try {
+ filename = (H5.H5Pget_virtual_filename (H5dcplid, 2));
+ }
+ catch (Throwable err) {
+ err.printStackTrace();
+ fail("H5.H5Pget_virtual_filename: " + err);
+ }
+ finally {
+ if (H5dssid > 0)
+ try {H5.H5Sclose(H5dssid);} catch (Exception ex) {}
+ if (H5did > 0)
+ try {H5.H5Dclose(H5did);} catch (Exception ex) {}
+ }
+ assertTrue("testH5Pget_source_filename: "+filename, filename.compareTo("v-2.h5") == 0);
+ }
+
+ @Test
+ public void testH5Pget_source_datasetname() throws Throwable {
+ String datasetname = null;
+
+ H5did = _createDataset(H5fid, H5dsid, "VDS", H5dcplid, H5dapl_id);
+ try {
+ datasetname = H5.H5Pget_virtual_dsetname (H5dcplid, 1);
+ }
+ catch (Throwable err) {
+ err.printStackTrace();
+ fail("H5.H5Pget_virtual_dsetname: " + err);
+ }
+ finally {
+ if (H5dssid > 0)
+ try {H5.H5Sclose(H5dssid);} catch (Exception ex) {}
+ if (H5did > 0)
+ try {H5.H5Dclose(H5did);} catch (Exception ex) {}
+ }
+ assertTrue("testH5Pget_source_datasetname: "+datasetname, datasetname.compareTo("B") == 0);
+ }
+
+ @Test
+ public void testH5Pget_selection_source_dataset() throws Throwable {
+ long src_space = -1;
+ long src_selection = -1;
+
+ H5did = _createDataset(H5fid, H5dsid, "VDS", H5dcplid, H5dapl_id);
+ try {
+ src_space = H5.H5Pget_virtual_srcspace (H5dcplid, 0);
+ src_selection = H5.H5Sget_select_type(src_space);
+ }
+ catch (Throwable err) {
+ err.printStackTrace();
+ fail("testH5Pget_selection_source_dataset: " + err);
+ }
+ finally {
+ if (src_space > 0)
+ try {H5.H5Sclose(src_space);} catch (Exception ex) {}
+ if (H5dssid > 0)
+ try {H5.H5Sclose(H5dssid);} catch (Exception ex) {}
+ if (H5did > 0)
+ try {H5.H5Dclose(H5did);} catch (Exception ex) {}
+ }
+ assertTrue("testH5Pget_selection_source_dataset", src_selection == HDF5Constants.H5S_SEL_ALL);
+ }
+
+ @Test
+ public void testH5Pget_mapping_parameters() {
+ long num_map = -1;
+
+ H5did = _createDataset(H5fid, H5dsid, "VDS", H5dcplid, H5dapl_id);
+ try {
+ try {
+ num_map = H5.H5Pget_virtual_count(H5dcplid);
+ }
+ catch (Throwable err) {
+ err.printStackTrace();
+ fail("H5.H5Pget_virtual_count: " + err);
+ }
+ for (int i = 0; i < num_map; i++) {
+ int vselection = -1;
+ long vspace = -1;
+ long nblocks; // Number of hyperslab blocks
+ long blocks[] = {-1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1}; // List of blocks
+ long[] start = {i, 0};
+ long[] stride = {1, 1};
+ long[] count = {1, 1};
+ long[] block = {1, VDSDIM_Y};
+ long q_start[] = new long[2];
+ long q_stride[] = new long[2];
+ long q_count[] = new long[2];
+ long q_block[] = new long[2];
+ boolean is_regular = false;
+
+ try {
+ try {
+ vspace = H5.H5Pget_virtual_vspace (H5dcplid, i);
+ }
+ catch (Throwable err) {
+ err.printStackTrace();
+ fail("H5.H5Pget_virtual_vspace: " + err);
+ }
+ try {
+ vselection = H5.H5Sget_select_type(vspace);
+ }
+ catch (Throwable err) {
+ err.printStackTrace();
+ fail("H5.H5Sget_select_type: " + err);
+ }
+ assertTrue("testH5Pget_mapping_parameters["+i+"]", vselection == HDF5Constants.H5S_SEL_HYPERSLABS);
+
+ // Verify that there is only one block
+ nblocks = H5.H5Sget_select_hyper_nblocks(vspace);
+ assertTrue("H5Sget_select_hyper_nblocks", nblocks == 1);
+
+ // Retrieve the block defined
+ H5.H5Sget_select_hyper_blocklist(vspace, 0, nblocks, blocks);
+
+ // Verify that the correct block is defined
+ assertTrue("H5.H5Sget_select_hyper_blocklist["+i+"] [0]: "+blocks[0], start[0] == blocks[0]);
+ assertTrue("H5.H5Sget_select_hyper_blocklist["+i+"] [1]: "+blocks[1], start[1] == blocks[1]);
+ assertTrue("H5.H5Sget_select_hyper_blocklist["+i+"] [2]: "+blocks[2], (block[0]-1+i) == blocks[2]);
+ assertTrue("H5.H5Sget_select_hyper_blocklist["+i+"] [3]: "+blocks[3], (block[1]-1) == blocks[3]);
+ // We also can use new APIs to get start, stride, count and block
+ is_regular = H5.H5Sis_regular_hyperslab(vspace);
+ assertTrue("H5.H5Sis_regular_hyperslab", is_regular);
+ H5.H5Sget_regular_hyperslab (vspace, q_start, q_stride, q_count, q_block);
+
+ // Verify the hyperslab parameters
+ for(int u = 0; u < 2; u++) {
+ assertTrue("H5Sget_regular_hyperslab, start", start[u] == q_start[u]);
+ assertTrue("H5Sget_regular_hyperslab, stride", stride[u] == q_stride[u]);
+ assertTrue("H5Sget_regular_hyperslab, count", count[u] == q_count[u]);
+ assertTrue("H5Sget_regular_hyperslab, block", block[u] == q_block[u]);
+ }
+ }
+ catch (Throwable err) {
+ err.printStackTrace();
+ fail("H5.testH5Pget_mapping_parameters: " + err);
+ }
+ finally {
+ if (vspace > 0)
+ try {H5.H5Sclose(vspace);} catch (Exception ex) {}
+ }
+ }
+ }
+ catch (Throwable err) {
+ err.printStackTrace();
+ fail("testH5Pget_mapping_parameters: " + err);
+ }
+ finally {
+ if (H5dssid > 0)
+ try {H5.H5Sclose(H5dssid);} catch (Exception ex) {}
+ if (H5did > 0)
+ try {H5.H5Dclose(H5did);} catch (Exception ex) {}
+ }
+ }
+
+ @Test
+ public void testH5Pset_get_virtual_view() {
+ int ret_val = -1;
+ H5did = _createDataset(H5fid, H5dsid, "VDS", H5dcplid, H5dapl_id);
+ try {
+ ret_val = H5.H5Pget_virtual_view(H5dapl_id);
+ assertTrue("H5Pget_virtual_view", ret_val >= 0);
+ assertEquals(HDF5Constants.H5D_VDS_LAST_AVAILABLE, ret_val);
+ H5.H5Pset_virtual_view(H5dapl_id, HDF5Constants.H5D_VDS_FIRST_MISSING);
+ ret_val = H5.H5Pget_virtual_view(H5dapl_id);
+ assertTrue("H5Pget_virtual_view", ret_val >= 0);
+ assertEquals(HDF5Constants.H5D_VDS_FIRST_MISSING, ret_val);
+ }
+ catch (Throwable err) {
+ err.printStackTrace();
+ fail("testH5Pset_get_virtual_view: " + err);
+ }
+ finally {
+ if (H5dssid > 0)
+ try {H5.H5Sclose(H5dssid);} catch (Exception ex) {}
+ if (H5did > 0)
+ try {H5.H5Dclose(H5did);} catch (Exception ex) {}
+ }
+ }
+
+ @Ignore
+ public void testH5Pset_get_virtual_printf_gap() {
+ long ret_val = -1;
+ H5did = _createDataset(H5fid, H5dsid, "VDS", H5dcplid, H5dapl_id);
+ try {
+ ret_val = H5.H5Pget_virtual_printf_gap(H5dapl_id);
+ assertTrue("H5Pget_virtual_printf_gap", ret_val >= 0);
+ assertEquals(0, ret_val);
+ H5.H5Pset_virtual_printf_gap(H5dapl_id, 2);
+ ret_val = H5.H5Pget_virtual_view(H5dapl_id);
+ assertTrue("H5Pget_virtual_printf_gap", ret_val >= 0);
+ assertEquals(2, ret_val);
+ }
+ catch (Throwable err) {
+ err.printStackTrace();
+ fail("H5Pset_get_virtual_printf_gap: " + err);
+ }
+ finally {
+ if (H5dssid > 0)
+ try {H5.H5Sclose(H5dssid);} catch (Exception ex) {}
+ if (H5did > 0)
+ try {H5.H5Dclose(H5did);} catch (Exception ex) {}
+ }
+ }
+}
diff --git a/java/test/TestH5R.java b/java/test/TestH5R.java
new file mode 100644
index 0000000..72e0bfb
--- /dev/null
+++ b/java/test/TestH5R.java
@@ -0,0 +1,335 @@
+/* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
+ * Copyright by The HDF Group. *
+ * Copyright by the Board of Trustees of the University of Illinois. *
+ * All rights reserved. *
+ * *
+ * This file is part of HDF5. The full HDF5 copyright notice, including *
+ * terms governing use, modification, and redistribution, is contained in *
+ * the files COPYING and Copyright.html. COPYING can be found at the root *
+ * of the source code distribution tree; Copyright.html can be found at the *
+ * root level of an installed copy of the electronic HDF5 document set and *
+ * is linked from the top-level documents page. It can also be found at *
+ * http://hdfgroup.org/HDF5/doc/Copyright.html. If you do not have *
+ * access to either file, you may request a copy from help@hdfgroup.org. *
+ * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */
+
+package test;
+
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertNotNull;
+import static org.junit.Assert.assertTrue;
+import static org.junit.Assert.fail;
+
+import java.io.File;
+
+import hdf.hdf5lib.H5;
+import hdf.hdf5lib.HDF5Constants;
+import hdf.hdf5lib.exceptions.HDF5Exception;
+import hdf.hdf5lib.exceptions.HDF5LibraryException;
+
+import org.junit.After;
+import org.junit.Before;
+import org.junit.Rule;
+import org.junit.Test;
+import org.junit.rules.TestName;
+
+public class TestH5R {
+ @Rule public TestName testname = new TestName();
+ private static final String H5_FILE = "testH5R.h5";
+ private static final int DIM_X = 4;
+ private static final int DIM_Y = 6;
+ long H5fid = -1;
+ long H5dsid = -1;
+ long H5did = -1;
+ long H5gid = -1;
+ long H5did2 = -1;
+ long[] H5dims = { DIM_X, DIM_Y };
+
+ private final void _deleteFile(String filename) {
+ File file = null;
+ try {
+ file = new File(filename);
+ }
+ catch (Throwable err) {}
+
+ if (file.exists()) {
+ try {file.delete();} catch (SecurityException e) {}
+ }
+ }
+
+ private final long _createDataset(long fid, long dsid, String name, long dapl) {
+ long did = -1;
+ try {
+ did = H5.H5Dcreate(fid, name,
+ HDF5Constants.H5T_STD_I32BE, dsid,
+ HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT, dapl);
+ }
+ catch (Throwable err) {
+ err.printStackTrace();
+ fail("H5.H5Dcreate: " + err);
+ }
+ assertTrue("TestH5R._createDataset: ",did > 0);
+
+ return did;
+ }
+
+ private final long _createGroup(long fid, String name) {
+ long gid = -1;
+ try {
+ gid = H5.H5Gcreate(fid, name, HDF5Constants.H5P_DEFAULT,
+ HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT);
+ }
+ catch (Throwable err) {
+ err.printStackTrace();
+ fail("H5.H5Gcreate: " + err);
+ }
+ assertTrue("TestH5R._createGroup: ",gid > 0);
+
+ return gid;
+ }
+
+ @Before
+ public void createH5file()
+ throws NullPointerException, HDF5Exception {
+ assertTrue("H5 open ids is 0",H5.getOpenIDCount()==0);
+ System.out.print(testname.getMethodName());
+
+ try {
+ H5fid = H5.H5Fcreate(H5_FILE, HDF5Constants.H5F_ACC_TRUNC,
+ HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT);
+ H5dsid = H5.H5Screate_simple(2, H5dims, null);
+ H5gid = _createGroup(H5fid, "Group1");
+ H5did2 = _createDataset(H5gid, H5dsid, "dset2", HDF5Constants.H5P_DEFAULT);
+ H5did = _createDataset(H5fid, H5dsid, "dset", HDF5Constants.H5P_DEFAULT);
+
+ }
+ catch (Throwable err) {
+ err.printStackTrace();
+ fail("TestH5R.createH5file: " + err);
+ }
+ assertTrue("TestH5R.createH5file: H5.H5Fcreate: ",H5fid > 0);
+ assertTrue("TestH5R.createH5file: H5.H5Screate_simple: ",H5dsid > 0);
+ assertTrue("TestH5R.createH5file: _createDataset: ",H5did > 0);
+
+ H5.H5Fflush(H5fid, HDF5Constants.H5F_SCOPE_LOCAL);
+ }
+
+ @After
+ public void deleteH5file() throws HDF5LibraryException {
+ if (H5dsid > 0)
+ try {H5.H5Sclose(H5dsid);} catch (Exception ex) {}
+ if (H5did > 0)
+ try {H5.H5Dclose(H5did);} catch (Exception ex) {}
+ if (H5fid > 0)
+ try {H5.H5Fclose(H5fid);} catch (Exception ex) {}
+ if (H5gid > 0)
+ try {H5.H5Gclose(H5gid);} catch (Exception ex) {}
+ if (H5did2 > 0)
+ try {H5.H5Dclose(H5did2);} catch (Exception ex) {}
+
+ _deleteFile(H5_FILE);
+ System.out.println();
+ }
+
+ @Test
+ public void testH5Rget_name() {
+ long loc_id=H5fid;
+ int ref_type=HDF5Constants.H5R_OBJECT;
+ long ret_val=-1;
+ byte[] ref=null;
+ String[] name= {""};
+ String objName = "/dset";
+
+ try {
+ ref = H5.H5Rcreate(H5fid, objName, ref_type, -1);
+ }
+ catch (Throwable err) {
+ err.printStackTrace();
+ fail("H5.H5Rget_name:H5Rcreate " + err);
+ }
+
+ try {
+ ret_val = H5.H5Rget_name(loc_id, ref_type, ref, name, 16);
+ }
+ catch (Throwable err) {
+ err.printStackTrace();
+ fail("H5.H5Rget_name: " + err);
+ }
+
+ assertTrue("testH5Rget_name: H5Rget_name", ret_val>0);
+ assertTrue("The name of the object: ", objName.equals(name[0]));
+ }
+
+ @Test
+ public void testH5Rget_obj_type2() {
+ int ref_type=HDF5Constants.H5R_OBJECT;
+ byte[] ref=null;
+
+ String objName = "/dset";
+ int obj_type = -1;;
+
+ try {
+ ref = H5.H5Rcreate(H5fid, objName, ref_type, -1);
+ }
+ catch(Throwable err) {
+ err.printStackTrace();
+ }
+
+ try {
+ obj_type = H5.H5Rget_obj_type(H5fid, HDF5Constants.H5R_OBJECT, ref);
+ }
+ catch (Throwable err) {
+ err.printStackTrace();
+ fail("H5.H5Rget_obj_type2: " + err);
+ }
+ assertEquals(obj_type, HDF5Constants.H5O_TYPE_DATASET);
+ }
+
+ @Test
+ public void testH5Rcreate_refobj() {
+ byte[] ref = null;
+
+ try {
+ ref = H5.H5Rcreate(H5fid, "/dset", HDF5Constants.H5R_OBJECT, -1);
+ }
+ catch (Throwable err) {
+ err.printStackTrace();
+ fail("H5.H5Rcreate: " + err);
+ }
+ assertNotNull(ref);
+ }
+
+ @Test
+ public void testH5Rcreate_regionrefobj() {
+ byte[] ref = null;
+ try {
+ ref = H5.H5Rcreate(H5fid, "/dset", HDF5Constants.H5R_DATASET_REGION, H5dsid);
+ }
+ catch (Throwable err) {
+ err.printStackTrace();
+ fail("H5.H5Rcreate: " + err);
+ }
+ assertNotNull(ref);
+ }
+
+ @Test
+ public void testH5Rdereference() {
+ byte[] ref1 = null;
+ byte[] ref2 = null;
+ long dataset_id = -1;
+ long group_id = -1;
+ try {
+ //Create reference on dataset
+ ref1 = H5.H5Rcreate(H5fid, "/dset", HDF5Constants.H5R_DATASET_REGION, H5dsid);
+ dataset_id= H5.H5Rdereference(H5fid, HDF5Constants.H5P_DEFAULT, HDF5Constants.H5R_DATASET_REGION, ref1);
+
+ //Create reference on group
+ ref2 = H5.H5Rcreate(H5gid, "/Group1", HDF5Constants.H5R_OBJECT, -1);
+ group_id= H5.H5Rdereference(H5gid, HDF5Constants.H5P_DEFAULT, HDF5Constants.H5R_OBJECT, ref2);
+ assertNotNull(ref1);
+ assertNotNull(ref2);
+ assertTrue(dataset_id>=0);
+ assertTrue(group_id>=0);
+ }
+ catch (Throwable err) {
+ err.printStackTrace();
+ fail("TestH5Rdereference " + err);
+ }
+ finally {
+ try {H5.H5Dclose(dataset_id);} catch (Exception ex) {}
+ try {H5.H5Gclose(group_id);} catch (Exception ex) {}
+ }
+ }
+
+ @Test
+ public void testH5Rget_region() {
+ byte[] ref = null;
+ long dsid = -1;
+ try {
+ ref = H5.H5Rcreate(H5fid, "/dset", HDF5Constants.H5R_DATASET_REGION, H5dsid);
+ dsid = H5.H5Rget_region(H5fid, HDF5Constants.H5R_DATASET_REGION, ref);
+ assertNotNull(ref);
+ assertTrue(dsid>=0);
+ }
+ catch (Throwable err) {
+ err.printStackTrace();
+ fail("TestH5Rget_region: " + err);
+ }
+ finally {
+ try {H5.H5Sclose(dsid);} catch (Exception ex) {}
+ }
+ }
+
+ @Test(expected = IllegalArgumentException.class)
+ public void testH5Rget_name_Invalidreftype() throws Throwable {
+ byte[] ref = null;
+ String[] name= {""};
+ ref = H5.H5Rcreate(H5fid, "/dset", HDF5Constants.H5R_OBJECT, -1);
+ H5.H5Rget_name(H5fid, HDF5Constants.H5R_DATASET_REGION, ref, name, 16);
+ }
+
+ @Test(expected = NullPointerException.class)
+ public void testH5Rget_name_NULLreference() throws Throwable {
+ byte[] ref = null;
+ String[] name= {""};
+ H5.H5Rget_name(H5fid, HDF5Constants.H5R_OBJECT, ref, name, 16);
+ }
+
+ @Test(expected = HDF5LibraryException.class)
+ public void testH5Rget_obj_type2_Invalidreftype() throws Throwable {
+ byte[] ref = null;
+ ref = H5.H5Rcreate(H5fid, "/dset", HDF5Constants.H5R_OBJECT, -1);
+ H5.H5Rget_obj_type(H5fid, HDF5Constants.H5R_DATASET_REGION, ref);
+ }
+
+ @Test(expected = HDF5LibraryException.class)
+ public void testH5Rcreate_InvalidObjectName() throws Throwable {
+ H5.H5Rcreate(H5fid, "/GROUPS", HDF5Constants.H5R_OBJECT, -1);
+ }
+
+ @Test(expected = HDF5LibraryException.class)
+ public void testH5Rcreate_Invalidspace_id() throws Throwable {
+ H5.H5Rcreate(H5fid, "/dset", HDF5Constants.H5R_DATASET_REGION, -1);
+ }
+
+ @Test(expected = IllegalArgumentException.class)
+ public void testH5Rcreate_Invalidreftype() throws Throwable {
+ H5.H5Rcreate(H5fid, "/dset", HDF5Constants.H5R_BADTYPE, -1);
+ }
+
+ @Test(expected = IllegalArgumentException.class)
+ public void testH5Rgetregion_Invalidreftype() throws Throwable {
+ byte[] ref = null;
+ ref = H5.H5Rcreate(H5fid, "/dset", HDF5Constants.H5R_OBJECT, H5dsid);
+ H5.H5Rget_region(H5fid, HDF5Constants.H5R_DATASET_REGION, ref);
+ }
+
+ @Test(expected = IllegalArgumentException.class)
+ public void testH5Rgetregion_Badreferencetype() throws Throwable {
+ byte[] ref = null;
+ ref = H5.H5Rcreate(H5fid, "/dset", HDF5Constants.H5R_OBJECT, H5dsid);
+ H5.H5Rget_region(H5fid, HDF5Constants.H5R_OBJECT, ref);
+ }
+
+ @Test(expected = NullPointerException.class)
+ public void testH5Rgetregion_Nullreference() throws Throwable {
+ byte[] ref = null;
+ H5.H5Rget_region(H5fid, HDF5Constants.H5R_DATASET_REGION, ref);
+ }
+
+ @Test(expected = NullPointerException.class)
+ public void testH5Rdereference_Nullreference() throws Throwable {
+ byte[] ref = null;
+ H5.H5Rdereference(H5did2, HDF5Constants.H5P_DEFAULT, HDF5Constants.H5R_OBJECT, ref);
+ }
+
+ @Test(expected = IllegalArgumentException.class)
+ public void testH5Rdereference_Invalidreference() throws Throwable {
+ byte[] ref1 = null;
+ byte[] ref2 = null;
+ ref1 = H5.H5Rcreate(H5fid, "/dset", HDF5Constants.H5R_DATASET_REGION, H5dsid);
+ ref2 = H5.H5Rcreate(H5gid, "/Group1", HDF5Constants.H5R_OBJECT, -1);
+ H5.H5Rdereference(H5gid, HDF5Constants.H5P_DEFAULT, HDF5Constants.H5R_OBJECT, ref1);
+ }
+
+}
diff --git a/java/test/TestH5S.java b/java/test/TestH5S.java
new file mode 100644
index 0000000..909ab02
--- /dev/null
+++ b/java/test/TestH5S.java
@@ -0,0 +1,590 @@
+/* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
+ * Copyright by The HDF Group. *
+ * Copyright by the Board of Trustees of the University of Illinois. *
+ * All rights reserved. *
+ * *
+ * This file is part of HDF5. The full HDF5 copyright notice, including *
+ * terms governing use, modification, and redistribution, is contained in *
+ * the files COPYING and Copyright.html. COPYING can be found at the root *
+ * of the source code distribution tree; Copyright.html can be found at the *
+ * root level of an installed copy of the electronic HDF5 document set and *
+ * is linked from the top-level documents page. It can also be found at *
+ * http://hdfgroup.org/HDF5/doc/Copyright.html. If you do not have *
+ * access to either file, you may request a copy from help@hdfgroup.org. *
+ * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */
+
+package test;
+
+
+import static org.junit.Assert.assertFalse;
+import static org.junit.Assert.assertTrue;
+import static org.junit.Assert.fail;
+import hdf.hdf5lib.H5;
+import hdf.hdf5lib.HDF5Constants;
+import hdf.hdf5lib.exceptions.HDF5Exception;
+import hdf.hdf5lib.exceptions.HDF5LibraryException;
+
+import org.junit.After;
+import org.junit.Before;
+import org.junit.Rule;
+import org.junit.Test;
+import org.junit.rules.TestName;
+
+public class TestH5S {
+ @Rule public TestName testname = new TestName();
+ long H5sid = -1;
+ int H5rank = 2;
+ long H5dims[] = {5, 5};
+ long H5maxdims[] = {10, 10};
+
+ @Before
+ public void createH5file()
+ throws NullPointerException, HDF5Exception {
+ assertTrue("H5 open ids is 0", H5.getOpenIDCount()==0);
+ System.out.print(testname.getMethodName());
+
+ H5sid = H5.H5Screate_simple(H5rank, H5dims, H5maxdims);
+ assertTrue("H5.H5Screate_simple_extent", H5sid > 0);
+ }
+
+ @After
+ public void deleteH5file() throws HDF5LibraryException {
+ if (H5sid > 0) {
+ try {H5.H5Sclose(H5sid);} catch (Exception ex) {}
+ }
+ System.out.println();
+ }
+
+ @Test
+ public void testH5Sget_simple_extent_ndims() {
+ int read_rank = -1;
+ try {
+ read_rank = H5.H5Sget_simple_extent_ndims(H5sid);
+ assertTrue("H5.H5Sget_simple_extent_ndims", H5rank == read_rank);
+ }
+ catch (Throwable err) {
+ err.printStackTrace();
+ fail("H5.H5Sget_simple_extent_ndims: " + err);
+ }
+ }
+
+ @Test
+ public void testH5Sget_simple_extent_dims_null() {
+ int read_rank = -1;
+
+ try {
+ read_rank = H5.H5Sget_simple_extent_dims(H5sid, null, null);
+ assertTrue("H5.H5Sget_simple_extent_dims", H5rank == read_rank);
+ }
+ catch (Throwable err) {
+ err.printStackTrace();
+ fail("H5.H5Sget_simple_extent_dims: " + err);
+ }
+ }
+
+ @Test
+ public void testH5Sget_simple_extent_dims() {
+ int read_rank = -1;
+ long dims[] = {5, 5};
+ long maxdims[] = {10, 10};
+
+ try {
+ read_rank = H5.H5Sget_simple_extent_dims(H5sid, dims, maxdims);
+ assertTrue("H5.H5Sget_simple_extent_dims", H5rank == read_rank);
+ assertTrue("H5.H5Sget_simple_extent_dims:dims", H5dims[0] == dims[0]);
+ assertTrue("H5.H5Sget_simple_extent_dims:maxdims", H5maxdims[0] == maxdims[0]);
+ }
+ catch (Throwable err) {
+ err.printStackTrace();
+ fail("H5.H5Sget_simple_extent_dims: " + err);
+ }
+ }
+
+ @Test
+ public void testH5Sget_simple_extent_npoints() {
+ long num_elements = -1;
+ try {
+ num_elements = H5.H5Sget_simple_extent_npoints(H5sid);
+ assertTrue("H5.H5Sget_simple_extent_npoints", (H5dims[0]*H5dims[1]) == num_elements);
+ }
+ catch (Throwable err) {
+ err.printStackTrace();
+ fail("H5.H5Sget_simple_extent_npoints: " + err);
+ }
+ }
+
+ @Test
+ public void testH5Sget_simple_extent_type() {
+ int read_type = -1;
+ try {
+ read_type = H5.H5Sget_simple_extent_type(H5sid);
+ assertTrue("H5.H5Sget_simple_extent_type", HDF5Constants.H5S_SIMPLE == read_type);
+ }
+ catch (Throwable err) {
+ err.printStackTrace();
+ fail("H5.H5Sget_simple_extent_type: " + err);
+ }
+ }
+
+ @Test
+ public void testH5Sis_simple() {
+ boolean result = false;
+
+ try {
+ result = H5.H5Sis_simple(H5sid);
+ assertTrue("H5.H5Sis_simple", result);
+ }
+ catch (Throwable err) {
+ err.printStackTrace();
+ fail("H5.H5Sis_simple: " + err);
+ }
+ }
+
+ @Test
+ public void testH5Sset_extent_simple() {
+ long num_elements = -1;
+ try {
+ H5.H5Sset_extent_simple(H5sid, H5rank, H5maxdims, H5maxdims);
+ num_elements = H5.H5Sget_simple_extent_npoints(H5sid);
+ assertTrue("H5.H5Sget_simple_extent_npoints", (H5maxdims[0]*H5maxdims[1]) == num_elements);
+ }
+ catch (Throwable err) {
+ err.printStackTrace();
+ fail("H5.H5Sset_extent_simple: " + err);
+ }
+ }
+
+ @Test
+ public void testH5Sget_select_type() {
+ int read_type = -1;
+ try {
+ read_type = H5.H5Sget_select_type(H5sid);
+ assertTrue("H5.H5Sget_select_type", HDF5Constants.H5S_SEL_ALL == read_type);
+ }
+ catch (Throwable err) {
+ err.printStackTrace();
+ fail("H5.H5Sset_extent_none: " + err);
+ }
+ }
+
+ @Test
+ public void testH5Sset_extent_none() {
+ int read_type = -1;
+ try {
+ H5.H5Sset_extent_none(H5sid);
+ read_type = H5.H5Sget_simple_extent_type(H5sid);
+ assertTrue("H5.H5Sget_simple_extent_type: "+read_type, HDF5Constants.H5S_NO_CLASS == read_type);
+ }
+ catch (Throwable err) {
+ err.printStackTrace();
+ fail("H5.H5Sset_extent_none: " + err);
+ }
+ }
+
+ @Test
+ public void testH5Scopy() {
+ long sid = -1;
+ int read_rank = -1;
+
+ try {
+ sid = H5.H5Scopy(H5sid);
+ assertTrue("H5.H5Sis_simple", sid > 0);
+ read_rank = H5.H5Sget_simple_extent_ndims(sid);
+ assertTrue("H5.H5Screate_simple_extent_ndims", H5rank == read_rank);
+ }
+ catch (Throwable err) {
+ err.printStackTrace();
+ fail("H5.H5Scopy: " + err);
+ }
+ finally {
+ try {H5.H5Sclose(sid);} catch (Exception ex) {}
+ }
+ }
+
+ @Test
+ public void testH5Sextent_copy() {
+ long sid = -1;
+ int class_type = -1;
+
+ try {
+ sid = H5.H5Screate(HDF5Constants.H5S_NULL);
+ assertTrue("H5.H5Screate_null", sid > 0);
+ H5.H5Sextent_copy(sid, H5sid);
+ class_type = H5.H5Sget_simple_extent_type(sid);
+ assertTrue("H5.H5Screate_null: type", class_type == HDF5Constants.H5S_SIMPLE);
+ }
+ catch (Throwable err) {
+ err.printStackTrace();
+ fail("H5.H5Sextent_copy: " + err);
+ }
+ finally {
+ try {H5.H5Sclose(sid);} catch (Exception ex) {}
+ }
+ }
+
+ @Test
+ public void testH5Sextent_equal() {
+ long sid = -1;
+ boolean result = false;
+
+ try {
+ sid = H5.H5Screate(HDF5Constants.H5S_NULL);
+ assertTrue("H5.H5Screate_null",sid > 0);
+ }
+ catch (Throwable err) {
+ err.printStackTrace();
+ fail("H5.H5Screate: null " + err);
+ }
+
+ try {
+ result = H5.H5Sextent_equal(sid, H5sid);
+ assertFalse("H5.testH5Sextent_equal",result);
+ H5.H5Sextent_copy(sid, H5sid);
+ result = H5.H5Sextent_equal(sid, H5sid);
+ assertTrue("H5.testH5Sextent_equal", result);
+ }
+ catch (Throwable err) {
+ err.printStackTrace();
+ fail("H5.H5Sextent_copy " + err);
+ }
+ finally {
+ try {H5.H5Sclose(sid);} catch (Exception ex) {}
+ }
+ }
+
+ @Test
+ public void testH5Sencode_decode_null_dataspace() {
+ long sid = -1;
+ long decoded_sid = -1;
+ byte[] null_sbuf = null;
+ boolean result = false;
+
+ try {
+ sid = H5.H5Screate(HDF5Constants.H5S_NULL);
+ assertTrue("H5.H5Screate_null", sid > 0);
+ }
+ catch (Throwable err) {
+ err.printStackTrace();
+ fail("H5.H5Screate: null " + err);
+ }
+
+ try {
+ null_sbuf = H5.H5Sencode(sid);
+ assertFalse("H5.testH5Sencode", null_sbuf==null);
+ }
+ catch (Throwable err) {
+ err.printStackTrace();
+ fail("H5.H5Sencode " + err);
+ }
+ finally {
+ if(null_sbuf == null) {
+ try {H5.H5Sclose(sid);} catch (Exception ex) {}
+ }
+ }
+
+ try {
+ decoded_sid = H5.H5Sdecode(null_sbuf);
+ assertTrue("H5.testH5Sdecode", decoded_sid>0);
+
+ result = H5.H5Sextent_equal(sid, decoded_sid);
+ assertTrue("H5.testH5Sextent_equal", result);
+ }
+ catch (Throwable err) {
+ err.printStackTrace();
+ fail("H5.H5Sdecode " + err);
+ }
+ finally {
+ try {H5.H5Sclose(decoded_sid);} catch (Exception ex) {}
+ try {H5.H5Sclose(sid);} catch (Exception ex) {}
+ }
+ }
+
+ @Test
+ public void testH5Sencode_decode_scalar_dataspace() {
+ long sid = -1;
+ long decoded_sid = -1;
+ byte[] scalar_sbuf = null;
+ boolean result = false;
+ int iresult = -1;
+ long lresult = -1;
+
+ try {
+ sid = H5.H5Screate(HDF5Constants.H5S_SCALAR);
+ assertTrue("H5.H5Screate_null", sid > 0);
+ }
+ catch (Throwable err) {
+ err.printStackTrace();
+ fail("H5.H5Screate: null " + err);
+ }
+
+ try {
+ scalar_sbuf = H5.H5Sencode(sid);
+ assertFalse("H5.testH5Sencode", scalar_sbuf==null);
+ }
+ catch (Throwable err) {
+ err.printStackTrace();
+ fail("H5.H5Sencode " + err);
+ }
+ finally {
+ if(scalar_sbuf == null) {
+ try {H5.H5Sclose(sid);} catch (Exception ex) {}
+ }
+ }
+
+ try {
+ decoded_sid = H5.H5Sdecode(scalar_sbuf);
+ assertTrue("H5.testH5Sdecode", decoded_sid>0);
+
+ result = H5.H5Sextent_equal(sid, decoded_sid);
+ assertTrue("H5.testH5Sextent_equal", result);
+
+ /* Verify decoded dataspace */
+ lresult = H5.H5Sget_simple_extent_npoints(decoded_sid);
+ assertTrue("H5.testH5Sget_simple_extent_npoints", lresult==1);
+
+ iresult = H5.H5Sget_simple_extent_ndims(decoded_sid);
+ assertTrue("H5.testH5Sget_simple_extent_ndims", iresult==0);
+ }
+ catch (Throwable err) {
+ err.printStackTrace();
+ fail("H5.H5Sdecode " + err);
+ }
+ finally {
+ try {H5.H5Sclose(decoded_sid);} catch (Exception ex) {}
+ try {H5.H5Sclose(sid);} catch (Exception ex) {}
+ }
+ }
+
+ @Test
+ public void testH5Sselect_none() {
+ int read_type = -1;
+ try {
+ H5.H5Sselect_none(H5sid);
+ read_type = H5.H5Sget_select_type(H5sid);
+ assertTrue("H5.H5Sget_select_type: "+read_type, HDF5Constants.H5S_SEL_NONE == read_type);
+ H5.H5Sselect_all(H5sid);
+ read_type = H5.H5Sget_select_type(H5sid);
+ assertTrue("H5.H5Sget_select_type: "+read_type, HDF5Constants.H5S_SEL_ALL == read_type);
+ }
+ catch (Throwable err) {
+ err.printStackTrace();
+ fail("H5.H5Sset_extent_none: " + err);
+ }
+ }
+
+ @Test
+ public void testH5Sget_select_npoints() {
+ long coord[][] = {{0,1},{2,4},{5,6}}; /* Coordinates for point selection */
+ long num_elements = -1;
+ try {
+ H5.H5Sselect_elements(H5sid, HDF5Constants.H5S_SELECT_SET, 3, coord);
+ num_elements = H5.H5Sget_select_npoints(H5sid);
+ assertTrue("H5.H5Sget_select_npoints: "+num_elements, 3 == num_elements);
+ }
+ catch (Throwable err) {
+ err.printStackTrace();
+ fail("H5.H5Sget_select_npoints: " + err);
+ }
+ }
+
+ @Test(expected = IllegalArgumentException.class)
+ public void testH5Sget_select_elem_pointlist_invalid() throws Throwable {
+ long coord[][] = {{0,1},{2,4},{5,6}}; /* Coordinates for point selection */
+ long getcoord[] = {-1,-1}; /* Coordinates for get point selection */
+ try {
+ H5.H5Sselect_elements(H5sid, HDF5Constants.H5S_SELECT_SET, 3, coord);
+ }
+ catch (Throwable err) {
+ err.printStackTrace();
+ fail("H5.H5Sget_select_elem_pointlist: " + err);
+ }
+ H5.H5Sget_select_elem_pointlist(H5sid, 0, 3, getcoord);
+ }
+
+ @Test
+ public void testH5Sget_select_elem_pointlist() {
+ long coord[][] = {{0,1},{2,3},{4,5}}; /* Coordinates for point selection */
+ long getcoord[] = {-1,-1,-1,-1,-1,-1}; /* Coordinates for get point selection */
+ try {
+ H5.H5Sselect_elements(H5sid, HDF5Constants.H5S_SELECT_SET, 3, coord);
+ H5.H5Sget_select_elem_pointlist(H5sid, 0, 3, getcoord);
+ assertTrue("H5.H5Sget_select_elem_pointlist", coord[0][0] == getcoord[0]);
+ assertTrue("H5.H5Sget_select_elem_pointlist", coord[0][1] == getcoord[1]);
+ assertTrue("H5.H5Sget_select_elem_pointlist", coord[1][0] == getcoord[2]);
+ assertTrue("H5.H5Sget_select_elem_pointlist", coord[1][1] == getcoord[3]);
+ assertTrue("H5.H5Sget_select_elem_pointlist", coord[2][0] == getcoord[4]);
+ assertTrue("H5.H5Sget_select_elem_pointlist", coord[2][1] == getcoord[5]);
+ }
+ catch (Throwable err) {
+ err.printStackTrace();
+ fail("H5.H5Sget_select_elem_pointlist: " + err);
+ }
+ }
+
+ @Test
+ public void testH5Sget_select_bounds() {
+ long lowbounds[] = {-1,-1};
+ long hibounds[] = {-1,-1};
+ try {
+ H5.H5Sget_select_bounds(H5sid, lowbounds, hibounds);
+ assertTrue("H5.H5Sget_select_bounds", 0 == lowbounds[0]);
+ assertTrue("H5.H5Sget_select_bounds", 0 == lowbounds[1]);
+ assertTrue("H5.H5Sget_select_bounds", (H5dims[0]-1) == hibounds[0]);
+ assertTrue("H5.H5Sget_select_bounds", (H5dims[1]-1) == hibounds[1]);
+ }
+ catch (Throwable err) {
+ err.printStackTrace();
+ fail("H5.H5Sget_select_bounds: " + err);
+ }
+ }
+
+ @Test
+ public void testH5Soffset_simple() {
+ long coord[][] = {{2,2},{2,4},{4,2},{4,4}}; /* Coordinates for point selection */
+ long lowbounds[] = {-1,-1};
+ long hibounds[] = {-1,-1};
+ try {
+ H5.H5Sselect_elements(H5sid, HDF5Constants.H5S_SELECT_SET, 4, coord);
+ H5.H5Sget_select_bounds(H5sid, lowbounds, hibounds);
+ assertTrue("H5.H5Sget_select_bounds", 2 == lowbounds[0]);
+ assertTrue("H5.H5Sget_select_bounds", 2 == lowbounds[1]);
+ assertTrue("H5.H5Sget_select_bounds", (H5dims[0]-1) == hibounds[0]);
+ assertTrue("H5.H5Sget_select_bounds", (H5dims[1]-1) == hibounds[1]);
+ }
+ catch (Throwable err) {
+ err.printStackTrace();
+ fail("H5.H5Sget_select_bounds: " + err);
+ }
+ try {
+ long offset[] = {-1,-1};
+ H5.H5Soffset_simple(H5sid, offset);
+ H5.H5Sget_select_bounds(H5sid, lowbounds, hibounds);
+ assertTrue("H5.H5Sget_select_bounds", 1 == lowbounds[0]);
+ assertTrue("H5.H5Sget_select_bounds", 1 == lowbounds[1]);
+ assertTrue("H5.H5Sget_select_bounds", (H5dims[0]-2) == hibounds[0]);
+ assertTrue("H5.H5Sget_select_bounds", (H5dims[1]-2) == hibounds[1]);
+ }
+ catch (Throwable err) {
+ err.printStackTrace();
+ fail("H5.H5Soffset_simple: " + err);
+ }
+ }
+
+ @Test
+ public void testH5Sget_select_hyper() {
+ long space1 = -1;
+ long start[] = {0,0};
+ long stride[] = {1,1};
+ long count[] = {1,1};
+ long block[] = {4,4};
+ long nblocks; // Number of hyperslab blocks
+ long blocks[] = {-1, -1, -1, -1, -1, -1, -1, -1}; // List of blocks
+ try {
+ // Copy "all" selection & space
+ space1 = H5.H5Scopy(H5sid);
+ assertTrue("H5.H5Scopy", H5sid > 0);
+ // 'AND' "all" selection with another hyperslab
+ H5.H5Sselect_hyperslab(space1, HDF5Constants.H5S_SELECT_AND, start, stride, count, block);
+
+ // Verify that there is only one block
+ nblocks = H5.H5Sget_select_hyper_nblocks(space1);
+ assertTrue("H5Sget_select_hyper_nblocks", nblocks == 1);
+
+ // Retrieve the block defined
+ H5.H5Sget_select_hyper_blocklist(space1, 0, nblocks, blocks);
+
+ // Verify that the correct block is defined
+ assertTrue("H5.H5Sget_select_hyper_blocklist", start[0] == blocks[0]);
+ assertTrue("H5.H5Sget_select_hyper_blocklist", start[1] == blocks[1]);
+ assertTrue("H5.H5Sget_select_hyper_blocklist", (block[0]-1) == blocks[2]);
+ assertTrue("H5.H5Sget_select_hyper_blocklist", (block[1]-1) == blocks[3]);
+ }
+ catch (Throwable err) {
+ err.printStackTrace();
+ fail("H5.H5Sget_select_bounds: " + err);
+ }
+ finally {
+ try {H5.H5Sclose(space1);} catch (Exception ex) {}
+ }
+ }
+
+ @Test
+ public void testH5Sget_select_valid() {
+ long space1 = -1;
+ long start[] = {1,0};
+ long stride[] = {1,1};
+ long count[] = {2,3};
+ long block[] = {1,1};
+ long offset[] = {0,0}; // Offset of selection
+
+ try {
+ // Copy "all" selection & space
+ space1 = H5.H5Scopy(H5sid);
+ assertTrue("H5.H5Scopy", H5sid > 0);
+ // 'AND' "all" selection with another hyperslab
+ H5.H5Sselect_hyperslab(space1, HDF5Constants.H5S_SELECT_SET, start, stride, count, block);
+
+ // Check a valid offset
+ offset[0]=-1;
+ offset[1]=0;
+ H5.H5Soffset_simple(space1, offset);
+ assertTrue("H5Sselect_valid", H5.H5Sselect_valid(space1));
+
+ // Check an invalid offset
+ offset[0]=10;
+ offset[1]=0;
+ H5.H5Soffset_simple(space1, offset);
+ assertFalse("H5Sselect_valid", H5.H5Sselect_valid(space1));
+
+ /* Reset offset */
+ offset[0]=0;
+ offset[1]=0;
+ H5.H5Soffset_simple(space1, offset);
+ assertTrue("H5Sselect_valid", H5.H5Sselect_valid(space1));
+ }
+ catch (Throwable err) {
+ err.printStackTrace();
+ fail("testH5Sget_select_valid: " + err);
+ }
+ finally {
+ try {H5.H5Sclose(space1);} catch (Exception ex) {}
+ }
+ }
+
+ @Test
+ public void testH5Shyper_regular() {
+ long start[] = {1,0};
+ long stride[] = {1,1};
+ long count[] = {2,3};
+ long block[] = {1,1};
+ long q_start[] = new long[2];
+ long q_stride[] = new long[2];
+ long q_count[] = new long[2];
+ long q_block[] = new long[2];
+ boolean is_regular = false;
+
+ try {
+ // Set "regular" hyperslab selection
+ H5.H5Sselect_hyperslab(H5sid, HDF5Constants.H5S_SELECT_SET, start, stride, count, block);
+
+ // Query if 'hyperslab' selection is regular hyperslab (should be TRUE)
+ is_regular = H5.H5Sis_regular_hyperslab(H5sid);
+ assertTrue("H5.H5Sis_regular_hyperslab", is_regular);
+
+ // Retrieve the hyperslab parameters
+ H5.H5Sget_regular_hyperslab(H5sid, q_start, q_stride, q_count, q_block);
+
+ /* Verify the hyperslab parameters */
+ for(int u = 0; u < H5rank; u++) {
+ assertTrue("H5Sget_regular_hyperslab, start", start[u] == q_start[u]);
+ assertTrue("H5Sget_regular_hyperslab, stride", stride[u] == q_stride[u]);
+ assertTrue("H5Sget_regular_hyperslab, count", count[u] == q_count[u]);
+ assertTrue("H5Sget_regular_hyperslab, block", block[u] == q_block[u]);
+ } /* end for */
+ }
+ catch (Throwable err) {
+ err.printStackTrace();
+ fail("testH5Sget_select_valid: " + err);
+ }
+ }
+}
diff --git a/java/test/TestH5Sbasic.java b/java/test/TestH5Sbasic.java
new file mode 100644
index 0000000..2731a06
--- /dev/null
+++ b/java/test/TestH5Sbasic.java
@@ -0,0 +1,247 @@
+/* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
+ * Copyright by The HDF Group. *
+ * Copyright by the Board of Trustees of the University of Illinois. *
+ * All rights reserved. *
+ * *
+ * This file is part of HDF5. The full HDF5 copyright notice, including *
+ * terms governing use, modification, and redistribution, is contained in *
+ * the files COPYING and Copyright.html. COPYING can be found at the root *
+ * of the source code distribution tree; Copyright.html can be found at the *
+ * root level of an installed copy of the electronic HDF5 document set and *
+ * is linked from the top-level documents page. It can also be found at *
+ * http://hdfgroup.org/HDF5/doc/Copyright.html. If you do not have *
+ * access to either file, you may request a copy from help@hdfgroup.org. *
+ * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */
+
+package test;
+
+import static org.junit.Assert.assertTrue;
+import static org.junit.Assert.fail;
+import hdf.hdf5lib.H5;
+import hdf.hdf5lib.HDF5Constants;
+import hdf.hdf5lib.exceptions.HDF5LibraryException;
+
+import org.junit.After;
+import org.junit.Before;
+import org.junit.Rule;
+import org.junit.Test;
+import org.junit.rules.TestName;
+
+public class TestH5Sbasic {
+ @Rule public TestName testname = new TestName();
+
+ @Before
+ public void checkOpenIDs() {
+ assertTrue("H5 open ids is 0",H5.getOpenIDCount()==0);
+ System.out.print(testname.getMethodName());
+ }
+ @After
+ public void nextTestName() {
+ System.out.println();
+ }
+
+ @Test//(expected = HDF5LibraryException.class)
+ public void testH5Sclose_invalid() throws Throwable {
+ long sid = H5.H5Sclose(-1);
+ assertTrue(sid == 0);
+ }
+
+ @Test(expected = HDF5LibraryException.class)
+ public void testH5Screate_invalid() throws Throwable {
+ H5.H5Screate(-1);
+ }
+
+ @Test(expected = HDF5LibraryException.class)
+ public void testH5Sget_simple_extent_type_invalid() throws Throwable {
+ H5.H5Sget_simple_extent_type(-1);
+ }
+
+ @Test
+ public void testH5Screate_scalar() {
+ long sid = -1;
+ int class_type = -1;
+ try {
+ sid = H5.H5Screate(HDF5Constants.H5S_SCALAR);
+ assertTrue("H5.H5Screate_scalar",sid > 0);
+ class_type = H5.H5Sget_simple_extent_type(sid);
+ assertTrue("H5.H5Screate_scalar: type",class_type == HDF5Constants.H5S_SCALAR);
+ }
+ catch (Throwable err) {
+ err.printStackTrace();
+ fail("H5.H5Screate: " + err);
+ }
+ finally {
+ try {H5.H5Sclose(sid);} catch (Exception ex) {}
+ }
+ }
+
+ @Test
+ public void testH5Screate_null() {
+ long sid = -1;
+ int class_type = -1;
+ try {
+ sid = H5.H5Screate(HDF5Constants.H5S_NULL);
+ assertTrue("H5.H5Screate_null", sid > 0);
+ class_type = H5.H5Sget_simple_extent_type(sid);
+ assertTrue("H5.H5Screate_null: type", class_type == HDF5Constants.H5S_NULL);
+ }
+ catch (Throwable err) {
+ err.printStackTrace();
+ fail("H5.H5Screate: " + err);
+ }
+ finally {
+ try {H5.H5Sclose(sid);} catch (Exception ex) {}
+ }
+ }
+
+ @Test(expected = NullPointerException.class)
+ public void testH5Screate_simple_dims_null() throws Throwable {
+ H5.H5Screate_simple(2, (long[])null, null);
+ }
+
+ @Test(expected = IllegalArgumentException.class)
+ public void testH5Screate_simple_rank_invalid() throws Throwable {
+ long dims[] = {5, 5};
+ H5.H5Screate_simple(-1, dims, null);
+ }
+
+ @Test(expected = IllegalArgumentException.class)
+ public void testH5Screate_simple_dims_invalid() throws Throwable {
+ long dims[] = {2, 2};
+ H5.H5Screate_simple(5, dims, null);
+ }
+
+ @Test(expected = HDF5LibraryException.class)
+ public void testH5Screate_simple_dims_exceed() throws Throwable {
+ long dims[] = {0,1,2,3,4,5,6,7,8,9,10,11,12,13,14,15,16,17,18,19,20,
+ 21,22,23,24,25,26,27,28,29,30,31,32,33,35};
+ H5.H5Screate_simple(35, dims, null);
+ }
+
+//H5Screate_simple was changed to allow a dim of 0
+// @Ignore(expected = HDF5LibraryException.class)
+// public void testH5Screate_simple_dims_zero() {
+// long dims[] = {0, 0};
+// H5.H5Screate_simple(2, dims, null);
+// }
+
+ @Test
+ public void testH5Screate_simple() {
+ long sid = -1;
+ int class_type = -1;
+ int rank = 2;
+ long dims[] = {5, 5};
+ long maxdims[] = {10, 10};
+
+ try {
+ sid = H5.H5Screate_simple(rank, dims, maxdims);
+ assertTrue("H5.H5Screate_simple", sid > 0);
+ class_type = H5.H5Sget_simple_extent_type(sid);
+ assertTrue("H5.H5Screate_simple: type", class_type == HDF5Constants.H5S_SIMPLE);
+ }
+ catch (Throwable err) {
+ err.printStackTrace();
+ fail("H5.H5Screate_simple: " + err);
+ }
+ finally {
+ try {H5.H5Sclose(sid);} catch (Exception ex) {}
+ }
+ }
+
+ @Test
+ public void testH5Screate_simple_unlimted() {
+ long sid = -1;
+ int class_type = -1;
+ int rank = 2;
+ long dims[] = {5, 5};
+ long maxdims[] = {HDF5Constants.H5S_UNLIMITED, HDF5Constants.H5S_UNLIMITED};
+
+ try {
+ sid = H5.H5Screate_simple(rank, dims, maxdims);
+ assertTrue("H5.H5Screate_simple", sid > 0);
+ class_type = H5.H5Sget_simple_extent_type(sid);
+ assertTrue("H5.H5Screate_simple: type", class_type == HDF5Constants.H5S_SIMPLE);
+ }
+ catch (Throwable err) {
+ err.printStackTrace();
+ fail("H5.H5Screate_simple: " + err);
+ }
+ finally {
+ try {H5.H5Sclose(sid);} catch (Exception ex) {}
+ }
+ }
+
+ @Test
+ public void testH5Screate_simple_unlimted_1d() {
+ long sid = -1;
+ int class_type = -1;
+ int rank = 1;
+ long dims[] = {5};
+ long maxdims[] = {HDF5Constants.H5S_UNLIMITED};
+
+ try {
+ sid = H5.H5Screate_simple(rank, dims, maxdims);
+ assertTrue("H5.H5Screate_simple", sid > 0);
+ class_type = H5.H5Sget_simple_extent_type(sid);
+ assertTrue("H5.H5Screate_simple: type", class_type == HDF5Constants.H5S_SIMPLE);
+ }
+ catch (Throwable err) {
+ err.printStackTrace();
+ fail("H5.H5Screate_simple: " + err);
+ }
+ finally {
+ try {H5.H5Sclose(sid);} catch (Exception ex) {}
+ }
+ }
+
+ @Test
+ public void testH5Screate_simple_max_default() {
+ long sid = -1;
+ int rank = 2;
+ long dims[] = {5, 5};
+
+ try {
+ sid = H5.H5Screate_simple(rank, dims, null);
+ assertTrue("H5.H5Screate_simple_max_default", sid > 0);
+ }
+ catch (Throwable err) {
+ err.printStackTrace();
+ fail("H5.H5Screate_simple: " + err);
+ }
+ finally {
+ try {H5.H5Sclose(sid);} catch (Exception ex) {}
+ }
+ }
+
+ @Test
+ public void testH5Screate_simple_extent() {
+ long sid = -1;
+ int rank = 2;
+ long dims[] = {5, 5};
+ long maxdims[] = {10, 10};
+
+ try {
+ sid = H5.H5Screate(HDF5Constants.H5S_SIMPLE);
+ assertTrue("H5.H5Screate_simple_extent",sid > 0);
+ H5.H5Sset_extent_simple(sid, rank, dims, maxdims);
+ }
+ catch (Throwable err) {
+ err.printStackTrace();
+ fail("H5.H5Screate: " + err);
+ }
+ finally {
+ try {H5.H5Sclose(sid);} catch (Exception ex) {}
+ }
+ }
+
+ @Test(expected = IllegalArgumentException.class)
+ public void testH5Sencode_invalid() throws Throwable {
+ H5.H5Sencode(-1);
+ }
+
+ @Test(expected = NullPointerException.class)
+ public void testH5Sdecode_null() throws Throwable {
+ H5.H5Sdecode(null);
+ }
+
+}
diff --git a/java/test/TestH5T.java b/java/test/TestH5T.java
new file mode 100644
index 0000000..e03b97f
--- /dev/null
+++ b/java/test/TestH5T.java
@@ -0,0 +1,459 @@
+/* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
+ * Copyright by The HDF Group. *
+ * Copyright by the Board of Trustees of the University of Illinois. *
+ * All rights reserved. *
+ * *
+ * This file is part of HDF5. The full HDF5 copyright notice, including *
+ * terms governing use, modification, and redistribution, is contained in *
+ * the files COPYING and Copyright.html. COPYING can be found at the root *
+ * of the source code distribution tree; Copyright.html can be found at the *
+ * root level of an installed copy of the electronic HDF5 document set and *
+ * is linked from the top-level documents page. It can also be found at *
+ * http://hdfgroup.org/HDF5/doc/Copyright.html. If you do not have *
+ * access to either file, you may request a copy from help@hdfgroup.org. *
+ * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */
+
+package test;
+
+import static org.junit.Assert.assertFalse;
+import static org.junit.Assert.assertTrue;
+import static org.junit.Assert.fail;
+
+import java.io.File;
+
+import hdf.hdf5lib.H5;
+import hdf.hdf5lib.HDF5Constants;
+import hdf.hdf5lib.exceptions.HDF5Exception;
+import hdf.hdf5lib.exceptions.HDF5LibraryException;
+
+import org.junit.After;
+import org.junit.Before;
+import org.junit.Rule;
+import org.junit.Test;
+import org.junit.rules.TestName;
+
+public class TestH5T {
+ @Rule public TestName testname = new TestName();
+ private static final String H5_FILE = "test.h5";
+ long H5fid = -1;
+ long H5strdid = -1;
+
+ private final void _deleteFile(String filename) {
+ File file = null;
+ try {
+ file = new File(filename);
+ }
+ catch (Throwable err) {}
+
+ if (file.exists()) {
+ try {file.delete();} catch (SecurityException e) {}
+ }
+ }
+
+ @Before
+ public void createH5file() throws NullPointerException, HDF5Exception {
+ assertTrue("H5 open ids is 0", H5.getOpenIDCount()==0);
+ System.out.print(testname.getMethodName());
+
+ H5fid = H5.H5Fcreate(H5_FILE, HDF5Constants.H5F_ACC_TRUNC,
+ HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT);
+ assertTrue("H5.H5Fcreate", H5fid > 0);
+ H5strdid = H5.H5Tcopy(HDF5Constants.H5T_C_S1);
+ assertTrue("H5.H5Tcopy", H5strdid > 0);
+
+ H5.H5Fflush(H5fid, HDF5Constants.H5F_SCOPE_LOCAL);
+ }
+
+ @After
+ public void deleteH5file() throws HDF5LibraryException {
+ if (H5strdid >= 0)
+ try {H5.H5Tclose(H5strdid);} catch (Exception ex) {}
+ if (H5fid > 0)
+ try {H5.H5Fclose(H5fid);} catch (Exception ex) {}
+
+ _deleteFile(H5_FILE);
+ System.out.println();
+ }
+
+ @Test(expected = HDF5LibraryException.class)
+ public void testH5Tequal_type_error() throws Throwable {
+ H5.H5Tequal(HDF5Constants.H5T_INTEGER, H5strdid);
+ }
+
+ @Test
+ public void testH5Tget_class() {
+ try {
+ int result = H5.H5Tget_class(H5strdid);
+ assertTrue("H5.H5Tget_class", result > 0);
+ String class_name = H5.H5Tget_class_name(result);
+ assertTrue("H5.H5Tget_class", class_name.compareTo("H5T_STRING")==0);
+ }
+ catch (Throwable err) {
+ err.printStackTrace();
+ fail("testH5Tget_class: " + err);
+ }
+ }
+
+ @Test
+ public void testH5Tget_size() {
+ long dt_size = -1;
+
+ try {
+ dt_size = H5.H5Tget_size(H5strdid);
+ }
+ catch (Throwable err) {
+ err.printStackTrace();
+ fail("testH5Tget_size:H5.H5Tget_size " + err);
+ }
+ assertTrue("testH5Tget_size", dt_size > 0);
+ }
+
+ @Test
+ public void testH5Tset_size() {
+ long dt_size = 5;
+
+ try {
+ H5.H5Tset_size(H5strdid, dt_size);
+ }
+ catch (Throwable err) {
+ err.printStackTrace();
+ fail("testH5Tset_size:H5.H5Tset_size " + err);
+ }
+ try {
+ dt_size = H5.H5Tget_size(H5strdid);
+ }
+ catch (Throwable err) {
+ err.printStackTrace();
+ fail("testH5Tget_size:H5.H5Tget_size " + err);
+ }
+ assertTrue("testH5Tget_size", dt_size == 5);
+ }
+
+ @Test
+ public void testH5Tarray_create() {
+ long filetype_id = -1;
+ long[] adims = { 3, 5 };
+
+ try {
+ filetype_id = H5.H5Tarray_create(HDF5Constants.H5T_STD_I64LE, 2, adims);
+ assertTrue("testH5Tarray_create", filetype_id >= 0);
+ }
+ catch (Throwable err) {
+ err.printStackTrace();
+ fail("testH5Tarray_create.H5Tarray_create " + err);
+ }
+ finally {
+ if (filetype_id >= 0)
+ try {H5.H5Tclose(filetype_id);} catch (Exception ex) {}
+ }
+ }
+
+ @Test
+ public void testH5Tget_array_ndims() {
+ long filetype_id = -1;
+ int ndims = 0;
+ long[] adims = { 3, 5 };
+
+ try {
+ filetype_id = H5.H5Tarray_create(HDF5Constants.H5T_STD_I64LE, 2, adims);
+ }
+ catch (Throwable err) {
+ err.printStackTrace();
+ fail("testH5Tarray_create.H5Tarray_create " + err);
+ }
+ assertTrue("testH5Tget_array_ndims:H5Tarray_create", filetype_id >= 0);
+ try {
+ ndims = H5.H5Tget_array_ndims(filetype_id);
+ assertTrue("testH5Tget_array_ndims", ndims == 2);
+ }
+ catch (Throwable err) {
+ err.printStackTrace();
+ fail("testH5Tget_array_ndims.H5Tget_array_ndims " + err);
+ }
+ finally {
+ if (filetype_id >= 0)
+ try {H5.H5Tclose(filetype_id);} catch (Exception ex) {}
+ }
+ }
+
+ @Test
+ public void testH5Tget_array_dims() {
+ long filetype_id = -1;
+ int ndims = 0;
+ long[] adims = { 3, 5 };
+ long[] rdims = new long[2];
+
+ try {
+ filetype_id = H5.H5Tarray_create(HDF5Constants.H5T_STD_I64LE, 2, adims);
+ }
+ catch (Throwable err) {
+ err.printStackTrace();
+ fail("testH5Tarray_create.H5Tarray_create " + err);
+ }
+ assertTrue("testH5Tget_array_dims:H5Tarray_create", filetype_id >= 0);
+ try {
+ ndims = H5.H5Tget_array_dims(filetype_id, rdims);
+ assertTrue("testH5Tget_array_dims", ndims == 2);
+ assertTrue("testH5Tget_array_dims", adims[0] == rdims[0]);
+ assertTrue("testH5Tget_array_dims", adims[1] == rdims[1]);
+ }
+ catch (Throwable err) {
+ err.printStackTrace();
+ fail("testH5Tget_array_dims.H5Tget_array_dims " + err);
+ }
+ finally {
+ if (filetype_id >= 0)
+ try {H5.H5Tclose(filetype_id);} catch (Exception ex) {}
+ }
+ }
+
+ @Test
+ public void testH5Tenum_functions() {
+ long filetype_id =-1;
+ String enum_type ="Enum_type";
+ byte[] enum_val = new byte[1];
+ String enum_name = null;
+
+ // Create a enumerate datatype
+ try {
+ filetype_id = H5.H5Tcreate(HDF5Constants.H5T_ENUM, (long)1);
+ }
+ catch (Throwable err) {
+ err.printStackTrace();
+ fail("testH5Tenum_functions:H5Tcreate " + err);
+ }
+ assertTrue("testH5Tenum_functions:H5Tcreate", filetype_id >= 0);
+ try {
+ enum_val[0]=10;
+ H5.H5Tenum_insert(filetype_id, "RED", enum_val);
+ enum_val[0]=11;
+ H5.H5Tenum_insert(filetype_id, "GREEN", enum_val);
+ enum_val[0]=12;
+ H5.H5Tenum_insert(filetype_id, "BLUE", enum_val);
+ enum_val[0]=13;
+ H5.H5Tenum_insert(filetype_id, "ORANGE", enum_val);
+ enum_val[0]=14;
+ H5.H5Tenum_insert(filetype_id, "YELLOW", enum_val);
+
+ // Query member number and member index by member name, for enumeration type.
+ assertTrue("Can't get member number", H5.H5Tget_nmembers(filetype_id) == 5);
+ assertTrue("Can't get correct index number", H5.H5Tget_member_index(filetype_id, "ORANGE") == 3);
+
+ // Commit enumeration datatype and close it */
+ H5.H5Tcommit(H5fid, enum_type, filetype_id, HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT);
+
+ H5.H5Tclose(filetype_id);
+
+ // Open the dataytpe for query
+ filetype_id = H5.H5Topen(H5fid, enum_type, HDF5Constants.H5P_DEFAULT);
+ assertTrue("testH5Tenum_functions:H5Tcreate", filetype_id >= 0);
+
+ // Query member number and member index by member name, for enumeration type
+ assertTrue("Can't get member number", H5.H5Tget_nmembers(filetype_id) == 5);
+ assertTrue("Can't get correct index number", H5.H5Tget_member_index(filetype_id, "ORANGE") == 3);
+
+ // Query member value by member name, for enumeration type
+ H5.H5Tenum_valueof (filetype_id, "ORANGE", enum_val);
+ assertTrue("Incorrect value for enum member", enum_val[0]==13);
+
+ // Query member value by member index, for enumeration type
+ H5.H5Tget_member_value (filetype_id, 2, enum_val);
+ assertTrue("Incorrect value for enum member", enum_val[0]==12);
+
+ // Query member name by member value, for enumeration type
+ enum_val[0] = 14;
+ enum_name = H5.H5Tenum_nameof(filetype_id, enum_val, 16);
+ assertTrue("Incorrect name for enum member", enum_name.compareTo("YELLOW")==0);
+ }
+ catch (Throwable err) {
+ err.printStackTrace();
+ fail("testH5Tenum_functions:query " + err);
+ }
+ finally {
+ if (filetype_id >= 0)
+ try {H5.H5Tclose(filetype_id);} catch (Exception ex) {}
+ }
+ }
+
+ @Test
+ public void testH5Tenum_create_functions() {
+ long filetype_id = -1;
+ byte[] enum_val = new byte[1];
+
+ // Create a enumerate datatype
+ try {
+ filetype_id = H5.H5Tenum_create(HDF5Constants.H5T_NATIVE_INT);
+ }
+ catch (Throwable err) {
+ err.printStackTrace();
+ fail("testH5Tenum_create_functions:H5Tcreate " + err);
+ }
+ assertTrue("testH5Tenum_create_functions:H5Tcreate", filetype_id >= 0);
+ try {
+ enum_val[0]=10;
+ H5.H5Tenum_insert(filetype_id, "RED", enum_val);
+ enum_val[0]=11;
+ H5.H5Tenum_insert(filetype_id, "GREEN", enum_val);
+ enum_val[0]=12;
+ H5.H5Tenum_insert(filetype_id, "BLUE", enum_val);
+ enum_val[0]=13;
+ H5.H5Tenum_insert(filetype_id, "ORANGE", enum_val);
+ enum_val[0]=14;
+ H5.H5Tenum_insert(filetype_id, "YELLOW", enum_val);
+
+ // Query member number and member index by member name, for enumeration type.
+ assertTrue("Can't get member number", H5.H5Tget_nmembers(filetype_id) == 5);
+ assertTrue("Can't get correct index number", H5.H5Tget_member_index(filetype_id, "ORANGE") == 3);
+ }
+ catch (Throwable err) {
+ err.printStackTrace();
+ fail("testH5Tenum_create_functions:H5Tget_nmembers " + err);
+ }
+ finally {
+ if (filetype_id >= 0)
+ try {H5.H5Tclose(filetype_id);} catch (Exception ex) {}
+ }
+ }
+
+ @Test
+ public void testH5Topaque_functions() {
+ long filetype_id = -1;
+ String opaque_name = null;
+
+ // Create a opaque datatype
+ try {
+ filetype_id = H5.H5Tcreate(HDF5Constants.H5T_OPAQUE, (long)4);
+ }
+ catch (Throwable err) {
+ err.printStackTrace();
+ fail("testH5Topaque_functions:H5Tcreate " + err);
+ }
+ assertTrue("testH5Topaque_functions:H5Tcreate", filetype_id >= 0);
+
+ try {
+ H5.H5Tset_tag(filetype_id, "opaque type");
+ opaque_name = H5.H5Tget_tag(filetype_id);
+ assertTrue("Incorrect tag for opaque type", opaque_name.compareTo("opaque type")==0);
+ }
+ catch (Throwable err) {
+ err.printStackTrace();
+ fail("testH5Topaque_functions:H5Tset_get_tag " + err);
+ }
+ finally {
+ if (filetype_id >= 0)
+ try {H5.H5Tclose(filetype_id);} catch (Exception ex) {}
+ }
+ }
+
+ @Test
+ public void testH5Tvlen_create() {
+ long filetype_id = -1;
+
+ try {
+ filetype_id = H5.H5Tvlen_create(HDF5Constants.H5T_C_S1);
+ assertTrue("testH5Tvlen_create", filetype_id >= 0);
+
+ // Check if datatype is VL type
+ int vlclass = H5.H5Tget_class(filetype_id);
+ assertTrue("testH5Tvlen_create:H5Tget_class", vlclass == HDF5Constants.H5T_VLEN);
+ assertFalse("testH5Tis_variable_str:H5Tget_class", vlclass == HDF5Constants.H5T_STRING);
+ }
+ catch (Throwable err) {
+ err.printStackTrace();
+ fail("testH5Tvlen_create.H5Tvlen_create " + err);
+ }
+ finally {
+ if (filetype_id >= 0)
+ try {H5.H5Tclose(filetype_id);} catch (Exception ex) {}
+ }
+ }
+
+ @Test
+ public void testH5Tis_variable_str() {
+ long filetype_id = -1;
+
+ try {
+ filetype_id = H5.H5Tcopy(HDF5Constants.H5T_C_S1);
+ assertTrue("testH5Tis_variable_str.H5Tcopy: ", filetype_id >= 0);
+
+ // Convert to variable-length string
+ H5.H5Tset_size(filetype_id, HDF5Constants.H5T_VARIABLE);
+
+ // Check if datatype is VL string
+ int vlclass = H5.H5Tget_class(filetype_id);
+ assertTrue("testH5Tis_variable_str:H5Tget_class", vlclass == HDF5Constants.H5T_STRING);
+ assertFalse("testH5Tvlen_create:H5Tget_class", vlclass == HDF5Constants.H5T_VLEN);
+
+ assertTrue("testH5Tis_variable_str:H5Tis_variable_str", H5.H5Tis_variable_str(filetype_id));
+
+ // Verify that the class detects as a string
+ assertTrue("testH5Tis_variable_str:H5Tdetect_class", H5.H5Tdetect_class(filetype_id, HDF5Constants.H5T_STRING));
+ }
+ catch (Throwable err) {
+ err.printStackTrace();
+ fail("testH5Tis_variable_str " + err);
+ }
+ finally {
+ if (filetype_id >= 0)
+ try {H5.H5Tclose(filetype_id);} catch (Exception ex) {}
+ }
+ }
+
+ @Test
+ public void testH5Tcompound_functions() {
+ long filetype_id =-1;
+
+ // Create a compound datatype
+ try {
+ filetype_id = H5.H5Tcreate(HDF5Constants.H5T_COMPOUND, (long)16);
+ }
+ catch (Throwable err) {
+ err.printStackTrace();
+ fail("testH5Tcompound_functions:H5Tcreate " + err);
+ }
+ assertTrue("testH5Tcompound_functions:H5Tcreate", filetype_id >= 0);
+ try {
+ H5.H5Tinsert(filetype_id, "Lon", 0, HDF5Constants.H5T_NATIVE_DOUBLE);
+ H5.H5Tinsert(filetype_id, "Lat", 8, HDF5Constants.H5T_NATIVE_DOUBLE);
+
+ // Query member number and member index by member name, for enumeration type.
+ assertTrue("Can't get member number", H5.H5Tget_nmembers(filetype_id) == 2);
+ assertTrue("Can't get correct index number", H5.H5Tget_member_index(filetype_id, "Lat") == 1);
+
+ // We started to support this function for compound type in 1.8.6 release.
+ int order = H5.H5Tget_order(filetype_id);
+ assertFalse("Can't get order for compound type.", order == HDF5Constants.H5T_ORDER_ERROR);
+ assertTrue("Wrong order for this type.", (order == HDF5Constants.H5T_ORDER_LE) || (order == HDF5Constants.H5T_ORDER_BE));
+
+ // Make certain that the correct classes can be detected
+ assertTrue("Can't get correct class", H5.H5Tdetect_class(filetype_id, HDF5Constants.H5T_COMPOUND));
+ assertTrue("Can't get correct class", H5.H5Tdetect_class(filetype_id, HDF5Constants.H5T_FLOAT));
+ // Make certain that an incorrect class is not detected
+ assertFalse("Can get incorrect class", H5.H5Tdetect_class(filetype_id, HDF5Constants.H5T_TIME));
+
+ // Query member name by member index
+ String index_name = H5.H5Tget_member_name (filetype_id, 0);
+ assertTrue("Incorrect name for member index", index_name.compareTo("Lon")==0);
+
+ // Query member offset by member no
+ long index_offset = H5.H5Tget_member_offset (filetype_id, 1);
+ assertTrue("Incorrect offset for member no", index_offset == 8);
+
+ // Query member type by member index
+ long index_type = H5.H5Tget_member_type (filetype_id, 0);
+ assertTrue("Incorrect type for member index", H5.H5Tequal(HDF5Constants.H5T_NATIVE_DOUBLE, index_type));
+ if (index_type >= 0)
+ try {H5.H5Tclose(index_type);} catch (Exception ex) {}
+
+ }
+ catch (Throwable err) {
+ err.printStackTrace();
+ fail("testH5Tcompound_functions:query " + err);
+ }
+ finally {
+ if (filetype_id >= 0)
+ try {H5.H5Tclose(filetype_id);} catch (Exception ex) {}
+ }
+ }
+
+}
diff --git a/java/test/TestH5Tbasic.java b/java/test/TestH5Tbasic.java
new file mode 100644
index 0000000..950f1c7
--- /dev/null
+++ b/java/test/TestH5Tbasic.java
@@ -0,0 +1,161 @@
+/* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
+ * Copyright by The HDF Group. *
+ * Copyright by the Board of Trustees of the University of Illinois. *
+ * All rights reserved. *
+ * *
+ * This file is part of HDF5. The full HDF5 copyright notice, including *
+ * terms governing use, modification, and redistribution, is contained in *
+ * the files COPYING and Copyright.html. COPYING can be found at the root *
+ * of the source code distribution tree; Copyright.html can be found at the *
+ * root level of an installed copy of the electronic HDF5 document set and *
+ * is linked from the top-level documents page. It can also be found at *
+ * http://hdfgroup.org/HDF5/doc/Copyright.html. If you do not have *
+ * access to either file, you may request a copy from help@hdfgroup.org. *
+ * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */
+
+package test;
+
+import static org.junit.Assert.assertFalse;
+import static org.junit.Assert.assertTrue;
+import static org.junit.Assert.fail;
+import hdf.hdf5lib.H5;
+import hdf.hdf5lib.HDF5Constants;
+
+import org.junit.After;
+import org.junit.Before;
+import org.junit.Rule;
+import org.junit.Test;
+import org.junit.rules.TestName;
+
+public class TestH5Tbasic {
+ @Rule public TestName testname = new TestName();
+
+ @Before
+ public void checkOpenIDs() {
+ assertTrue("H5 open ids is 0",H5.getOpenIDCount()==0);
+ System.out.print(testname.getMethodName());
+ }
+ @After
+ public void nextTestName() {
+ System.out.println();
+ }
+
+ @Test
+ public void testH5Tcopy() {
+ long H5strdid = -1;
+ try {
+ H5strdid = H5.H5Tcopy(HDF5Constants.H5T_C_S1);
+ assertTrue("H5.H5Tcopy",H5strdid > 0);
+ }
+ catch (Throwable err) {
+ err.printStackTrace();
+ fail("testH5Tcopy: " + err);
+ }
+ finally {
+ if (H5strdid >= 0)
+ try {H5.H5Tclose(H5strdid);} catch (Exception ex) {}
+ }
+ }
+
+ @Test
+ public void testH5Tequal() {
+ long H5strdid = -1;
+ try {
+ H5strdid = H5.H5Tcopy(HDF5Constants.H5T_C_S1);
+ assertTrue("H5.H5Tcopy",H5strdid > 0);
+ boolean teq = H5.H5Tequal(HDF5Constants.H5T_C_S1, H5strdid);
+ assertTrue("H5.H5Tequal",teq);
+ }
+ catch (Throwable err) {
+ err.printStackTrace();
+ fail("testH5Tequal: " + err);
+ }
+ finally {
+ if (H5strdid >= 0)
+ try {H5.H5Tclose(H5strdid);} catch (Exception ex) {}
+ }
+ }
+
+ @Test
+ public void testH5Tequal_not() {
+ long H5strdid = -1;
+ try {
+ H5strdid = H5.H5Tcopy(HDF5Constants.H5T_STD_U64LE);
+ assertTrue("H5.H5Tcopy",H5strdid > 0);
+ boolean teq = H5.H5Tequal(HDF5Constants.H5T_IEEE_F32BE, H5strdid);
+ assertFalse("H5.H5Tequal",teq);
+ }
+ catch (Throwable err) {
+ err.printStackTrace();
+ fail("testH5Tequal_not: " + err);
+ }
+ finally {
+ if (H5strdid >= 0)
+ try {H5.H5Tclose(H5strdid);} catch (Exception ex) {}
+ }
+ }
+
+ @Test
+ public void testH5Tconvert() {
+ String[] strs = {"a1234","b1234"};
+ int srcLen = 5;
+ int dstLen = 10;
+ long srcId = -1;
+ long dstId = -1;
+ int dimSize = strs.length;
+ byte[] buf = new byte[dimSize*dstLen];
+
+ for (int i=0; i<dimSize; i++)
+ System.arraycopy(strs[i].getBytes(), 0, buf, i*srcLen, 5);
+
+ try {
+ srcId = H5.H5Tcopy(HDF5Constants.H5T_C_S1);
+ H5.H5Tset_size(srcId, (long)srcLen);
+
+ dstId = H5.H5Tcopy(HDF5Constants.H5T_C_S1);
+ H5.H5Tset_size(dstId, (long)dstLen);
+
+ H5.H5Tconvert(srcId, dstId, dimSize, buf, null, HDF5Constants.H5P_DEFAULT);
+ }
+ catch (Throwable err) {
+ err.printStackTrace();
+ fail("testH5Tconvert: " + err);
+ }
+ finally {
+ try {H5.H5Tclose(srcId);} catch (Exception ex) {}
+ try {H5.H5Tclose(dstId);} catch (Exception ex) {}
+ }
+
+ for (int i=0; i<strs.length; i++) {
+ assertTrue((new String(buf, i*dstLen, dstLen)).startsWith(strs[i]));
+ }
+ }
+
+ @Test
+ public void testH5Torder_size() {
+ long H5strdid = -1;
+ try {
+ // Fixed length string
+ H5strdid = H5.H5Tcopy(HDF5Constants.H5T_C_S1);
+ assertTrue("H5.H5Tcopy",H5strdid > 0);
+ H5.H5Tset_size(H5strdid, (long)5);
+ assertTrue(HDF5Constants.H5T_ORDER_NONE == H5.H5Tget_order(H5strdid));
+ H5.H5Tset_order(H5strdid, HDF5Constants.H5T_ORDER_NONE);
+ assertTrue(HDF5Constants.H5T_ORDER_NONE == H5.H5Tget_order(H5strdid));
+ assertTrue(5 == H5.H5Tget_size(H5strdid));
+
+ // Variable length string
+ H5.H5Tset_size(H5strdid, HDF5Constants.H5T_VARIABLE);
+ H5.H5Tset_order(H5strdid, HDF5Constants.H5T_ORDER_BE);
+ assertTrue(HDF5Constants.H5T_ORDER_BE == H5.H5Tget_order(H5strdid));
+ }
+ catch (Throwable err) {
+ err.printStackTrace();
+ fail("testH5Torder: " + err);
+ }
+ finally {
+ if (H5strdid >= 0)
+ try {H5.H5Tclose(H5strdid);} catch (Exception ex) {}
+ }
+ }
+}
diff --git a/java/test/TestH5Tparams.java b/java/test/TestH5Tparams.java
new file mode 100644
index 0000000..15cc6af
--- /dev/null
+++ b/java/test/TestH5Tparams.java
@@ -0,0 +1,389 @@
+/* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
+ * Copyright by The HDF Group. *
+ * Copyright by the Board of Trustees of the University of Illinois. *
+ * All rights reserved. *
+ * *
+ * This file is part of HDF5. The full HDF5 copyright notice, including *
+ * terms governing use, modification, and redistribution, is contained in *
+ * the files COPYING and Copyright.html. COPYING can be found at the root *
+ * of the source code distribution tree; Copyright.html can be found at the *
+ * root level of an installed copy of the electronic HDF5 document set and *
+ * is linked from the top-level documents page. It can also be found at *
+ * http://hdfgroup.org/HDF5/doc/Copyright.html. If you do not have *
+ * access to either file, you may request a copy from help@hdfgroup.org. *
+ * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */
+
+package test;
+
+import static org.junit.Assert.assertTrue;
+import hdf.hdf5lib.H5;
+import hdf.hdf5lib.exceptions.HDF5LibraryException;
+
+import org.junit.After;
+import org.junit.Before;
+import org.junit.Rule;
+import org.junit.Test;
+import org.junit.rules.TestName;
+
+public class TestH5Tparams {
+ @Rule public TestName testname = new TestName();
+
+ @Before
+ public void checkOpenIDs() {
+ assertTrue("H5 open ids is 0",H5.getOpenIDCount()==0);
+ System.out.print(testname.getMethodName());
+ }
+ @After
+ public void nextTestName() {
+ System.out.println();
+ }
+
+ @Test//(expected = HDF5LibraryException.class)
+ public void testH5Tclose_invalid() throws Throwable {
+ long tid = H5.H5Tclose(-1);
+ assertTrue(tid == 0);
+ }
+
+ @Test(expected = HDF5LibraryException.class)
+ public void testH5Tcopy_invalid() throws Throwable {
+ H5.H5Tcopy(-1);
+ }
+
+ @Test(expected = HDF5LibraryException.class)
+ public void testH5Tequal_invalid() throws Throwable {
+ H5.H5Tequal(-1, -1);
+ }
+
+ @Test(expected = HDF5LibraryException.class)
+ public void testH5Tlock_invalid() throws Throwable {
+ H5.H5Tlock(-1);
+ }
+
+ @Test(expected = HDF5LibraryException.class)
+ public void testH5Tget_class_invalid() throws Throwable {
+ H5.H5Tget_class(-1);
+ }
+
+ @Test(expected = HDF5LibraryException.class)
+ public void testH5Tget_size_invalid() throws Throwable {
+ H5.H5Tget_size(-1);
+ }
+
+ @Test(expected = HDF5LibraryException.class)
+ public void testH5Tset_size_invalid() throws Throwable {
+ H5.H5Tset_size(-1, 0);
+ }
+
+ @Test(expected = HDF5LibraryException.class)
+ public void testH5Tget_order_invalid() throws Throwable {
+ H5.H5Tget_order(-1);
+ }
+
+ @Test(expected = HDF5LibraryException.class)
+ public void testH5Tset_order_invalid() throws Throwable {
+ H5.H5Tset_order(-1, 0);
+ }
+
+ @Test(expected = HDF5LibraryException.class)
+ public void testH5Tget_precision_invalid() throws Throwable {
+ H5.H5Tget_precision(-1);
+ }
+
+ @Test(expected = HDF5LibraryException.class)
+ public void testH5Tget_precision_long_invalid() throws Throwable {
+ H5.H5Tget_precision_long(-1);
+ }
+
+ @Test(expected = HDF5LibraryException.class)
+ public void testH5Tset_precision_invalid() throws Throwable {
+ H5.H5Tset_precision(-1, 0);
+ }
+
+ @Test(expected = HDF5LibraryException.class)
+ public void testH5Tget_offset_invalid() throws Throwable {
+ H5.H5Tget_offset(-1);
+ }
+
+ @Test(expected = HDF5LibraryException.class)
+ public void testH5Tset_offset_invalid() throws Throwable {
+ H5.H5Tset_offset(-1, 0);
+ }
+
+ @Test(expected = HDF5LibraryException.class)
+ public void testH5Tcreate_invalid() throws Throwable {
+ H5.H5Tcreate(-1, (long)0);
+ }
+
+ @Test(expected = NullPointerException.class)
+ public void testH5Topen_null() throws Throwable {
+ H5.H5Topen(-1, null, 0);
+ }
+
+ @Test(expected = HDF5LibraryException.class)
+ public void testH5Topen_invalid() throws Throwable {
+ H5.H5Topen(-1, "Bogus", 0);
+ }
+
+ @Test(expected = NullPointerException.class)
+ public void testH5Tcommit_null() throws Throwable {
+ H5.H5Tcommit(-1, null, 0, -1, -1, -1);
+ }
+
+ @Test(expected = HDF5LibraryException.class)
+ public void testH5Tcommit_invalid() throws Throwable {
+ H5.H5Tcommit(-1, "Bogus", -1, -1, -1, -1);
+ }
+
+ @Test(expected = NullPointerException.class)
+ public void testH5Tget_pad_null() throws Throwable {
+ H5.H5Tget_pad(-1, null);
+ }
+
+ @Test(expected = HDF5LibraryException.class)
+ public void testH5Tget_pad_invalid() throws Throwable {
+ int[] pad = new int[2];
+ H5.H5Tget_pad(-1, pad);
+ }
+
+ @Test(expected = HDF5LibraryException.class)
+ public void testH5Tset_pad_invalid() throws Throwable {
+ H5.H5Tset_pad(-1, -1, -1);
+ }
+
+ @Test(expected = HDF5LibraryException.class)
+ public void testH5Tget_sign_invalid() throws Throwable {
+ H5.H5Tget_sign(-1);
+ }
+
+ @Test(expected = HDF5LibraryException.class)
+ public void testH5Tset_sign_invalid() throws Throwable {
+ H5.H5Tset_sign(-1, 0);
+ }
+
+ @Test(expected = NullPointerException.class)
+ public void testH5Tget_fields_null() throws Throwable {
+ H5.H5Tget_fields(-1, (long[])null);
+ }
+
+ @Test(expected = IllegalArgumentException.class)
+ public void testH5Tget_fields_length_invalid() throws Throwable {
+ long[] fields = new long[2];
+ H5.H5Tget_fields(-1, fields);
+ }
+
+ @Test(expected = HDF5LibraryException.class)
+ public void testH5Tget_fields_invalid() throws Throwable {
+ long[] fields = new long[5];
+ H5.H5Tget_fields(-1, fields);
+ }
+
+ @Test(expected = HDF5LibraryException.class)
+ public void testH5Tset_fields_invalid() throws Throwable {
+ H5.H5Tset_fields(-1, -1, -1, -1, -1, -1);
+ }
+
+ @Test(expected = HDF5LibraryException.class)
+ public void testH5Tget_ebias_invalid() throws Throwable {
+ H5.H5Tget_ebias(-1);
+ }
+
+ @Test(expected = HDF5LibraryException.class)
+ public void testH5Tget_ebias_long_invalid() throws Throwable {
+ H5.H5Tget_ebias_long(-1);
+ }
+
+ @Test(expected = HDF5LibraryException.class)
+ public void testH5Tset_ebias_invalid() throws Throwable {
+ H5.H5Tset_ebias(-1, 0);
+ }
+
+ @Test(expected = HDF5LibraryException.class)
+ public void testH5Tget_norm_invalid() throws Throwable {
+ H5.H5Tget_norm(-1);
+ }
+
+ @Test(expected = HDF5LibraryException.class)
+ public void testH5Tset_norm_invalid() throws Throwable {
+ H5.H5Tset_norm(-1, 0);
+ }
+
+ @Test(expected = HDF5LibraryException.class)
+ public void testH5Tget_inpad_invalid() throws Throwable {
+ H5.H5Tget_inpad(-1);
+ }
+
+ @Test(expected = HDF5LibraryException.class)
+ public void testH5Tset_inpad_invalid() throws Throwable {
+ H5.H5Tset_inpad(-1, 0);
+ }
+
+ @Test(expected = HDF5LibraryException.class)
+ public void testH5Tget_cset_invalid() throws Throwable {
+ H5.H5Tget_cset(-1);
+ }
+
+ @Test(expected = HDF5LibraryException.class)
+ public void testH5Tset_cset_invalid() throws Throwable {
+ H5.H5Tset_cset(-1, 0);
+ }
+
+ @Test(expected = HDF5LibraryException.class)
+ public void testH5Tget_strpad_invalid() throws Throwable {
+ H5.H5Tget_strpad(-1);
+ }
+
+ @Test(expected = HDF5LibraryException.class)
+ public void testH5Tset_strpad_invalid() throws Throwable {
+ H5.H5Tset_strpad(-1, 0);
+ }
+
+ @Test(expected = HDF5LibraryException.class)
+ public void testH5Tget_nmembers_invalid() throws Throwable {
+ H5.H5Tget_nmembers(-1);
+ }
+
+ @Test(expected = NullPointerException.class)
+ public void testH5Tget_member_index_null() throws Throwable {
+ H5.H5Tget_member_index(-1, null);
+ }
+
+ @Test(expected = HDF5LibraryException.class)
+ public void testH5Tget_member_index_invalid() throws Throwable {
+ H5.H5Tget_member_index(-1, "Bogus");
+ }
+
+ @Test(expected = HDF5LibraryException.class)
+ public void testH5Tget_member_type_invalid() throws Throwable {
+ H5.H5Tget_member_type(-1, -1);
+ }
+
+ @Test(expected = HDF5LibraryException.class)
+ public void testH5Tget_member_class_invalid() throws Throwable {
+ H5.H5Tget_member_class(-1, -1);
+ }
+
+ @Test(expected = NullPointerException.class)
+ public void testH5Tinsert_null() throws Throwable {
+ H5.H5Tinsert(-1, null, 0, 0);
+ }
+
+ @Test(expected = HDF5LibraryException.class)
+ public void testH5Tinsert_invalid() throws Throwable {
+ H5.H5Tinsert(-1, "Bogus", 0, 0);
+ }
+
+ @Test(expected = HDF5LibraryException.class)
+ public void testH5Tpack_invalid() throws Throwable {
+ H5.H5Tpack(-1);
+ }
+
+ @Test(expected = HDF5LibraryException.class)
+ public void testH5Tvlen_create_invalid() throws Throwable {
+ H5.H5Tvlen_create(-1);
+ }
+
+ @Test(expected = NullPointerException.class)
+ public void testH5Tset_tag_null() throws Throwable {
+ H5.H5Tset_tag(-1, null);
+ }
+
+ @Test(expected = HDF5LibraryException.class)
+ public void testH5Tset_tag_invalid() throws Throwable {
+ H5.H5Tset_tag(-1, "Bogus");
+ }
+
+ @Test(expected = HDF5LibraryException.class)
+ public void testH5Tget_super_invalid() throws Throwable {
+ H5.H5Tget_super(-1);
+ }
+
+ @Test(expected = HDF5LibraryException.class)
+ public void testH5Tenum_create_invalid() throws Throwable {
+ H5.H5Tenum_create(-1);
+ }
+
+ @Test(expected = NullPointerException.class)
+ public void testH5Tenum_insert_name_null() throws Throwable {
+ H5.H5Tenum_insert(-1, null, (byte[])null);
+ }
+
+ @Test(expected = NullPointerException.class)
+ public void testH5Tenum_insert_null() throws Throwable {
+ H5.H5Tenum_insert(-1, "bogus", (byte[])null);
+ }
+
+ @Test(expected = HDF5LibraryException.class)
+ public void testH5Tenum_insert_invalid() throws Throwable {
+ byte[] enumtype = new byte[2];
+ H5.H5Tenum_insert(-1, "bogus", enumtype);
+ }
+
+ @Test(expected = IllegalArgumentException.class)
+ public void testH5Tenum_nameof_invalid_size() throws Throwable {
+ H5.H5Tenum_nameof(-1, null, -1);
+ }
+
+ @Test(expected = NullPointerException.class)
+ public void testH5Tenum_nameof_value_null() throws Throwable {
+ H5.H5Tenum_nameof(-1, null, 1);
+ }
+
+ @Test(expected = HDF5LibraryException.class)
+ public void testH5Tenum_nameof_invalid() throws Throwable {
+ byte[] btype = new byte[2];
+ H5.H5Tenum_nameof(-1, btype, 1);
+ }
+
+ @Test(expected = NullPointerException.class)
+ public void testH5Tenum_valueof_name_null() throws Throwable {
+ H5.H5Tenum_valueof(-1, null, (byte[])null);
+ }
+
+ @Test(expected = NullPointerException.class)
+ public void testH5Tenum_valueof_null() throws Throwable {
+ H5.H5Tenum_valueof(-1, "bogus", (byte[])null);
+ }
+
+ @Test(expected = HDF5LibraryException.class)
+ public void testH5Tenum_valueof_invalid() throws Throwable {
+ byte[] btype = new byte[2];
+ H5.H5Tenum_valueof(-1, "bogus", btype);
+ }
+
+ @Test(expected = NullPointerException.class)
+ public void testH5Tget_member_value_null() throws Throwable {
+ H5.H5Tget_member_value(-1, -1, (byte[])null);
+ }
+
+ @Test(expected = HDF5LibraryException.class)
+ public void testH5Tget_member_value_invalid() throws Throwable {
+ byte[] btype = new byte[2];
+ H5.H5Tget_member_value(-1, -1, btype);
+ }
+
+ @Test(expected = IllegalArgumentException.class)
+ public void testH5Tarray_create_invalid() throws Throwable {
+ H5.H5Tarray_create(-1, -1, null);
+ }
+
+ @Test(expected = NullPointerException.class)
+ public void testH5Tarray_create_value_null() throws Throwable {
+ H5.H5Tarray_create(-1, 1, null);
+ }
+
+ @Test(expected = HDF5LibraryException.class)
+ public void testH5Tget_array_ndims_invalid() throws Throwable {
+ H5.H5Tget_array_ndims(-1);
+ }
+
+ @Test(expected = NullPointerException.class)
+ public void testH5Tget_array_dims_null() throws Throwable {
+ H5.H5Tget_array_dims(-1, null);
+ }
+
+ @Test(expected = HDF5LibraryException.class)
+ public void testH5Tget_native_type_invalid() throws Throwable {
+ H5.H5Tget_native_type(-1);
+ }
+
+}
diff --git a/java/test/TestH5Z.java b/java/test/TestH5Z.java
new file mode 100644
index 0000000..bdf3f1d
--- /dev/null
+++ b/java/test/TestH5Z.java
@@ -0,0 +1,100 @@
+/* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
+ * Copyright by The HDF Group. *
+ * Copyright by the Board of Trustees of the University of Illinois. *
+ * All rights reserved. *
+ * *
+ * This file is part of HDF5. The full HDF5 copyright notice, including *
+ * terms governing use, modification, and redistribution, is contained in *
+ * the files COPYING and Copyright.html. COPYING can be found at the root *
+ * of the source code distribution tree; Copyright.html can be found at the *
+ * root level of an installed copy of the electronic HDF5 document set and *
+ * is linked from the top-level documents page. It can also be found at *
+ * http://hdfgroup.org/HDF5/doc/Copyright.html. If you do not have *
+ * access to either file, you may request a copy from help@hdfgroup.org. *
+ * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */
+
+package test;
+
+import static org.junit.Assert.assertTrue;
+import static org.junit.Assert.fail;
+import hdf.hdf5lib.H5;
+import hdf.hdf5lib.HDF5Constants;
+import hdf.hdf5lib.exceptions.HDF5LibraryException;
+
+import org.junit.After;
+import org.junit.Before;
+import org.junit.Rule;
+import org.junit.Test;
+import org.junit.rules.TestName;
+
+public class TestH5Z {
+ @Rule public TestName testname = new TestName();
+
+ @Before
+ public void checkOpenIDs() {
+ assertTrue("H5 open ids is 0",H5.getOpenIDCount()==0);
+ System.out.print(testname.getMethodName());
+ }
+ @After
+ public void nextTestName() {
+ System.out.println();
+ }
+
+ @Test
+ public void testH5Zfilter_avail() {
+ try {
+ int filter_found = H5.H5Zfilter_avail(HDF5Constants.H5Z_FILTER_DEFLATE);
+ assertTrue("H5.H5Zfilter_avail_DEFLATE", filter_found > 0);
+ filter_found = H5.H5Zfilter_avail(HDF5Constants.H5Z_FILTER_FLETCHER32);
+ assertTrue("H5.H5Zfilter_avail_FLETCHER32", filter_found > 0);
+ filter_found = H5.H5Zfilter_avail(HDF5Constants.H5Z_FILTER_NBIT);
+ assertTrue("H5.H5Zfilter_avail_NBIT", filter_found > 0);
+ filter_found = H5.H5Zfilter_avail(HDF5Constants.H5Z_FILTER_SCALEOFFSET);
+ assertTrue("H5.H5Zfilter_avail_SCALEOFFSET", filter_found > 0);
+ filter_found = H5.H5Zfilter_avail(HDF5Constants.H5Z_FILTER_SHUFFLE);
+ assertTrue("H5.H5Zfilter_avail_SHUFFLE", filter_found > 0);
+// filter_found = H5.H5Zfilter_avail(HDF5Constants.H5Z_FILTER_SZIP);
+// assertTrue("H5.H5Zfilter_avail_SZIP", filter_found > 0);
+ }
+ catch (Throwable err) {
+ err.printStackTrace();
+ fail("H5.H5Zfilter_avail " + err);
+ }
+ }
+
+ @Test
+ public void testH5Zget_filter_info() {
+ try {
+ int filter_flag = H5.H5Zget_filter_info(HDF5Constants.H5Z_FILTER_DEFLATE);
+ assertTrue("H5.H5Zget_filter_info_DEFLATE_DECODE_ENABLED", (filter_flag & HDF5Constants.H5Z_FILTER_CONFIG_DECODE_ENABLED) > 0);
+ assertTrue("H5.H5Zget_filter_info_DEFLATE_ENCODE_ENABLED", (filter_flag & HDF5Constants.H5Z_FILTER_CONFIG_ENCODE_ENABLED) > 0);
+ filter_flag = H5.H5Zget_filter_info(HDF5Constants.H5Z_FILTER_FLETCHER32);
+ assertTrue("H5.H5Zget_filter_info_FLETCHER32_DECODE_ENABLED", (filter_flag & HDF5Constants.H5Z_FILTER_CONFIG_DECODE_ENABLED) > 0);
+ assertTrue("H5.H5Zget_filter_info_FLETCHER32_ENCODE_ENABLED", (filter_flag & HDF5Constants.H5Z_FILTER_CONFIG_ENCODE_ENABLED) > 0);
+ filter_flag = H5.H5Zget_filter_info(HDF5Constants.H5Z_FILTER_NBIT);
+ assertTrue("H5.H5Zget_filter_info_NBIT_DECODE_ENABLED", (filter_flag & HDF5Constants.H5Z_FILTER_CONFIG_DECODE_ENABLED) > 0);
+ assertTrue("H5.H5Zget_filter_info_NBIT_ENCODE_ENABLED", (filter_flag & HDF5Constants.H5Z_FILTER_CONFIG_ENCODE_ENABLED) > 0);
+ filter_flag = H5.H5Zget_filter_info(HDF5Constants.H5Z_FILTER_SCALEOFFSET);
+ assertTrue("H5.H5Zget_filter_info_SCALEOFFSET_DECODE_ENABLED", (filter_flag & HDF5Constants.H5Z_FILTER_CONFIG_DECODE_ENABLED) > 0);
+ assertTrue("H5.H5Zget_filter_info_SCALEOFFSET_ENCODE_ENABLED", (filter_flag & HDF5Constants.H5Z_FILTER_CONFIG_ENCODE_ENABLED) > 0);
+ filter_flag = H5.H5Zget_filter_info(HDF5Constants.H5Z_FILTER_SHUFFLE);
+ assertTrue("H5.H5Zget_filter_info_DECODE_SHUFFLE_ENABLED", (filter_flag & HDF5Constants.H5Z_FILTER_CONFIG_DECODE_ENABLED) > 0);
+ assertTrue("H5.H5Zget_filter_info_ENCODE_SHUFFLE_ENABLED", (filter_flag & HDF5Constants.H5Z_FILTER_CONFIG_ENCODE_ENABLED) > 0);
+// filter_flag = H5.H5Zget_filter_info(HDF5Constants.H5Z_FILTER_SZIP);
+// assertTrue("H5.H5Zget_filter_info_DECODE_SZIP_ENABLED", (filter_flag & HDF5Constants.H5Z_FILTER_CONFIG_DECODE_ENABLED) > 0);
+// assertTrue("H5.H5Zget_filter_info_ENCODE_SZIP_ENABLED", (filter_flag & HDF5Constants.H5Z_FILTER_CONFIG_ENCODE_ENABLED) > 0);
+ }
+ catch (Throwable err) {
+ err.printStackTrace();
+ fail("H5.H5Zget_filter_info " + err);
+ }
+ }
+
+ @Test(expected = HDF5LibraryException.class)
+ public void testH5Zunregister_predefined() throws Throwable {
+ int filter_found = H5.H5Zfilter_avail(HDF5Constants.H5Z_FILTER_SHUFFLE);
+ assertTrue("H5.H5Zfilter_avail", filter_found > 0);
+
+ H5.H5Zunregister(HDF5Constants.H5Z_FILTER_SHUFFLE);
+ }
+}
diff --git a/java/test/h5ex_g_iterate.orig b/java/test/h5ex_g_iterate.orig
new file mode 100644
index 0000000..e462703
--- /dev/null
+++ b/java/test/h5ex_g_iterate.orig
Binary files differ
diff --git a/java/test/junit.sh.in b/java/test/junit.sh.in
new file mode 100644
index 0000000..698dc5d
--- /dev/null
+++ b/java/test/junit.sh.in
@@ -0,0 +1,255 @@
+#! /bin/sh
+#
+# Copyright by The HDF Group.
+# Copyright by the Board of Trustees of the University of Illinois.
+# All rights reserved.
+#
+# This file is part of HDF5. The full HDF5 copyright notice, including
+# terms governing use, modification, and redistribution, is contained in
+# the files COPYING and Copyright.html. COPYING can be found at the root
+# of the source code distribution tree; Copyright.html can be found at the
+# root level of an installed copy of the electronic HDF5 document set and
+# is linked from the top-level documents page. It can also be found at
+# http://hdfgroup.org/HDF5/doc/Copyright.html. If you do not have
+# access to either file, you may request a copy from help@hdfgroup.org.
+#
+
+top_builddir=@top_builddir@
+top_srcdir=@top_srcdir@
+srcdir=@srcdir@
+
+TESTNAME=JUnitInterface
+EXIT_SUCCESS=0
+EXIT_FAILURE=1
+
+# Set up default variable values if not supplied by the user.
+RM='rm -rf'
+CMP='cmp'
+DIFF='diff -c'
+CP='cp'
+DIRNAME='dirname'
+LS='ls'
+AWK='awk'
+
+nerrors=0
+verbose=yes
+
+# where the libs exist
+HDFLIB_HOME="$top_srcdir/java/lib"
+BLDLIBDIR="$top_builddir/java/lib"
+BLDDIR="."
+HDFTEST_HOME="$top_srcdir/java/test"
+JARFILE=jar@PACKAGE_TARNAME@-@PACKAGE_VERSION@.jar
+TESTJARFILE=jar@PACKAGE_TARNAME@test.jar
+test -d $BLDLIBDIR || mkdir -p $BLDLIBDIR
+
+######################################################################
+# library files
+# --------------------------------------------------------------------
+# All the library files copy from source directory to test directory
+# NOTE: Keep this framework to add/remove test files.
+# This list are also used for checking exist.
+# Comment '#' without space can be used.
+# --------------------------------------------------------------------
+LIST_LIBRARY_FILES="
+$HDFLIB_HOME/hamcrest-core.jar
+$HDFLIB_HOME/junit.jar
+$HDFLIB_HOME/slf4j-api-1.7.5.jar
+$HDFLIB_HOME/ext/slf4j-simple-1.7.5.jar
+$top_builddir/src/.libs/libhdf5.*
+$top_builddir/java/src/jni/.libs/libhdf5_java.*
+$top_builddir/java/src/$JARFILE
+"
+LIST_DATA_FILES="
+$HDFTEST_HOME/JUnit-interface.txt
+$HDFTEST_HOME/JUnit-interface.ert
+"
+
+expect="JUnit-interface.txt"
+actual="JUnit-interface.out"
+actual_err="JUnit-interface.err"
+actual_ext="JUnit-interface.ext"
+
+#
+# copy files from source dirs to test dir
+#
+COPY_LIBFILES="$LIST_LIBRARY_FILES"
+
+COPY_LIBFILES_TO_BLDLIBDIR()
+{
+ # copy test files. Used -f to make sure get a new copy
+ for tstfile in $COPY_LIBFILES
+ do
+ # ignore '#' comment
+ echo $tstfile | tr -d ' ' | grep '^#' > /dev/null
+ RET=$?
+ if [ $RET -eq 1 ]; then
+ # skip cp if srcdir is same as destdir
+ # this occurs when build/test performed in source dir and
+ # make cp fail
+ SDIR=`$DIRNAME $tstfile`
+ INODE_SDIR=`$LS -i -d $SDIR | $AWK -F' ' '{print $1}'`
+ INODE_DDIR=`$LS -i -d $BLDLIBDIR | $AWK -F' ' '{print $1}'`
+ if [ "$INODE_SDIR" != "$INODE_DDIR" ]; then
+ $CP -f $tstfile $BLDLIBDIR
+ if [ $? -ne 0 ]; then
+ echo "Error: FAILED to copy $tstfile ."
+
+ # Comment out this to CREATE expected file
+ exit $EXIT_FAILURE
+ fi
+ fi
+ fi
+ done
+}
+
+CLEAN_LIBFILES_AND_BLDLIBDIR()
+{
+ # skip rm if srcdir is same as destdir
+ # this occurs when build/test performed in source dir and
+ # make cp fail
+ SDIR=`$DIRNAME $HDFLIB_HOME/junit.jar`
+ INODE_SDIR=`$LS -i -d $SDIR | $AWK -F' ' '{print $1}'`
+ INODE_DDIR=`$LS -i -d $BLDLIBDIR | $AWK -F' ' '{print $1}'`
+ if [ "$INODE_SDIR" != "$INODE_DDIR" ]; then
+ $RM $BLDLIBDIR
+ fi
+}
+
+COPY_DATAFILES="$LIST_DATA_FILES"
+
+COPY_DATAFILES_TO_BLDDIR()
+{
+ # copy test files. Used -f to make sure get a new copy
+ for tstfile in $COPY_DATAFILES
+ do
+ # ignore '#' comment
+ echo $tstfile | tr -d ' ' | grep '^#' > /dev/null
+ RET=$?
+ if [ $RET -eq 1 ]; then
+ # skip cp if srcdir is same as destdir
+ # this occurs when build/test performed in source dir and
+ # make cp fail
+ SDIR=`$DIRNAME $tstfile`
+ INODE_SDIR=`$LS -i -d $SDIR | $AWK -F' ' '{print $1}'`
+ INODE_DDIR=`$LS -i -d $BLDDIR | $AWK -F' ' '{print $1}'`
+ if [ "$INODE_SDIR" != "$INODE_DDIR" ]; then
+ $CP -f $tstfile $BLDDIR
+ if [ $? -ne 0 ]; then
+ echo "Error: FAILED to copy $tstfile ."
+
+ # Comment out this to CREATE expected file
+ exit $EXIT_FAILURE
+ fi
+ fi
+ fi
+ done
+ $CP -f $HDFTEST_HOME/h5ex_g_iterate.orig $BLDDIR/h5ex_g_iterate.hdf
+}
+
+CLEAN_DATAFILES_AND_BLDDIR()
+{
+ $RM $BLDDIR/h5ex_g_iterate.hdf
+ $RM $BLDDIR/JUnit-interface.out
+ $RM $BLDDIR/JUnit-interface.err
+ $RM $BLDDIR/JUnit-interface.ext
+ $RM $BLDDIR/JUnit-interface.txt
+}
+
+# Print a line-line message left justified in a field of 70 characters
+# beginning with the word "Testing".
+#
+TESTING() {
+ SPACES=" "
+ echo "Testing $* $SPACES" | cut -c1-70 | tr -d '\012'
+}
+
+# where Java is installed (requires jdk1.7.x)
+JAVAEXE=@JAVA@
+JAVAEXEFLAGS=@H5_JAVAFLAGS@
+
+###############################################################################
+# DO NOT MODIFY BELOW THIS LINE
+###############################################################################
+
+# prepare for test
+COPY_LIBFILES_TO_BLDLIBDIR
+COPY_DATAFILES_TO_BLDDIR
+
+CPATH=".:"$BLDLIBDIR"/"$JARFILE":"$BLDLIBDIR"/junit.jar:"$BLDLIBDIR"/hamcrest-core.jar:"$BLDLIBDIR"/slf4j-api-1.7.5.jar:"$BLDLIBDIR"/slf4j-simple-1.7.5.jar:"$TESTJARFILE""
+
+TEST=/usr/bin/test
+if [ ! -x /usr/bin/test ]
+then
+TEST=`which test`
+fi
+
+if $TEST -z "$CLASSPATH"; then
+ CLASSPATH=""
+fi
+CLASSPATH=$CPATH":"$CLASSPATH
+export CLASSPATH
+
+if $TEST -n "$JAVAPATH" ; then
+ PATH=$JAVAPATH":"$PATH
+ export PATH
+fi
+
+if $TEST -e /bin/uname; then
+ os_name=`/bin/uname -s`
+elif $TEST -e /usr/bin/uname; then
+ os_name=`/usr/bin/uname -s`
+else
+ os_name=unknown
+fi
+
+if $TEST -z "$LD_LIBRARY_PATH" ; then
+ LD_LIBRARY_PATH=""
+fi
+
+case $os_name in
+ Darwin)
+ DYLD_LIBRARY_PATH=$BLDLIBDIR:$DYLD_LIBRARY_PATH
+ export DYLD_LIBRARY_PATH
+ LD_LIBRARY_PATH=$DYLD_LIBRARY_PATH
+ ;;
+ *)
+ LD_LIBRARY_PATH=$BLDLIBDIR:$LD_LIBRARY_PATH
+ ;;
+esac
+
+export LD_LIBRARY_PATH
+
+echo "$JAVAEXE $JAVAEXEFLAGS -Xmx1024M -Dorg.slf4j.simpleLogger.defaultLog=trace -Djava.library.path=$BLDLIBDIR -cp $CLASSPATH -ea org.junit.runner.JUnitCore test.TestAll"
+($JAVAEXE $JAVAEXEFLAGS -Xmx1024M -Dorg.slf4j.simpleLogger.defaultLog=trace -Djava.library.path=$BLDLIBDIR -cp $CLASSPATH -ea org.junit.runner.JUnitCore test.TestAll 1>$actual_ext 2>$actual_err)
+
+# Extract file name, line number, version and thread IDs because they may be different
+sed -e 's/thread [0-9]*/thread (IDs)/' -e 's/: .*\.c /: (file name) /' \
+ -e 's/line [0-9]*/line (number)/' \
+ -e 's/Time: [0-9]*\.[0-9]*/Time: XXXX/' \
+ -e 's/v[1-9]*\.[0-9]*\./version (number)\./' \
+ -e 's/[1-9]*\.[0-9]*\.[0-9]*[^)]*/version (number)/' \
+ $actual_ext > $actual
+
+if $CMP $expect $actual; then
+ echo " PASSED"
+else
+ echo "*FAILED*"
+ echo " Expected result differs from actual result"
+ nerrors="`expr $nerrors + 1`"
+ test yes = "$verbose" && $DIFF $expect $actual |sed 's/^/ /'
+fi
+
+
+# Clean up temporary files/directories
+CLEAN_LIBFILES_AND_BLDLIBDIR
+CLEAN_DATAFILES_AND_BLDDIR
+
+# Report test results and exit
+if test $nerrors -eq 0 ; then
+ echo "All $TESTNAME tests passed."
+ exit $EXIT_SUCCESS
+else
+ echo "$TESTNAME tests failed with $nerrors errors."
+ exit $EXIT_FAILURE
+fi