From 4c66bf627ec4b963f026bbfa7101127271644f79 Mon Sep 17 00:00:00 2001 From: Allen Byrne <50328838+byrnHDF@users.noreply.github.com> Date: Tue, 9 Aug 2022 18:11:14 -0500 Subject: Merge UG from 1.10 and add prelim vol section (#1980) * Merge UG from 1.10 and add prelim vol section * Spelling fixes * Merge format and autotools javadoc from 1.10 --- config/cmake/UseJava.cmake | 1 + doxygen/CMakeLists.txt | 3 +- doxygen/Doxyfile.in | 5 +- doxygen/dox/DDLBNF110.dox | 2 +- doxygen/dox/Overview.dox | 6 +- doxygen/dox/RFC.dox | 2 +- doxygen/dox/ReferenceManual.dox | 61 +- doxygen/dox/Specifications.dox | 16 +- doxygen/dox/TechnicalNotes.dox | 16 +- doxygen/dox/UsersGuide.dox | 400 ++ doxygen/dox/high_level/extension.dox | 544 ++- doxygen/dox/high_level/high_level.dox | 29 - doxygen/examples/H5.format.1.0.html | 2 +- doxygen/examples/H5.format.1.1.html | 2 +- doxygen/examples/H5.format.2.0.html | 6 +- doxygen/examples/H5.format.html | 6 +- doxygen/examples/H5R_examples.c | 171 - doxygen/examples/ThreadSafeLibrary.html | 10 +- doxygen/examples/core_menu.md | 65 + doxygen/examples/fortran_menu.md | 61 + doxygen/examples/high_level_menu.md | 30 + doxygen/examples/java_menu.md | 84 + doxygen/hdf5doxy.css | 16 +- doxygen/hdf5doxy_layout.xml | 1 + doxygen/img/Dmodel_fig1.gif | Bin 0 -> 13259 bytes doxygen/img/Dmodel_fig10.gif | Bin 0 -> 11552 bytes doxygen/img/Dmodel_fig11_b.gif | Bin 0 -> 13924 bytes doxygen/img/Dmodel_fig12_a.gif | Bin 0 -> 3182 bytes doxygen/img/Dmodel_fig12_b.gif | Bin 0 -> 4028 bytes doxygen/img/Dmodel_fig14_a.gif | Bin 0 -> 5367 bytes doxygen/img/Dmodel_fig14_b.gif | Bin 0 -> 6432 bytes doxygen/img/Dmodel_fig14_c.gif | Bin 0 -> 7397 bytes doxygen/img/Dmodel_fig14_d.gif | Bin 0 -> 9898 bytes doxygen/img/Dmodel_fig2.gif | Bin 0 -> 12024 bytes doxygen/img/Dmodel_fig3_a.gif | Bin 0 -> 7427 bytes doxygen/img/Dmodel_fig3_c.gif | Bin 0 -> 6800 bytes doxygen/img/Dmodel_fig4_a.gif | Bin 0 -> 4239 bytes doxygen/img/Dmodel_fig4_b.gif | Bin 0 -> 24587 bytes doxygen/img/Dmodel_fig5.gif | Bin 0 -> 7431 bytes doxygen/img/Dmodel_fig6.gif | Bin 0 -> 6497 bytes doxygen/img/Dmodel_fig7_b.gif | Bin 0 -> 26637 bytes doxygen/img/Dmodel_fig8.gif | Bin 0 -> 12217 bytes doxygen/img/Dmodel_fig9.gif | Bin 0 -> 14812 bytes doxygen/img/Dsets_NbitFloating1.gif | Bin 0 -> 6815 bytes doxygen/img/Dsets_NbitFloating2.gif | Bin 0 -> 9335 bytes doxygen/img/Dsets_NbitInteger1.gif | Bin 0 -> 6489 bytes doxygen/img/Dsets_NbitInteger2.gif | Bin 0 -> 10471 bytes doxygen/img/Dsets_fig1.gif | Bin 0 -> 10803 bytes doxygen/img/Dsets_fig10.gif | Bin 0 -> 6538 bytes doxygen/img/Dsets_fig11.gif | Bin 0 -> 21211 bytes doxygen/img/Dsets_fig12.gif | Bin 0 -> 36869 bytes doxygen/img/Dsets_fig2.gif | Bin 0 -> 18083 bytes doxygen/img/Dsets_fig3.gif | Bin 0 -> 27621 bytes doxygen/img/Dsets_fig4.gif | Bin 0 -> 41416 bytes doxygen/img/Dsets_fig5.gif | Bin 0 -> 15893 bytes doxygen/img/Dsets_fig6.gif | Bin 0 -> 2509 bytes doxygen/img/Dsets_fig7.gif | Bin 0 -> 2556 bytes doxygen/img/Dsets_fig8.gif | Bin 0 -> 2273 bytes doxygen/img/Dsets_fig9.gif | Bin 0 -> 4368 bytes doxygen/img/Dspace_CvsF1.gif | Bin 0 -> 8623 bytes doxygen/img/Dspace_CvsF2.gif | Bin 0 -> 8623 bytes doxygen/img/Dspace_CvsF3.gif | Bin 0 -> 8909 bytes doxygen/img/Dspace_CvsF4.gif | Bin 0 -> 8470 bytes doxygen/img/Dspace_combine.gif | Bin 0 -> 5101 bytes doxygen/img/Dspace_complex.gif | Bin 0 -> 6715 bytes doxygen/img/Dspace_features.gif | Bin 0 -> 89809 bytes doxygen/img/Dspace_features_cmpd.gif | Bin 0 -> 31274 bytes doxygen/img/Dspace_move.gif | Bin 0 -> 13255 bytes doxygen/img/Dspace_point.gif | Bin 0 -> 6697 bytes doxygen/img/Dspace_read.gif | Bin 0 -> 14238 bytes doxygen/img/Dspace_select.gif | Bin 0 -> 29452 bytes doxygen/img/Dspace_separate.gif | Bin 0 -> 9911 bytes doxygen/img/Dspace_simple.gif | Bin 0 -> 9709 bytes doxygen/img/Dspace_subset.gif | Bin 0 -> 4790 bytes doxygen/img/Dspace_three_datasets.gif | Bin 0 -> 10912 bytes doxygen/img/Dspace_transfer.gif | Bin 0 -> 15096 bytes doxygen/img/Dspace_write1to2.gif | Bin 0 -> 38748 bytes doxygen/img/Dtypes_fig1.gif | Bin 0 -> 8857 bytes doxygen/img/Dtypes_fig10.gif | Bin 0 -> 41804 bytes doxygen/img/Dtypes_fig11.gif | Bin 0 -> 79789 bytes doxygen/img/Dtypes_fig12.gif | Bin 0 -> 15692 bytes doxygen/img/Dtypes_fig13a.gif | Bin 0 -> 3350 bytes doxygen/img/Dtypes_fig13b.gif | Bin 0 -> 3571 bytes doxygen/img/Dtypes_fig13c.gif | Bin 0 -> 2629 bytes doxygen/img/Dtypes_fig13d.gif | Bin 0 -> 2769 bytes doxygen/img/Dtypes_fig14.gif | Bin 0 -> 50174 bytes doxygen/img/Dtypes_fig15.gif | Bin 0 -> 30871 bytes doxygen/img/Dtypes_fig16.gif | Bin 0 -> 5420 bytes doxygen/img/Dtypes_fig16a.gif | Bin 0 -> 10896 bytes doxygen/img/Dtypes_fig16b.gif | Bin 0 -> 6908 bytes doxygen/img/Dtypes_fig16c.gif | Bin 0 -> 6847 bytes doxygen/img/Dtypes_fig16d.gif | Bin 0 -> 9850 bytes doxygen/img/Dtypes_fig17a.gif | Bin 0 -> 13623 bytes doxygen/img/Dtypes_fig17b.gif | Bin 0 -> 30154 bytes doxygen/img/Dtypes_fig18.gif | Bin 0 -> 9037 bytes doxygen/img/Dtypes_fig19.gif | Bin 0 -> 12014 bytes doxygen/img/Dtypes_fig2.gif | Bin 0 -> 6099 bytes doxygen/img/Dtypes_fig20a.gif | Bin 0 -> 31836 bytes doxygen/img/Dtypes_fig20b.gif | Bin 0 -> 17044 bytes doxygen/img/Dtypes_fig20c.gif | Bin 0 -> 24983 bytes doxygen/img/Dtypes_fig20d.gif | Bin 0 -> 14435 bytes doxygen/img/Dtypes_fig21.gif | Bin 0 -> 56286 bytes doxygen/img/Dtypes_fig22.gif | Bin 0 -> 40000 bytes doxygen/img/Dtypes_fig23.gif | Bin 0 -> 61311 bytes doxygen/img/Dtypes_fig24.gif | Bin 0 -> 30529 bytes doxygen/img/Dtypes_fig25a.gif | Bin 0 -> 17268 bytes doxygen/img/Dtypes_fig25c.gif | Bin 0 -> 17238 bytes doxygen/img/Dtypes_fig26.gif | Bin 0 -> 14866 bytes doxygen/img/Dtypes_fig27.gif | Bin 0 -> 57099 bytes doxygen/img/Dtypes_fig28.gif | Bin 0 -> 49961 bytes doxygen/img/Dtypes_fig3.gif | Bin 0 -> 5654 bytes doxygen/img/Dtypes_fig4.gif | Bin 0 -> 14387 bytes doxygen/img/Dtypes_fig5.gif | Bin 0 -> 16959 bytes doxygen/img/Dtypes_fig6.gif | Bin 0 -> 52742 bytes doxygen/img/Dtypes_fig7.gif | Bin 0 -> 55938 bytes doxygen/img/Dtypes_fig8.gif | Bin 0 -> 20671 bytes doxygen/img/Dtypes_fig9.gif | Bin 0 -> 22036 bytes doxygen/img/Files_fig3.gif | Bin 0 -> 44382 bytes doxygen/img/Files_fig4.gif | Bin 0 -> 38862 bytes doxygen/img/Groups_fig1.gif | Bin 0 -> 5404 bytes doxygen/img/Groups_fig10_a.gif | Bin 0 -> 7745 bytes doxygen/img/Groups_fig10_b.gif | Bin 0 -> 6372 bytes doxygen/img/Groups_fig10_c.gif | Bin 0 -> 8308 bytes doxygen/img/Groups_fig10_d.gif | Bin 0 -> 12963 bytes doxygen/img/Groups_fig11_a.gif | Bin 0 -> 7349 bytes doxygen/img/Groups_fig11_b.gif | Bin 0 -> 7912 bytes doxygen/img/Groups_fig11_c.gif | Bin 0 -> 8589 bytes doxygen/img/Groups_fig11_d.gif | Bin 0 -> 9552 bytes doxygen/img/Groups_fig2.gif | Bin 0 -> 6187 bytes doxygen/img/Groups_fig3.gif | Bin 0 -> 5045 bytes doxygen/img/Groups_fig4.gif | Bin 0 -> 12187 bytes doxygen/img/Groups_fig5.gif | Bin 0 -> 10459 bytes doxygen/img/Groups_fig6.gif | Bin 0 -> 13979 bytes doxygen/img/Groups_fig9_a.gif | Bin 0 -> 6313 bytes doxygen/img/Groups_fig9_aa.gif | Bin 0 -> 7923 bytes doxygen/img/Groups_fig9_b.gif | Bin 0 -> 7352 bytes doxygen/img/Groups_fig9_bb.gif | Bin 0 -> 7336 bytes doxygen/img/Pmodel_fig2.gif | Bin 0 -> 4411 bytes doxygen/img/Pmodel_fig3.gif | Bin 0 -> 39263 bytes doxygen/img/Pmodel_fig5_a.gif | Bin 0 -> 17234 bytes doxygen/img/Pmodel_fig5_b.gif | Bin 0 -> 20671 bytes doxygen/img/Pmodel_fig5_c.gif | Bin 0 -> 23897 bytes doxygen/img/Pmodel_fig5_d.gif | Bin 0 -> 23575 bytes doxygen/img/Pmodel_fig5_e.gif | Bin 0 -> 4304 bytes doxygen/img/Pmodel_fig6.gif | Bin 0 -> 11996 bytes doxygen/img/PropListClassInheritance.gif | Bin 0 -> 17349 bytes doxygen/img/PropListEcosystem.gif | Bin 0 -> 3720 bytes doxygen/img/Shared_Attribute.jpg | Bin 0 -> 37209 bytes doxygen/img/UML_Attribute.jpg | Bin 0 -> 36134 bytes doxygen/img/UML_FileAndProps.gif | Bin 0 -> 6161 bytes doxygen/img/VFL_Drivers.gif | Bin 0 -> 17638 bytes doxygen/img/dtypes_fig25b.gif | Bin 0 -> 6634 bytes hl/src/H5LDpublic.h | 6 +- hl/src/H5LTpublic.h | 117 +- java/examples/groups/H5Ex_G_Visit.java | 2 +- java/src/Makefile.am | 3 +- java/src/hdf/hdf5lib/CMakeLists.txt | 1 - java/src/hdf/hdf5lib/H5.java | 3491 ++++++++++++------ java/src/hdf/hdf5lib/HDF5Constants.java | 11 +- java/src/hdf/hdf5lib/HDF5GroupInfo.java | 182 - java/src/hdf/hdf5lib/HDFArray.java | 7 +- java/src/hdf/hdf5lib/HDFNativeData.java | 7 +- java/src/hdf/hdf5lib/callbacks/Callbacks.java | 6 +- java/src/hdf/hdf5lib/callbacks/H5A_iterate_cb.java | 2 + java/src/hdf/hdf5lib/callbacks/H5D_append_cb.java | 2 + java/src/hdf/hdf5lib/callbacks/H5D_iterate_cb.java | 2 + java/src/hdf/hdf5lib/callbacks/H5E_walk_cb.java | 2 + java/src/hdf/hdf5lib/callbacks/H5L_iterate_t.java | 2 + java/src/hdf/hdf5lib/callbacks/H5O_iterate_t.java | 2 + .../hdf5lib/callbacks/H5P_cls_close_func_cb.java | 2 + .../hdf5lib/callbacks/H5P_cls_copy_func_cb.java | 2 + .../hdf5lib/callbacks/H5P_cls_create_func_cb.java | 2 + java/src/hdf/hdf5lib/callbacks/H5P_iterate_cb.java | 2 + .../hdf5lib/callbacks/H5P_prp_close_func_cb.java | 2 + .../hdf5lib/callbacks/H5P_prp_compare_func_cb.java | 2 + .../hdf5lib/callbacks/H5P_prp_copy_func_cb.java | 2 + .../hdf5lib/callbacks/H5P_prp_create_func_cb.java | 2 + .../hdf5lib/callbacks/H5P_prp_delete_func_cb.java | 2 + .../hdf/hdf5lib/callbacks/H5P_prp_get_func_cb.java | 2 + .../hdf/hdf5lib/callbacks/H5P_prp_set_func_cb.java | 2 + java/src/hdf/hdf5lib/callbacks/package-info.java | 1 + .../hdf/hdf5lib/exceptions/HDF5AtomException.java | 6 +- .../hdf5lib/exceptions/HDF5AttributeException.java | 6 +- .../hdf/hdf5lib/exceptions/HDF5BtreeException.java | 6 +- .../exceptions/HDF5DataFiltersException.java | 6 +- .../exceptions/HDF5DataStorageException.java | 6 +- .../exceptions/HDF5DatasetInterfaceException.java | 6 +- .../HDF5DataspaceInterfaceException.java | 6 +- .../exceptions/HDF5DatatypeInterfaceException.java | 6 +- java/src/hdf/hdf5lib/exceptions/HDF5Exception.java | 18 +- .../exceptions/HDF5ExternalFileListException.java | 6 +- .../exceptions/HDF5FileInterfaceException.java | 6 +- .../exceptions/HDF5FunctionArgumentException.java | 6 +- .../exceptions/HDF5FunctionEntryExitException.java | 6 +- .../hdf/hdf5lib/exceptions/HDF5HeapException.java | 6 +- .../exceptions/HDF5InternalErrorException.java | 6 +- .../hdf/hdf5lib/exceptions/HDF5JavaException.java | 8 +- .../hdf5lib/exceptions/HDF5LibraryException.java | 44 +- .../exceptions/HDF5LowLevelIOException.java | 6 +- .../exceptions/HDF5MetaDataCacheException.java | 6 +- .../exceptions/HDF5ObjectHeaderException.java | 6 +- .../HDF5PropertyListInterfaceException.java | 6 +- .../hdf5lib/exceptions/HDF5ReferenceException.java | 6 +- .../HDF5ResourceUnavailableException.java | 6 +- .../exceptions/HDF5SymbolTableException.java | 6 +- java/src/hdf/hdf5lib/exceptions/package-info.java | 1 + java/src/hdf/hdf5lib/package-info.java | 114 +- java/src/hdf/overview.html | 8 +- java/src/jni/exceptionImp.c | 12 +- java/src/jni/exceptionImp.h | 2 +- release_docs/RELEASE.txt | 6 +- src/H5Amodule.h | 346 +- src/H5Dmodule.h | 2956 ++++++++++++++- src/H5Emodule.h | 578 ++- src/H5Epublic.h | 8 +- src/H5FDmpio.h | 6 +- src/H5FDsec2.h | 16 +- src/H5FDstdio.h | 2 +- src/H5Fmodule.h | 1443 +++++++- src/H5Gmodule.h | 924 ++++- src/H5Gpublic.h | 10 +- src/H5Imodule.h | 3 +- src/H5Lmodule.h | 3 +- src/H5Mmodule.h | 16 +- src/H5Omodule.h | 3 +- src/H5Opublic.h | 4 +- src/H5PLmodule.h | 5 +- src/H5Pmodule.h | 960 ++++- src/H5Ppublic.h | 80 +- src/H5Rmodule.h | 23 +- src/H5Smodule.h | 1492 +++++++- src/H5Tmodule.h | 3832 +++++++++++++++++++- src/H5VLmodule.h | 92 +- src/H5VLpublic.h | 2 +- src/H5Zmodule.h | 3 +- src/H5Zpublic.h | 2 +- src/H5module.h | 1404 ++++++- src/H5private.h | 2 +- 238 files changed, 17725 insertions(+), 2236 deletions(-) create mode 100644 doxygen/dox/UsersGuide.dox delete mode 100644 doxygen/dox/high_level/high_level.dox delete mode 100644 doxygen/examples/H5R_examples.c create mode 100644 doxygen/examples/core_menu.md create mode 100644 doxygen/examples/fortran_menu.md create mode 100644 doxygen/examples/high_level_menu.md create mode 100644 doxygen/examples/java_menu.md create mode 100644 doxygen/img/Dmodel_fig1.gif create mode 100644 doxygen/img/Dmodel_fig10.gif create mode 100644 doxygen/img/Dmodel_fig11_b.gif create mode 100644 doxygen/img/Dmodel_fig12_a.gif create mode 100644 doxygen/img/Dmodel_fig12_b.gif create mode 100644 doxygen/img/Dmodel_fig14_a.gif create mode 100644 doxygen/img/Dmodel_fig14_b.gif create mode 100644 doxygen/img/Dmodel_fig14_c.gif create mode 100644 doxygen/img/Dmodel_fig14_d.gif create mode 100644 doxygen/img/Dmodel_fig2.gif create mode 100644 doxygen/img/Dmodel_fig3_a.gif create mode 100644 doxygen/img/Dmodel_fig3_c.gif create mode 100644 doxygen/img/Dmodel_fig4_a.gif create mode 100644 doxygen/img/Dmodel_fig4_b.gif create mode 100644 doxygen/img/Dmodel_fig5.gif create mode 100644 doxygen/img/Dmodel_fig6.gif create mode 100644 doxygen/img/Dmodel_fig7_b.gif create mode 100644 doxygen/img/Dmodel_fig8.gif create mode 100644 doxygen/img/Dmodel_fig9.gif create mode 100644 doxygen/img/Dsets_NbitFloating1.gif create mode 100644 doxygen/img/Dsets_NbitFloating2.gif create mode 100644 doxygen/img/Dsets_NbitInteger1.gif create mode 100644 doxygen/img/Dsets_NbitInteger2.gif create mode 100644 doxygen/img/Dsets_fig1.gif create mode 100644 doxygen/img/Dsets_fig10.gif create mode 100644 doxygen/img/Dsets_fig11.gif create mode 100644 doxygen/img/Dsets_fig12.gif create mode 100644 doxygen/img/Dsets_fig2.gif create mode 100644 doxygen/img/Dsets_fig3.gif create mode 100644 doxygen/img/Dsets_fig4.gif create mode 100644 doxygen/img/Dsets_fig5.gif create mode 100644 doxygen/img/Dsets_fig6.gif create mode 100644 doxygen/img/Dsets_fig7.gif create mode 100644 doxygen/img/Dsets_fig8.gif create mode 100644 doxygen/img/Dsets_fig9.gif create mode 100644 doxygen/img/Dspace_CvsF1.gif create mode 100644 doxygen/img/Dspace_CvsF2.gif create mode 100644 doxygen/img/Dspace_CvsF3.gif create mode 100644 doxygen/img/Dspace_CvsF4.gif create mode 100644 doxygen/img/Dspace_combine.gif create mode 100644 doxygen/img/Dspace_complex.gif create mode 100644 doxygen/img/Dspace_features.gif create mode 100644 doxygen/img/Dspace_features_cmpd.gif create mode 100644 doxygen/img/Dspace_move.gif create mode 100644 doxygen/img/Dspace_point.gif create mode 100644 doxygen/img/Dspace_read.gif create mode 100644 doxygen/img/Dspace_select.gif create mode 100644 doxygen/img/Dspace_separate.gif create mode 100644 doxygen/img/Dspace_simple.gif create mode 100644 doxygen/img/Dspace_subset.gif create mode 100644 doxygen/img/Dspace_three_datasets.gif create mode 100644 doxygen/img/Dspace_transfer.gif create mode 100644 doxygen/img/Dspace_write1to2.gif create mode 100644 doxygen/img/Dtypes_fig1.gif create mode 100644 doxygen/img/Dtypes_fig10.gif create mode 100644 doxygen/img/Dtypes_fig11.gif create mode 100644 doxygen/img/Dtypes_fig12.gif create mode 100644 doxygen/img/Dtypes_fig13a.gif create mode 100644 doxygen/img/Dtypes_fig13b.gif create mode 100644 doxygen/img/Dtypes_fig13c.gif create mode 100644 doxygen/img/Dtypes_fig13d.gif create mode 100644 doxygen/img/Dtypes_fig14.gif create mode 100644 doxygen/img/Dtypes_fig15.gif create mode 100644 doxygen/img/Dtypes_fig16.gif create mode 100644 doxygen/img/Dtypes_fig16a.gif create mode 100644 doxygen/img/Dtypes_fig16b.gif create mode 100644 doxygen/img/Dtypes_fig16c.gif create mode 100644 doxygen/img/Dtypes_fig16d.gif create mode 100644 doxygen/img/Dtypes_fig17a.gif create mode 100644 doxygen/img/Dtypes_fig17b.gif create mode 100644 doxygen/img/Dtypes_fig18.gif create mode 100644 doxygen/img/Dtypes_fig19.gif create mode 100644 doxygen/img/Dtypes_fig2.gif create mode 100644 doxygen/img/Dtypes_fig20a.gif create mode 100644 doxygen/img/Dtypes_fig20b.gif create mode 100644 doxygen/img/Dtypes_fig20c.gif create mode 100644 doxygen/img/Dtypes_fig20d.gif create mode 100644 doxygen/img/Dtypes_fig21.gif create mode 100644 doxygen/img/Dtypes_fig22.gif create mode 100644 doxygen/img/Dtypes_fig23.gif create mode 100644 doxygen/img/Dtypes_fig24.gif create mode 100644 doxygen/img/Dtypes_fig25a.gif create mode 100644 doxygen/img/Dtypes_fig25c.gif create mode 100644 doxygen/img/Dtypes_fig26.gif create mode 100644 doxygen/img/Dtypes_fig27.gif create mode 100644 doxygen/img/Dtypes_fig28.gif create mode 100644 doxygen/img/Dtypes_fig3.gif create mode 100644 doxygen/img/Dtypes_fig4.gif create mode 100644 doxygen/img/Dtypes_fig5.gif create mode 100644 doxygen/img/Dtypes_fig6.gif create mode 100644 doxygen/img/Dtypes_fig7.gif create mode 100644 doxygen/img/Dtypes_fig8.gif create mode 100644 doxygen/img/Dtypes_fig9.gif create mode 100644 doxygen/img/Files_fig3.gif create mode 100644 doxygen/img/Files_fig4.gif create mode 100644 doxygen/img/Groups_fig1.gif create mode 100644 doxygen/img/Groups_fig10_a.gif create mode 100644 doxygen/img/Groups_fig10_b.gif create mode 100644 doxygen/img/Groups_fig10_c.gif create mode 100644 doxygen/img/Groups_fig10_d.gif create mode 100644 doxygen/img/Groups_fig11_a.gif create mode 100644 doxygen/img/Groups_fig11_b.gif create mode 100644 doxygen/img/Groups_fig11_c.gif create mode 100644 doxygen/img/Groups_fig11_d.gif create mode 100644 doxygen/img/Groups_fig2.gif create mode 100644 doxygen/img/Groups_fig3.gif create mode 100644 doxygen/img/Groups_fig4.gif create mode 100644 doxygen/img/Groups_fig5.gif create mode 100644 doxygen/img/Groups_fig6.gif create mode 100644 doxygen/img/Groups_fig9_a.gif create mode 100644 doxygen/img/Groups_fig9_aa.gif create mode 100644 doxygen/img/Groups_fig9_b.gif create mode 100644 doxygen/img/Groups_fig9_bb.gif create mode 100644 doxygen/img/Pmodel_fig2.gif create mode 100644 doxygen/img/Pmodel_fig3.gif create mode 100644 doxygen/img/Pmodel_fig5_a.gif create mode 100644 doxygen/img/Pmodel_fig5_b.gif create mode 100644 doxygen/img/Pmodel_fig5_c.gif create mode 100644 doxygen/img/Pmodel_fig5_d.gif create mode 100644 doxygen/img/Pmodel_fig5_e.gif create mode 100644 doxygen/img/Pmodel_fig6.gif create mode 100644 doxygen/img/PropListClassInheritance.gif create mode 100644 doxygen/img/PropListEcosystem.gif create mode 100644 doxygen/img/Shared_Attribute.jpg create mode 100644 doxygen/img/UML_Attribute.jpg create mode 100644 doxygen/img/UML_FileAndProps.gif create mode 100644 doxygen/img/VFL_Drivers.gif create mode 100644 doxygen/img/dtypes_fig25b.gif delete mode 100644 java/src/hdf/hdf5lib/HDF5GroupInfo.java diff --git a/config/cmake/UseJava.cmake b/config/cmake/UseJava.cmake index 2351ce8..1de08db 100644 --- a/config/cmake/UseJava.cmake +++ b/config/cmake/UseJava.cmake @@ -1448,6 +1448,7 @@ function(create_javadoc _target) add_custom_target(${_target}_javadoc ALL COMMAND ${Java_JAVADOC_EXECUTABLE} + -Xdoclint:none ${_javadoc_options} ${_javadoc_files} ${_javadoc_packages} diff --git a/doxygen/CMakeLists.txt b/doxygen/CMakeLists.txt index e75c899..17d8da7 100644 --- a/doxygen/CMakeLists.txt +++ b/doxygen/CMakeLists.txt @@ -7,11 +7,12 @@ project (HDF5_DOXYGEN C) if (DOXYGEN_FOUND) set (DOXYGEN_PACKAGE ${HDF5_PACKAGE_NAME}) set (DOXYGEN_VERSION_STRING ${HDF5_PACKAGE_VERSION_STRING}) + set (DOXYGEN_DIR ${HDF5_DOXYGEN_DIR}) set (DOXYGEN_INCLUDE_ALIASES_PATH ${HDF5_DOXYGEN_DIR}) set (DOXYGEN_INCLUDE_ALIASES aliases) set (DOXYGEN_VERBATIM_VARS DOXYGEN_INCLUDE_ALIASES) set (DOXYGEN_PROJECT_LOGO ${HDF5_DOXYGEN_DIR}/img/HDFG-logo.png) - set (DOXYGEN_PROJECT_BRIEF "C-API Reference") + set (DOXYGEN_PROJECT_BRIEF "API Reference") set (DOXYGEN_INPUT_DIRECTORY "${HDF5_SOURCE_DIR} ${HDF5_DOXYGEN_DIR}/dox ${HDF5_GENERATED_SOURCE_DIR}") set (DOXYGEN_OPTIMIZE_OUTPUT_FOR_C YES) set (DOXYGEN_MACRO_EXPANSION YES) diff --git a/doxygen/Doxyfile.in b/doxygen/Doxyfile.in index 6d82765..6f29c0b 100644 --- a/doxygen/Doxyfile.in +++ b/doxygen/Doxyfile.in @@ -874,6 +874,9 @@ FILE_PATTERNS = H5*public.h \ H5VLnative.h \ H5Zdevelop.h \ H5version.h \ + H5*.java \ + HDF*.java \ + *.F90 \ *.dox # The RECURSIVE tag can be used to specify whether or not subdirectories should @@ -942,7 +945,7 @@ EXAMPLE_RECURSIVE = NO # that contain images that are to be included in the documentation (see the # \image command). -IMAGE_PATH = @HDF5_DOXYGEN_DIR@/img +IMAGE_PATH = @DOXYGEN_DIR@/img # The INPUT_FILTER tag can be used to specify a program that doxygen should # invoke to filter for each input file. Doxygen will invoke the filter program diff --git a/doxygen/dox/DDLBNF110.dox b/doxygen/dox/DDLBNF110.dox index f7e4267..6d6b67e 100644 --- a/doxygen/dox/DDLBNF110.dox +++ b/doxygen/dox/DDLBNF110.dox @@ -126,7 +126,7 @@ This section contains a brief explanation of the symbols used in the DDL. ::= H5T_REFERENCE { } - ::= H5T_STD_REF_OBJECT | H5T_STD_REF_DSETREG | H5T_STD_REF | UNDEFINED + ::= H5T_STD_REF_OBJECT | H5T_STD_REF_DSETREG ::= H5T_COMPOUND { + diff --git a/doxygen/dox/Overview.dox b/doxygen/dox/Overview.dox index 040769c..64e80c7 100644 --- a/doxygen/dox/Overview.dox +++ b/doxygen/dox/Overview.dox @@ -23,7 +23,7 @@ documents cover a mix of tasks, concepts, and reference, to help a specific \par Versions Version-specific documentation (see the version in the title area) can be found here: - - HDF5 develop branch (this site) + - HDF5 1.12 branch (this site) - HDF5 1.12.x - HDF5 1.10.x - HDF5 1.8.x @@ -36,10 +36,6 @@ documents cover a mix of tasks, concepts, and reference, to help a specific \par Offline reading You can download it as a tgz archive for offline reading. -\par History - A snapshot (~April 2017) of the pre-Doxygen HDF5 documentation can be found - here. - \par ToDo List There is plenty of unfinished business. diff --git a/doxygen/dox/RFC.dox b/doxygen/dox/RFC.dox index c16dcea..3cda309 100644 --- a/doxygen/dox/RFC.dox +++ b/doxygen/dox/RFC.dox @@ -88,4 +88,4 @@ 2004-08-11 \ref_rfc20040811 -*/ \ No newline at end of file +*/ diff --git a/doxygen/dox/ReferenceManual.dox b/doxygen/dox/ReferenceManual.dox index ad10ba1..7900925 100644 --- a/doxygen/dox/ReferenceManual.dox +++ b/doxygen/dox/ReferenceManual.dox @@ -1,53 +1,32 @@ /** \page RM HDF5 Reference Manual -The functions provided by the HDF5 C-API are grouped into the following +The functions provided by the HDF5 API are grouped into the following \Emph{modules}: *
Modules
- - - - + + + + + + + + diff --git a/doxygen/dox/Specifications.dox b/doxygen/dox/Specifications.dox index 5a36d61..e352f40 100644 --- a/doxygen/dox/Specifications.dox +++ b/doxygen/dox/Specifications.dox @@ -2,20 +2,20 @@ \section DDL -\li \ref DDLBNF110 "DDL in BNF through HDF5 1.10" -\li \ref DDLBNF112 "DDL in BNF for HDF5 1.12 and above" +\li \ref DDLBNF110 +\li \ref DDLBNF112 \section File Format -\li \ref FMT1 "HDF5 File Format Specification Version 1.0" -\li \ref FMT11 "HDF5 File Format Specification Version 1.1" -\li \ref FMT2 "HDF5 File Format Specification Version 2.0" -\li \ref FMT3 "HDF5 File Format Specification Version 3.0" +\li \ref FMT1 +\li \ref FMT11 +\li \ref FMT2 +\li \ref FMT3 \section Other -\li \ref IMG "HDF5 Image and Palette Specification Version 1.2" -\li \ref TBL "HDF5 Table Specification Version 1.0" +\li \ref IMG +\li \ref TBL \li HDF5 Dimension Scale Specification diff --git a/doxygen/dox/TechnicalNotes.dox b/doxygen/dox/TechnicalNotes.dox index 9bd2802..bca81e4 100644 --- a/doxygen/dox/TechnicalNotes.dox +++ b/doxygen/dox/TechnicalNotes.dox @@ -1,13 +1,13 @@ /** \page TN Technical Notes -\li \link api-compat-macros API Compatibility Macros \endlink -\li \ref APPDBG "Debugging HDF5 Applications" -\li \ref FMTDISC "File Format Walkthrough" -\li \ref FILTER "Filters" -\li \ref IOFLOW "HDF5 Raw I/O Flow Notes" -\li \ref TNMDC "Metadata Caching in HDF5" -\li \ref MT "Thread Safe library" -\li \ref VFL "Virtual File Layer" +\li \ref api-compat-macros +\li \ref APPDBG +\li \ref FMTDISC +\li \ref FILTER +\li \ref IOFLOW +\li \ref TNMDC +\li \ref MT +\li \ref VFL */ diff --git a/doxygen/dox/UsersGuide.dox b/doxygen/dox/UsersGuide.dox new file mode 100644 index 0000000..6b44d21 --- /dev/null +++ b/doxygen/dox/UsersGuide.dox @@ -0,0 +1,400 @@ +/** \page UG HDF5 User Guide + +
+HDF5 Release 1.12 + +\image html HDFG-logo.png "The HDF Group" + +
+ +\ref sec_data_model +\li \ref subsec_data_model_intro +\li \ref subsec_data_model_abstract +
    +
  • \ref subsubsec_data_model_abstract_file +
  • \ref subsubsec_data_model_abstract_group +
  • \ref subsubsec_data_model_abstract_dataset +
  • \ref subsubsec_data_model_abstract_space +
  • \ref subsubsec_data_model_abstract_type +
  • \ref subsubsec_data_model_abstract_attr +
  • \ref subsubsec_data_model_abstract_plist +
  • \ref subsubsec_data_model_abstract_link +
+\li \ref subsec_data_model_storage +
    +
  • \ref subsubsec_data_model_storage_spec +
  • \ref subsubsec_data_model_storage_imple +
+\li \ref subsec_data_model_structure +
    +
  • \ref subsubsec_data_model_structure_file +
  • \ref subsubsec_data_model_structure_path +
  • \ref subsubsec_data_model_structure_example +
+ +\ref sec_program +\li \ref subsec_program_intro +\li \ref subsec_program_model +
    +
  • \ref subsubsec_program_model_create +
  • \ref subsubsec_program_model_dset +
  • \ref subsubsec_program_model_close +
  • \ref subsubsec_program_model_data +
  • \ref subsubsec_program_model_partial +
  • \ref subsubsec_program_model_info +
  • \ref subsubsec_program_model_compound +
  • \ref subsubsec_program_model_extend +
  • \ref subsubsec_program_model_group +
  • \ref subsubsec_program_model_attr +
+\li \ref subsec_program_transfer_pipeline + +\ref sec_file +\li \ref subsec_file_intro +\li \ref subsec_file_access_modes +\li \ref subsec_file_creation_access +\li \ref subsec_file_drivers +\li \ref subsec_file_program_model +
    +
  • \ref subsubsec_file_program_model_create +
  • \ref subsubsec_file_program_model_open +
  • \ref subsubsec_file_program_model_close +
+\li \ref subsec_file_h5dump +\li \ref subsec_file_summary +\li \ref subsec_file_create +\li \ref subsec_file_closes +\li \ref subsec_file_property_lists +
    +
  • \ref subsubsec_file_property_lists_create +
  • \ref subsubsec_file_property_lists_props +
  • \ref subsubsec_file_property_lists_access +
+\li \ref subsec_file_alternate_drivers +
    +
  • \ref subsubsec_file_alternate_drivers_id +
  • \ref subsubsec_file_alternate_drivers_sec2 +
  • \ref subsubsec_file_alternate_drivers_direct +
  • \ref subsubsec_file_alternate_drivers_log +
  • \ref subsubsec_file_alternate_drivers_win +
  • \ref subsubsec_file_alternate_drivers_stdio +
  • \ref subsubsec_file_alternate_drivers_mem +
  • \ref subsubsec_file_alternate_drivers_family +
  • \ref subsubsec_file_alternate_drivers_multi +
  • \ref subsubsec_file_alternate_drivers_split +
  • \ref subsubsec_file_alternate_drivers_par +
+\li \ref subsec_file_examples +
    +
  • \ref subsubsec_file_examples_trunc +
  • \ref subsubsec_file_examples_props +
  • \ref subsubsec_file_examples_access +
+\li \ref subsec_file_multiple + +\ref sec_group +\li \ref subsec_group_intro +\li \ref subsec_group_descr +
    +
  • \ref subsubsec_group_descr_object +
  • \ref subsubsec_group_descr_model +
  • \ref subsubsec_group_descr_path +
  • \ref subsubsec_group_descr_impl +
+\li \ref subsec_group_h5dump +\li \ref subsec_group_function +\li \ref subsec_group_program +
    +
  • \ref subsubsec_group_program_create +
  • \ref subsubsec_group_program_open +
  • \ref subsubsec_group_program_dataset +
  • \ref subsubsec_group_program_close +
  • \ref subsubsec_group_program_links +
  • \ref subsubsec_group_program_info +
  • \ref subsubsec_group_program_objs +
  • \ref subsubsec_group_program_all +
+\li \ref subsec_group_examples + +\ref sec_dataset +\li \ref subsec_dataset_intro +\li \ref subsec_dataset_function +\li \ref subsec_dataset_program +
    +
  • \ref subsubsec_dataset_program_general +
  • \ref subsubsec_dataset_program_create +
  • \ref subsubsec_dataset_program_transfer +
  • \ref subsubsec_dataset_program_read +
+\li \ref subsec_dataset_transfer Data Transfer +
    +
  • \ref subsubsec_dataset_transfer_pipe +
  • \ref subsubsec_dataset_transfer_filter +
  • \ref subsubsec_dataset_transfer_drive +
  • \ref subsubsec_dataset_transfer_props +
  • \ref subsubsec_dataset_transfer_store +
  • \ref subsubsec_dataset_transfer_partial +
+\li \ref subsec_dataset_allocation +
    +
  • \ref subsubsec_dataset_allocation_store +
  • \ref subsubsec_dataset_allocation_delete +
  • \ref subsubsec_dataset_allocation_release +
  • \ref subsubsec_dataset_allocation_ext +
+\li \ref subsec_dataset_filters +
    +
  • \ref subsubsec_dataset_filters_nbit +
  • \ref subsubsec_dataset_filters_scale +
  • \ref subsubsec_dataset_filters_szip +
+ +\ref sec_datatype +\li \ref subsec_datatype_intro +\li \ref subsec_datatype_model +
    +
  • \ref subsubsec_datatype_model_class +
  • \ref subsubsec_datatype_model_predefine +
+\li \ref subsec_datatype_usage +
    +
  • \ref subsubsec_datatype_usage_object +
  • \ref subsubsec_datatype_usage_create +
  • \ref subsubsec_datatype_usage_transfer +
  • \ref subsubsec_datatype_usage_discover +
  • \ref subsubsec_datatype_usage_user +
+\li \ref subsec_datatype_function +\li \ref subsec_datatype_program +
    +
  • \ref subsubsec_datatype_program_discover +
  • \ref subsubsec_datatype_program_define +
+\li \ref subsec_datatype_other +
    +
  • \ref subsubsec_datatype_other_strings +
  • \ref subsubsec_datatype_other_refs +
  • \ref subsubsec_datatype_other_enum +
  • \ref subsubsec_datatype_other_opaque +
  • \ref subsubsec_datatype_other_bitfield +
+\li \ref subsec_datatype_fill +\li \ref subsec_datatype_complex +
    +
  • \ref subsubsec_datatype_complex_create +
  • \ref subsubsec_datatype_complex_analyze +
+\li \ref subsec_datatype_life +\li \ref subsec_datatype_transfer +\li \ref subsec_datatype_text + +\ref sec_dataspace +\li \ref subsec_dataspace_intro +\li \ref subsec_dataspace_function +\li \ref subsec_dataspace_program +
    +
  • \ref subsubsec_dataspace_program_object +
  • \ref subsubsec_dataspace_program_model +
+\li \ref subsec_dataspace_transfer +
    +
  • \ref subsubsec_dataspace_transfer_select +
  • \ref subsubsec_dataspace_transfer_model +
+\li \ref subsec_dataspace_select +\li \ref subsec_dataspace_refer +
    +
  • \ref subsubsec_dataspace_refer_use +
  • \ref subsubsec_dataspace_refer_create +
  • \ref subsubsec_dataspace_refer_read +
+\li \ref subsec_dataspace_sample + +\ref sec_attribute +\li \ref subsec_attribute_intro +\li \ref subsec_attribute_program +
    +
  • To Open and Read or Write an Existing Attribute
  • +
+\li \ref subsec_error_H5A +\li \ref subsec_attribute_work +
    +
  • \ref subsubsec_attribute_work_struct +
  • \ref subsubsec_attribute_work_create +
  • \ref subsubsec_attribute_work_access +
  • \ref subsubsec_attribute_work_info +
  • \ref subsubsec_attribute_work_iterate +
  • \ref subsubsec_attribute_work_delete +
  • \ref subsubsec_attribute_work_close +
+\li \ref subsec_attribute_special + +\ref sec_error +\li \ref subsec_error_intro +\li \ref subsec_error_program +\li \ref subsec_error_H5E +\li \ref subsec_error_ops +
    +
  • \ref subsubsec_error_ops_stack +
  • \ref subsubsec_error_ops_print +
  • \ref subsubsec_error_ops_mute +
  • \ref subsubsec_error_ops_custom_print +
  • \ref subsubsec_error_ops_walk +
  • \ref subsubsec_error_ops_travers +
+\li \ref subsec_error_adv +
    +
  • \ref subsubsec_error_adv_more +
  • \ref subsubsec_error_adv_app +
+ +\ref sec_plist +\li \ref subsec_plist_intro +\li \ref subsec_plist_class +
    +
  • \ref subsubsec_plist_class +
  • \ref subsubsec_plist_lists +
  • \ref subsubsec_plist_props +
+\li \ref subsec_plist_program +
    +
  • \ref subsubsec_plist_default +
  • \ref subsubsec_plist_basic +
  • \ref subsubsec_plist_additional +
+\li \ref subsec_plist_generic +\li \ref subsec_plist_H5P +\li \ref subsec_plist_resources +\li \ref subsec_plist_notes + +\ref sec_vol +\li \ref subsec_vol_intro +\li \ref subsec_vol_abstract_layer +\li \ref subsec_vol_connect +\li \ref subsec_vol_use + +\ref sec_map + +\ref sec_addition + +\page AR_UG Additional Resources + +\section sec_addition Additional Resources +These documents provide additional information for the use and tuning of specific HDF5 features. +
-\li \ref H5A "Attributes (H5A)" -\li \ref H5D "Datasets (H5D)" -\li \ref H5S "Dataspaces (H5S)" -\li \ref H5T "Datatypes (H5T)" -\li \ref H5E "Error Handling (H5E)" -\li \ref H5ES "Event Sets (H5ES)" -\li \ref H5F "Files (H5F)" -\li \ref H5Z "Filters (H5Z)" -\li \ref H5G "Groups (H5G)" - -\li \ref H5I "Identifiers (H5I)" -\li \ref H5 "Library General (H5)" -\li \ref H5L "Links (H5L)" -\li \ref H5M "Maps (H5M)" -\li \ref H5O "Objects (H5O)" -\li \ref H5P "Property Lists (H5P)" -\li \ref H5PL "Dynamically-loaded Plugins (H5PL)" -\li \ref H5R "References (H5R)" -\li \ref H5VL "Virtual Object Layer (H5VL)" - -\li \ref high_level -
    -
  • \ref H5LT "Lite (H5LT, H5LD)" -
  • \ref H5IM "Images (H5IM)" -
  • \ref H5TB "Table (H5TB)" -
  • \ref H5PT "Packet Table (H5PT)" -
  • \ref H5DS "Dimension Scale (H5DS)" -
  • \ref H5DO "Optimizations (H5DO)" -
  • \ref H5LR "Extensions (H5LR, H5LT)" -
-
-\a Core \a library: \ref H5 \ref H5A \ref H5D \ref H5E \ref H5ES \ref H5F \ref H5G \ref H5I \ref H5L -\ref H5M \ref H5O \ref H5P \ref H5PL \ref H5R \ref H5S \ref H5T \ref H5VL \ref H5Z -
-\a High-level \a library: \ref H5LT \ref H5IM \ref H5TB \ref H5PT \ref H5DS \ref H5DO \ref H5LR -
+\include{doc} core_menu.md +
+ +\include{doc} high_level_menu.md +
+ +\include{doc} fortran_menu.md +
+ +\include{doc} java_menu.md +
Deprecated functions Functions with \ref ASYNC
+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
Table of Additional resources
+

Document

+
+

Comments

+
+

HDF5 Examples

+
+

Code examples by API.

+
+

Chunking in HDF5

+
+

Structuring the use of chunking and tuning it for performance.

+
+

Using the Direct Chunk Write Function

+
+

Describes another way that chunks can be written to datasets.

+
+

Copying Committed Datatypes with H5Ocopy

+
+

Describes how to copy to another file a dataset that uses a committed datatype or an object with an attribute that uses a committed datatype so that the committed datatype in the destination file can be used by multiple objects.

+
+

Metadata Caching in HDF5

+
+

Managing the HDF5 metadata cache and tuning it for performance.

+
+

HDF5 Dynamically Loaded Filters

+
+

Describes how an HDF5 application can apply a filter that is not registered with the HDF5 Library.

+
+

HDF5 File Image Operations

+
+

Describes how to work with HDF5 files in memory. Disk I/O is not required when file images are opened, created, read from, or written to.

+
+

Modified Region Writes

+
+

Describes how to set write operations for in-memory files so that only modified regions are written to storage. Available when the Core (Memory) VFD is used.

+
+

Using Identifiers

+
+

Describes how identifiers behave and how they should be treated.

+
+

Using UTF-8 Encoding in HDF5 Applications

+
+

Describes the use of UTF-8 Unicode character encodings in HDF5 applications.

+
+

Freeing Memory Allocated by the HDF5 Library

+
+

Describes how inconsistent memory management can cause heap corruption or resource leaks and possible solutions.

+
+

HDF5 Glossary

+
+

A glossary of terms.

+
+ +Previous Chapter \ref sec_plist + +\par Don't like what you see? - You can help to improve this User Guide + Complete the survey linked near the top of this page!\n + We treat documentation like code: Fork the + HDF5 repo, make changes, and create a + pull request !\n + +*/ \ No newline at end of file diff --git a/doxygen/dox/high_level/extension.dox b/doxygen/dox/high_level/extension.dox index c81ac6e..e8471b9 100644 --- a/doxygen/dox/high_level/extension.dox +++ b/doxygen/dox/high_level/extension.dox @@ -1,60 +1,48 @@ /** \defgroup H5LR Extensions * - * Working with region references, hyperslab selections, + * Working with region references, hyperslab selections, * and bit-fields (H5LR, H5LT) * - * The following reference manual entries describe high-level HDF5 C and Fortran APIs - * for working with region references, hyperslab selections, and bit-fields. - * These functions were created as part of a project supporting + * The following reference manual entries describe high-level HDF5 C and Fortran APIs + * for working with region references, hyperslab selections, and bit-fields. + * These functions were created as part of a project supporting * NPP/NPOESS Data Production and Exploitation ( * - * project , - * - * software ). - * While they were written to facilitate access to NPP, NPOESS, and JPSS - * data in the HDF5 format, these functions may be useful to anyone working + * project, software). + * While they were written to facilitate access to NPP, NPOESS, and JPSS + * data in the HDF5 format, these functions may be useful to anyone working * with region references, hyperslab selections, or bit-fields. * - * Note that these functions are not part of the standard HDF5 distribution; - * the - * - * software - * must be separately downloaded and installed. + * Note that these functions are not part of the standard HDF5 distribution; + * the software must be separately downloaded and installed. * - * A comprehensive guide to this library, - * + * A comprehensive guide to this library, + * * User Guide to the HDF5 High-level Library for Handling Region References and Hyperslab Selections - * is available at + * is available at * https://support.hdfgroup.org/projects/jpss/documentation/HL/UG/NPOESS_HL-UG.pdf. * * - \ref H5LRcopy_reference - * \n Copies data from the specified dataset to a new location and - * creates a reference to it. + * \n Copies data from the specified dataset to a new location and creates a reference to it. * - \ref H5LRcopy_region - * \n Copies data from a referenced region to a region in a - * destination dataset. + * \n Copies data from a referenced region to a region in a destination dataset. * - \ref H5LRcreate_ref_to_all - * \n Creates a dataset with the region references to the data in all - * datasets located under a specified group in a file or creates a - * dataset with object references to all objects (groups or datasets) + * \n Creates a dataset with the region references to the data in all datasets located under a + * specified group in a file or creates a dataset with object references to all objects (groups or datasets) * located under a specified group in a file. * - \ref H5LRcreate_region_references - * \n Creates an array of region references using an array of paths to + * \n Creates an array of region references using an array of paths to * datasets and an array of corresponding hyperslab descriptions. * - \ref H5LRget_region_info * \n Retrieves information about the data a region reference points to. * - \ref H5LRmake_dataset - * \n Creates and writes a dataset containing a list of - * region references. + * \n Creates and writes a dataset containing a list of region references. * - \ref H5LRread_region - * \n Retrieves raw data pointed to by a region reference to - * an application buffer. + * \n Retrieves raw data pointed to by a region reference to an application buffer. * - \ref H5LTcopy_region - * \n Copies data from a specified region in a source dataset - * to a specified region in a destination dataset. + * \n Copies data from a specified region in a source dataset to a specified region in a destination dataset. * - \ref H5LTread_bitfield_value - * \n Retrieves the values of quality flags for each element - * to the application provided buffer. + * \n Retrieves the values of quality flags for each element to the application provided buffer. * - \ref H5LTread_region * \n Reads selected data to an application buffer. * @@ -77,24 +65,24 @@ * \param[in] path Path to the dataset being created * \param[in] type_id Datatype of the dataset * \param[in] buf_size Size of the \p loc_id_ref and \p buf arrays - * \param[in] loc_id_ref Array of object identifiers; each identifier - * describes to which HDF5 file the corresponding + * \param[in] loc_id_ref Array of object identifiers; each identifier + * describes to which HDF5 file the corresponding * region reference belongs to * \param[in] buf Array of region references * * \return \herr_t * - * \details Given an array of size \p buf_size of region references \p buf, - * the function will create a dataset with path \p path, at location - * specified by \p loc_id and of a datatype specified by \p type_id, - * and will write data associated with each region reference in the order - * corresponding to the order of the region references in the buffer. - * It is assumed that all referenced hyperslabs have the same dimensionality, - * and only the size of the slowest changing dimension may differ. - * Each reference in the \p buf array belongs to the file identified + * \details Given an array of size \p buf_size of region references \p buf, + * the function will create a dataset with path \p path, at location + * specified by \p loc_id and of a datatype specified by \p type_id, + * and will write data associated with each region reference in the order + * corresponding to the order of the region references in the buffer. + * It is assumed that all referenced hyperslabs have the same dimensionality, + * and only the size of the slowest changing dimension may differ. + * Each reference in the \p buf array belongs to the file identified * by the corresponding object identifiers in the array \p loc_id_ref. * - * If \p path does not exist in \p loc_id then the function will + * If \p path does not exist in \p loc_id then the function will * create the path specified by \p path automatically. * * \version 1.1 Fortran wrapper introduced in this release. @@ -103,10 +91,10 @@ * */ H5_HLRDLL herr_t H5LRmake_dataset(hid_t loc_id, - const char *path, - hid_t type_id, const size_t buf_size, - const hid_t *loc_id_ref, - const hdset_reg_ref_t *buf); + const char *path, + hid_t type_id, const size_t buf_size, + const hid_t *loc_id_ref, + const hdset_reg_ref_t *buf); /*------------------------------------------------------------------------- * @@ -119,49 +107,46 @@ H5_HLRDLL herr_t H5LRmake_dataset(hid_t loc_id, * -------------------------------------------------------------------------- * \ingroup H5LR * - * \brief Creates an array of region references using an array of paths to + * \brief Creates an array of region references using an array of paths to * datasets and an array of corresponding hyperslab descriptions. * * \param[in] obj_id File identifier for the HDF5 file containing * the referenced regions or an object identifier * for any object in that file - * \param[in] num_elem Number of elements in the \p path and - * \p buf arrays - * \param[in] path Array of pointers to strings, which contain - * the paths to the target datasets for the - * region references + * \param[in] num_elem Number of elements in the \p path and \p buf arrays + * \param[in] path Array of pointers to strings, which contain + * the paths to the target datasets for the region references * \param[in] block_coord Array of hyperslab coordinate - * \param[out] buf Buffer for returning an array of region - * references + * \param[out] buf Buffer for returning an array of region references * * \return \herr_t * * \note **Motivation:** - * \note H5LRcreate_region_references() is useful when creating + * \note H5LRcreate_region_references() is useful when creating * large numbers of similar region references. * - * \details H5LRcreate_region_references() creates a list of region references - * given an array of paths to datasets and another array listing the + * \details H5LRcreate_region_references() creates a list of region references + * given an array of paths to datasets and another array listing the * corner coordinates of the corresponding hyperslabs. * * \p path parameter is an array of pointers to strings. * - * \p num_elem specifies the number of region references to be created, + * \p num_elem specifies the number of region references to be created, * thus specifying the size of the \p path and \p _buf arrays. * - * Buffer \p block_coord has size 2*rank and is the coordinates of the - * starting point following by the coordinates of the ending point of - * the hyperslab, repeated \p num_elem times for each hyperslab. - * For example, creating two region references to two hyperslabs, - * one with a rectangular hyperslab region starting at element (2,2) - * to element (5,4) and the second rectangular region starting at - * element (7,7) to element (9,10), results in \p block_coord + * Buffer \p block_coord has size 2*rank and is the coordinates of the + * starting point following by the coordinates of the ending point of + * the hyperslab, repeated \p num_elem times for each hyperslab. + * For example, creating two region references to two hyperslabs, + * one with a rectangular hyperslab region starting at element (2,2) + * to element (5,4) and the second rectangular region starting at + * element (7,7) to element (9,10), results in \p block_coord * being {2,2,5,4, 7,7,9,10}. * - * The rank of the hyperslab will be the same as the rank of the - * target dataset. H5LRcreate_region_references() will retrieve - * the rank for each dataset and will use those values to interpret - * the values in the buffer. Please note that rank may vary from one + * The rank of the hyperslab will be the same as the rank of the + * target dataset. H5LRcreate_region_references() will retrieve + * the rank for each dataset and will use those values to interpret + * the values in the buffer. Please note that rank may vary from one * dataset to another. * * \version 1.1 Fortran wrapper introduced in this release. @@ -170,43 +155,39 @@ H5_HLRDLL herr_t H5LRmake_dataset(hid_t loc_id, * */ H5_HLRDLL herr_t H5LRcreate_region_references(hid_t obj_id, - size_t num_elem, - const char **path, - const hsize_t *block_coord, - hdset_reg_ref_t *buf); + size_t num_elem, + const char **path, + const hsize_t *block_coord, + hdset_reg_ref_t *buf); /** * -------------------------------------------------------------------------- * \ingroup H5LR * - * \brief Copies data from the specified dataset to a new location and - * creates a reference to it. + * \brief Copies data from the specified dataset to a new location and creates a reference to it. * - * \param[in] obj_id Identifier of any object in a file an - * HDF5 reference belongs to + * \param[in] obj_id Identifier of any object in a file an HDF5 reference belongs to * \param[in] ref Reference to the datasets region - * \param[in] file Name of the destination file + * \param[in] file Name of the destination file * \param[in] path Full path to the destination dataset - * \param[in] block_coord Hyperslab coordinates in the destination - * dataset - * \param[out] ref_new Region reference to the new location of - * data + * \param[in] block_coord Hyperslab coordinates in the destination dataset + * \param[out] ref_new Region reference to the new location of data * * \return \herr_t * - * \details Given a data set pointed to by a region reference, the function - * H5LRcopy_reference() will copy the hyperslab data referenced by - * a datasets region reference into existing dataset specified by - * its path \p path in the file with the name \p file, and to location - * specified by the hyperslab coordinates \p block_coord. It will - * create the region reference \p ref_new to point to the new location. - * The number of elements in the old and newly specified regions has + * \details Given a data set pointed to by a region reference, the function + * H5LRcopy_reference() will copy the hyperslab data referenced by + * a datasets region reference into existing dataset specified by + * its path \p path in the file with the name \p file, and to location + * specified by the hyperslab coordinates \p block_coord. It will + * create the region reference \p ref_new to point to the new location. + * The number of elements in the old and newly specified regions has * to be the same. * - * Buffer \p block_coord has size 2*rank and is the coordinates of - * the starting point following by the coordinates of the ending - * point of the hyperslab. For example, to extract a rectangular - * hyperslab region starting at element (2,2) to element (5,4) + * Buffer \p block_coord has size 2*rank and is the coordinates of + * the starting point following by the coordinates of the ending + * point of the hyperslab. For example, to extract a rectangular + * hyperslab region starting at element (2,2) to element (5,4) * then \p block_coord would be {2, 2, 5, 4}. * * \version 1.1 Fortran wrapper introduced in this release. @@ -215,41 +196,39 @@ H5_HLRDLL herr_t H5LRcreate_region_references(hid_t obj_id, * */ H5_HLRDLL herr_t H5LRcopy_reference(hid_t obj_id, hdset_reg_ref_t *ref, const char *file, - const char *path, const hsize_t *block_coord, - hdset_reg_ref_t *ref_new); + const char *path, const hsize_t *block_coord, + hdset_reg_ref_t *ref_new); /** * -------------------------------------------------------------------------- * \ingroup H5LR * - * \brief Copies data from a referenced region to a region in a - * destination dataset. + * \brief Copies data from a referenced region to a region in a destination dataset. * - * \param[in] obj_id Identifier of any object in a file + * \param[in] obj_id Identifier of any object in a file * dataset region reference belongs to * \param[in] ref Dataset region reference - * \param[in] file Name of the destination file + * \param[in] file Name of the destination file * \param[in] path Full path to the destination dataset - * \param[in] block_coord Hyperslab coordinates in the destination - * dataset + * \param[in] block_coord Hyperslab coordinates in the destination dataset * * \return \herr_t * - * \details Given a dataset region reference \p ref in a source file - * specified by an identifier of any object in that file - * \p obj_id, the function will write data to the existing - * dataset \p path in file \p file to the simple hyperslab + * \details Given a dataset region reference \p ref in a source file + * specified by an identifier of any object in that file + * \p obj_id, the function will write data to the existing + * dataset \p path in file \p file to the simple hyperslab * specified by \p block_coord. * - * Buffer \p block_coord has size 2*rank and is the coordinates - * of the starting point following by the coordinates of the - * ending point of the hyperslab. For example, to specify a - * rectangular hyperslab destination region starting at element + * Buffer \p block_coord has size 2*rank and is the coordinates + * of the starting point following by the coordinates of the + * ending point of the hyperslab. For example, to specify a + * rectangular hyperslab destination region starting at element * (2,2) to element (5,4) then \p block_coord would be {2, 2, 5, 4}. * - * If \p path does not exist in the destination file (as may be - * the case when writing to a new file) then the dataset will be - * copied directly to the \p path and \p block_coord will be + * If \p path does not exist in the destination file (as may be + * the case when writing to a new file) then the dataset will be + * copied directly to the \p path and \p block_coord will be * disregarded. * * \version 1.1 Fortran wrapper introduced in this release. @@ -258,71 +237,66 @@ H5_HLRDLL herr_t H5LRcopy_reference(hid_t obj_id, hdset_reg_ref_t *ref, const ch * */ H5_HLRDLL herr_t H5LRcopy_region(hid_t obj_id, - hdset_reg_ref_t *ref, - const char *file, - const char *path, - const hsize_t *block_coord); + hdset_reg_ref_t *ref, + const char *file, + const char *path, + const hsize_t *block_coord); /** * -------------------------------------------------------------------------- * \ingroup H5LR * - * \brief Creates a dataset with the region references to the data - * in all datasets located under a specified group in a file - * or creates a dataset with object references to all objects + * \brief Creates a dataset with the region references to the data + * in all datasets located under a specified group in a file + * or creates a dataset with object references to all objects * (groups or datasets) located under a specified group in a file. * * \fg_loc_id - * \param[in] group_path Absolute or relative path to the group - * at which traversal starts - * \param[in] ds_path Absolute or relative path to the dataset - * with region references to be created - * \param[in] index_type Index_type; - * see valid values below in description - * \param[in] order Order in which index is traversed; - * see valid values below in description - * \param[in] ref_type Reference type; - * see valid values below in description + * \param[in] group_path Absolute or relative path to the group at which traversal starts + * \param[in] ds_path Absolute or relative path to the dataset with region references to be created + * \param[in] index_type Index_type; see valid values below in description + * \param[in] order Order in which index is traversed; see valid values below in description + * \param[in] ref_type Reference type; see valid values below in description * * \return \herr_t * - * \details H5LRcreate_ref_to_all() creates a dataset with the - * region references to the data in all datasets located - * under a specified group in a file or creates a dataset with - * object references to all objects (groups or datasets) located + * \details H5LRcreate_ref_to_all() creates a dataset with the + * region references to the data in all datasets located + * under a specified group in a file or creates a dataset with + * object references to all objects (groups or datasets) located * under a specified group in a file. * - * Given a dataset path \p ds_path in a file specified by the - * \p loc_id identifier, the function H5LRcreate_ref_to_all() - * will create a contiguous one-dimensional dataset with the - * region references or object references depending on the value - * of the \p ref_type parameter. When \p ref_type is - * #H5R_DATASET_REGION, each region reference points to all data - * in a dataset encountered by an internally called H5Lvisit() - * routine, which starts at the group specified by the \p loc_id + * Given a dataset path \p ds_path in a file specified by the + * \p loc_id identifier, the function H5LRcreate_ref_to_all() + * will create a contiguous one-dimensional dataset with the + * region references or object references depending on the value + * of the \p ref_type parameter. When \p ref_type is + * #H5R_DATASET_REGION, each region reference points to all data + * in a dataset encountered by an internally called H5Lvisit() + * routine, which starts at the group specified by the \p loc_id * and \p group_path parameters. In a like manner, when - * \p ref_type is #H5R_OBJECT, each object reference points to + * \p ref_type is #H5R_OBJECT, each object reference points to * an object (a group or a dataset) encountered by H5Lvisit(). * - * If \p ds_path does not exist in \p loc_id then the function + * If \p ds_path does not exist in \p loc_id then the function * will create the path specified by \p ds_path automatically. * - * \p index_type specifies the index to be used. + * \p index_type specifies the index to be used. * Valid values include the following: * - #H5_INDEX_NAME Alphanumeric index on name * - #H5_INDEX_CRT_ORDER Index on creation order * - * \p order specifies the order in which objects are to be - * inspected along the index specified in \p index_type. + * \p order specifies the order in which objects are to be + * inspected along the index specified in \p index_type. * Valid values include the following: * - #H5_ITER_INC Increasing order * - #H5_ITER_DEC Decreasing order * - #H5_ITER_NATIVE Fastest available order * - * For more detailed information on these two parameters, - * see H5Lvisit(). + * For more detailed information on these two parameters, + * @see H5Lvisit(). * - * \p ref_type specifies the type of the reference to be used. + * \p ref_type specifies the type of the reference to be used. * Valid values include the following: * - #H5R_DATASET_REGION Dataset region reference * - #H5R_OBJECT Object reference @@ -333,7 +307,7 @@ H5_HLRDLL herr_t H5LRcopy_region(hid_t obj_id, * */ H5_HLRDLL herr_t H5LRcreate_ref_to_all(hid_t loc_id, const char *group_path, - const char *ds_path, H5_index_t index_type, H5_iter_order_t order, H5R_type_t ref_type); + const char *ds_path, H5_index_t index_type, H5_iter_order_t order, H5R_type_t ref_type); /*------------------------------------------------------------------------- * @@ -352,30 +326,27 @@ H5_HLRDLL herr_t H5LRcreate_ref_to_all(hid_t loc_id, const char *group_path, * \param[in] obj_id File identifier for the HDF5 file containing * the dataset with the referenced region or an * object identifier for any object in that file - * \param[in] ref Region reference specifying data to be read - * in - * \param[in] mem_type Memory datatype of data read from referenced + * \param[in] ref Region reference specifying data to be read in + * \param[in] mem_type Memory datatype of data read from referenced * region into the application buffer - * \param[in,out] numelem Number of elements to be read into buffer - * \p buf - * \param[out] buf Buffer in which data is returned to the - * application + * \param[in,out] numelem Number of elements to be read into buffer \p buf + * \param[out] buf Buffer in which data is returned to the application * * \return \herr_t * - * \details H5LRread_region() reads data pointed to by the region + * \details H5LRread_region() reads data pointed to by the region * reference \p ref into the buffer \p buf. * - * \p numelem specifies the number of elements to be read - * into \p buf. When the size of the reference region is unknown, - * H5LRread_region() can be called with \p buf set to NULL; - * the number of elements in the referenced region will be returned + * \p numelem specifies the number of elements to be read + * into \p buf. When the size of the reference region is unknown, + * H5LRread_region() can be called with \p buf set to NULL; + * the number of elements in the referenced region will be returned * in \p numelem. * - * The buffer buf must be big enough to hold \p numelem elements - * of type \p mem_type. For example, if data is read from the referenced - * region into an integer buffer, \p mem_type should be #H5T_NATIVE_INT - * and the buffer must be at least \c sizeof(int) * \p numelem bytes + * The buffer buf must be big enough to hold \p numelem elements + * of type \p mem_type. For example, if data is read from the referenced + * region into an integer buffer, \p mem_type should be #H5T_NATIVE_INT + * and the buffer must be at least \c sizeof(int) * \p numelem bytes * in size. This buffer must be allocated by the application. * * \version 1.1 Fortran wrapper introduced in this release. @@ -384,10 +355,10 @@ H5_HLRDLL herr_t H5LRcreate_ref_to_all(hid_t loc_id, const char *group_path, * */ H5_HLRDLL herr_t H5LRread_region(hid_t obj_id, - const hdset_reg_ref_t *ref, - hid_t mem_type, - size_t *numelem, - void *buf ); + const hdset_reg_ref_t *ref, + hid_t mem_type, + size_t *numelem, + void *buf ); /*------------------------------------------------------------------------- * @@ -400,40 +371,33 @@ H5_HLRDLL herr_t H5LRread_region(hid_t obj_id, * -------------------------------------------------------------------------- * \ingroup H5LR * - * \brief Retrieves information about the data a region reference - * points to. + * \brief Retrieves information about the data a region reference points to. * - * \param[in] obj_id Identifier of any object in an HDF5 file - * the region reference belongs to. + * \param[in] obj_id Identifier of any object in an HDF5 file the region reference belongs to. * \param[in] ref Region reference to query - * \param[in,out] len Size of the buffer to store \p path in. - * NOTE: if \p *path is not NULL then \p *len - * must be the appropriate length + * \param[in,out] len Size of the buffer to store \p path in. + * NOTE: if \p *path is not NULL then \p *len must be the appropriate length * \param[out] path Full path that a region reference points to * \param[out] rank The number of dimensions of the dataset - * dimensions of the dataset pointed by - * region reference. - * \param[out] dtype Datatype of the dataset pointed by the - * region reference. + * dimensions of the dataset pointed by region reference. + * \param[out] dtype Datatype of the dataset pointed by the region reference. * \param[out] sel_type Type of the selection (point or hyperslab) - * \param[in,out] numelem Number of coordinate blocks or - * selected elements. - * \param[out] buf Buffer containing description of the region - * pointed by region reference + * \param[in,out] numelem Number of coordinate blocks or selected elements. + * \param[out] buf Buffer containing description of the region pointed by region reference * * \return \herr_t * - * \details H5LRget_region_info() queries information about the data - * pointed by a region reference \p ref. It returns one of the - * absolute paths to a dataset, length of the path, dataset’s rank - * and datatype, description of the referenced region and type of - * the referenced region. Any output argument can be NULL if that + * \details H5LRget_region_info() queries information about the data + * pointed by a region reference \p ref. It returns one of the + * absolute paths to a dataset, length of the path, dataset’s rank + * and datatype, description of the referenced region and type of + * the referenced region. Any output argument can be NULL if that * argument does not need to be returned. * - * The parameter \p obj_id is an identifier for any object in the - * HDF5 file containing the referenced object. For example, it can - * be an identifier of a dataset the region reference belongs to - * or an identifier of an HDF5 file the dataset with region references + * The parameter \p obj_id is an identifier for any object in the + * HDF5 file containing the referenced object. For example, it can + * be an identifier of a dataset the region reference belongs to + * or an identifier of an HDF5 file the dataset with region references * is stored in. * * The parameter \p ref is a region reference to query. @@ -442,36 +406,36 @@ H5_HLRDLL herr_t H5LRread_region(hid_t obj_id, * buffer of size \p len+1 to return an absolute path to a dataset * the region reference points to. * - * The parameter \p len is a length of absolute path string plus - * the \0 string terminator. If path parameter is NULL, actual - * length of the path (+1 for \0 string terminator) is returned to - * application and can be used to allocate buffer path of an + * The parameter \p len is a length of absolute path string plus + * the \0 string terminator. If path parameter is NULL, actual + * length of the path (+1 for \0 string terminator) is returned to + * application and can be used to allocate buffer path of an * appropriate length \p len. * * The parameter \p sel_type describes the type of the selected - * region. Possible values can be #H5S_SEL_POINTS for point + * region. Possible values can be #H5S_SEL_POINTS for point * selection and #H5S_SEL_HYPERSLABS for hyperslab selection. * - * The parameter \p numelem describes how many elements will be - * placed in the buffer \p buf. The number should be interpreted + * The parameter \p numelem describes how many elements will be + * placed in the buffer \p buf. The number should be interpreted * using the value of \p sel_type. * - * If value of \p sel_type is #H5S_SEL_HYPERSLABS, the parameter - * \p buf contains \p numelem blocks of the coordinates for each - * simple hyperslab of the referenced region. Each block has - * length \c 2*\p rank and is organized as follows: <"start" coordinate>, - * immediately followed by <"opposite" corner coordinate>. - * The total size of the buffer to hold the description of the - * region will be \c 2*\p rank*\p numelem. If region reference - * points to a contiguous sub-array, then the value of \p numelem - * is 1 and the block contains coordinates of the upper left and + * If value of \p sel_type is #H5S_SEL_HYPERSLABS, the parameter + * \p buf contains \p numelem blocks of the coordinates for each + * simple hyperslab of the referenced region. Each block has + * length \c 2*\p rank and is organized as follows: <"start" coordinate>, + * immediately followed by <"opposite" corner coordinate>. + * The total size of the buffer to hold the description of the + * region will be \c 2*\p rank*\p numelem. If region reference + * points to a contiguous sub-array, then the value of \p numelem + * is 1 and the block contains coordinates of the upper left and * lower right corners of the sub-array (or simple hyperslab). * - * If value of \p sel_type is #H5S_SEL_POINTS, the parameter \p buf - * contains \p numelem blocks of the coordinates for each selected - * point of the referenced region. Each block has length \p rank - * and contains coordinates of the element. The total size of the - * buffer to hold the description of the region will be + * If value of \p sel_type is #H5S_SEL_POINTS, the parameter \p buf + * contains \p numelem blocks of the coordinates for each selected + * point of the referenced region. Each block has length \p rank + * and contains coordinates of the element. The total size of the + * buffer to hold the description of the region will be * \p rank* \p numelem. * * @@ -481,14 +445,14 @@ H5_HLRDLL herr_t H5LRread_region(hid_t obj_id, * */ H5_HLRDLL herr_t H5LRget_region_info(hid_t obj_id, - const hdset_reg_ref_t *ref, - size_t *len, - char *path, - int *rank, - hid_t *dtype, - H5S_sel_type *sel_type, - size_t *numelem, - hsize_t *buf ); + const hdset_reg_ref_t *ref, + size_t *len, + char *path, + int *rank, + hid_t *dtype, + H5S_sel_type *sel_type, + size_t *numelem, + hsize_t *buf ); @@ -503,35 +467,33 @@ H5_HLRDLL herr_t H5LRget_region_info(hid_t obj_id, * -------------------------------------------------------------------------- * \ingroup H5LR * - * \brief Copies data from a specified region in a source dataset + * \brief Copies data from a specified region in a source dataset * to a specified region in a destination dataset * * \param[in] file_src Name of the source file * \param[in] path_src Full path to the source dataset - * \param[in] block_coord_src Hyperslab coordinates in the - * source dataset + * \param[in] block_coord_src Hyperslab coordinates in the source dataset * \param[in] file_dest Name of the destination file * \param[in] path_dest Full path to the destination dataset - * \param[in] block_coord_dset Hyperslab coordinates in the - * destination dataset + * \param[in] block_coord_dset Hyperslab coordinates in the destination dataset * * \return \herr_t * - * \details Given a path to a dataset \p path_src in a file with the - * name \p file_src, and description of a simple hyperslab of - * the source \p block_coord_src, the function will write data - * to the dataset \p path_dest in file \p file_dest to the - * simple hyperslab specified by \p block_coord_dset. - * The arrays \p block_coord_src and \p block_coord_dset have - * a length of 2*rank and are the coordinates of the starting - * point following by the coordinates of the ending point of the - * hyperslab. For example, to specify a rectangular hyperslab - * destination region starting at element (2,2) to element (5,4) + * \details Given a path to a dataset \p path_src in a file with the + * name \p file_src, and description of a simple hyperslab of + * the source \p block_coord_src, the function will write data + * to the dataset \p path_dest in file \p file_dest to the + * simple hyperslab specified by \p block_coord_dset. + * The arrays \p block_coord_src and \p block_coord_dset have + * a length of 2*rank and are the coordinates of the starting + * point following by the coordinates of the ending point of the + * hyperslab. For example, to specify a rectangular hyperslab + * destination region starting at element (2,2) to element (5,4) * then \p block_coord_dset would be {2, 2, 5, 4}. * - * If \p path_dest does not exist in the destination file - * (as may be the case when writing to a new file) then the - * dataset will be copied directly to the \p path_dest and + * If \p path_dest does not exist in the destination file + * (as may be the case when writing to a new file) then the + * dataset will be copied directly to the \p path_dest and * \p block_coord_dset will be disregarded. * * \version 1.1 Fortran wrapper introduced in this release. @@ -540,11 +502,11 @@ H5_HLRDLL herr_t H5LRget_region_info(hid_t obj_id, * */ H5_HLRDLL herr_t H5LTcopy_region(const char *file_src, - const char *path_src, - const hsize_t *block_coord_src, - const char *file_dest, - const char *path_dest, - const hsize_t *block_coord_dset); + const char *path_src, + const hsize_t *block_coord_src, + const char *file_dest, + const char *path_dest, + const hsize_t *block_coord_dset); /*------------------------------------------------------------------------- * @@ -562,27 +524,25 @@ H5_HLRDLL herr_t H5LTcopy_region(const char *file_src, * \param[in] file Name of file * \param[in] path Full path to a dataset * \param[in] block_coord Hyperslab coordinates - * \param[in] mem_type Memory datatype, describing the buffer - * the referenced data will be read into - * \param[out] buf Buffer containing data from the - * referenced region + * \param[in] mem_type Memory datatype, describing the buffer the referenced data will be read into + * \param[out] buf Buffer containing data from the referenced region * * \return \herr_t * - * \details H5LTread_region() reads data from a region described by - * the hyperslab coordinates in \p block_coord, located in - * the dataset specified by its absolute path \p path in a - * file specified by its name \p file. Data is read into a - * buffer \p buf of the datatype that corresponds to the + * \details H5LTread_region() reads data from a region described by + * the hyperslab coordinates in \p block_coord, located in + * the dataset specified by its absolute path \p path in a + * file specified by its name \p file. Data is read into a + * buffer \p buf of the datatype that corresponds to the * HDF5 datatype specified by \p mem_type. * - * Buffer \p block_coord has size 2*rank and is the coordinates - * of the starting point following by the coordinates of the - * ending point of the hyperslab. For example, to extract a - * rectangular hyperslab region starting at element (2,2) to + * Buffer \p block_coord has size 2*rank and is the coordinates + * of the starting point following by the coordinates of the + * ending point of the hyperslab. For example, to extract a + * rectangular hyperslab region starting at element (2,2) to * element (5,4) then \p block_coord would be {2, 2, 5, 4}. * - * Buffer \p buf should be big enough to hold selected elements + * Buffer \p buf should be big enough to hold selected elements * of the type that corresponds to the \p mem_type * * \version 1.1 Fortran wrapper introduced in this release. @@ -591,57 +551,55 @@ H5_HLRDLL herr_t H5LTcopy_region(const char *file_src, * */ H5_HLRDLL herr_t H5LTread_region(const char *file, - const char *path, - const hsize_t *block_coord, - hid_t mem_type, - void *buf ); + const char *path, + const hsize_t *block_coord, + hid_t mem_type, + void *buf ); /** * -------------------------------------------------------------------------- * \ingroup H5LR * - * \brief Retrieves the values of quality flags for each element + * \brief Retrieves the values of quality flags for each element * to the application provided buffer. * * \param[in] dset_id Identifier of the dataset with bit-field values * \param[in] num_values Number of the values to be extracted - * \param[in] offset Array of staring bits to be extracted from + * \param[in] offset Array of staring bits to be extracted from * the element; valid values: 0 (zero) through 7 - * \param[in] lengths Array of the number of bits to be extracted - * for each value - * \param[in] space Dataspace identifier, describing the elements - * to be read from the dataset with bit-field - * values + * \param[in] lengths Array of the number of bits to be extracted for each value + * \param[in] space Dataspace identifier, describing the elements + * to be read from the dataset with bit-field values * \param[out] buf Buffer to read the values in * * \return \herr_t * - * \details H5LTread_bitfield_value() reads selected elements from a - * dataset specified by its identifier \p dset_id, and unpacks + * \details H5LTread_bitfield_value() reads selected elements from a + * dataset specified by its identifier \p dset_id, and unpacks * the bit-field values to a buffer \p buf. * - * The parameter \p space is a space identifier that indicates + * The parameter \p space is a space identifier that indicates * which elements of the dataset should be read. * - * The parameter \p offset is an array of length \p num_values; + * The parameter \p offset is an array of length \p num_values; * the ith element of the array holds the value of the - * starting bit of the ith bit-field value. + * starting bit of the ith bit-field value. * Valid values are: 0 (zero) through 7. * - * The parameter \p lengths is an array of length \p num_values; - * the ith element of the array holds the number of - * bits to be extracted for the ith bit-field value. - * Extracted bits will be interpreted as a base-2 integer value. - * Each value will be converted to the base-10 integer value and - * stored in the application buffer. - * - * Buffer \p buf is allocated by the application and should be big - * enough to hold \c num_sel_elem * \p num_values elements of the - * specified type, where \c num_sel_elem is a number of the elements - * to be read from the dataset. Data in the buffer is organized - * as \p num_values values for the first element, followed by the - * \p num_values values for the second element, ... , followed by - * the \p num_values values for the + * The parameter \p lengths is an array of length \p num_values; + * the ith element of the array holds the number of + * bits to be extracted for the ith bit-field value. + * Extracted bits will be interpreted as a base-2 integer value. + * Each value will be converted to the base-10 integer value and + * stored in the application buffer. + * + * Buffer \p buf is allocated by the application and should be big + * enough to hold \c num_sel_elem * \p num_values elements of the + * specified type, where \c num_sel_elem is a number of the elements + * to be read from the dataset. Data in the buffer is organized + * as \p num_values values for the first element, followed by the + * \p num_values values for the second element, ... , followed by + * the \p num_values values for the * \c num_selected_elemth element. * * \version 1.1 Fortran wrapper introduced in this release. @@ -650,5 +608,5 @@ H5_HLRDLL herr_t H5LTread_region(const char *file, * */ H5_HLRDLL herr_t H5LTread_bitfield_value(hid_t dset_id, int num_values, const unsigned *offset, - const unsigned *lengths, hid_t space, int *buf); + const unsigned *lengths, hid_t space, int *buf); diff --git a/doxygen/dox/high_level/high_level.dox b/doxygen/dox/high_level/high_level.dox deleted file mode 100644 index c53d298..0000000 --- a/doxygen/dox/high_level/high_level.dox +++ /dev/null @@ -1,29 +0,0 @@ -/** \page high_level High-level library - * The high-level HDF5 library includes several sets of convenience and standard-use APIs to - * facilitate common HDF5 operations. - * - *
    - *
  • \ref H5LT "Lite (H5LT, H5LD)" - * \n - * Functions to simplify creating and manipulating datasets, attributes and other features - *
  • \ref H5IM "Image (H5IM)" - * \n - * Creating and manipulating HDF5 datasets intended to be interpreted as images - *
  • \ref H5TB "Table (H5TB)" - * \n - * Creating and manipulating HDF5 datasets intended to be interpreted as tables - *
  • \ref H5PT "Packet Table (H5PT)" - * \n - * Creating and manipulating HDF5 datasets to support append- and read-only operations on table data - *
  • \ref H5DS "Dimension Scale (H5DS)" - * \n - * Creating and manipulating HDF5 datasets that are associated with the dimension of another HDF5 dataset - *
  • \ref H5DO "Optimizations (H5DO)" - * \n - * Bypassing default HDF5 behavior in order to optimize for specific use cases - *
  • \ref H5LR "Extensions (H5LR, H5LT)" - * \n - * Working with region references, hyperslab selections, and bit-fields - *
- * - */ diff --git a/doxygen/examples/H5.format.1.0.html b/doxygen/examples/H5.format.1.0.html index ff21315..4eb0548 100644 --- a/doxygen/examples/H5.format.1.0.html +++ b/doxygen/examples/H5.format.1.0.html @@ -139,7 +139,7 @@

This document describes the lower-level data objects; the higher-level objects and their properties are described - in the HDF5 User's Guide. + in the HDF5 User Guide. - { - __label__ fail_file, fail_fspace, fail_dset, fail_sel, fail_aspace, fail_attr, fail_awrite; - hid_t file, fspace, dset, aspace, attr; - H5R_ref_t ref; - - if ((file = H5Fcreate("reference.h5", H5F_ACC_TRUNC, H5P_DEFAULT, H5P_DEFAULT)) == H5I_INVALID_HID) { - ret_val = EXIT_FAILURE; - goto fail_file; - } - // create a region reference which selects all elements of the dataset at "/data" - if ((fspace = H5Screate_simple(2, (hsize_t[]){10, 20}, NULL)) == H5I_INVALID_HID) { - ret_val = EXIT_FAILURE; - goto fail_fspace; - } - if ((dset = H5Dcreate(file, "data", H5T_STD_I32LE, fspace, H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT)) == - H5I_INVALID_HID) { - ret_val = EXIT_FAILURE; - goto fail_dset; - } - if (H5Sselect_all(fspace) < 0 || H5Rcreate_region(file, "data", fspace, H5P_DEFAULT, &ref) < 0) { - ret_val = EXIT_FAILURE; - goto fail_sel; - } - // store the region reference in a scalar attribute of the root group called "region" - if ((aspace = H5Screate(H5S_SCALAR)) == H5I_INVALID_HID) { - ret_val = EXIT_FAILURE; - goto fail_aspace; - } - if ((attr = H5Acreate(file, "region", H5T_STD_REF, aspace, H5P_DEFAULT, H5P_DEFAULT)) == - H5I_INVALID_HID) { - ret_val = EXIT_FAILURE; - goto fail_attr; - } - if (H5Awrite(attr, H5T_STD_REF, &ref) < 0) { - ret_val = EXIT_FAILURE; - goto fail_awrite; - } - -fail_awrite: - H5Aclose(attr); -fail_attr: - H5Sclose(aspace); -fail_aspace: - H5Rdestroy(&ref); -fail_sel: - H5Dclose(dset); -fail_dset: - H5Sclose(fspace); -fail_fspace: - H5Fclose(file); -fail_file:; - } - //! - - //! - { - __label__ fail_file, fail_attr, fail_aread; - hid_t file, attr; - H5R_ref_t ref; - - if ((file = H5Fopen("reference.h5", H5F_ACC_RDONLY, H5P_DEFAULT)) == H5I_INVALID_HID) { - ret_val = EXIT_FAILURE; - goto fail_file; - } - - // read the dataset region reference from the attribute - if ((attr = H5Aopen(file, "region", H5P_DEFAULT)) == H5I_INVALID_HID) { - ret_val = EXIT_FAILURE; - goto fail_attr; - } - if (H5Aread(attr, H5T_STD_REF, &ref) < 0) { - ret_val = EXIT_FAILURE; - goto fail_aread; - } - assert(H5Rget_type(&ref) == H5R_DATASET_REGION2); - - // get an HDF5 path name for the dataset of the region reference - { - char buf[255]; - if (H5Rget_obj_name(&ref, H5P_DEFAULT, buf, 255) < 0) { - ret_val = EXIT_FAILURE; - } - printf("Object name: \"%s\"\n", buf); - } - - H5Rdestroy(&ref); -fail_aread: - H5Aclose(attr); -fail_attr: - H5Fclose(file); -fail_file:; - } - //! - - //! - { - __label__ fail_file, fail_attr, fail_ref; - hid_t file, attr; - H5R_ref_t ref; - - if ((file = H5Fopen("reference.h5", H5F_ACC_RDWR, H5P_DEFAULT)) == H5I_INVALID_HID) { - ret_val = EXIT_FAILURE; - goto fail_file; - } - - // H5T_STD_REF is a generic reference type - // we can "update" the attribute value to refer to the attribute itself - if ((attr = H5Aopen(file, "region", H5P_DEFAULT)) == H5I_INVALID_HID) { - ret_val = EXIT_FAILURE; - goto fail_attr; - } - if (H5Rcreate_attr(file, "data", "region", H5P_DEFAULT, &ref) < 0) { - ret_val = EXIT_FAILURE; - goto fail_ref; - } - - assert(H5Rget_type(&ref) == H5R_ATTR); - - if (H5Awrite(attr, H5T_STD_REF, &ref) < 0) { - ret_val = EXIT_FAILURE; - } - - H5Rdestroy(&ref); -fail_ref: - H5Aclose(attr); -fail_attr: - H5Fclose(file); -fail_file:; - } - //! - - //! - { - __label__ fail_file, fail_ref; - hid_t file; - H5R_ref_t ref; - - // create an HDF5 object reference to the root group - if ((file = H5Fopen("reference.h5", H5F_ACC_RDONLY, H5P_DEFAULT)) == H5I_INVALID_HID) { - ret_val = EXIT_FAILURE; - goto fail_file; - } - if (H5Rcreate_object(file, ".", H5P_DEFAULT, &ref) < 0) { - ret_val = EXIT_FAILURE; - goto fail_ref; - } - - // H5Rdestroy() releases all resources associated with an HDF5 reference - H5Rdestroy(&ref); -fail_ref: - H5Fclose(file); -fail_file:; - } - //! - - return ret_val; -} diff --git a/doxygen/examples/ThreadSafeLibrary.html b/doxygen/examples/ThreadSafeLibrary.html index 97f7742..5824dc6 100644 --- a/doxygen/examples/ThreadSafeLibrary.html +++ b/doxygen/examples/ThreadSafeLibrary.html @@ -20,9 +20,9 @@ The following code is placed at the beginning of H5private.h:

-H5_HAVE_THREADSAFE is defined when the HDF-5 library is +H5_HAVE_THREADSAFE is defined when the HDF5 library is compiled with the --enable-threadsafe configuration option. In general, -code for the non-threadsafe version of HDF-5 library are placed within +code for the non-threadsafe version of HDF5 library are placed within the #else part of the conditional compilation. The exception to this rule are the changes to the FUNC_ENTER (in H5private.h), HRETURN and HRETURN_ERROR (in @@ -438,7 +438,7 @@ described in Appendix D and may be found in H5TS.c.

Except where stated, all tests involve 16 simultaneous threads that make -use of HDF-5 API calls without any explicit synchronization typically +use of HDF5 API calls without any explicit synchronization typically required in a non-threadsafe environment.

@@ -453,7 +453,7 @@ dataset's named value.

The main thread would join with all 16 threads and attempt to match the -resulting HDF-5 file with expected results - that each dataset contains +resulting HDF5 file with expected results - that each dataset contains the correct value (0 for zero, 1 for one etc ...) and all datasets were correctly created.

@@ -473,7 +473,7 @@ name.

The error stack implementation runs correctly if it reports 15 instances -of the dataset name conflict error and finally generates a correct HDF-5 +of the dataset name conflict error and finally generates a correct HDF5 containing that single dataset. Each thread should report its own stack of errors with a thread number associated with it.

diff --git a/doxygen/examples/core_menu.md b/doxygen/examples/core_menu.md new file mode 100644 index 0000000..8c82cc5 --- /dev/null +++ b/doxygen/examples/core_menu.md @@ -0,0 +1,65 @@ +Core Library + +- @ref H5A "Attributes (H5A)" +
+HDF5 attribute is a small metadata object describing the nature and/or intended usage of a primary data object. + +- @ref H5D "Datasets (H5D)" +
+Manage HDF5 datasets, including the transfer of data between memory and disk and the description of dataset properties. + +- @ref H5S "Dataspaces (H5S)" +
+HDF5 dataspaces describe the shape of datasets in memory or in HDF5 files. + +- @ref H5T "Datatypes (H5T)" +
+HDF5 datatypes describe the element type of HDF5 datasets and attributes. + +- @ref H5E "Error Handling (H5E)" +
+HDF5 library error reporting. + +- @ref H5F "Files (H5F)" +
+Manage HDF5 files. + +- @ref H5Z "Filters (H5Z)" +
+Manage HDF5 user-defined filters + +- @ref H5G "Groups (H5G)" +
+Manage HDF5 groups. + +- @ref H5I "Identifiers (H5I)" +
+Manage identifiers defined by the HDF5 library. + +- @ref H5 "Library General (H5)" +
+Manage the life cycle of HDF5 library instances. + +- @ref H5L "Links (H5L)" +
+Manage HDF5 links and link types. + +- @ref H5O "Objects (H5O)" +
+Manage HDF5 objects (groups, datasets, datatype objects). + +- @ref H5P "Property Lists (H5P)" +
+HDF5 property lists are the main vehicle to configure the behavior of HDF5 API functions. + +- @ref H5PL "Dynamically-loaded Plugins (H5PL)" +
+Manage the loading behavior of HDF5 plugins. + +- @ref H5R "References (H5R)" +
+Manage HDF5 references (HDF5 objects, attributes, and selections on datasets a.k.a. dataset regions). + +- @ref H5VL "VOL Connector (H5VL)" +
+Manage HDF5 VOL connector plugins. diff --git a/doxygen/examples/fortran_menu.md b/doxygen/examples/fortran_menu.md new file mode 100644 index 0000000..335a21a --- /dev/null +++ b/doxygen/examples/fortran_menu.md @@ -0,0 +1,61 @@ +Fortran Library + +- H5A "Attributes (H5A)" +
+HDF5 attribute is a small metadata object describing the nature and/or intended usage of a primary data object. + +- H5D "Datasets (H5D)" +
+Manage HDF5 datasets, including the transfer of data between memory and disk and the description of dataset properties. + +- H5S "Dataspaces (H5S)" +
+HDF5 dataspaces describe the shape of datasets in memory or in HDF5 files. + +- H5T "Datatypes (H5T)" +
+HDF5 datatypes describe the element type of HDF5 datasets and attributes. + +- H5E "Error Handling (H5E)" +
+HDF5 library error reporting. + +- H5F "Files (H5F)" +
+Manage HDF5 files. + +- H5Z "Filters (H5Z)" +
+Manage HDF5 user-defined filters + +- H5G "Groups (H5G)" +
+Manage HDF5 groups. + +- H5I "Identifiers (H5I)" +
+Manage identifiers defined by the HDF5 library. + +- H5 "Library General (H5)" +
+Manage the life cycle of HDF5 library instances. + +- H5L "Links (H5L)" +
+Manage HDF5 links and link types. + +- H5O "Objects (H5O)" +
+Manage HDF5 objects (groups, datasets, datatype objects). + +- H5P "Property Lists (H5P)" +
+HDF5 property lists are the main vehicle to configure the behavior of HDF5 API functions. + +- H5PL "Dynamically-loaded Plugins (H5PL)" +
+Manage the loading behavior of HDF5 plugins. + +- H5R "References (H5R)" +
+Manage HDF5 references (HDF5 objects, attributes, and selections on datasets a.k.a. dataset regions). diff --git a/doxygen/examples/high_level_menu.md b/doxygen/examples/high_level_menu.md new file mode 100644 index 0000000..9e6f696 --- /dev/null +++ b/doxygen/examples/high_level_menu.md @@ -0,0 +1,30 @@ +High-level library +
+The high-level HDF5 library includes several sets of convenience and standard-use APIs to +facilitate common HDF5 operations. + +- @ref H5LT "Lite (H5LT, H5LD)" +
+Functions to simplify creating and manipulating datasets, attributes and other features + +- @ref H5IM "Image (H5IM)" +
+Creating and manipulating HDF5 datasets intended to be interpreted as images + +- @ref H5TB "Table (H5TB)" +
+Creating and manipulating HDF5 datasets intended to be interpreted as tables + +- @ref H5PT "Packet Table (H5PT)" +
+Creating and manipulating HDF5 datasets to support append- and read-only operations on table data + +- @ref H5DS "Dimension Scale (H5DS)" +
+Creating and manipulating HDF5 datasets that are associated with the dimension of another HDF5 dataset + +- @ref H5DO "Optimizations (H5DO)" +
+Bypassing default HDF5 behavior in order to optimize for specific use cases + +- @ref H5LR "Extensions (H5LR, H5LT)" diff --git a/doxygen/examples/java_menu.md b/doxygen/examples/java_menu.md new file mode 100644 index 0000000..1236838 --- /dev/null +++ b/doxygen/examples/java_menu.md @@ -0,0 +1,84 @@ +Java Library + @ref HDF5LIB + +- @ref JH5 +
+This package is the Java interface for the HDF5 library. + +- @ref JH5A +
+This package is the Java interface for the HDF5 library attribute APIs. + +- @ref JH5D +
+This package is the Java interface for the HDF5 library dataset APIs. + +- @ref JH5S +
+This package is the Java interface for the HDF5 library dataspace APIs. + +- @ref JH5T +
+This package is the Java interface for the HDF5 library datatype APIs. + +- @ref JH5E +
+This package is the Java interface for the HDF5 library error APIs. + +- @ref JH5F +
+This package is the Java interface for the HDF5 library file APIs. + +- @ref JH5Z +
+This package is the Java interface for the HDF5 library filter APIs. + +- @ref JH5G +
+This package is the Java interface for the HDF5 library group APIs. + +- @ref JH5I +
+This package is the Java interface for the HDF5 library identifier APIs. + +- @ref JH5L +
+This package is the Java interface for the HDF5 library links APIs. + +- @ref JH5O +
+This package is the Java interface for the HDF5 library object APIs. + +- @ref JH5P +
+This package is the Java interface for the HDF5 library property list APIs. + +- @ref JH5PL +
+This package is the Java interface for the HDF5 library plugin APIs. + +- @ref JH5R +
+This package is the Java interface for the HDF5 library reference APIs. + +- @ref JH5VL +
+This package is the Java interface for the HDF5 library VOL connector APIs. + +- @ref HDF5CONST +
+This class contains C constants and enumerated types of HDF5 library. + +- @ref HDFNATIVE +
+This class encapsulates native methods to deal with arrays of numbers, + * converting from numbers to bytes and bytes to numbers. + +- @ref HDFARRAY +
+This is a class for handling multidimensional arrays for HDF. + +- @ref ERRORS +
+The class HDF5Exception returns errors from the Java HDF5 Interface. + \ No newline at end of file diff --git a/doxygen/hdf5doxy.css b/doxygen/hdf5doxy.css index 8c03860..639843d 100644 --- a/doxygen/hdf5doxy.css +++ b/doxygen/hdf5doxy.css @@ -244,8 +244,16 @@ a { padding: 2px 1px 0; } -a[href*="http"] { - background: url('https://mdn.mozillademos.org/files/12982/external-link-52.png') no-repeat 100% 0; - background-size: 12px 12px; - padding-right: 16px; +a[href^="http"]::after, +a[href^="https://"]::after +{ + content: ""; + width: 11px; + height: 11px; + margin-left: 4px; + background-image: url("data:image/svg+xml,%3Csvg xmlns='http://www.w3.org/2000/svg' width='16' height='16' fill='currentColor' viewBox='0 0 16 16'%3E%3Cpath fill-rule='evenodd' d='M8.636 3.5a.5.5 0 0 0-.5-.5H1.5A1.5 1.5 0 0 0 0 4.5v10A1.5 1.5 0 0 0 1.5 16h10a1.5 1.5 0 0 0 1.5-1.5V7.864a.5.5 0 0 0-1 0V14.5a.5.5 0 0 1-.5.5h-10a.5.5 0 0 1-.5-.5v-10a.5.5 0 0 1 .5-.5h6.636a.5.5 0 0 0 .5-.5z'/%3E%3Cpath fill-rule='evenodd' d='M16 .5a.5.5 0 0 0-.5-.5h-5a.5.5 0 0 0 0 1h3.793L6.146 9.146a.5.5 0 1 0 .708.708L15 1.707V5.5a.5.5 0 0 0 1 0v-5z'/%3E%3C/svg%3E"); + background-position: center; + background-repeat: no-repeat; + background-size: contain; + display: inline-block; } diff --git a/doxygen/hdf5doxy_layout.xml b/doxygen/hdf5doxy_layout.xml index 24642b5..20e0123 100644 --- a/doxygen/hdf5doxy_layout.xml +++ b/doxygen/hdf5doxy_layout.xml @@ -5,6 +5,7 @@ + diff --git a/doxygen/img/Dmodel_fig1.gif b/doxygen/img/Dmodel_fig1.gif new file mode 100644 index 0000000..ca8093c Binary files /dev/null and b/doxygen/img/Dmodel_fig1.gif differ diff --git a/doxygen/img/Dmodel_fig10.gif b/doxygen/img/Dmodel_fig10.gif new file mode 100644 index 0000000..c6a9916 Binary files /dev/null and b/doxygen/img/Dmodel_fig10.gif differ diff --git a/doxygen/img/Dmodel_fig11_b.gif b/doxygen/img/Dmodel_fig11_b.gif new file mode 100644 index 0000000..19ea9fb Binary files /dev/null and b/doxygen/img/Dmodel_fig11_b.gif differ diff --git a/doxygen/img/Dmodel_fig12_a.gif b/doxygen/img/Dmodel_fig12_a.gif new file mode 100644 index 0000000..1f597df Binary files /dev/null and b/doxygen/img/Dmodel_fig12_a.gif differ diff --git a/doxygen/img/Dmodel_fig12_b.gif b/doxygen/img/Dmodel_fig12_b.gif new file mode 100644 index 0000000..f271082 Binary files /dev/null and b/doxygen/img/Dmodel_fig12_b.gif differ diff --git a/doxygen/img/Dmodel_fig14_a.gif b/doxygen/img/Dmodel_fig14_a.gif new file mode 100644 index 0000000..45d6c6c Binary files /dev/null and b/doxygen/img/Dmodel_fig14_a.gif differ diff --git a/doxygen/img/Dmodel_fig14_b.gif b/doxygen/img/Dmodel_fig14_b.gif new file mode 100644 index 0000000..12a667d Binary files /dev/null and b/doxygen/img/Dmodel_fig14_b.gif differ diff --git a/doxygen/img/Dmodel_fig14_c.gif b/doxygen/img/Dmodel_fig14_c.gif new file mode 100644 index 0000000..0c06049 Binary files /dev/null and b/doxygen/img/Dmodel_fig14_c.gif differ diff --git a/doxygen/img/Dmodel_fig14_d.gif b/doxygen/img/Dmodel_fig14_d.gif new file mode 100644 index 0000000..7cb8956 Binary files /dev/null and b/doxygen/img/Dmodel_fig14_d.gif differ diff --git a/doxygen/img/Dmodel_fig2.gif b/doxygen/img/Dmodel_fig2.gif new file mode 100644 index 0000000..c2c9d04 Binary files /dev/null and b/doxygen/img/Dmodel_fig2.gif differ diff --git a/doxygen/img/Dmodel_fig3_a.gif b/doxygen/img/Dmodel_fig3_a.gif new file mode 100644 index 0000000..9f00832 Binary files /dev/null and b/doxygen/img/Dmodel_fig3_a.gif differ diff --git a/doxygen/img/Dmodel_fig3_c.gif b/doxygen/img/Dmodel_fig3_c.gif new file mode 100644 index 0000000..8529181 Binary files /dev/null and b/doxygen/img/Dmodel_fig3_c.gif differ diff --git a/doxygen/img/Dmodel_fig4_a.gif b/doxygen/img/Dmodel_fig4_a.gif new file mode 100644 index 0000000..c7fdce1 Binary files /dev/null and b/doxygen/img/Dmodel_fig4_a.gif differ diff --git a/doxygen/img/Dmodel_fig4_b.gif b/doxygen/img/Dmodel_fig4_b.gif new file mode 100644 index 0000000..34053d5 Binary files /dev/null and b/doxygen/img/Dmodel_fig4_b.gif differ diff --git a/doxygen/img/Dmodel_fig5.gif b/doxygen/img/Dmodel_fig5.gif new file mode 100644 index 0000000..69e11f5 Binary files /dev/null and b/doxygen/img/Dmodel_fig5.gif differ diff --git a/doxygen/img/Dmodel_fig6.gif b/doxygen/img/Dmodel_fig6.gif new file mode 100644 index 0000000..bf677c2 Binary files /dev/null and b/doxygen/img/Dmodel_fig6.gif differ diff --git a/doxygen/img/Dmodel_fig7_b.gif b/doxygen/img/Dmodel_fig7_b.gif new file mode 100644 index 0000000..da27fa0 Binary files /dev/null and b/doxygen/img/Dmodel_fig7_b.gif differ diff --git a/doxygen/img/Dmodel_fig8.gif b/doxygen/img/Dmodel_fig8.gif new file mode 100644 index 0000000..27305a8 Binary files /dev/null and b/doxygen/img/Dmodel_fig8.gif differ diff --git a/doxygen/img/Dmodel_fig9.gif b/doxygen/img/Dmodel_fig9.gif new file mode 100644 index 0000000..31893bf Binary files /dev/null and b/doxygen/img/Dmodel_fig9.gif differ diff --git a/doxygen/img/Dsets_NbitFloating1.gif b/doxygen/img/Dsets_NbitFloating1.gif new file mode 100644 index 0000000..3d3ce19 Binary files /dev/null and b/doxygen/img/Dsets_NbitFloating1.gif differ diff --git a/doxygen/img/Dsets_NbitFloating2.gif b/doxygen/img/Dsets_NbitFloating2.gif new file mode 100644 index 0000000..cdb5a90 Binary files /dev/null and b/doxygen/img/Dsets_NbitFloating2.gif differ diff --git a/doxygen/img/Dsets_NbitInteger1.gif b/doxygen/img/Dsets_NbitInteger1.gif new file mode 100644 index 0000000..656fb8d Binary files /dev/null and b/doxygen/img/Dsets_NbitInteger1.gif differ diff --git a/doxygen/img/Dsets_NbitInteger2.gif b/doxygen/img/Dsets_NbitInteger2.gif new file mode 100644 index 0000000..e100ebe Binary files /dev/null and b/doxygen/img/Dsets_NbitInteger2.gif differ diff --git a/doxygen/img/Dsets_fig1.gif b/doxygen/img/Dsets_fig1.gif new file mode 100644 index 0000000..c8f3349 Binary files /dev/null and b/doxygen/img/Dsets_fig1.gif differ diff --git a/doxygen/img/Dsets_fig10.gif b/doxygen/img/Dsets_fig10.gif new file mode 100644 index 0000000..4593cc1 Binary files /dev/null and b/doxygen/img/Dsets_fig10.gif differ diff --git a/doxygen/img/Dsets_fig11.gif b/doxygen/img/Dsets_fig11.gif new file mode 100644 index 0000000..573701a Binary files /dev/null and b/doxygen/img/Dsets_fig11.gif differ diff --git a/doxygen/img/Dsets_fig12.gif b/doxygen/img/Dsets_fig12.gif new file mode 100644 index 0000000..d9ddd2b Binary files /dev/null and b/doxygen/img/Dsets_fig12.gif differ diff --git a/doxygen/img/Dsets_fig2.gif b/doxygen/img/Dsets_fig2.gif new file mode 100644 index 0000000..8ecc2c7 Binary files /dev/null and b/doxygen/img/Dsets_fig2.gif differ diff --git a/doxygen/img/Dsets_fig3.gif b/doxygen/img/Dsets_fig3.gif new file mode 100644 index 0000000..642715e Binary files /dev/null and b/doxygen/img/Dsets_fig3.gif differ diff --git a/doxygen/img/Dsets_fig4.gif b/doxygen/img/Dsets_fig4.gif new file mode 100644 index 0000000..a24ccc9 Binary files /dev/null and b/doxygen/img/Dsets_fig4.gif differ diff --git a/doxygen/img/Dsets_fig5.gif b/doxygen/img/Dsets_fig5.gif new file mode 100644 index 0000000..78c953e Binary files /dev/null and b/doxygen/img/Dsets_fig5.gif differ diff --git a/doxygen/img/Dsets_fig6.gif b/doxygen/img/Dsets_fig6.gif new file mode 100644 index 0000000..ea15564 Binary files /dev/null and b/doxygen/img/Dsets_fig6.gif differ diff --git a/doxygen/img/Dsets_fig7.gif b/doxygen/img/Dsets_fig7.gif new file mode 100644 index 0000000..f7f6b9e Binary files /dev/null and b/doxygen/img/Dsets_fig7.gif differ diff --git a/doxygen/img/Dsets_fig8.gif b/doxygen/img/Dsets_fig8.gif new file mode 100644 index 0000000..91cb6aa Binary files /dev/null and b/doxygen/img/Dsets_fig8.gif differ diff --git a/doxygen/img/Dsets_fig9.gif b/doxygen/img/Dsets_fig9.gif new file mode 100644 index 0000000..802ca52 Binary files /dev/null and b/doxygen/img/Dsets_fig9.gif differ diff --git a/doxygen/img/Dspace_CvsF1.gif b/doxygen/img/Dspace_CvsF1.gif new file mode 100644 index 0000000..716b9f1 Binary files /dev/null and b/doxygen/img/Dspace_CvsF1.gif differ diff --git a/doxygen/img/Dspace_CvsF2.gif b/doxygen/img/Dspace_CvsF2.gif new file mode 100644 index 0000000..716b9f1 Binary files /dev/null and b/doxygen/img/Dspace_CvsF2.gif differ diff --git a/doxygen/img/Dspace_CvsF3.gif b/doxygen/img/Dspace_CvsF3.gif new file mode 100644 index 0000000..59c31ff Binary files /dev/null and b/doxygen/img/Dspace_CvsF3.gif differ diff --git a/doxygen/img/Dspace_CvsF4.gif b/doxygen/img/Dspace_CvsF4.gif new file mode 100644 index 0000000..e97b006 Binary files /dev/null and b/doxygen/img/Dspace_CvsF4.gif differ diff --git a/doxygen/img/Dspace_combine.gif b/doxygen/img/Dspace_combine.gif new file mode 100644 index 0000000..8da2397 Binary files /dev/null and b/doxygen/img/Dspace_combine.gif differ diff --git a/doxygen/img/Dspace_complex.gif b/doxygen/img/Dspace_complex.gif new file mode 100644 index 0000000..53e92ee Binary files /dev/null and b/doxygen/img/Dspace_complex.gif differ diff --git a/doxygen/img/Dspace_features.gif b/doxygen/img/Dspace_features.gif new file mode 100644 index 0000000..d94b4e4 Binary files /dev/null and b/doxygen/img/Dspace_features.gif differ diff --git a/doxygen/img/Dspace_features_cmpd.gif b/doxygen/img/Dspace_features_cmpd.gif new file mode 100644 index 0000000..f24ee99 Binary files /dev/null and b/doxygen/img/Dspace_features_cmpd.gif differ diff --git a/doxygen/img/Dspace_move.gif b/doxygen/img/Dspace_move.gif new file mode 100644 index 0000000..5debd75 Binary files /dev/null and b/doxygen/img/Dspace_move.gif differ diff --git a/doxygen/img/Dspace_point.gif b/doxygen/img/Dspace_point.gif new file mode 100644 index 0000000..92ad3a8 Binary files /dev/null and b/doxygen/img/Dspace_point.gif differ diff --git a/doxygen/img/Dspace_read.gif b/doxygen/img/Dspace_read.gif new file mode 100644 index 0000000..28c67f4 Binary files /dev/null and b/doxygen/img/Dspace_read.gif differ diff --git a/doxygen/img/Dspace_select.gif b/doxygen/img/Dspace_select.gif new file mode 100644 index 0000000..b9f4851 Binary files /dev/null and b/doxygen/img/Dspace_select.gif differ diff --git a/doxygen/img/Dspace_separate.gif b/doxygen/img/Dspace_separate.gif new file mode 100644 index 0000000..ba4ba8c Binary files /dev/null and b/doxygen/img/Dspace_separate.gif differ diff --git a/doxygen/img/Dspace_simple.gif b/doxygen/img/Dspace_simple.gif new file mode 100644 index 0000000..ff3eca5 Binary files /dev/null and b/doxygen/img/Dspace_simple.gif differ diff --git a/doxygen/img/Dspace_subset.gif b/doxygen/img/Dspace_subset.gif new file mode 100644 index 0000000..b353175 Binary files /dev/null and b/doxygen/img/Dspace_subset.gif differ diff --git a/doxygen/img/Dspace_three_datasets.gif b/doxygen/img/Dspace_three_datasets.gif new file mode 100644 index 0000000..4af222f Binary files /dev/null and b/doxygen/img/Dspace_three_datasets.gif differ diff --git a/doxygen/img/Dspace_transfer.gif b/doxygen/img/Dspace_transfer.gif new file mode 100644 index 0000000..7de0231 Binary files /dev/null and b/doxygen/img/Dspace_transfer.gif differ diff --git a/doxygen/img/Dspace_write1to2.gif b/doxygen/img/Dspace_write1to2.gif new file mode 100644 index 0000000..5735bc7 Binary files /dev/null and b/doxygen/img/Dspace_write1to2.gif differ diff --git a/doxygen/img/Dtypes_fig1.gif b/doxygen/img/Dtypes_fig1.gif new file mode 100644 index 0000000..484f54f Binary files /dev/null and b/doxygen/img/Dtypes_fig1.gif differ diff --git a/doxygen/img/Dtypes_fig10.gif b/doxygen/img/Dtypes_fig10.gif new file mode 100644 index 0000000..60c8ba9 Binary files /dev/null and b/doxygen/img/Dtypes_fig10.gif differ diff --git a/doxygen/img/Dtypes_fig11.gif b/doxygen/img/Dtypes_fig11.gif new file mode 100644 index 0000000..b5eda71 Binary files /dev/null and b/doxygen/img/Dtypes_fig11.gif differ diff --git a/doxygen/img/Dtypes_fig12.gif b/doxygen/img/Dtypes_fig12.gif new file mode 100644 index 0000000..ee911b7 Binary files /dev/null and b/doxygen/img/Dtypes_fig12.gif differ diff --git a/doxygen/img/Dtypes_fig13a.gif b/doxygen/img/Dtypes_fig13a.gif new file mode 100644 index 0000000..2f47b71 Binary files /dev/null and b/doxygen/img/Dtypes_fig13a.gif differ diff --git a/doxygen/img/Dtypes_fig13b.gif b/doxygen/img/Dtypes_fig13b.gif new file mode 100644 index 0000000..fe3b5fb Binary files /dev/null and b/doxygen/img/Dtypes_fig13b.gif differ diff --git a/doxygen/img/Dtypes_fig13c.gif b/doxygen/img/Dtypes_fig13c.gif new file mode 100644 index 0000000..afd2834 Binary files /dev/null and b/doxygen/img/Dtypes_fig13c.gif differ diff --git a/doxygen/img/Dtypes_fig13d.gif b/doxygen/img/Dtypes_fig13d.gif new file mode 100644 index 0000000..48805d8 Binary files /dev/null and b/doxygen/img/Dtypes_fig13d.gif differ diff --git a/doxygen/img/Dtypes_fig14.gif b/doxygen/img/Dtypes_fig14.gif new file mode 100644 index 0000000..8f4d787 Binary files /dev/null and b/doxygen/img/Dtypes_fig14.gif differ diff --git a/doxygen/img/Dtypes_fig15.gif b/doxygen/img/Dtypes_fig15.gif new file mode 100644 index 0000000..82a34d0 Binary files /dev/null and b/doxygen/img/Dtypes_fig15.gif differ diff --git a/doxygen/img/Dtypes_fig16.gif b/doxygen/img/Dtypes_fig16.gif new file mode 100644 index 0000000..e83d379 Binary files /dev/null and b/doxygen/img/Dtypes_fig16.gif differ diff --git a/doxygen/img/Dtypes_fig16a.gif b/doxygen/img/Dtypes_fig16a.gif new file mode 100644 index 0000000..7e68cc0 Binary files /dev/null and b/doxygen/img/Dtypes_fig16a.gif differ diff --git a/doxygen/img/Dtypes_fig16b.gif b/doxygen/img/Dtypes_fig16b.gif new file mode 100644 index 0000000..b7919be Binary files /dev/null and b/doxygen/img/Dtypes_fig16b.gif differ diff --git a/doxygen/img/Dtypes_fig16c.gif b/doxygen/img/Dtypes_fig16c.gif new file mode 100644 index 0000000..cca285a Binary files /dev/null and b/doxygen/img/Dtypes_fig16c.gif differ diff --git a/doxygen/img/Dtypes_fig16d.gif b/doxygen/img/Dtypes_fig16d.gif new file mode 100644 index 0000000..8ca0fd7 Binary files /dev/null and b/doxygen/img/Dtypes_fig16d.gif differ diff --git a/doxygen/img/Dtypes_fig17a.gif b/doxygen/img/Dtypes_fig17a.gif new file mode 100644 index 0000000..cdfaa29 Binary files /dev/null and b/doxygen/img/Dtypes_fig17a.gif differ diff --git a/doxygen/img/Dtypes_fig17b.gif b/doxygen/img/Dtypes_fig17b.gif new file mode 100644 index 0000000..4a3ba33 Binary files /dev/null and b/doxygen/img/Dtypes_fig17b.gif differ diff --git a/doxygen/img/Dtypes_fig18.gif b/doxygen/img/Dtypes_fig18.gif new file mode 100644 index 0000000..73c33e0 Binary files /dev/null and b/doxygen/img/Dtypes_fig18.gif differ diff --git a/doxygen/img/Dtypes_fig19.gif b/doxygen/img/Dtypes_fig19.gif new file mode 100644 index 0000000..38ea6d4 Binary files /dev/null and b/doxygen/img/Dtypes_fig19.gif differ diff --git a/doxygen/img/Dtypes_fig2.gif b/doxygen/img/Dtypes_fig2.gif new file mode 100644 index 0000000..52285a6 Binary files /dev/null and b/doxygen/img/Dtypes_fig2.gif differ diff --git a/doxygen/img/Dtypes_fig20a.gif b/doxygen/img/Dtypes_fig20a.gif new file mode 100644 index 0000000..8406e77 Binary files /dev/null and b/doxygen/img/Dtypes_fig20a.gif differ diff --git a/doxygen/img/Dtypes_fig20b.gif b/doxygen/img/Dtypes_fig20b.gif new file mode 100644 index 0000000..3f2331d Binary files /dev/null and b/doxygen/img/Dtypes_fig20b.gif differ diff --git a/doxygen/img/Dtypes_fig20c.gif b/doxygen/img/Dtypes_fig20c.gif new file mode 100644 index 0000000..5b60165 Binary files /dev/null and b/doxygen/img/Dtypes_fig20c.gif differ diff --git a/doxygen/img/Dtypes_fig20d.gif b/doxygen/img/Dtypes_fig20d.gif new file mode 100644 index 0000000..fdcb59a Binary files /dev/null and b/doxygen/img/Dtypes_fig20d.gif differ diff --git a/doxygen/img/Dtypes_fig21.gif b/doxygen/img/Dtypes_fig21.gif new file mode 100644 index 0000000..6d30528 Binary files /dev/null and b/doxygen/img/Dtypes_fig21.gif differ diff --git a/doxygen/img/Dtypes_fig22.gif b/doxygen/img/Dtypes_fig22.gif new file mode 100644 index 0000000..5e2ca99 Binary files /dev/null and b/doxygen/img/Dtypes_fig22.gif differ diff --git a/doxygen/img/Dtypes_fig23.gif b/doxygen/img/Dtypes_fig23.gif new file mode 100644 index 0000000..f0c9882 Binary files /dev/null and b/doxygen/img/Dtypes_fig23.gif differ diff --git a/doxygen/img/Dtypes_fig24.gif b/doxygen/img/Dtypes_fig24.gif new file mode 100644 index 0000000..a1c28f4 Binary files /dev/null and b/doxygen/img/Dtypes_fig24.gif differ diff --git a/doxygen/img/Dtypes_fig25a.gif b/doxygen/img/Dtypes_fig25a.gif new file mode 100644 index 0000000..16d3bcc Binary files /dev/null and b/doxygen/img/Dtypes_fig25a.gif differ diff --git a/doxygen/img/Dtypes_fig25c.gif b/doxygen/img/Dtypes_fig25c.gif new file mode 100644 index 0000000..a625b74 Binary files /dev/null and b/doxygen/img/Dtypes_fig25c.gif differ diff --git a/doxygen/img/Dtypes_fig26.gif b/doxygen/img/Dtypes_fig26.gif new file mode 100644 index 0000000..24b34fb Binary files /dev/null and b/doxygen/img/Dtypes_fig26.gif differ diff --git a/doxygen/img/Dtypes_fig27.gif b/doxygen/img/Dtypes_fig27.gif new file mode 100644 index 0000000..71f182a Binary files /dev/null and b/doxygen/img/Dtypes_fig27.gif differ diff --git a/doxygen/img/Dtypes_fig28.gif b/doxygen/img/Dtypes_fig28.gif new file mode 100644 index 0000000..56d8d1b Binary files /dev/null and b/doxygen/img/Dtypes_fig28.gif differ diff --git a/doxygen/img/Dtypes_fig3.gif b/doxygen/img/Dtypes_fig3.gif new file mode 100644 index 0000000..993d12e Binary files /dev/null and b/doxygen/img/Dtypes_fig3.gif differ diff --git a/doxygen/img/Dtypes_fig4.gif b/doxygen/img/Dtypes_fig4.gif new file mode 100644 index 0000000..67aedef Binary files /dev/null and b/doxygen/img/Dtypes_fig4.gif differ diff --git a/doxygen/img/Dtypes_fig5.gif b/doxygen/img/Dtypes_fig5.gif new file mode 100644 index 0000000..075417d Binary files /dev/null and b/doxygen/img/Dtypes_fig5.gif differ diff --git a/doxygen/img/Dtypes_fig6.gif b/doxygen/img/Dtypes_fig6.gif new file mode 100644 index 0000000..516ab95 Binary files /dev/null and b/doxygen/img/Dtypes_fig6.gif differ diff --git a/doxygen/img/Dtypes_fig7.gif b/doxygen/img/Dtypes_fig7.gif new file mode 100644 index 0000000..c18e9dc Binary files /dev/null and b/doxygen/img/Dtypes_fig7.gif differ diff --git a/doxygen/img/Dtypes_fig8.gif b/doxygen/img/Dtypes_fig8.gif new file mode 100644 index 0000000..d75d998 Binary files /dev/null and b/doxygen/img/Dtypes_fig8.gif differ diff --git a/doxygen/img/Dtypes_fig9.gif b/doxygen/img/Dtypes_fig9.gif new file mode 100644 index 0000000..873f0ab Binary files /dev/null and b/doxygen/img/Dtypes_fig9.gif differ diff --git a/doxygen/img/Files_fig3.gif b/doxygen/img/Files_fig3.gif new file mode 100644 index 0000000..6912f5c Binary files /dev/null and b/doxygen/img/Files_fig3.gif differ diff --git a/doxygen/img/Files_fig4.gif b/doxygen/img/Files_fig4.gif new file mode 100644 index 0000000..b4ff107 Binary files /dev/null and b/doxygen/img/Files_fig4.gif differ diff --git a/doxygen/img/Groups_fig1.gif b/doxygen/img/Groups_fig1.gif new file mode 100644 index 0000000..193fff9 Binary files /dev/null and b/doxygen/img/Groups_fig1.gif differ diff --git a/doxygen/img/Groups_fig10_a.gif b/doxygen/img/Groups_fig10_a.gif new file mode 100644 index 0000000..6595b34 Binary files /dev/null and b/doxygen/img/Groups_fig10_a.gif differ diff --git a/doxygen/img/Groups_fig10_b.gif b/doxygen/img/Groups_fig10_b.gif new file mode 100644 index 0000000..9e7c234 Binary files /dev/null and b/doxygen/img/Groups_fig10_b.gif differ diff --git a/doxygen/img/Groups_fig10_c.gif b/doxygen/img/Groups_fig10_c.gif new file mode 100644 index 0000000..20900ac Binary files /dev/null and b/doxygen/img/Groups_fig10_c.gif differ diff --git a/doxygen/img/Groups_fig10_d.gif b/doxygen/img/Groups_fig10_d.gif new file mode 100644 index 0000000..7251919 Binary files /dev/null and b/doxygen/img/Groups_fig10_d.gif differ diff --git a/doxygen/img/Groups_fig11_a.gif b/doxygen/img/Groups_fig11_a.gif new file mode 100644 index 0000000..1d041d0 Binary files /dev/null and b/doxygen/img/Groups_fig11_a.gif differ diff --git a/doxygen/img/Groups_fig11_b.gif b/doxygen/img/Groups_fig11_b.gif new file mode 100644 index 0000000..732109b Binary files /dev/null and b/doxygen/img/Groups_fig11_b.gif differ diff --git a/doxygen/img/Groups_fig11_c.gif b/doxygen/img/Groups_fig11_c.gif new file mode 100644 index 0000000..f1444eb Binary files /dev/null and b/doxygen/img/Groups_fig11_c.gif differ diff --git a/doxygen/img/Groups_fig11_d.gif b/doxygen/img/Groups_fig11_d.gif new file mode 100644 index 0000000..ee1b740 Binary files /dev/null and b/doxygen/img/Groups_fig11_d.gif differ diff --git a/doxygen/img/Groups_fig2.gif b/doxygen/img/Groups_fig2.gif new file mode 100644 index 0000000..d14b0ff Binary files /dev/null and b/doxygen/img/Groups_fig2.gif differ diff --git a/doxygen/img/Groups_fig3.gif b/doxygen/img/Groups_fig3.gif new file mode 100644 index 0000000..aaa1fe7 Binary files /dev/null and b/doxygen/img/Groups_fig3.gif differ diff --git a/doxygen/img/Groups_fig4.gif b/doxygen/img/Groups_fig4.gif new file mode 100644 index 0000000..a077bf3 Binary files /dev/null and b/doxygen/img/Groups_fig4.gif differ diff --git a/doxygen/img/Groups_fig5.gif b/doxygen/img/Groups_fig5.gif new file mode 100644 index 0000000..55ddc3c Binary files /dev/null and b/doxygen/img/Groups_fig5.gif differ diff --git a/doxygen/img/Groups_fig6.gif b/doxygen/img/Groups_fig6.gif new file mode 100644 index 0000000..53a18d4 Binary files /dev/null and b/doxygen/img/Groups_fig6.gif differ diff --git a/doxygen/img/Groups_fig9_a.gif b/doxygen/img/Groups_fig9_a.gif new file mode 100644 index 0000000..af0ab69 Binary files /dev/null and b/doxygen/img/Groups_fig9_a.gif differ diff --git a/doxygen/img/Groups_fig9_aa.gif b/doxygen/img/Groups_fig9_aa.gif new file mode 100644 index 0000000..43ed356 Binary files /dev/null and b/doxygen/img/Groups_fig9_aa.gif differ diff --git a/doxygen/img/Groups_fig9_b.gif b/doxygen/img/Groups_fig9_b.gif new file mode 100644 index 0000000..b07ec9c Binary files /dev/null and b/doxygen/img/Groups_fig9_b.gif differ diff --git a/doxygen/img/Groups_fig9_bb.gif b/doxygen/img/Groups_fig9_bb.gif new file mode 100644 index 0000000..e13f534 Binary files /dev/null and b/doxygen/img/Groups_fig9_bb.gif differ diff --git a/doxygen/img/Pmodel_fig2.gif b/doxygen/img/Pmodel_fig2.gif new file mode 100644 index 0000000..8be15fb Binary files /dev/null and b/doxygen/img/Pmodel_fig2.gif differ diff --git a/doxygen/img/Pmodel_fig3.gif b/doxygen/img/Pmodel_fig3.gif new file mode 100644 index 0000000..211f2ab Binary files /dev/null and b/doxygen/img/Pmodel_fig3.gif differ diff --git a/doxygen/img/Pmodel_fig5_a.gif b/doxygen/img/Pmodel_fig5_a.gif new file mode 100644 index 0000000..6607b1c Binary files /dev/null and b/doxygen/img/Pmodel_fig5_a.gif differ diff --git a/doxygen/img/Pmodel_fig5_b.gif b/doxygen/img/Pmodel_fig5_b.gif new file mode 100644 index 0000000..548df28 Binary files /dev/null and b/doxygen/img/Pmodel_fig5_b.gif differ diff --git a/doxygen/img/Pmodel_fig5_c.gif b/doxygen/img/Pmodel_fig5_c.gif new file mode 100644 index 0000000..459bc66 Binary files /dev/null and b/doxygen/img/Pmodel_fig5_c.gif differ diff --git a/doxygen/img/Pmodel_fig5_d.gif b/doxygen/img/Pmodel_fig5_d.gif new file mode 100644 index 0000000..207350d Binary files /dev/null and b/doxygen/img/Pmodel_fig5_d.gif differ diff --git a/doxygen/img/Pmodel_fig5_e.gif b/doxygen/img/Pmodel_fig5_e.gif new file mode 100644 index 0000000..ee4f656 Binary files /dev/null and b/doxygen/img/Pmodel_fig5_e.gif differ diff --git a/doxygen/img/Pmodel_fig6.gif b/doxygen/img/Pmodel_fig6.gif new file mode 100644 index 0000000..2dac825 Binary files /dev/null and b/doxygen/img/Pmodel_fig6.gif differ diff --git a/doxygen/img/PropListClassInheritance.gif b/doxygen/img/PropListClassInheritance.gif new file mode 100644 index 0000000..c6f0309 Binary files /dev/null and b/doxygen/img/PropListClassInheritance.gif differ diff --git a/doxygen/img/PropListEcosystem.gif b/doxygen/img/PropListEcosystem.gif new file mode 100644 index 0000000..cf77ba4 Binary files /dev/null and b/doxygen/img/PropListEcosystem.gif differ diff --git a/doxygen/img/Shared_Attribute.jpg b/doxygen/img/Shared_Attribute.jpg new file mode 100644 index 0000000..058eeec Binary files /dev/null and b/doxygen/img/Shared_Attribute.jpg differ diff --git a/doxygen/img/UML_Attribute.jpg b/doxygen/img/UML_Attribute.jpg new file mode 100644 index 0000000..5b3db7d Binary files /dev/null and b/doxygen/img/UML_Attribute.jpg differ diff --git a/doxygen/img/UML_FileAndProps.gif b/doxygen/img/UML_FileAndProps.gif new file mode 100644 index 0000000..1de96c6 Binary files /dev/null and b/doxygen/img/UML_FileAndProps.gif differ diff --git a/doxygen/img/VFL_Drivers.gif b/doxygen/img/VFL_Drivers.gif new file mode 100644 index 0000000..4b626c6 Binary files /dev/null and b/doxygen/img/VFL_Drivers.gif differ diff --git a/doxygen/img/dtypes_fig25b.gif b/doxygen/img/dtypes_fig25b.gif new file mode 100644 index 0000000..9dbc225 Binary files /dev/null and b/doxygen/img/dtypes_fig25b.gif differ diff --git a/hl/src/H5LDpublic.h b/hl/src/H5LDpublic.h index e42c8d8..611fcc5 100644 --- a/hl/src/H5LDpublic.h +++ b/hl/src/H5LDpublic.h @@ -34,7 +34,7 @@ extern "C" { * It will return failure if \p cur_dims is NULL. * * \note See Also: - * \note Dataset Watch functions (used with \ref h5watch): + * \note Dataset Watch functions (used with h5watch): * - H5LDget_dset_dims() * - H5LDget_dset_elmts() * - H5LDget_dset_type_size() @@ -71,7 +71,7 @@ H5_HLDLL herr_t H5LDget_dset_dims(hid_t did, hsize_t *cur_dims); * conflict with these two separators. * * \note See Also: - * \note Dataset Watch functions (used with \ref h5watch): + * \note Dataset Watch functions (used with h5watch): * - H5LDget_dset_dims() * - H5LDget_dset_elmts() * - H5LDget_dset_type_size() @@ -123,7 +123,7 @@ H5_HLDLL size_t H5LDget_dset_type_size(hid_t did, const char *fields); * two separators. * * \note See Also: - * \note Dataset Watch functions (used with \ref h5watch): + * \note Dataset Watch functions (used with h5watch): * - H5LDget_dset_dims() * - H5LDget_dset_elmts() * - H5LDget_dset_type_size() diff --git a/hl/src/H5LTpublic.h b/hl/src/H5LTpublic.h index 53cd38e..6e25afa 100644 --- a/hl/src/H5LTpublic.h +++ b/hl/src/H5LTpublic.h @@ -60,89 +60,89 @@ extern "C" { * * - Dataset Functions * - Make dataset functions - * - \ref H5LTmake_dataset - * - \ref H5LTmake_dataset_char - * - \ref H5LTmake_dataset_short - * - \ref H5LTmake_dataset_int - * - \ref H5LTmake_dataset_long - * - \ref H5LTmake_dataset_float - * - \ref H5LTmake_dataset_double - * - \ref H5LTmake_dataset_string + * - \ref H5LTmake_dataset + * - \ref H5LTmake_dataset_char + * - \ref H5LTmake_dataset_short + * - \ref H5LTmake_dataset_int + * - \ref H5LTmake_dataset_long + * - \ref H5LTmake_dataset_float + * - \ref H5LTmake_dataset_double + * - \ref H5LTmake_dataset_string * * - Read dataset functions - * - \ref H5LTread_dataset - * - \ref H5LTread_dataset_char - * - \ref H5LTread_dataset_short - * - \ref H5LTread_dataset_int - * - \ref H5LTread_dataset_long - * - \ref H5LTread_dataset_float - * - \ref H5LTread_dataset_double - * - \ref H5LTread_dataset_string + * - \ref H5LTread_dataset + * - \ref H5LTread_dataset_char + * - \ref H5LTread_dataset_short + * - \ref H5LTread_dataset_int + * - \ref H5LTread_dataset_long + * - \ref H5LTread_dataset_float + * - \ref H5LTread_dataset_double + * - \ref H5LTread_dataset_string * * - Query dataset functions - * - \ref H5LTfind_dataset - * - \ref H5LTget_dataset_ndims - * - \ref H5LTget_dataset_info + * - \ref H5LTfind_dataset + * - \ref H5LTget_dataset_ndims + * - \ref H5LTget_dataset_info * * - Dataset watch functions - * - \ref H5LDget_dset_dims - * - \ref H5LDget_dset_elmts - * - \ref H5LDget_dset_type_size + * - \ref H5LDget_dset_dims + * - \ref H5LDget_dset_elmts + * - \ref H5LDget_dset_type_size * *
* * - Attribute Functions * - Set attribute functions - * - \ref H5LTset_attribute_string - * - \ref H5LTset_attribute_char - * - \ref H5LTset_attribute_uchar - * - \ref H5LTset_attribute_short - * - \ref H5LTset_attribute_ushort - * - \ref H5LTset_attribute_int - * - \ref H5LTset_attribute_uint - * - \ref H5LTset_attribute_long - * - \ref H5LTset_attribute_long_long - * - \ref H5LTset_attribute_ulong - * - \ref H5LTset_attribute_ullong - * - \ref H5LTset_attribute_float - * - \ref H5LTset_attribute_double - * - H5LTset_attribute_f (fortran ONLY) + * - \ref H5LTset_attribute_string + * - \ref H5LTset_attribute_char + * - \ref H5LTset_attribute_uchar + * - \ref H5LTset_attribute_short + * - \ref H5LTset_attribute_ushort + * - \ref H5LTset_attribute_int + * - \ref H5LTset_attribute_uint + * - \ref H5LTset_attribute_long + * - \ref H5LTset_attribute_long_long + * - \ref H5LTset_attribute_ulong + * - \ref H5LTset_attribute_ullong + * - \ref H5LTset_attribute_float + * - \ref H5LTset_attribute_double + * - H5LTset_attribute_f (fortran ONLY) * * - Get attribute functions - * - \ref H5LTget_attribute - * - \ref H5LTget_attribute_string - * - \ref H5LTget_attribute_char - * - \ref H5LTget_attribute_uchar - * - \ref H5LTget_attribute_short - * - \ref H5LTget_attribute_ushort - * - \ref H5LTget_attribute_int - * - \ref H5LTget_attribute_uint - * - \ref H5LTget_attribute_long - * - \ref H5LTget_attribute_long_long - * - \ref H5LTget_attribute_ulong - * - \ref H5LTget_attribute_ullong - * - \ref H5LTget_attribute_float - * - \ref H5LTget_attribute_double + * - \ref H5LTget_attribute + * - \ref H5LTget_attribute_string + * - \ref H5LTget_attribute_char + * - \ref H5LTget_attribute_uchar + * - \ref H5LTget_attribute_short + * - \ref H5LTget_attribute_ushort + * - \ref H5LTget_attribute_int + * - \ref H5LTget_attribute_uint + * - \ref H5LTget_attribute_long + * - \ref H5LTget_attribute_long_long + * - \ref H5LTget_attribute_ulong + * - \ref H5LTget_attribute_ullong + * - \ref H5LTget_attribute_float + * - \ref H5LTget_attribute_double * * - Query attribute functions - * - \ref H5LTfind_attribute - * - \ref H5LTget_attribute_info - * - \ref H5LTget_attribute_ndims + * - \ref H5LTfind_attribute + * - \ref H5LTget_attribute_info + * - \ref H5LTget_attribute_ndims * * * * - Datatype Functions * - Datatype translation functions - * - \ref H5LTtext_to_dtype - * - \ref H5LTdtype_to_text + * - \ref H5LTtext_to_dtype + * - \ref H5LTdtype_to_text * * - File image function * - Open file image function - * - \ref H5LTopen_file_image + * - \ref H5LTopen_file_image * * - Path and object function * - Query path and object function - * - \ref H5LTpath_valid + * - \ref H5LTpath_valid * *
@@ -1516,8 +1516,7 @@ H5_HLDLL herr_t H5LTfind_attribute(hid_t loc_id, const char *name); * indicating the file’s root group, followed by the members * - A relative path with respect to \p loc_id * - A dot (\c .), if \p loc_id is the object identifier for - * the object itself - * . + * the object itself. * * If \p path is an absolute path, then \p loc_id can be an * identifier for any object in the file as it is used only to diff --git a/java/examples/groups/H5Ex_G_Visit.java b/java/examples/groups/H5Ex_G_Visit.java index d14ded6..1f2f9a1 100644 --- a/java/examples/groups/H5Ex_G_Visit.java +++ b/java/examples/groups/H5Ex_G_Visit.java @@ -15,7 +15,7 @@ using H5Ovisit and H5Lvisit. The program prints all of the objects in the file specified in FILE, then prints all of the links in that file. The default file used by this - example implements the structure described in the User's + example implements the structure described in the User Guide, chapter 4, figure 26. ************************************************************/ package examples.groups; diff --git a/java/src/Makefile.am b/java/src/Makefile.am index 0fe5302..8d9182d 100644 --- a/java/src/Makefile.am +++ b/java/src/Makefile.am @@ -108,7 +108,6 @@ hdf5_java_JAVA = \ ${pkgpath}/structs/H5AC_cache_config_t.java \ ${pkgpath}/H5.java \ ${pkgpath}/HDF5Constants.java \ - ${pkgpath}/HDF5GroupInfo.java \ ${pkgpath}/HDFArray.java \ ${pkgpath}/HDFNativeData.java @@ -124,7 +123,7 @@ DOCTITLE = '

HDF5 Java Wrapper

' SRCDIR = '$(pkgpath)' docs: - $(JAVADOC) -sourcepath $(srcdir) -d javadoc -use -splitIndex -windowtitle $(WINDOWTITLE) -doctitle $(DOCTITLE) -J-Xmx180m -verbose -overview $(top_srcdir)/java/src/hdf/overview.html -classpath $(CLASSPATH_ENV) hdf.hdf5lib + $(JAVADOC) -sourcepath $(srcdir) -d javadoc -Xdoclint:none -use -splitIndex -windowtitle $(WINDOWTITLE) -doctitle $(DOCTITLE) -J-Xmx180m -verbose -overview $(top_srcdir)/java/src/hdf/overview.html -classpath $(CLASSPATH_ENV) hdf.hdf5lib CLEANFILES = classhdf5_java.stamp $(jarfile) $(JAVAROOT)/$(pkgpath)/callbacks/*.class $(JAVAROOT)/$(pkgpath)/exceptions/*.class $(JAVAROOT)/$(pkgpath)/structs/*.class $(JAVAROOT)/$(pkgpath)/*.class diff --git a/java/src/hdf/hdf5lib/CMakeLists.txt b/java/src/hdf/hdf5lib/CMakeLists.txt index eddc934..9247197 100644 --- a/java/src/hdf/hdf5lib/CMakeLists.txt +++ b/java/src/hdf/hdf5lib/CMakeLists.txt @@ -101,7 +101,6 @@ set (HDF5_JAVADOC_HDF_HDF5_STRUCTS_SOURCES set (HDF5_JAVA_HDF_HDF5_SOURCES HDFArray.java HDF5Constants.java - HDF5GroupInfo.java HDFNativeData.java H5.java ) diff --git a/java/src/hdf/hdf5lib/H5.java b/java/src/hdf/hdf5lib/H5.java index 48662ef..80bb0f5 100644 --- a/java/src/hdf/hdf5lib/H5.java +++ b/java/src/hdf/hdf5lib/H5.java @@ -59,55 +59,55 @@ import hdf.hdf5lib.structs.H5O_native_info_t; import hdf.hdf5lib.structs.H5O_token_t; /** + * @page HDF5LIB HDF5 Java API Package * This class is the Java interface for the HDF5 library. *

* This code is the called by Java programs to access the entry points of the HDF5 library. Each routine wraps * a single HDF5 entry point, generally with the arguments and return codes analogous to the C interface. *

- * For details of the HDF5 library, see the HDF5 Documentation at: - * http://hdfgroup.org/HDF5/ + * For details of the HDF5 library, @see @ref RM *


*

* Mapping of arguments for Java * *

* In general, arguments to the HDF Java API are straightforward translations from the 'C' API described in - * the HDF Reference Manual. + * the @ref RM. * * - * + * * - * + * * * * - * + * * * * - * + * * * * - * + * * * * - * + * * * * - * + * * * * - * + * * * * - * - * + * * *
HDF-5 C types to Java types HDF5 C types to Java types
HDF-5HDF5Java
H5T_NATIVE_INT@ref H5T_NATIVE_INTint, Integer
H5T_NATIVE_SHORT@ref H5T_NATIVE_SHORTshort, Short
H5T_NATIVE_FLOAT@ref H5T_NATIVE_FLOATfloat, Float
H5T_NATIVE_DOUBLE@ref H5T_NATIVE_DOUBLEdouble, Double
H5T_NATIVE_CHAR@ref H5T_NATIVE_CHARbyte, Byte
H5T_C_S1@ref H5T_C_S1java.lang.String
void *
+ *
void *
* (i.e., pointer to `Any')
Special -- see HDFArraySpecial -- see @ref HDFARRAY
* General Rules for Passing Arguments and Results @@ -116,17 +116,17 @@ import hdf.hdf5lib.structs.H5O_token_t; * for arrays, which are discussed below. *

* The return value of Java methods is also the analogous type, as above. A major exception to that - * rule is that all HDF functions that return SUCCEED/FAIL are declared boolean in the Java version, - * rather than int as in the C. Functions that return a value or else FAIL are declared the + * rule is that all HDF Java functions will raise an exception upon failure in the Java version, + * rather than just return int as in the C. Functions that return a value are declared * equivalent to the C function. * However, in most cases the Java method will raise an exception instead of returning an error code. - * See Errors and Exceptions below. + * @see @ref ERRORS. *

* Java does not support pass by reference of arguments, so arguments that are returned through OUT * parameters must be wrapped in an object or array. The Java API for HDF consistently wraps arguments in - * arrays. + * arrays. Where possible the Java function may return the OUT parameter as an object or basic type. *

- * For instance, a function that returns two integers is declared: + * For instance, a function that returns two integers declared as: * *

  *       h_err_t HDF5dummy( int *a1, int *a2)
@@ -137,26 +137,34 @@ import hdf.hdf5lib.structs.H5O_token_t;
  * 
  * public synchronized static native int HDF5dummy(int args[]);
  * 
+ * OR + *
+ * public synchronized static native int[] HDF5dummy();
+ * 
* * where a1 is args[0] and a2 is args[1], and would be invoked: * *
  * H5.HDF5dummy(a);
  * 
+ * OR + *
+ * a = H5.HDF5dummy();
+ * 
* *

* All the routines where this convention is used will have specific documentation of the details, given * below. *

- * Arrays + * @ref HDFARRAY *

* HDF5 needs to read and write multi-dimensional arrays of any number type (and records). The HDF5 API * describes the layout of the source and destination, and the data for the array passed as a block of * bytes, for instance, * - *

- *      herr_t H5Dread(long fid, long filetype, long memtype, long memspace, void * data);
- * 
+ * @code + * herr_t H5Dread(long fid, long filetype, long memtype, long memspace, void *data); + * @endcode * *

* where ``void *'' means that the data may be any valid numeric type, and is a contiguous block of bytes that @@ -166,7 +174,7 @@ import hdf.hdf5lib.structs.H5O_token_t; * For Java, this ``ANY'' is a problem, as the type of data must always be declared. Furthermore, * multidimensional arrays are definitely not laid out contiguously in memory. It would be infeasible * to declare a separate routine for every combination of number type and dimensionality. For that reason, the - * HDFArray class is used to discover the type, shape, and + * @ref HDFARRAY HDFArray class is used to discover the type, shape, and * size of the data array at run time, and to convert to and from a contiguous array of bytes in synchronized * static native C order. *

@@ -174,88 +182,103 @@ import hdf.hdf5lib.structs.H5O_token_t; * passed as an ``Object'', and the Java API will translate to and from the appropriate packed array of bytes * needed by the C library. So the function above would be declared: * - *

- * public synchronized static native int H5Dread(long fid, long filetype, long memtype, long memspace,
- * Object data);
- * 
- * OPEN_IDS.addElement(id); - + * @code + * public synchronized static int H5Dread(long dataset_id, long mem_type_id, long mem_space_id, + * long file_space_id, long xfer_plist_id, Object obj, + * boolean isCriticalPinning) + * throws HDF5Exception, HDF5LibraryException, NullPointerException; + * @endcode + * * and the parameter data can be any multi-dimensional array of numbers, such as float[][], or * int[][][], or Double[][]. *

- * HDF-5 Constants + * @ref HDF5CONST *

- * The HDF-5 API defines a set of constants and enumerated values. Most of these values are available to Java - * programs via the class HDF5Constants. For example, + * The HDF5 API defines a set of constants and enumerated values. Most of these values are available to Java + * programs via the class @ref HDF5CONST HDF5Constants. For example, * the parameters for the h5open() call include two numeric values, HDFConstants.H5F_ACC_RDWR * and HDF5Constants.H5P_DEFAULT. * As would be expected, these numbers correspond to the C constants - * H5F_ACC_RDWR and H5P_DEFAULT. + * #H5F_ACC_RDWR and #H5P_DEFAULT. *

- * The HDF-5 API defines a set of values that describe number types and sizes, such as "H5T_NATIVE_INT" and - * "hsize_t". These values are determined at run time by the HDF-5 C library. To support these parameters, - * the Java class HDF5CDataTypes looks up the values + * The HDF5 API defines a set of values that describe number types and sizes, such as "H5T_NATIVE_INT" and + * "hsize_t". These values are determined at run time by the HDF5 C library. To support these parameters, + * the Java HDFConstants class looks up the values * when initiated. The values can be accessed as public variables of the Java class, such as: * - *

- * long data_type = HDF5CDataTypes.JH5T_NATIVE_INT;
- * 
+ * @code + * long data_type = HDFConstants.H5T_NATIVE_INT; + * @endcode * * The Java application uses both types of constants the same way, the only difference is that the - * HDF5CDataTypes may have different values on different platforms. + * HDFConstants may have different values on different platforms. *

- * Error handling and Exceptions + * @ref ERRORS *

- * The HDF5 error API (H5E) manages the behavior of the error stack in the HDF-5 library. This API is omitted - * from the JHI5. Errors are converted into Java exceptions. This is totally different from the C interface, - * but is very natural for Java programming. - *

- * The exceptions of the JHI5 are organized as sub-classes of the class - * HDF5Exception. There are two subclasses - * of - * HDF5Exception, - HDF5LibraryException - * and HDF5JavaException. The - * sub-classes of the former represent errors from the HDF-5 C library, while sub-classes of the latter + * The HDF5 error API (@ref H5E) manages the behavior of the error stack in the HDF5 library. This API is + * omitted from the JHI5. Errors are converted into Java exceptions. This is totally different from the C + * interface, but is very natural for Java programming.

The exceptions of the JHI5 are organized as + * sub-classes of the class + * @ref ERRORS HDF5Exception. There are two subclasses of + * HDF5Exception, @ref ERRORSLIB HDF5LibraryException + * and @ref ERRORSJAVA HDF5JavaException. The + * sub-classes of the former represent errors from the HDF5 C library, while sub-classes of the latter * represent errors in the JHI5 wrapper and support code. *

* The super-class HDF5LibraryException implements the method 'printStackTrace()', - * which prints out the HDF-5 error stack, as described in the HDF-5 C API H5Eprint(). This may - * be used by Java exception handlers to print out the HDF-5 error stack. - *


+ * which prints out the HDF5 error stack, as described in the HDF5 C API @ref H5Eprint(). This + * may be used by Java exception handlers to print out the HDF5 error stack.
+ * + * @version HDF5 1.10.10
+ * See also: + * @ref HDFARRAY hdf.hdf5lib.HDFArray
+ * @ref HDF5CONST hdf.hdf5lib.HDF5Constants
+ * @ref ERRORS hdf.hdf5lib.HDF5Exception
+ * HDF5 + * + * For details of the HDF5 library, @see @ref RM + */ + +/** + * This class is the Java interface for the HDF5 library. * - * @version HDF5 1.12.3
- * See also: hdf.hdf5lib.HDFArray
- * hdf.hdf5lib.HDF5Constants
- * hdf.hdf5lib.HDF5CDataTypes
- * hdf.hdf5lib.HDF5Exception
- * http://hdfgroup.org/HDF5" + * @defgroup JH5 HDF5 Library Java Interface + * + * This code is the called by Java programs to access the entry points of the HDF5 library. Each routine wraps + * a single HDF5 entry point, generally with the arguments and return codes analogous to the C interface. * */ public class H5 implements java.io.Serializable { /** - * + * Serialization ID */ private static final long serialVersionUID = 6129888282117053288L; private final static org.slf4j.Logger log = org.slf4j.LoggerFactory.getLogger(H5.class); /** - * The version number of the HDF5 library: - * LIB_VERSION[0]: The major version of the library. - * LIB_VERSION[1]: The minor version of the library. - * LIB_VERSION[2]: The release number of the library. + * @ingroup JH5 * + * The version number of the HDF5 library: + *
    + *
  • LIB_VERSION[0]: The major version of the library.
  • + *
  • LIB_VERSION[1]: The minor version of the library.
  • + *
  • LIB_VERSION[2]: The release number of the library.
  • + *
* Make sure to update the versions number when a different library is used. */ public final static int LIB_VERSION[] = {1, 12, 3}; /** + * @ingroup JH5 + * * add system property to load library by path */ public final static String H5PATH_PROPERTY_KEY = "hdf.hdf5lib.H5.hdf5lib"; /** + * @ingroup JH5 + * * add system property to load library by name from library path, via System.loadLibrary() */ public final static String H5_LIBRARY_NAME_PROPERTY_KEY = "hdf.hdf5lib.H5.loadLibraryName"; @@ -268,6 +291,8 @@ public class H5 implements java.io.Serializable { static { loadH5Lib(); } /** + * @ingroup JH5 + * * load native library */ public static void loadH5Lib() @@ -372,6 +397,8 @@ public class H5 implements java.io.Serializable { // //////////////////////////////////////////////////////////// /** + * @ingroup JH5 + * * Get number of open IDs. * * @return Returns a count of open IDs @@ -379,6 +406,8 @@ public class H5 implements java.io.Serializable { public final static int getOpenIDCount() { return OPEN_IDS.size(); } /** + * @ingroup JH5 + * * Get the open IDs * * @return Returns a collection of open IDs @@ -386,6 +415,8 @@ public class H5 implements java.io.Serializable { public final static Collection getOpenIDs() { return OPEN_IDS; } /** + * @ingroup JH5 + * * H5check_version verifies that the arguments match the version numbers compiled into the library. * * @param majnum @@ -397,47 +428,55 @@ public class H5 implements java.io.Serializable { * @return a non-negative value if successful. Upon failure (when the versions do not match), this * function causes the application to abort (i.e., crash) * - * See C API function: herr_t H5check_version() + * See C API function: @ref herr_t H5check_version(unsigned majnum, unsigned minnum, unsigned relnum) **/ public synchronized static native int H5check_version(int majnum, int minnum, int relnum); /** + * @ingroup JH5 + * * H5close flushes all data to disk, closes all file identifiers, and cleans up all memory used by the * library. * * @return a non-negative value if successful * * @exception HDF5LibraryException - * Error from the HDF-5 Library. + * Error from the HDF5 Library. **/ public synchronized static native int H5close() throws HDF5LibraryException; /** + * @ingroup JH5 + * * H5open initialize the library. * * @return a non-negative value if successful * * @exception HDF5LibraryException - * Error from the HDF-5 Library. + * Error from the HDF5 Library. **/ public synchronized static native int H5open() throws HDF5LibraryException; /** + * @ingroup JH5 + * * H5dont_atexit indicates to the library that an atexit() cleanup routine should not be installed. In * order to be effective, this routine must be called before any other HDF function calls, and must be * called each time the library is loaded/linked into the application (the first time and after it's been - * unloaded).

This is called by the static initializer, so this should never need to be explicitly + * unloaded).

This is called by the static initializer, so this should never need to be explicitly * called by a Java program. * * @return a non-negative value if successful * * @exception HDF5LibraryException - * Error from the HDF-5 Library. + * Error from the HDF5 Library. **/ private synchronized static native int H5dont_atexit() throws HDF5LibraryException; /** - * Turn off error handling. By default, the C library prints the error stack of the HDF-5 C library on + * @ingroup JH5 + * + * Turn off error handling. By default, the C library prints the error stack of the HDF5 C library on * stdout. This behavior may be disabled by calling H5error_off(). * * @return a non-negative value if successful @@ -445,29 +484,35 @@ public class H5 implements java.io.Serializable { public synchronized static native int H5error_off(); /** - * Turn on error handling. By default, the C library prints the error stack of the HDF-5 C library on + * @ingroup JH5 + * + * Turn on error handling. By default, the C library prints the error stack of the HDF5 C library on * stdout. This behavior may be re-enabled by calling H5error_on(). */ public synchronized static native void H5error_on(); /** + * @ingroup JH5 + * * H5garbage_collect collects on all free-lists of all types. * * @return a non-negative value if successful * * @exception HDF5LibraryException - * Error from the HDF-5 Library. + * Error from the HDF5 Library. **/ public synchronized static native int H5garbage_collect() throws HDF5LibraryException; /** + * @ingroup JH5 + * * H5get_libversion retrieves the major, minor, and release numbers of the version of the HDF library * which is linked to the application. * * @param libversion * The version information of the HDF library. * - *

+     * 
      *      libversion[0] = The major version of the library.
      *      libversion[1] = The minor version of the library.
      *      libversion[2] = The release number of the library.
@@ -475,11 +520,13 @@ public class H5 implements java.io.Serializable {
      * @return a non-negative value if successful, along with the version information.
      *
      * @exception HDF5LibraryException
-     *            Error from the HDF-5 Library.
+     *            Error from the HDF5 Library.
      **/
     public synchronized static native int H5get_libversion(int[] libversion) throws HDF5LibraryException;
 
     /**
+     * @ingroup JH5
+     *
      * H5set_free_list_limits
      *      Sets limits on the different kinds of free lists.  Setting a value
      *      of -1 for a limit means no limit of that type.  These limits are global
@@ -506,7 +553,7 @@ public class H5 implements java.io.Serializable {
      * @return a non-negative value if successful, along with the version information.
      *
      * @exception HDF5LibraryException
-     *            Error from the HDF-5 Library.
+     *            Error from the HDF5 Library.
      */
     public synchronized static native int H5set_free_list_limits(int reg_global_lim, int reg_list_lim,
                                                                  int arr_global_lim, int arr_list_lim,
@@ -514,6 +561,8 @@ public class H5 implements java.io.Serializable {
         throws HDF5LibraryException;
 
     /**
+     * @ingroup JH5
+     *
      * H5export_dataset is a utility function to save data in a file.
      *
      * @param file_export_name
@@ -529,7 +578,7 @@ public class H5 implements java.io.Serializable {
      *            3 - export data as binary Big Endian.
      *
      * @exception HDF5LibraryException
-     *            Error from the HDF-5 Library.
+     *            Error from the HDF5 Library.
      **/
     public synchronized static native void H5export_dataset(String file_export_name, long file_id,
                                                             String object_path, int binary_order)
@@ -551,13 +600,15 @@ public class H5 implements java.io.Serializable {
      *            3 - export data as binary Big Endian.
      *
      * @exception HDF5LibraryException
-     *            Error from the HDF-5 Library.
+     *            Error from the HDF5 Library.
      **/
     public synchronized static native void H5export_attribute(String file_export_name, long dataset_id,
                                                               String attribute_name, int binary_order)
         throws HDF5LibraryException;
 
     /**
+     * @ingroup JH5
+     *
      * H5is_library_threadsafe Checks to see if the library was built with thread-safety enabled.
      *
      * @return true if hdf5 library implements threadsafe
@@ -572,11 +623,23 @@ public class H5 implements java.io.Serializable {
 
     // ////////////////////////////////////////////////////////////
     // //
-    // H5A: HDF5 1.8 Attribute Interface API Functions //
+    // H5A: HDF5 Attribute Interface API Functions //
     // //
     // ////////////////////////////////////////////////////////////
+    /**
+     * @defgroup JH5A Java Attribute (H5A) Interface
+     *
+     * An HDF5 attribute is a small metadata object describing the nature and/or intended usage of a primary
+     *data object. A primary data object may be a dataset, group, or committed datatype.
+     *
+     * @see H5A, C-API
+     *
+     * @see @ref H5A_UG, User Guide
+     **/
 
     /**
+     * @ingroup JH5A
+     *
      * H5Aclose terminates access to the attribute specified by its identifier, attr_id.
      *
      * @param attr_id
@@ -585,7 +648,7 @@ public class H5 implements java.io.Serializable {
      * @return a non-negative value if successful
      *
      * @exception HDF5LibraryException
-     *            Error from the HDF-5 Library.
+     *            Error from the HDF5 Library.
      **/
     public static int H5Aclose(long attr_id) throws HDF5LibraryException
     {
@@ -601,6 +664,8 @@ public class H5 implements java.io.Serializable {
     private synchronized static native int _H5Aclose(long attr_id) throws HDF5LibraryException;
 
     /**
+     * @ingroup JH5A
+     *
      * H5Acopy copies the content of one attribute to another.
      *
      * @param src_aid
@@ -611,11 +676,13 @@ public class H5 implements java.io.Serializable {
      * @return a non-negative value if successful
      *
      * @exception HDF5LibraryException
-     *            Error from the HDF-5 Library.
+     *            Error from the HDF5 Library.
      */
     public synchronized static native int H5Acopy(long src_aid, long dst_aid) throws HDF5LibraryException;
 
     /**
+     * @ingroup JH5A
+     *
      * H5Acreate creates an attribute, attr_name, which is attached to the object specified by the identifier
      * loc_id.
      *
@@ -635,7 +702,7 @@ public class H5 implements java.io.Serializable {
      * @return An attribute identifier if successful; otherwise returns a negative value.
      *
      * @exception HDF5LibraryException
-     *            Error from the HDF-5 Library.
+     *            Error from the HDF5 Library.
      * @exception NullPointerException
      *            Name is null.
      **/
@@ -652,6 +719,8 @@ public class H5 implements java.io.Serializable {
     }
 
     /**
+     * @ingroup JH5A
+     *
      * H5Acreate2 an attribute, attr_name, which is attached to the object specified by the identifier loc_id.
      *
      * @see public static long H5Acreate( long loc_id, String attr_name, long type_id, long space_id, long
@@ -662,6 +731,8 @@ public class H5 implements java.io.Serializable {
         throws HDF5LibraryException, NullPointerException;
 
     /**
+     * @ingroup JH5A
+     *
      * H5Acreate_by_name creates an attribute, attr_name, which is attached to the object specified by loc_id
      * and obj_name.
      *
@@ -685,7 +756,7 @@ public class H5 implements java.io.Serializable {
      * @return An attribute identifier if successful; otherwise returns a negative value.
      *
      * @exception HDF5LibraryException
-     *            Error from the HDF-5 Library.
+     *            Error from the HDF5 Library.
      * @exception NullPointerException
      *            name is null.
      **/
@@ -709,6 +780,8 @@ public class H5 implements java.io.Serializable {
         throws HDF5LibraryException, NullPointerException;
 
     /**
+     * @ingroup JH5A
+     *
      * H5Adelete removes the attribute specified by its name, name, from a dataset, group, or named datatype.
      *
      * @param loc_id
@@ -719,7 +792,7 @@ public class H5 implements java.io.Serializable {
      * @return a non-negative value if successful
      *
      * @exception HDF5LibraryException
-     *            Error from the HDF-5 Library.
+     *            Error from the HDF5 Library.
      * @exception NullPointerException
      *            name is null.
      **/
@@ -727,6 +800,8 @@ public class H5 implements java.io.Serializable {
         throws HDF5LibraryException, NullPointerException;
 
     /**
+     * @ingroup JH5A
+     *
      * H5Adelete_by_idx removes an attribute, specified by its location in an index, from an object.
      *
      * @param loc_id
@@ -743,7 +818,7 @@ public class H5 implements java.io.Serializable {
      *            IN: Link access property list identifier
      *
      * @exception HDF5LibraryException
-     *            Error from the HDF-5 Library.
+     *            Error from the HDF5 Library.
      * @exception NullPointerException
      *            obj_name is null.
      **/
@@ -752,6 +827,8 @@ public class H5 implements java.io.Serializable {
         throws HDF5LibraryException, NullPointerException;
 
     /**
+     * @ingroup JH5A
+     *
      * H5Adelete_by_name removes the attribute attr_name from an object specified by location and name, loc_id
      * and obj_name, respectively.
      *
@@ -767,7 +844,7 @@ public class H5 implements java.io.Serializable {
      * @return a non-negative value if successful; otherwise returns a negative value.
      *
      * @exception HDF5LibraryException
-     *            Error from the HDF-5 Library.
+     *            Error from the HDF5 Library.
      * @exception NullPointerException
      *            name is null.
      **/
@@ -776,6 +853,8 @@ public class H5 implements java.io.Serializable {
         throws HDF5LibraryException, NullPointerException;
 
     /**
+     * @ingroup JH5A
+     *
      * H5Aexists determines whether the attribute attr_name exists on the object specified by obj_id.
      *
      * @param obj_id
@@ -786,7 +865,7 @@ public class H5 implements java.io.Serializable {
      * @return boolean true if an attribute with a given name exists.
      *
      * @exception HDF5LibraryException
-     *            Error from the HDF-5 Library.
+     *            Error from the HDF5 Library.
      * @exception NullPointerException
      *            attr_name is null.
      **/
@@ -794,6 +873,8 @@ public class H5 implements java.io.Serializable {
         throws HDF5LibraryException, NullPointerException;
 
     /**
+     * @ingroup JH5A
+     *
      * H5Aexists_by_name determines whether the attribute attr_name exists on an object. That object is
      * specified by its location and name, loc_id and obj_name, respectively.
      *
@@ -809,7 +890,7 @@ public class H5 implements java.io.Serializable {
      * @return boolean true if an attribute with a given name exists, otherwise returns false.
      *
      * @exception HDF5LibraryException
-     *            Error from the HDF-5 Library.
+     *            Error from the HDF5 Library.
      * @exception NullPointerException
      *            name is null.
      **/
@@ -818,6 +899,8 @@ public class H5 implements java.io.Serializable {
         throws HDF5LibraryException, NullPointerException;
 
     /**
+     * @ingroup JH5A
+     *
      * H5Aget_info retrieves attribute information, by attribute identifier.
      *
      * @param attr_id
@@ -826,11 +909,13 @@ public class H5 implements java.io.Serializable {
      * @return A buffer(H5A_info_t) for Attribute information
      *
      * @exception HDF5LibraryException
-     *            Error from the HDF-5 Library.
+     *            Error from the HDF5 Library.
      **/
     public synchronized static native H5A_info_t H5Aget_info(long attr_id) throws HDF5LibraryException;
 
     /**
+     * @ingroup JH5A
+     *
      * H5Aget_info_by_idx Retrieves attribute information, by attribute index position.
      *
      * @param loc_id
@@ -849,7 +934,7 @@ public class H5 implements java.io.Serializable {
      * @return A buffer(H5A_info_t) for Attribute information
      *
      * @exception HDF5LibraryException
-     *            Error from the HDF-5 Library.
+     *            Error from the HDF5 Library.
      * @exception NullPointerException
      *            obj_name is null.
      **/
@@ -859,6 +944,8 @@ public class H5 implements java.io.Serializable {
         throws HDF5LibraryException, NullPointerException;
 
     /**
+     * @ingroup JH5A
+     *
      * H5Aget_info_by_name Retrieves attribute information, by attribute name.
      *
      * @param loc_id
@@ -873,7 +960,7 @@ public class H5 implements java.io.Serializable {
      * @return A buffer(H5A_info_t) for Attribute information
      *
      * @exception HDF5LibraryException
-     *            Error from the HDF-5 Library.
+     *            Error from the HDF5 Library.
      * @exception NullPointerException
      *            obj_name is null.
      **/
@@ -882,6 +969,8 @@ public class H5 implements java.io.Serializable {
         throws HDF5LibraryException, NullPointerException;
 
     /**
+     * @ingroup JH5A
+     *
      * H5Aget_name retrieves the name of an attribute specified by the identifier, attr_id.
      *
      * @param attr_id
@@ -890,11 +979,13 @@ public class H5 implements java.io.Serializable {
      * @return String for Attribute name.
      *
      * @exception HDF5LibraryException
-     *            Error from the HDF-5 Library.
+     *            Error from the HDF5 Library.
      **/
     public synchronized static native String H5Aget_name(long attr_id) throws HDF5LibraryException;
 
     /**
+     * @ingroup JH5A
+     *
      * H5Aget_name_by_idx retrieves the name of an attribute that is attached to an object, which is specified
      * by its location and name, loc_id and obj_name, respectively.
      *
@@ -923,6 +1014,8 @@ public class H5 implements java.io.Serializable {
         throws HDF5LibraryException, NullPointerException;
 
     /**
+     * @ingroup JH5A
+     *
      * H5Aget_space retrieves a copy of the dataspace for an attribute.
      *
      * @param attr_id
@@ -931,7 +1024,7 @@ public class H5 implements java.io.Serializable {
      * @return attribute dataspace identifier if successful
      *
      * @exception HDF5LibraryException
-     *            Error from the HDF-5 Library.
+     *            Error from the HDF5 Library.
      **/
     public static long H5Aget_space(long attr_id) throws HDF5LibraryException
     {
@@ -947,6 +1040,8 @@ public class H5 implements java.io.Serializable {
     private synchronized static native long _H5Aget_space(long attr_id) throws HDF5LibraryException;
 
     /**
+     * @ingroup JH5A
+     *
      * H5Aget_storage_size returns the amount of storage that is required for the specified attribute,
      * attr_id.
      *
@@ -956,11 +1051,13 @@ public class H5 implements java.io.Serializable {
      * @return the amount of storage size allocated for the attribute; otherwise returns 0 (zero)
      *
      * @exception HDF5LibraryException
-     *            Error from the HDF-5 Library.
+     *            Error from the HDF5 Library.
      **/
     public synchronized static native long H5Aget_storage_size(long attr_id) throws HDF5LibraryException;
 
     /**
+     * @ingroup JH5A
+     *
      * H5Aget_type retrieves a copy of the datatype for an attribute.
      *
      * @param attr_id
@@ -969,7 +1066,7 @@ public class H5 implements java.io.Serializable {
      * @return a datatype identifier if successful
      *
      * @exception HDF5LibraryException
-     *            Error from the HDF-5 Library.
+     *            Error from the HDF5 Library.
      **/
     public static long H5Aget_type(long attr_id) throws HDF5LibraryException
     {
@@ -985,6 +1082,8 @@ public class H5 implements java.io.Serializable {
     private synchronized static native long _H5Aget_type(long attr_id) throws HDF5LibraryException;
 
     /**
+     * @ingroup JH5A
+     *
      * H5Aopen opens an existing attribute, attr_name, that is attached to an object specified an object
      * identifier, object_id.
      *
@@ -998,7 +1097,7 @@ public class H5 implements java.io.Serializable {
      * @return An attribute identifier if successful; otherwise returns a negative value.
      *
      * @exception HDF5LibraryException
-     *            Error from the HDF-5 Library.
+     *            Error from the HDF5 Library.
      * @exception NullPointerException
      *            Name is null.
      **/
@@ -1018,6 +1117,8 @@ public class H5 implements java.io.Serializable {
         throws HDF5LibraryException, NullPointerException;
 
     /**
+     * @ingroup JH5A
+     *
      * H5Aopen_by_idx opens an existing attribute that is attached to an object specified by location and
      * name, loc_id and obj_name, respectively
      *
@@ -1039,7 +1140,7 @@ public class H5 implements java.io.Serializable {
      * @return An attribute identifier if successful; otherwise returns a negative value.
      *
      * @exception HDF5LibraryException
-     *            Error from the HDF-5 Library.
+     *            Error from the HDF5 Library.
      * @exception NullPointerException
      *            Name is null.
      **/
@@ -1061,6 +1162,8 @@ public class H5 implements java.io.Serializable {
         throws HDF5LibraryException, NullPointerException;
 
     /**
+     * @ingroup JH5A
+     *
      * H5Aopen_by_name Opens an attribute for an object by object name and attribute name
      *
      * @param loc_id
@@ -1077,7 +1180,7 @@ public class H5 implements java.io.Serializable {
      * @return Returns an attribute identifier if successful; otherwise returns a negative value.
      *
      * @exception HDF5LibraryException
-     *            Error from the HDF-5 Library.
+     *            Error from the HDF5 Library.
      * @exception NullPointerException
      *            obj_name is null.
      **/
@@ -1098,6 +1201,8 @@ public class H5 implements java.io.Serializable {
         throws HDF5LibraryException, NullPointerException;
 
     /**
+     * @ingroup JH5A
+     *
      * H5Aread reads an attribute, specified with attr_id. The attribute's memory datatype is specified with
      * mem_type_id. The entire attribute is read into buffer from the file.
      *
@@ -1113,7 +1218,7 @@ public class H5 implements java.io.Serializable {
      * @return a non-negative value if successful
      *
      * @exception HDF5LibraryException
-     *            Error from the HDF-5 Library.
+     *            Error from the HDF5 Library.
      * @exception NullPointerException
      *            data buffer is null.
      **/
@@ -1122,6 +1227,8 @@ public class H5 implements java.io.Serializable {
         throws HDF5LibraryException, NullPointerException;
 
     /**
+     * @ingroup JH5A
+     *
      * H5Aread reads an attribute, specified with attr_id. The attribute's memory datatype is specified with
      * mem_type_id. The entire attribute is read into buffer from the file.
      *
@@ -1135,7 +1242,7 @@ public class H5 implements java.io.Serializable {
      * @return a non-negative value if successful
      *
      * @exception HDF5LibraryException
-     *            Error from the HDF-5 Library.
+     *            Error from the HDF5 Library.
      * @exception NullPointerException
      *            data buffer is null.
      **/
@@ -1146,6 +1253,8 @@ public class H5 implements java.io.Serializable {
     }
 
     /**
+     * @ingroup JH5A
+     *
      * H5Aread reads an attribute, specified with attr_id. The attribute's memory datatype is specified with
      * mem_type_id. The entire attribute is read into buffer from the file.
      *
@@ -1159,7 +1268,7 @@ public class H5 implements java.io.Serializable {
      * @return a non-negative value if successful
      *
      * @exception HDF5LibraryException
-     *            Error from the HDF-5 Library.
+     *            Error from the HDF5 Library.
      * @exception NullPointerException
      *            data buffer is null.
      **/
@@ -1170,6 +1279,8 @@ public class H5 implements java.io.Serializable {
     }
 
     /**
+     * @ingroup JH5A
+     *
      * H5Aread reads an attribute, specified with attr_id. The attribute's memory datatype is specified with
      * mem_type_id. The entire attribute is read into data object from the file.
      *
@@ -1187,7 +1298,7 @@ public class H5 implements java.io.Serializable {
      * @exception HDF5Exception
      *            Failure in the data conversion.
      * @exception HDF5LibraryException
-     *            Error from the HDF-5 Library.
+     *            Error from the HDF5 Library.
      * @exception NullPointerException
      *            data buffer is null. See public synchronized static native int H5Aread( )
      **/
@@ -1269,6 +1380,8 @@ public class H5 implements java.io.Serializable {
     }
 
     /**
+     * @ingroup JH5A
+     *
      * H5Aread reads an attribute, specified with attr_id. The attribute's memory datatype is specified with
      * mem_type_id. The entire attribute is read into buffer of double from the file.
      *
@@ -1284,7 +1397,7 @@ public class H5 implements java.io.Serializable {
      * @return a non-negative value if successful
      *
      * @exception HDF5LibraryException
-     *            Error from the HDF-5 Library.
+     *            Error from the HDF5 Library.
      * @exception NullPointerException
      *            data buffer is null.
      **/
@@ -1293,6 +1406,8 @@ public class H5 implements java.io.Serializable {
         throws HDF5LibraryException, NullPointerException;
 
     /**
+     * @ingroup JH5A
+     *
      * H5Aread reads an attribute, specified with attr_id. The attribute's memory datatype is specified with
      * mem_type_id. The entire attribute is read into buffer of double from the file.
      *
@@ -1306,7 +1421,7 @@ public class H5 implements java.io.Serializable {
      * @return a non-negative value if successful
      *
      * @exception HDF5LibraryException
-     *            Error from the HDF-5 Library.
+     *            Error from the HDF5 Library.
      * @exception NullPointerException
      *            data buffer is null.
      **/
@@ -1332,7 +1447,7 @@ public class H5 implements java.io.Serializable {
      * @return a non-negative value if successful
      *
      * @exception HDF5LibraryException
-     *            Error from the HDF-5 Library.
+     *            Error from the HDF5 Library.
      * @exception NullPointerException
      *            data buffer is null.
      **/
@@ -1341,6 +1456,8 @@ public class H5 implements java.io.Serializable {
         throws HDF5LibraryException, NullPointerException;
 
     /**
+     * @ingroup JH5A
+     *
      * H5Aread reads an attribute, specified with attr_id. The attribute's memory datatype is specified with
      * mem_type_id. The entire attribute is read into buffer of float from the file.
      *
@@ -1354,7 +1471,7 @@ public class H5 implements java.io.Serializable {
      * @return a non-negative value if successful
      *
      * @exception HDF5LibraryException
-     *            Error from the HDF-5 Library.
+     *            Error from the HDF5 Library.
      * @exception NullPointerException
      *            data buffer is null.
      **/
@@ -1365,6 +1482,8 @@ public class H5 implements java.io.Serializable {
     }
 
     /**
+     * @ingroup JH5A
+     *
      * H5Aread reads an attribute, specified with attr_id. The attribute's memory datatype is specified with
      * mem_type_id. The entire attribute is read into buffer of int from the file.
      *
@@ -1380,7 +1499,7 @@ public class H5 implements java.io.Serializable {
      * @return a non-negative value if successful
      *
      * @exception HDF5LibraryException
-     *            Error from the HDF-5 Library.
+     *            Error from the HDF5 Library.
      * @exception NullPointerException
      *            data buffer is null.
      **/
@@ -1389,6 +1508,8 @@ public class H5 implements java.io.Serializable {
         throws HDF5LibraryException, NullPointerException;
 
     /**
+     * @ingroup JH5A
+     *
      * H5Aread reads an attribute, specified with attr_id. The attribute's memory datatype is specified with
      * mem_type_id. The entire attribute is read into buffer of int from the file.
      *
@@ -1402,7 +1523,7 @@ public class H5 implements java.io.Serializable {
      * @return a non-negative value if successful
      *
      * @exception HDF5LibraryException
-     *            Error from the HDF-5 Library.
+     *            Error from the HDF5 Library.
      * @exception NullPointerException
      *            data buffer is null.
      **/
@@ -1413,6 +1534,8 @@ public class H5 implements java.io.Serializable {
     }
 
     /**
+     * @ingroup JH5A
+     *
      * H5Aread reads an attribute, specified with attr_id. The attribute's memory datatype is specified with
      * mem_type_id. The entire attribute is read into buffer of long from the file.
      *
@@ -1428,7 +1551,7 @@ public class H5 implements java.io.Serializable {
      * @return a non-negative value if successful
      *
      * @exception HDF5LibraryException
-     *            Error from the HDF-5 Library.
+     *            Error from the HDF5 Library.
      * @exception NullPointerException
      *            data buffer is null.
      **/
@@ -1437,6 +1560,8 @@ public class H5 implements java.io.Serializable {
         throws HDF5LibraryException, NullPointerException;
 
     /**
+     * @ingroup JH5A
+     *
      * H5Aread reads an attribute, specified with attr_id. The attribute's memory datatype is specified with
      * mem_type_id. The entire attribute is read into buffer of long from the file.
      *
@@ -1450,7 +1575,7 @@ public class H5 implements java.io.Serializable {
      * @return a non-negative value if successful
      *
      * @exception HDF5LibraryException
-     *            Error from the HDF-5 Library.
+     *            Error from the HDF5 Library.
      * @exception NullPointerException
      *            data buffer is null.
      **/
@@ -1461,6 +1586,8 @@ public class H5 implements java.io.Serializable {
     }
 
     /**
+     * @ingroup JH5A
+     *
      * H5Aread reads an attribute, specified with attr_id. The attribute's memory datatype is specified with
      * mem_type_id. The entire attribute is read into buffer of String from the file.
      *
@@ -1474,7 +1601,7 @@ public class H5 implements java.io.Serializable {
      * @return a non-negative value if successful
      *
      * @exception HDF5LibraryException
-     *            Error from the HDF-5 Library.
+     *            Error from the HDF5 Library.
      * @exception NullPointerException
      *            data buffer is null.
      **/
@@ -1482,6 +1609,8 @@ public class H5 implements java.io.Serializable {
         throws HDF5LibraryException, NullPointerException;
 
     /**
+     * @ingroup JH5A
+     *
      * H5Aread reads an attribute, specified with attr_id. The attribute's memory datatype is specified with
      * mem_type_id. The entire attribute is read into buffer of short from the file.
      *
@@ -1497,7 +1626,7 @@ public class H5 implements java.io.Serializable {
      * @return a non-negative value if successful
      *
      * @exception HDF5LibraryException
-     *            Error from the HDF-5 Library.
+     *            Error from the HDF5 Library.
      * @exception NullPointerException
      *            data buffer is null.
      **/
@@ -1506,6 +1635,8 @@ public class H5 implements java.io.Serializable {
         throws HDF5LibraryException, NullPointerException;
 
     /**
+     * @ingroup JH5A
+     *
      * H5Aread reads an attribute, specified with attr_id. The attribute's memory datatype is specified with
      * mem_type_id. The entire attribute is read into buffer  of shortfrom the file.
      *
@@ -1519,7 +1650,7 @@ public class H5 implements java.io.Serializable {
      * @return a non-negative value if successful
      *
      * @exception HDF5LibraryException
-     *            Error from the HDF-5 Library.
+     *            Error from the HDF5 Library.
      * @exception NullPointerException
      *            data buffer is null.
      **/
@@ -1530,6 +1661,8 @@ public class H5 implements java.io.Serializable {
     }
 
     /**
+     * @ingroup JH5A
+     *
      * H5Aread reads an attribute, specified with attr_id. The attribute's memory datatype is specified with
      * mem_type_id. The entire attribute is read into buffer of variable-lenght from the file.
      *
@@ -1543,7 +1676,7 @@ public class H5 implements java.io.Serializable {
      * @return a non-negative value if successful
      *
      * @exception HDF5LibraryException
-     *            Error from the HDF-5 Library.
+     *            Error from the HDF5 Library.
      * @exception NullPointerException
      *            data buffer is null.
      **/
@@ -1551,6 +1684,8 @@ public class H5 implements java.io.Serializable {
         throws HDF5LibraryException, NullPointerException;
 
     /**
+     * @ingroup JH5A
+     *
      * H5Aread reads an attribute, specified with attr_id. The attribute's memory datatype is specified with
      * mem_type_id. The entire attribute is read into buffer of String from the file.
      *
@@ -1564,7 +1699,7 @@ public class H5 implements java.io.Serializable {
      * @return a non-negative value if successful
      *
      * @exception HDF5LibraryException
-     *            Error from the HDF-5 Library.
+     *            Error from the HDF5 Library.
      * @exception NullPointerException
      *            data buffer is null.
      **/
@@ -1572,6 +1707,8 @@ public class H5 implements java.io.Serializable {
         throws HDF5LibraryException, NullPointerException;
 
     /**
+     * @ingroup JH5A
+     *
      * H5Aread reads an attribute, specified with attr_id. The attribute's memory datatype is specified with
      * mem_type_id. The entire attribute is read into buffer of variable-lenght strings from the file.
      *
@@ -1585,7 +1722,7 @@ public class H5 implements java.io.Serializable {
      * @return a non-negative value if successful
      *
      * @exception HDF5LibraryException
-     *            Error from the HDF-5 Library.
+     *            Error from the HDF5 Library.
      * @exception NullPointerException
      *             data buffer is null.
      **/
@@ -1593,6 +1730,8 @@ public class H5 implements java.io.Serializable {
         throws HDF5LibraryException, NullPointerException;
 
     /**
+     * @ingroup JH5A
+     *
      * H5Aread reads an attribute, specified with attr_id. The attribute's memory datatype is specified with
      * mem_type_id. The entire attribute is read into buffer of string from the file.
      *
@@ -1606,7 +1745,7 @@ public class H5 implements java.io.Serializable {
      * @return a non-negative value if successful
      *
      * @exception HDF5LibraryException
-     *            Error from the HDF-5 Library.
+     *            Error from the HDF5 Library.
      * @exception NullPointerException
      *            data buffer is null.
      **/
@@ -1614,8 +1753,10 @@ public class H5 implements java.io.Serializable {
         throws HDF5LibraryException, NullPointerException;
 
     /**
+     * @ingroup JH5A
+     *
      * H5Arename changes the name of attribute that is attached to the object specified by loc_id. The
-     *attribute named old_attr_name is renamed new_attr_name.
+     * attribute named old_attr_name is renamed new_attr_name.
      *
      * @param loc_id
      *            IN: Location or object identifier; may be dataset or group
@@ -1627,7 +1768,7 @@ public class H5 implements java.io.Serializable {
      * @return A non-negative value if successful; otherwise returns a negative value.
      *
      * @exception HDF5LibraryException
-     *            Error from the HDF-5 Library.
+     *            Error from the HDF5 Library.
      * @exception NullPointerException
      *            Name is null.
      **/
@@ -1635,6 +1776,8 @@ public class H5 implements java.io.Serializable {
         throws HDF5LibraryException, NullPointerException;
 
     /**
+     * @ingroup JH5A
+     *
      * H5Arename_by_name changes the name of attribute that is attached to the object specified by loc_id and
      * obj_name. The attribute named old_attr_name is renamed new_attr_name.
      *
@@ -1652,7 +1795,7 @@ public class H5 implements java.io.Serializable {
      * @return A non-negative value if successful; otherwise returns a negative value.
      *
      * @exception HDF5LibraryException
-     *            Error from the HDF-5 Library.
+     *            Error from the HDF5 Library.
      * @exception NullPointerException
      *            Name is null.
      **/
@@ -1661,6 +1804,8 @@ public class H5 implements java.io.Serializable {
         throws HDF5LibraryException, NullPointerException;
 
     /**
+     * @ingroup JH5A
+     *
      * H5Awrite writes an attribute, specified with attr_id. The attribute's memory datatype is specified with
      * mem_type_id. The entire attribute is written from buf to the file.
      *
@@ -1676,7 +1821,7 @@ public class H5 implements java.io.Serializable {
      * @return a non-negative value if successful
      *
      * @exception HDF5LibraryException
-     *            Error from the HDF-5 Library.
+     *            Error from the HDF5 Library.
      * @exception NullPointerException
      *            data is null.
      **/
@@ -1685,6 +1830,8 @@ public class H5 implements java.io.Serializable {
         throws HDF5LibraryException, NullPointerException;
 
     /**
+     * @ingroup JH5A
+     *
      * H5Awrite writes an attribute, specified with attr_id. The attribute's memory datatype is specified with
      * mem_type_id. The entire attribute is written from buf to the file.
      *
@@ -1698,7 +1845,7 @@ public class H5 implements java.io.Serializable {
      * @return a non-negative value if successful
      *
      * @exception HDF5LibraryException
-     *            Error from the HDF-5 Library.
+     *            Error from the HDF5 Library.
      * @exception NullPointerException
      *            data is null.
      **/
@@ -1709,6 +1856,8 @@ public class H5 implements java.io.Serializable {
     }
 
     /**
+     * @ingroup JH5A
+     *
      * H5Awrite writes an attribute, specified with attr_id. The attribute's memory datatype is specified with
      * mem_type_id. The entire attribute is written from buf to the file.
      *
@@ -1722,7 +1871,7 @@ public class H5 implements java.io.Serializable {
      * @return a non-negative value if successful
      *
      * @exception HDF5LibraryException
-     *            Error from the HDF-5 Library.
+     *            Error from the HDF5 Library.
      * @exception NullPointerException
      *            data is null.
      **/
@@ -1733,6 +1882,8 @@ public class H5 implements java.io.Serializable {
     }
 
     /**
+     * @ingroup JH5A
+     *
      * H5Awrite writes an attribute, specified with attr_id. The attribute's memory datatype is specified with
      * mem_type_id. The entire attribute is written from data object to the file.
      *
@@ -1750,7 +1901,7 @@ public class H5 implements java.io.Serializable {
      * @exception HDF5Exception
      *            Failure in the data conversion.
      * @exception HDF5LibraryException
-     *            Error from the HDF-5 Library.
+     *            Error from the HDF5 Library.
      * @exception NullPointerException
      *            data object is null
      **/
@@ -1809,6 +1960,8 @@ public class H5 implements java.io.Serializable {
     }
 
     /**
+     * @ingroup JH5A
+     *
      * H5Awrite writes an attribute, specified with attr_id. The attribute's memory datatype is specified with
      * mem_type_id. The entire attribute is written from buffer of double to the file.
      *
@@ -1824,7 +1977,7 @@ public class H5 implements java.io.Serializable {
      * @return a non-negative value if successful
      *
      * @exception HDF5LibraryException
-     *             Error from the HDF-5 Library.
+     *             Error from the HDF5 Library.
      * @exception NullPointerException
      *            data is null.
      **/
@@ -1833,6 +1986,8 @@ public class H5 implements java.io.Serializable {
         throws HDF5LibraryException, NullPointerException;
 
     /**
+     * @ingroup JH5A
+     *
      * H5Awrite writes an attribute, specified with attr_id. The attribute's memory datatype is specified with
      * mem_type_id. The entire attribute is written from buffer of double to the file.
      *
@@ -1846,7 +2001,7 @@ public class H5 implements java.io.Serializable {
      * @return a non-negative value if successful
      *
      * @exception HDF5LibraryException
-     *            Error from the HDF-5 Library.
+     *            Error from the HDF5 Library.
      * @exception NullPointerException
      *            data is null.
      **/
@@ -1857,6 +2012,8 @@ public class H5 implements java.io.Serializable {
     }
 
     /**
+     * @ingroup JH5A
+     *
      * H5Awrite writes an attribute, specified with attr_id. The attribute's memory datatype is specified with
      * mem_type_id. The entire attribute is written from buffer of float to the file.
      *
@@ -1872,7 +2029,7 @@ public class H5 implements java.io.Serializable {
      * @return a non-negative value if successful
      *
      * @exception HDF5LibraryException
-     *            Error from the HDF-5 Library.
+     *            Error from the HDF5 Library.
      * @exception NullPointerException
      *            data is null.
      **/
@@ -1881,6 +2038,8 @@ public class H5 implements java.io.Serializable {
         throws HDF5LibraryException, NullPointerException;
 
     /**
+     * @ingroup JH5A
+     *
      * H5Awrite writes an attribute, specified with attr_id. The attribute's memory datatype is specified with
      * mem_type_id. The entire attribute is written from buffer of float to the file.
      *
@@ -1894,7 +2053,7 @@ public class H5 implements java.io.Serializable {
      * @return a non-negative value if successful
      *
      * @exception HDF5LibraryException
-     *            Error from the HDF-5 Library.
+     *            Error from the HDF5 Library.
      * @exception NullPointerException
      *            data is null.
      **/
@@ -1905,6 +2064,8 @@ public class H5 implements java.io.Serializable {
     }
 
     /**
+     * @ingroup JH5A
+     *
      * H5Awrite writes an attribute, specified with attr_id. The attribute's memory datatype is specified with
      * mem_type_id. The entire attribute is written from buffer of int to the file.
      *
@@ -1920,7 +2081,7 @@ public class H5 implements java.io.Serializable {
      * @return a non-negative value if successful
      *
      * @exception HDF5LibraryException
-     *            Error from the HDF-5 Library.
+     *            Error from the HDF5 Library.
      * @exception NullPointerException
      *            data is null.
      **/
@@ -1929,6 +2090,8 @@ public class H5 implements java.io.Serializable {
         throws HDF5LibraryException, NullPointerException;
 
     /**
+     * @ingroup JH5A
+     *
      * H5Awrite writes an attribute, specified with attr_id. The attribute's memory datatype is specified with
      * mem_type_id. The entire attribute is written from buffer of int to the file.
      *
@@ -1942,7 +2105,7 @@ public class H5 implements java.io.Serializable {
      * @return a non-negative value if successful
      *
      * @exception HDF5LibraryException
-     *            Error from the HDF-5 Library.
+     *            Error from the HDF5 Library.
      * @exception NullPointerException
      *            data is null.
      **/
@@ -1953,6 +2116,8 @@ public class H5 implements java.io.Serializable {
     }
 
     /**
+     * @ingroup JH5A
+     *
      * H5Awrite writes an attribute, specified with attr_id. The attribute's memory datatype is specified with
      * mem_type_id. The entire attribute is written from buffer of long to the file.
      *
@@ -1968,7 +2133,7 @@ public class H5 implements java.io.Serializable {
      * @return a non-negative value if successful
      *
      * @exception HDF5LibraryException
-     *            Error from the HDF-5 Library.
+     *            Error from the HDF5 Library.
      * @exception NullPointerException
      *            data is null.
      **/
@@ -1977,6 +2142,8 @@ public class H5 implements java.io.Serializable {
         throws HDF5LibraryException, NullPointerException;
 
     /**
+     * @ingroup JH5A
+     *
      * H5Awrite writes an attribute, specified with attr_id. The attribute's memory datatype is specified with
      * mem_type_id. The entire attribute is written from buffer of long to the file.
      *
@@ -1990,7 +2157,7 @@ public class H5 implements java.io.Serializable {
      * @return a non-negative value if successful
      *
      * @exception HDF5LibraryException
-     *            Error from the HDF-5 Library.
+     *            Error from the HDF5 Library.
      * @exception NullPointerException
      *            data is null.
      **/
@@ -2001,6 +2168,8 @@ public class H5 implements java.io.Serializable {
     }
 
     /**
+     * @ingroup JH5A
+     *
      * H5Awrite writes an attribute, specified with attr_id. The attribute's memory datatype is specified with
      * mem_type_id. The entire attribute is written from buffer of short to the file.
      *
@@ -2016,7 +2185,7 @@ public class H5 implements java.io.Serializable {
      * @return a non-negative value if successful
      *
      * @exception HDF5LibraryException
-     *            Error from the HDF-5 Library.
+     *            Error from the HDF5 Library.
      * @exception NullPointerException
      *            data is null.
      **/
@@ -2025,6 +2194,8 @@ public class H5 implements java.io.Serializable {
         throws HDF5LibraryException, NullPointerException;
 
     /**
+     * @ingroup JH5A
+     *
      * H5Awrite writes an attribute, specified with attr_id. The attribute's memory datatype is specified with
      * mem_type_id. The entire attribute is written from buffer of short to the file.
      *
@@ -2038,7 +2209,7 @@ public class H5 implements java.io.Serializable {
      * @return a non-negative value if successful
      *
      * @exception HDF5LibraryException
-     *            Error from the HDF-5 Library.
+     *            Error from the HDF5 Library.
      * @exception NullPointerException
      *            data is null.
      **/
@@ -2049,6 +2220,8 @@ public class H5 implements java.io.Serializable {
     }
 
     /**
+     * @ingroup JH5A
+     *
      * H5Awrite writes an attribute, specified with attr_id. The attribute's memory datatype is specified with
      * mem_type_id. The entire attribute is written from buffer of string to the file.
      *
@@ -2062,7 +2235,7 @@ public class H5 implements java.io.Serializable {
      * @return a non-negative value if successful
      *
      * @exception HDF5LibraryException
-     *            Error from the HDF-5 Library.
+     *            Error from the HDF5 Library.
      * @exception NullPointerException
      *            data is null.
      **/
@@ -2070,6 +2243,8 @@ public class H5 implements java.io.Serializable {
         throws HDF5LibraryException, NullPointerException;
 
     /**
+     * @ingroup JH5A
+     *
      * H5Awrite writes an attribute, specified with attr_id. The attribute's memory datatype is specified with
      * mem_type_id. The entire attribute is written from buffer of variable-lenght to the file.
      *
@@ -2083,7 +2258,7 @@ public class H5 implements java.io.Serializable {
      * @return a non-negative value if successful
      *
      * @exception HDF5LibraryException
-     *            Error from the HDF-5 Library.
+     *            Error from the HDF5 Library.
      * @exception NullPointerException
      *            data is null.
      **/
@@ -2091,6 +2266,8 @@ public class H5 implements java.io.Serializable {
         throws HDF5LibraryException, NullPointerException;
 
     /**
+     * @ingroup JH5A
+     *
      * H5Awrite_VLStrings writes a variable length String dataset, specified by its identifier attr_id, from
      * the application memory buffer buffer of variable-lenght strings into the file.
      *
@@ -2106,7 +2283,7 @@ public class H5 implements java.io.Serializable {
      * @return a non-negative value if successful
      *
      * @exception HDF5LibraryException
-     *            Error from the HDF-5 Library.
+     *            Error from the HDF5 Library.
      * @exception NullPointerException
      *            name is null.
      **/
@@ -2115,6 +2292,8 @@ public class H5 implements java.io.Serializable {
         throws HDF5LibraryException, NullPointerException;
 
     /**
+     * @ingroup JH5A
+     *
      * H5Aget_create_plist retrieves a copy of the attribute creation property list identifier.
      *
      * @param attr_id
@@ -2123,7 +2302,7 @@ public class H5 implements java.io.Serializable {
      * @return identifier for the attribute's creation property list if successful
      *
      * @exception HDF5LibraryException
-     *            Error from the HDF-5 Library.
+     *            Error from the HDF5 Library.
      **/
     public static long H5Aget_create_plist(long attr_id) throws HDF5LibraryException
     {
@@ -2139,6 +2318,8 @@ public class H5 implements java.io.Serializable {
     private synchronized static native long _H5Aget_create_plist(long attr_id) throws HDF5LibraryException;
 
     /**
+     * @ingroup JH5A
+     *
      * H5Aiterate2 iterates over the attributes attached to a dataset, named datatype, or group, as
      * specified by obj_id. For each attribute, user-provided data, op_data, with additional information
      * as defined below, is passed to a user-defined function, op, which operates on that attribute.
@@ -2171,7 +2352,7 @@ public class H5 implements java.io.Serializable {
      *            members were processed with no operator returning non-zero.
      *
      * @exception HDF5LibraryException
-     *            Error from the HDF-5 Library.
+     *            Error from the HDF5 Library.
      * @exception NullPointerException
      *            buf is null.
      **/
@@ -2180,6 +2361,8 @@ public class H5 implements java.io.Serializable {
         throws HDF5LibraryException, NullPointerException;
 
     /**
+     * @ingroup JH5A
+     *
      * H5Aiterate_by_name iterates over the attributes attached to the dataset or group specified with loc_id
      * and obj_name. For each attribute, user-provided data, op_data, with additional information as defined
      * below, is passed to a user-defined function, op, which operates on that attribute.
@@ -2216,7 +2399,7 @@ public class H5 implements java.io.Serializable {
      *            members were processed with no operator returning non-zero.
      *
      * @exception HDF5LibraryException
-     *            Error from the HDF-5 Library.
+     *            Error from the HDF5 Library.
      * @exception NullPointerException
      *            buf is null.
      **/
@@ -2262,8 +2445,13 @@ public class H5 implements java.io.Serializable {
     // H5D: Datasets Interface Functions //
     // //
     // ////////////////////////////////////////////////////////////
+    /**
+     * @defgroup JH5D Java Datasets (H5D) Interface
+     **/
 
     /**
+     * @ingroup JH5D
+     *
      * H5Dcopy copies the content of one dataset to another dataset.
      *
      * @param src_did
@@ -2274,11 +2462,13 @@ public class H5 implements java.io.Serializable {
      * @return a non-negative value if successful
      *
      * @exception HDF5LibraryException
-     *            Error from the HDF-5 Library.
+     *            Error from the HDF5 Library.
      */
     public synchronized static native int H5Dcopy(long src_did, long dst_did) throws HDF5LibraryException;
 
     /**
+     * @ingroup JH5D
+     *
      * H5Dclose ends access to a dataset specified by dataset_id and releases resources used by it.
      *
      * @param dataset_id
@@ -2287,7 +2477,7 @@ public class H5 implements java.io.Serializable {
      * @return a non-negative value if successful
      *
      * @exception HDF5LibraryException
-     *            Error from the HDF-5 Library.
+     *            Error from the HDF5 Library.
      **/
     public static int H5Dclose(long dataset_id) throws HDF5LibraryException
     {
@@ -2303,6 +2493,8 @@ public class H5 implements java.io.Serializable {
     private synchronized static native int _H5Dclose(long dataset_id) throws HDF5LibraryException;
 
     /**
+     * @ingroup JH5D
+     *
      * H5Dcreate creates a new dataset named name at the location specified by loc_id.
      *
      * @param loc_id
@@ -2323,7 +2515,7 @@ public class H5 implements java.io.Serializable {
      * @return a dataset identifier
      *
      * @exception HDF5LibraryException
-     *            Error from the HDF-5 Library.
+     *            Error from the HDF5 Library.
      * @exception NullPointerException
      *            name is null.
      **/
@@ -2340,6 +2532,8 @@ public class H5 implements java.io.Serializable {
     }
 
     /**
+     * @ingroup JH5D
+     *
      * H5Dcreate2 creates a new dataset named name at the location specified by loc_id.
      *
      * @see public static int H5Dcreate(int loc_id, String name, int type_id, int space_id, int lcpl_id, int
@@ -2350,6 +2544,8 @@ public class H5 implements java.io.Serializable {
         throws HDF5LibraryException, NullPointerException;
 
     /**
+     * @ingroup JH5D
+     *
      * H5Dcreate_anon creates a dataset in the file specified by loc_id.
      *
      * @param loc_id
@@ -2366,7 +2562,7 @@ public class H5 implements java.io.Serializable {
      * @return a dataset identifier
      *
      * @exception HDF5LibraryException
-     *            Error from the HDF-5 Library.
+     *            Error from the HDF5 Library.
      **/
     public static long H5Dcreate_anon(long loc_id, long type_id, long space_id, long dcpl_id, long dapl_id)
         throws HDF5LibraryException
@@ -2385,6 +2581,8 @@ public class H5 implements java.io.Serializable {
         throws HDF5LibraryException;
 
     /**
+     * @ingroup JH5D
+     *
      * H5Dfill explicitly fills the dataspace selection in memory, space_id, with the fill value specified in
      * fill.
      *
@@ -2400,7 +2598,7 @@ public class H5 implements java.io.Serializable {
      *            IN: Dataspace describing memory buffer and containing the selection to be filled.
      *
      * @exception HDF5LibraryException
-     *            Error from the HDF-5 Library.
+     *            Error from the HDF5 Library.
      * @exception NullPointerException
      *            buf is null.
      **/
@@ -2409,6 +2607,8 @@ public class H5 implements java.io.Serializable {
         throws HDF5LibraryException, NullPointerException;
 
     /**
+     * @ingroup JH5D
+     *
      * H5Dget_access_plist returns an identifier for a copy of the dataset access property list for a dataset.
      *
      * @param dset_id
@@ -2417,11 +2617,13 @@ public class H5 implements java.io.Serializable {
      * @return a dataset access property list identifier
      *
      * @exception HDF5LibraryException
-     *            Error from the HDF-5 Library.
+     *            Error from the HDF5 Library.
      **/
     public synchronized static native long H5Dget_access_plist(long dset_id) throws HDF5LibraryException;
 
     /**
+     * @ingroup JH5D
+     *
      * H5Dget_create_plist returns an identifier for a copy of the dataset creation property list for a
      * dataset.
      *
@@ -2430,7 +2632,7 @@ public class H5 implements java.io.Serializable {
      * @return a dataset creation property list identifier if successful
      *
      * @exception HDF5LibraryException
-     *            Error from the HDF-5 Library.
+     *            Error from the HDF5 Library.
      **/
     public static long H5Dget_create_plist(long dataset_id) throws HDF5LibraryException
     {
@@ -2446,6 +2648,8 @@ public class H5 implements java.io.Serializable {
     private synchronized static native long _H5Dget_create_plist(long dataset_id) throws HDF5LibraryException;
 
     /**
+     * @ingroup JH5D
+     *
      * H5Dget_offset returns the address in the file of the dataset dset_id.
      *
      * @param dset_id
@@ -2454,11 +2658,13 @@ public class H5 implements java.io.Serializable {
      * @return the offset in bytes.
      *
      * @exception HDF5LibraryException
-     *            Error from the HDF-5 Library.
+     *            Error from the HDF5 Library.
      **/
     public synchronized static native long H5Dget_offset(long dset_id) throws HDF5LibraryException;
 
     /**
+     * @ingroup JH5D
+     *
      * H5Dget_space returns an identifier for a copy of the dataspace for a dataset.
      *
      * @param dataset_id
@@ -2467,7 +2673,7 @@ public class H5 implements java.io.Serializable {
      * @return a dataspace identifier if successful
      *
      * @exception HDF5LibraryException
-     *            Error from the HDF-5 Library.
+     *            Error from the HDF5 Library.
      **/
     public static long H5Dget_space(long dataset_id) throws HDF5LibraryException
     {
@@ -2483,6 +2689,8 @@ public class H5 implements java.io.Serializable {
     private synchronized static native long _H5Dget_space(long dataset_id) throws HDF5LibraryException;
 
     /**
+     * @ingroup JH5D
+     *
      * H5Dget_space_status determines whether space has been allocated for the dataset dset_id.
      *
      * @param dset_id
@@ -2491,11 +2699,13 @@ public class H5 implements java.io.Serializable {
      * @return the space allocation status
      *
      * @exception HDF5LibraryException
-     *            Error from the HDF-5 Library.
+     *            Error from the HDF5 Library.
      **/
     public synchronized static native int H5Dget_space_status(long dset_id) throws HDF5LibraryException;
 
     /**
+     * @ingroup JH5D
+     *
      * H5Dget_storage_size returns the amount of storage that is required for the dataset.
      *
      * @param dataset_id
@@ -2504,12 +2714,14 @@ public class H5 implements java.io.Serializable {
      * @return he amount of storage space allocated for the dataset.
      *
      * @exception HDF5LibraryException
-     *            Error from the HDF-5 Library.
+     *            Error from the HDF5 Library.
      **/
     public synchronized static native long H5Dget_storage_size(long dataset_id)
         throws HDF5LibraryException, IllegalArgumentException;
 
     /**
+     * @ingroup JH5D
+     *
      * H5Dget_type returns an identifier for a copy of the datatype for a dataset.
      *
      * @param dataset_id
@@ -2518,7 +2730,7 @@ public class H5 implements java.io.Serializable {
      * @return a datatype identifier if successful
      *
      * @exception HDF5LibraryException
-     *            Error from the HDF-5 Library.
+     *            Error from the HDF5 Library.
      **/
     public static long H5Dget_type(long dataset_id) throws HDF5LibraryException
     {
@@ -2534,6 +2746,8 @@ public class H5 implements java.io.Serializable {
     private synchronized static native long _H5Dget_type(long dataset_id) throws HDF5LibraryException;
 
     /**
+     * @ingroup JH5D
+     *
      * H5Diterate iterates over all the data elements in the memory buffer buf, executing the callback
      * function operator once for each such data element.
      *
@@ -2552,7 +2766,7 @@ public class H5 implements java.io.Serializable {
      *            members were processed with no operator returning non-zero.
      *
      * @exception HDF5LibraryException
-     *            Error from the HDF-5 Library.
+     *            Error from the HDF5 Library.
      * @exception NullPointerException
      *            buf is null.
      **/
@@ -2561,6 +2775,8 @@ public class H5 implements java.io.Serializable {
         throws HDF5LibraryException, NullPointerException;
 
     /**
+     * @ingroup JH5D
+     *
      * H5Dopen opens the existing dataset specified by a location identifier and name, loc_id and name,
      * respectively.
      *
@@ -2574,7 +2790,7 @@ public class H5 implements java.io.Serializable {
      * @return a dataset identifier if successful
      *
      * @exception HDF5LibraryException
-     *            Error from the HDF-5 Library.
+     *            Error from the HDF5 Library.
      * @exception NullPointerException
      *            name is null.
      **/
@@ -2591,6 +2807,8 @@ public class H5 implements java.io.Serializable {
     }
 
     /**
+     * @ingroup JH5D
+     *
      * H5Dopen2 opens the existing dataset specified by a location identifier and name, loc_id and name,
      * respectively.
      *
@@ -2600,6 +2818,8 @@ public class H5 implements java.io.Serializable {
         throws HDF5LibraryException, NullPointerException;
 
     /**
+     * @ingroup JH5D
+     *
      * H5Dread reads a (partial) dataset, specified by its identifier dataset_id, from the file into the
      * application memory buffer buf.
      *
@@ -2621,7 +2841,7 @@ public class H5 implements java.io.Serializable {
      * @return a non-negative value if successful
      *
      * @exception HDF5LibraryException
-     *            Error from the HDF-5 Library.
+     *            Error from the HDF5 Library.
      * @exception NullPointerException
      *            data buffer is null.
      **/
@@ -2631,6 +2851,8 @@ public class H5 implements java.io.Serializable {
         throws HDF5LibraryException, NullPointerException;
 
     /**
+     * @ingroup JH5D
+     *
      * H5Dread reads a (partial) dataset, specified by its identifier dataset_id, from the file into the
      * application memory buffer buf.
      *
@@ -2650,7 +2872,7 @@ public class H5 implements java.io.Serializable {
      * @return a non-negative value if successful
      *
      * @exception HDF5LibraryException
-     *            Error from the HDF-5 Library.
+     *            Error from the HDF5 Library.
      * @exception NullPointerException
      *            data buffer is null.
      **/
@@ -2662,6 +2884,8 @@ public class H5 implements java.io.Serializable {
     }
 
     /**
+     * @ingroup JH5D
+     *
      * H5Dread reads a (partial) dataset, specified by its identifier dataset_id, from the file into the
      * application memory buffer buf.
      *
@@ -2681,7 +2905,7 @@ public class H5 implements java.io.Serializable {
      * @return a non-negative value if successful
      *
      * @exception HDF5LibraryException
-     *            Error from the HDF-5 Library.
+     *            Error from the HDF5 Library.
      * @exception NullPointerException
      *            data buffer is null.
      **/
@@ -2693,6 +2917,8 @@ public class H5 implements java.io.Serializable {
     }
 
     /**
+     * @ingroup JH5D
+     *
      * H5Dread reads a (partial) dataset, specified by its identifier dataset_id, from the file into the
      * application data object.
      *
@@ -2716,7 +2942,7 @@ public class H5 implements java.io.Serializable {
      * @exception HDF5Exception
      *            Failure in the data conversion.
      * @exception HDF5LibraryException
-     *            Error from the HDF-5 Library.
+     *            Error from the HDF5 Library.
      * @exception NullPointerException
      *            data object is null.
      **/
@@ -2808,6 +3034,8 @@ public class H5 implements java.io.Serializable {
     }
 
     /**
+     * @ingroup JH5D
+     *
      * H5Dread reads a (partial) dataset, specified by its identifier dataset_id, from the file into the
      * application memory buffer of type double.
      *
@@ -2829,7 +3057,7 @@ public class H5 implements java.io.Serializable {
      * @return a non-negative value if successful
      *
      * @exception HDF5LibraryException
-     *            Error from the HDF-5 Library.
+     *            Error from the HDF5 Library.
      * @exception NullPointerException
      *            data buffer is null.
      **/
@@ -2839,6 +3067,8 @@ public class H5 implements java.io.Serializable {
         throws HDF5LibraryException, NullPointerException;
 
     /**
+     * @ingroup JH5D
+     *
      * H5Dread reads a (partial) dataset, specified by its identifier dataset_id, from the file into the
      * application memory buffer of type double.
      *
@@ -2858,7 +3088,7 @@ public class H5 implements java.io.Serializable {
      * @return a non-negative value if successful
      *
      * @exception HDF5LibraryException
-     *            Error from the HDF-5 Library.
+     *            Error from the HDF5 Library.
      * @exception NullPointerException
      *            data buffer is null.
      **/
@@ -2870,6 +3100,8 @@ public class H5 implements java.io.Serializable {
     }
 
     /**
+     * @ingroup JH5D
+     *
      * H5Dread reads a (partial) dataset, specified by its identifier dataset_id, from the file into the
      * application memory buffer of float.
      *
@@ -2891,7 +3123,7 @@ public class H5 implements java.io.Serializable {
      * @return a non-negative value if successful
      *
      * @exception HDF5LibraryException
-     *            Error from the HDF-5 Library.
+     *            Error from the HDF5 Library.
      * @exception NullPointerException
      *            data buffer is null.
      **/
@@ -2901,6 +3133,8 @@ public class H5 implements java.io.Serializable {
         throws HDF5LibraryException, NullPointerException;
 
     /**
+     * @ingroup JH5D
+     *
      * H5Dread reads a (partial) dataset, specified by its identifier dataset_id, from the file into the
      * application memory buffer of float.
      *
@@ -2920,7 +3154,7 @@ public class H5 implements java.io.Serializable {
      * @return a non-negative value if successful
      *
      * @exception HDF5LibraryException
-     *            Error from the HDF-5 Library.
+     *            Error from the HDF5 Library.
      * @exception NullPointerException
      *            data buffer is null.
      **/
@@ -2932,6 +3166,8 @@ public class H5 implements java.io.Serializable {
     }
 
     /**
+     * @ingroup JH5D
+     *
      * H5Dread reads a (partial) dataset, specified by its identifier dataset_id, from the file into the
      * application memory buffer of int.
      *
@@ -2953,7 +3189,7 @@ public class H5 implements java.io.Serializable {
      * @return a non-negative value if successful
      *
      * @exception HDF5LibraryException
-     *            Error from the HDF-5 Library.
+     *            Error from the HDF5 Library.
      * @exception NullPointerException
      *            data buffer is null.
      **/
@@ -2963,6 +3199,8 @@ public class H5 implements java.io.Serializable {
         throws HDF5LibraryException, NullPointerException;
 
     /**
+     * @ingroup JH5D
+     *
      * H5Dread reads a (partial) dataset, specified by its identifier dataset_id, from the file into the
      * application memory buffer of int.
      *
@@ -2982,7 +3220,7 @@ public class H5 implements java.io.Serializable {
      * @return a non-negative value if successful
      *
      * @exception HDF5LibraryException
-     *            Error from the HDF-5 Library.
+     *            Error from the HDF5 Library.
      * @exception NullPointerException
      *            data buffer is null.
      **/
@@ -2994,6 +3232,8 @@ public class H5 implements java.io.Serializable {
     }
 
     /**
+     * @ingroup JH5D
+     *
      * H5Dread reads a (partial) dataset, specified by its identifier dataset_id, from the file into the
      * application memory buffer of long.
      *
@@ -3015,7 +3255,7 @@ public class H5 implements java.io.Serializable {
      * @return a non-negative value if successful
      *
      * @exception HDF5LibraryException
-     *            Error from the HDF-5 Library.
+     *            Error from the HDF5 Library.
      * @exception NullPointerException
      *            data buffer is null.
      **/
@@ -3025,6 +3265,8 @@ public class H5 implements java.io.Serializable {
         throws HDF5LibraryException, NullPointerException;
 
     /**
+     * @ingroup JH5D
+     *
      * H5Dread reads a (partial) dataset, specified by its identifier dataset_id, from the file into the
      * application memory buffer of long.
      *
@@ -3044,7 +3286,7 @@ public class H5 implements java.io.Serializable {
      * @return a non-negative value if successful
      *
      * @exception HDF5LibraryException
-     *            Error from the HDF-5 Library.
+     *            Error from the HDF5 Library.
      * @exception NullPointerException
      *            data buffer is null.
      **/
@@ -3056,6 +3298,8 @@ public class H5 implements java.io.Serializable {
     }
 
     /**
+     * @ingroup JH5D
+     *
      * H5Dread reads a (partial) dataset, specified by its identifier dataset_id, from the file into the
      * application memory buffer of string.
      *
@@ -3075,7 +3319,7 @@ public class H5 implements java.io.Serializable {
      * @return a non-negative value if successful
      *
      * @exception HDF5LibraryException
-     *            Error from the HDF-5 Library.
+     *            Error from the HDF5 Library.
      * @exception NullPointerException
      *            data buffer is null.
      **/
@@ -3085,6 +3329,8 @@ public class H5 implements java.io.Serializable {
         throws HDF5LibraryException, NullPointerException;
 
     /**
+     * @ingroup JH5D
+     *
      * H5Dread reads a (partial) dataset, specified by its identifier dataset_id, from the file into the
      * application memory buffer of short.
      *
@@ -3106,7 +3352,7 @@ public class H5 implements java.io.Serializable {
      * @return a non-negative value if successful
      *
      * @exception HDF5LibraryException
-     *            Error from the HDF-5 Library.
+     *            Error from the HDF5 Library.
      * @exception NullPointerException
      *            data buffer is null.
      **/
@@ -3116,6 +3362,8 @@ public class H5 implements java.io.Serializable {
         throws HDF5LibraryException, NullPointerException;
 
     /**
+     * @ingroup JH5D
+     *
      * H5Dread reads a (partial) dataset, specified by its identifier dataset_id, from the file into the
      * application memory buffer of short.
      *
@@ -3135,7 +3383,7 @@ public class H5 implements java.io.Serializable {
      * @return a non-negative value if successful
      *
      * @exception HDF5LibraryException
-     *            Error from the HDF-5 Library.
+     *            Error from the HDF5 Library.
      * @exception NullPointerException
      *            data buffer is null.
      **/
@@ -3147,6 +3395,8 @@ public class H5 implements java.io.Serializable {
     }
 
     /**
+     * @ingroup JH5D
+     *
      * H5Dread reads a (partial) dataset, specified by its identifier dataset_id, from the file into the
      * application memory buffer of variable-lenght.
      *
@@ -3166,7 +3416,7 @@ public class H5 implements java.io.Serializable {
      * @return a non-negative value if successful
      *
      * @exception HDF5LibraryException
-     *            Error from the HDF-5 Library.
+     *            Error from the HDF5 Library.
      * @exception NullPointerException
      *            data buffer is null.
      **/
@@ -3175,6 +3425,8 @@ public class H5 implements java.io.Serializable {
         throws HDF5LibraryException, NullPointerException;
 
     /**
+     * @ingroup JH5D
+     *
      * H5Dread reads a (partial) dataset, specified by its identifier dataset_id, from the file into the
      * application memory buffer of string.
      *
@@ -3194,7 +3446,7 @@ public class H5 implements java.io.Serializable {
      * @return a non-negative value if successful
      *
      * @exception HDF5LibraryException
-     *            Error from the HDF-5 Library.
+     *            Error from the HDF5 Library.
      * @exception NullPointerException
      *            data buffer is null.
      **/
@@ -3203,6 +3455,8 @@ public class H5 implements java.io.Serializable {
         throws HDF5LibraryException, NullPointerException;
 
     /**
+     * @ingroup JH5D
+     *
      * H5Dread reads a (partial) dataset, specified by its identifier dataset_id, from the file into the
      * application memory buffer of variable-lenght strings.
      *
@@ -3222,7 +3476,7 @@ public class H5 implements java.io.Serializable {
      * @return a non-negative value if successful
      *
      * @exception HDF5LibraryException
-     *            Error from the HDF-5 Library.
+     *            Error from the HDF5 Library.
      * @exception NullPointerException
      *            data buffer is null.
      **/
@@ -3232,6 +3486,8 @@ public class H5 implements java.io.Serializable {
         throws HDF5LibraryException, NullPointerException;
 
     /**
+     * @ingroup JH5D
+     *
      * H5Dset_extent sets the current dimensions of the chunked dataset dset_id to the sizes specified in
      * size.
      *
@@ -3241,7 +3497,7 @@ public class H5 implements java.io.Serializable {
      *            IN: Array containing the new magnitude of each dimension of the dataset.
      *
      * @exception HDF5LibraryException
-     *            Error from the HDF-5 Library.
+     *            Error from the HDF5 Library.
      * @exception NullPointerException
      *            size is null.
      **/
@@ -3249,6 +3505,8 @@ public class H5 implements java.io.Serializable {
         throws HDF5LibraryException, NullPointerException;
 
     /**
+     * @ingroup JH5D
+     *
      * H5Dvlen_get_buf_size determines the number of bytes required to store the VL data from the dataset,
      * using the space_id for the selection in the dataset on disk and the type_id for the memory
      * representation of the VL data in memory.
@@ -3263,7 +3521,7 @@ public class H5 implements java.io.Serializable {
      * @return the size in bytes of the memory buffer required to store the VL data.
      *
      * @exception HDF5LibraryException
-     *            Error from the HDF-5 Library.
+     *            Error from the HDF5 Library.
      * @exception NullPointerException
      *            buf is null.
      **/
@@ -3271,6 +3529,8 @@ public class H5 implements java.io.Serializable {
         throws HDF5LibraryException;
 
     /**
+     * @ingroup JH5D
+     *
      * H5Dvlen_reclaim reclaims buffer used for VL data.
      *
      * @param type_id
@@ -3285,7 +3545,7 @@ public class H5 implements java.io.Serializable {
      * @return a non-negative value if successful
      *
      * @exception HDF5LibraryException
-     *            Error from the HDF-5 Library.
+     *            Error from the HDF5 Library.
      * @exception NullPointerException
      *            buf is null.
      *
@@ -3297,6 +3557,8 @@ public class H5 implements java.io.Serializable {
         throws HDF5LibraryException, NullPointerException;
 
     /**
+     * @ingroup JH5D
+     *
      * H5Dwrite writes a (partial) dataset, specified by its identifier dataset_id, from the application
      * memory buffer into the file.
      *
@@ -3318,7 +3580,7 @@ public class H5 implements java.io.Serializable {
      * @return a non-negative value if successful
      *
      * @exception HDF5LibraryException
-     *            Error from the HDF-5 Library.
+     *            Error from the HDF5 Library.
      * @exception NullPointerException
      *            name is null.
      **/
@@ -3328,6 +3590,8 @@ public class H5 implements java.io.Serializable {
         throws HDF5LibraryException, NullPointerException;
 
     /**
+     * @ingroup JH5D
+     *
      * H5Dwrite writes a (partial) dataset, specified by its identifier dataset_id, from the application
      * memory buffer into the file.
      *
@@ -3347,7 +3611,7 @@ public class H5 implements java.io.Serializable {
      * @return a non-negative value if successful
      *
      * @exception HDF5LibraryException
-     *            Error from the HDF-5 Library.
+     *            Error from the HDF5 Library.
      * @exception NullPointerException
      *            name is null.
      **/
@@ -3359,6 +3623,8 @@ public class H5 implements java.io.Serializable {
     }
 
     /**
+     * @ingroup JH5D
+     *
      * H5Dwrite writes a (partial) dataset, specified by its identifier dataset_id, from the application
      * memory buffer into the file.
      *
@@ -3378,7 +3644,7 @@ public class H5 implements java.io.Serializable {
      * @return a non-negative value if successful
      *
      * @exception HDF5LibraryException
-     *            Error from the HDF-5 Library.
+     *            Error from the HDF5 Library.
      * @exception NullPointerException
      *            name is null.
      **/
@@ -3390,6 +3656,8 @@ public class H5 implements java.io.Serializable {
     }
 
     /**
+     * @ingroup JH5D
+     *
      * H5Dwrite writes a (partial) dataset, specified by its identifier dataset_id, from the application
      * memory data object into the file.
      *
@@ -3413,7 +3681,7 @@ public class H5 implements java.io.Serializable {
      * @exception HDF5Exception
      *            Failure in the data conversion.
      * @exception HDF5LibraryException
-     *            Error from the HDF-5 Library.
+     *            Error from the HDF5 Library.
      * @exception NullPointerException
      *            data object is null.
      **/
@@ -3485,6 +3753,8 @@ public class H5 implements java.io.Serializable {
     }
 
     /**
+     * @ingroup JH5D
+     *
      * H5Dwrite writes a (partial) dataset, specified by its identifier dataset_id, from the application
      * memory buffer into the file.
      *
@@ -3506,7 +3776,7 @@ public class H5 implements java.io.Serializable {
      * @return a non-negative value if successful
      *
      * @exception HDF5LibraryException
-     *            Error from the HDF-5 Library.
+     *            Error from the HDF5 Library.
      * @exception NullPointerException
      *            name is null.
      **/
@@ -3516,6 +3786,8 @@ public class H5 implements java.io.Serializable {
         throws HDF5LibraryException, NullPointerException;
 
     /**
+     * @ingroup JH5D
+     *
      * H5Dwrite writes a (partial) dataset, specified by its identifier dataset_id, from the application
      * memory buffer into the file.
      *
@@ -3535,7 +3807,7 @@ public class H5 implements java.io.Serializable {
      * @return a non-negative value if successful
      *
      * @exception HDF5LibraryException
-     *            Error from the HDF-5 Library.
+     *            Error from the HDF5 Library.
      * @exception NullPointerException
      *            name is null.
      **/
@@ -3548,6 +3820,8 @@ public class H5 implements java.io.Serializable {
     }
 
     /**
+     * @ingroup JH5D
+     *
      * H5Dwrite writes a (partial) dataset, specified by its identifier dataset_id, from the application
      * memory buffer into the file.
      *
@@ -3569,7 +3843,7 @@ public class H5 implements java.io.Serializable {
      * @return a non-negative value if successful
      *
      * @exception HDF5LibraryException
-     *            Error from the HDF-5 Library.
+     *            Error from the HDF5 Library.
      * @exception NullPointerException
      *            name is null.
      **/
@@ -3579,6 +3853,8 @@ public class H5 implements java.io.Serializable {
         throws HDF5LibraryException, NullPointerException;
 
     /**
+     * @ingroup JH5D
+     *
      * H5Dwrite writes a (partial) dataset, specified by its identifier dataset_id, from the application
      * memory buffer into the file.
      *
@@ -3598,7 +3874,7 @@ public class H5 implements java.io.Serializable {
      * @return a non-negative value if successful
      *
      * @exception HDF5LibraryException
-     *            Error from the HDF-5 Library.
+     *            Error from the HDF5 Library.
      * @exception NullPointerException
      *            name is null.
      **/
@@ -3610,6 +3886,8 @@ public class H5 implements java.io.Serializable {
     }
 
     /**
+     * @ingroup JH5D
+     *
      * H5Dwrite writes a (partial) dataset, specified by its identifier dataset_id, from the application
      * memory buffer into the file.
      *
@@ -3631,7 +3909,7 @@ public class H5 implements java.io.Serializable {
      * @return a non-negative value if successful
      *
      * @exception HDF5LibraryException
-     *            Error from the HDF-5 Library.
+     *            Error from the HDF5 Library.
      * @exception NullPointerException
      *            name is null.
      **/
@@ -3641,6 +3919,8 @@ public class H5 implements java.io.Serializable {
         throws HDF5LibraryException, NullPointerException;
 
     /**
+     * @ingroup JH5D
+     *
      * H5Dwrite writes a (partial) dataset, specified by its identifier dataset_id, from the application
      * memory buffer into the file.
      *
@@ -3660,7 +3940,7 @@ public class H5 implements java.io.Serializable {
      * @return a non-negative value if successful
      *
      * @exception HDF5LibraryException
-     *            Error from the HDF-5 Library.
+     *            Error from the HDF5 Library.
      * @exception NullPointerException
      *            name is null.
      **/
@@ -3672,6 +3952,8 @@ public class H5 implements java.io.Serializable {
     }
 
     /**
+     * @ingroup JH5D
+     *
      * H5Dwrite writes a (partial) dataset, specified by its identifier dataset_id, from the application
      * memory buffer into the file.
      *
@@ -3693,7 +3975,7 @@ public class H5 implements java.io.Serializable {
      * @return a non-negative value if successful
      *
      * @exception HDF5LibraryException
-     *            Error from the HDF-5 Library.
+     *            Error from the HDF5 Library.
      * @exception NullPointerException
      *            name is null.
      **/
@@ -3703,6 +3985,8 @@ public class H5 implements java.io.Serializable {
         throws HDF5LibraryException, NullPointerException;
 
     /**
+     * @ingroup JH5D
+     *
      * H5Dwrite writes a (partial) dataset, specified by its identifier dataset_id, from the application
      * memory buffer into the file.
      *
@@ -3722,7 +4006,7 @@ public class H5 implements java.io.Serializable {
      * @return a non-negative value if successful
      *
      * @exception HDF5LibraryException
-     *            Error from the HDF-5 Library.
+     *            Error from the HDF5 Library.
      * @exception NullPointerException
      *            name is null.
      **/
@@ -3734,6 +4018,8 @@ public class H5 implements java.io.Serializable {
     }
 
     /**
+     * @ingroup JH5D
+     *
      * H5Dwrite writes a (partial) dataset, specified by its identifier dataset_id, from the application
      * memory buffer into the file.
      *
@@ -3755,7 +4041,7 @@ public class H5 implements java.io.Serializable {
      * @return a non-negative value if successful
      *
      * @exception HDF5LibraryException
-     *            Error from the HDF-5 Library.
+     *            Error from the HDF5 Library.
      * @exception NullPointerException
      *            name is null.
      **/
@@ -3765,6 +4051,8 @@ public class H5 implements java.io.Serializable {
         throws HDF5LibraryException, NullPointerException;
 
     /**
+     * @ingroup JH5D
+     *
      * H5Dwrite writes a (partial) dataset, specified by its identifier dataset_id, from the application
      * memory buffer into the file.
      *
@@ -3784,7 +4072,7 @@ public class H5 implements java.io.Serializable {
      * @return a non-negative value if successful
      *
      * @exception HDF5LibraryException
-     *            Error from the HDF-5 Library.
+     *            Error from the HDF5 Library.
      * @exception NullPointerException
      *            name is null.
      **/
@@ -3796,6 +4084,8 @@ public class H5 implements java.io.Serializable {
     }
 
     /**
+     * @ingroup JH5D
+     *
      * H5Dwrite writes a (partial) dataset, specified by its identifier dataset_id, from the application
      * memory buffer into the file.
      *
@@ -3815,7 +4105,7 @@ public class H5 implements java.io.Serializable {
      * @return a non-negative value if successful
      *
      * @exception HDF5LibraryException
-     *            Error from the HDF-5 Library.
+     *            Error from the HDF5 Library.
      * @exception NullPointerException
      *            name is null.
      **/
@@ -3825,6 +4115,8 @@ public class H5 implements java.io.Serializable {
         throws HDF5LibraryException, NullPointerException;
 
     /**
+     * @ingroup JH5D
+     *
      * H5Dwrite writes a (partial) dataset, specified by its identifier dataset_id, from the application
      * memory buffer into the file.
      *
@@ -3844,7 +4136,7 @@ public class H5 implements java.io.Serializable {
      * @return a non-negative value if successful
      *
      * @exception HDF5LibraryException
-     *            Error from the HDF-5 Library.
+     *            Error from the HDF5 Library.
      * @exception NullPointerException
      *            name is null.
      **/
@@ -3853,6 +4145,8 @@ public class H5 implements java.io.Serializable {
         throws HDF5LibraryException, NullPointerException;
 
     /**
+     * @ingroup JH5D
+     *
      * H5Dwrite_VLStrings writes a (partial) variable length String dataset, specified by its identifier
      * dataset_id, from the application memory buffer buf into the file.
      *
@@ -3874,7 +4168,7 @@ public class H5 implements java.io.Serializable {
      * @return a non-negative value if successful
      *
      * @exception HDF5LibraryException
-     *            Error from the HDF-5 Library.
+     *            Error from the HDF5 Library.
      * @exception NullPointerException
      *            name is null.
      **/
@@ -3885,6 +4179,8 @@ public class H5 implements java.io.Serializable {
         throws HDF5LibraryException, NullPointerException;
 
     /**
+     * @ingroup JH5D
+     *
      * H5Dflush causes all buffers associated with a dataset to be immediately flushed to disk without
      * removing the data from the cache.
      *
@@ -3892,11 +4188,13 @@ public class H5 implements java.io.Serializable {
      *            IN: Identifier of the dataset to be flushed.
      *
      * @exception HDF5LibraryException
-     *            Error from the HDF-5 Library.
+     *            Error from the HDF5 Library.
      **/
     public synchronized static native void H5Dflush(long dset_id) throws HDF5LibraryException;
 
     /**
+     * @ingroup JH5D
+     *
      * H5Drefresh causes all buffers associated with a dataset to be cleared and immediately re-loaded with
      * updated contents from disk. This function essentially closes the dataset, evicts all metadata
      * associated with it from the cache, and then re-opens the dataset. The reopened dataset is automatically
@@ -3906,7 +4204,7 @@ public class H5 implements java.io.Serializable {
      *            IN: Identifier of the dataset to be refreshed.
      *
      * @exception HDF5LibraryException
-     *            Error from the HDF-5 Library.
+     *            Error from the HDF5 Library.
      **/
     public synchronized static native void H5Drefresh(long dset_id) throws HDF5LibraryException;
 
@@ -3926,8 +4224,14 @@ public class H5 implements java.io.Serializable {
     // H5E: Error Stack //
     // //
     // ////////////////////////////////////////////////////////////
+    /**
+     *
+     * @defgroup JH5E Java Error (H5E) Interface
+     */
 
     /**
+     * @ingroup JH5E
+     *
      * H5Eauto_is_v2 determines whether the error auto reporting function for an error stack conforms to the
      * H5E_auto2_t typedef or the H5E_auto1_t typedef.
      *
@@ -3938,19 +4242,21 @@ public class H5 implements java.io.Serializable {
      *            H5E_auto1_t.
      *
      * @exception HDF5LibraryException
-     *            Error from the HDF-5 Library.
+     *            Error from the HDF5 Library.
      **/
     public synchronized static native boolean H5Eauto_is_v2(long stack_id) throws HDF5LibraryException;
 
     /**
+     * @ingroup JH5E
+     *
      * H5Eclear clears the error stack for the current thread. H5Eclear can fail if there are problems
      * initializing the library. 

This may be used by exception handlers to assure that the error condition - * in the HDF-5 library has been reset. + * in the HDF5 library has been reset. * * @return Returns a non-negative value if successful * * @exception HDF5LibraryException - * Error from the HDF-5 Library. + * Error from the HDF5 Library. **/ public static int H5Eclear() throws HDF5LibraryException { @@ -3959,6 +4265,8 @@ public class H5 implements java.io.Serializable { } /** + * @ingroup JH5E + * * H5Eclear clears the error stack specified by estack_id, or, if estack_id is set to H5E_DEFAULT, the * error stack for the current thread. * @@ -3966,11 +4274,13 @@ public class H5 implements java.io.Serializable { * IN: Error stack identifier. * * @exception HDF5LibraryException - * Error from the HDF-5 Library. + * Error from the HDF5 Library. **/ public static void H5Eclear(long stack_id) throws HDF5LibraryException { H5Eclear2(stack_id); } /** + * @ingroup JH5E + * * H5Eclear2 clears the error stack specified by estack_id, or, if estack_id is set to H5E_DEFAULT, the * error stack for the current thread. * @@ -3978,33 +4288,39 @@ public class H5 implements java.io.Serializable { * IN: Error stack identifier. * * @exception HDF5LibraryException - * Error from the HDF-5 Library. + * Error from the HDF5 Library. **/ public synchronized static native void H5Eclear2(long stack_id) throws HDF5LibraryException; /** + * @ingroup JH5E + * * H5Eclose_msg closes an error message identifier, which can be either a major or minor message. * * @param err_id * IN: Error message identifier. * * @exception HDF5LibraryException - * Error from the HDF-5 Library. + * Error from the HDF5 Library. **/ public synchronized static native void H5Eclose_msg(long err_id) throws HDF5LibraryException; /** + * @ingroup JH5E + * * H5Eclose_stack closes the object handle for an error stack and releases its resources. * * @param stack_id * IN: Error stack identifier. * * @exception HDF5LibraryException - * Error from the HDF-5 Library. + * Error from the HDF5 Library. **/ public synchronized static native void H5Eclose_stack(long stack_id) throws HDF5LibraryException; /** + * @ingroup JH5E + * * H5Ecreate_msg adds an error message to an error class defined by client library or application program. * * @param cls_id @@ -4017,7 +4333,7 @@ public class H5 implements java.io.Serializable { * @return a message identifier * * @exception HDF5LibraryException - * Error from the HDF-5 Library. + * Error from the HDF5 Library. * @exception NullPointerException * msg is null. **/ @@ -4025,16 +4341,20 @@ public class H5 implements java.io.Serializable { throws HDF5LibraryException, NullPointerException; /** + * @ingroup JH5E + * * H5Ecreate_stack creates a new empty error stack and returns the new stack's identifier. * * @return an error stack identifier * * @exception HDF5LibraryException - * Error from the HDF-5 Library. + * Error from the HDF5 Library. **/ public synchronized static native long H5Ecreate_stack() throws HDF5LibraryException; /** + * @ingroup JH5E + * * H5Eget_class_name retrieves the name of the error class specified by the class identifier. * * @param class_id @@ -4043,23 +4363,27 @@ public class H5 implements java.io.Serializable { * @return the name of the error class * * @exception HDF5LibraryException - * Error from the HDF-5 Library. + * Error from the HDF5 Library. **/ public synchronized static native String H5Eget_class_name(long class_id) throws HDF5LibraryException, NullPointerException; /** + * @ingroup JH5E + * * H5Eget_current_stack copies the current error stack and returns an error stack identifier for the new * copy. * * @return an error stack identifier * * @exception HDF5LibraryException - * Error from the HDF-5 Library. + * Error from the HDF5 Library. **/ public synchronized static native long H5Eget_current_stack() throws HDF5LibraryException; /** + * @ingroup JH5E + * * H5Eset_current_stack replaces the content of the current error stack with a copy of the content of the * error stack specified by estack_id. * @@ -4067,11 +4391,13 @@ public class H5 implements java.io.Serializable { * IN: Error stack identifier. * * @exception HDF5LibraryException - * Error from the HDF-5 Library. + * Error from the HDF5 Library. **/ public synchronized static native void H5Eset_current_stack(long stack_id) throws HDF5LibraryException; /** + * @ingroup JH5E + * * H5Eget_msg retrieves the error message including its length and type. * * @param msg_id @@ -4082,12 +4408,14 @@ public class H5 implements java.io.Serializable { * @return the error message * * @exception HDF5LibraryException - * Error from the HDF-5 Library. + * Error from the HDF5 Library. **/ public synchronized static native String H5Eget_msg(long msg_id, int[] type_list) throws HDF5LibraryException; /** + * @ingroup JH5E + * * H5Eget_num retrieves the number of error records in the error stack specified by estack_id (including * major, minor messages and description). * @@ -4097,12 +4425,14 @@ public class H5 implements java.io.Serializable { * @return the number of error messages * * @exception HDF5LibraryException - * Error from the HDF-5 Library. + * Error from the HDF5 Library. **/ public synchronized static native long H5Eget_num(long stack_id) throws HDF5LibraryException, NullPointerException; /** + * @ingroup JH5E + * * H5Eprint2 prints the error stack specified by estack_id on the specified stream, stream. * * @param stack_id @@ -4112,12 +4442,14 @@ public class H5 implements java.io.Serializable { * IN: File pointer, or stderr if null. * * @exception HDF5LibraryException - * Error from the HDF-5 Library. + * Error from the HDF5 Library. **/ public synchronized static native void H5Eprint2(long stack_id, Object stream) throws HDF5LibraryException; /** + * @ingroup JH5E + * * H5Epop deletes the number of error records specified in count from the top of the error stack specified * by estack_id (including major, minor messages and description). * @@ -4127,11 +4459,13 @@ public class H5 implements java.io.Serializable { * IN: Version of the client library or application to which the error class belongs. * * @exception HDF5LibraryException - * Error from the HDF-5 Library. + * Error from the HDF5 Library. **/ public synchronized static native void H5Epop(long stack_id, long count) throws HDF5LibraryException; /** + * @ingroup JH5E + * * H5Epush pushes a new error record onto the error stack specified by estack_id. * * @param stack_id @@ -4152,7 +4486,7 @@ public class H5 implements java.io.Serializable { * IN: Error description string. * * @exception HDF5LibraryException - * Error from the HDF-5 Library. + * Error from the HDF5 Library. * @exception NullPointerException * file, func, or msg is null. **/ @@ -4162,6 +4496,8 @@ public class H5 implements java.io.Serializable { H5Epush2(stack_id, file, func, line, cls_id, maj_id, min_id, msg); } /** + * @ingroup JH5E + * * H5Epush2 pushes a new error record onto the error stack specified by estack_id. * * @param stack_id @@ -4182,7 +4518,7 @@ public class H5 implements java.io.Serializable { * IN: Error description string. * * @exception HDF5LibraryException - * Error from the HDF-5 Library. + * Error from the HDF5 Library. * @exception NullPointerException * file, func, or msg is null. **/ @@ -4191,6 +4527,8 @@ public class H5 implements java.io.Serializable { throws HDF5LibraryException, NullPointerException; /** + * @ingroup JH5E + * * H5Eregister_class registers a client library or application program to the HDF5 error API so that the * client library or application program can report errors together with HDF5 library. * @@ -4204,7 +4542,7 @@ public class H5 implements java.io.Serializable { * @return a class identifier * * @exception HDF5LibraryException - * Error from the HDF-5 Library. + * Error from the HDF5 Library. * @exception NullPointerException * name is null. **/ @@ -4212,17 +4550,21 @@ public class H5 implements java.io.Serializable { throws HDF5LibraryException, NullPointerException; /** + * @ingroup JH5E + * * H5Eunregister_class removes the error class specified by class_id. * * @param class_id * IN: Error class identifier. * * @exception HDF5LibraryException - * Error from the HDF-5 Library. + * Error from the HDF5 Library. **/ public synchronized static native void H5Eunregister_class(long class_id) throws HDF5LibraryException; /** + * @ingroup JH5E + * * H5Ewalk walks the error stack specified by estack_id for the current thread and calls the * function specified in func for each error along the way. * @@ -4236,7 +4578,7 @@ public class H5 implements java.io.Serializable { * IN: Data to be passed with func. * * @exception HDF5LibraryException - * Error from the HDF-5 Library. + * Error from the HDF5 Library. * @exception NullPointerException * func is null. **/ @@ -4246,6 +4588,8 @@ public class H5 implements java.io.Serializable { H5Ewalk2(stack_id, direction, func, client_data); } /** + * @ingroup JH5E + * * H5Ewalk2 walks the error stack specified by estack_id for the current thread and calls the * function specified in func for each error along the way. * @@ -4259,7 +4603,7 @@ public class H5 implements java.io.Serializable { * IN: Data to be passed with func. * * @exception HDF5LibraryException - * Error from the HDF-5 Library. + * Error from the HDF5 Library. * @exception NullPointerException * func is null. **/ @@ -4298,8 +4642,14 @@ public class H5 implements java.io.Serializable { // H5F: File Interface Functions // // // // //////////////////////////////////////////////////////////// + /** + * + * @defgroup JH5F Java File (H5F) Interface + */ /** + * @ingroup JH5F + * * H5Fclose terminates access to an HDF5 file. * * @param file_id @@ -4308,7 +4658,7 @@ public class H5 implements java.io.Serializable { * @return a non-negative value if successful * * @exception HDF5LibraryException - * Error from the HDF-5 Library. + * Error from the HDF5 Library. **/ public static int H5Fclose(long file_id) throws HDF5LibraryException { @@ -4324,6 +4674,8 @@ public class H5 implements java.io.Serializable { private synchronized static native int _H5Fclose(long file_id) throws HDF5LibraryException; /** + * @ingroup JH5F + * * H5Fopen opens an existing file and is the primary function for accessing existing HDF5 files. * * @param name @@ -4336,7 +4688,7 @@ public class H5 implements java.io.Serializable { * @return a file identifier if successful * * @exception HDF5LibraryException - * Error from the HDF-5 Library. + * Error from the HDF5 Library. * @exception NullPointerException * name is null. **/ @@ -4356,6 +4708,8 @@ public class H5 implements java.io.Serializable { throws HDF5LibraryException, NullPointerException; /** + * @ingroup JH5F + * * H5Freopen reopens an HDF5 file. * * @param file_id @@ -4364,7 +4718,7 @@ public class H5 implements java.io.Serializable { * @return a new file identifier if successful * * @exception HDF5LibraryException - * Error from the HDF-5 Library. + * Error from the HDF5 Library. **/ public static long H5Freopen(long file_id) throws HDF5LibraryException { @@ -4380,25 +4734,27 @@ public class H5 implements java.io.Serializable { private synchronized static native long _H5Freopen(long file_id) throws HDF5LibraryException; /** + * @ingroup JH5F + * * H5Fcreate is the primary function for creating HDF5 files. * * @param name * Name of the file to access. * @param flags * File access flags. Possible values include: - *

    - *
  • - * H5F_ACC_RDWR Allow read and write access to file.
  • - *
  • - * H5F_ACC_RDONLY Allow read-only access to file.
  • - *
  • - * H5F_ACC_TRUNC Truncate file, if it already exists, erasing all data previously stored in the - * file.
  • - *
  • - * H5F_ACC_EXCL Fail if file already exists.
  • - *
  • - * H5P_DEFAULT Apply default file access and creation properties.
  • - *
+ *
    + *
  • + * @ref H5F_ACC_RDWR Allow read and write access to file.
  • + *
  • + * @ref H5F_ACC_RDONLY Allow read-only access to file.
  • + *
  • + * @ref H5F_ACC_TRUNC Truncate file, if it already exists, erasing all data previously stored + * in the file.
  • + *
  • + * @ref H5F_ACC_EXCL Fail if file already exists.
  • + *
  • + * @ref H5P_DEFAULT Apply default file access and creation properties.
  • + *
* * @param create_id * File creation property list identifier, used when modifying default file meta-data. Use @@ -4411,7 +4767,7 @@ public class H5 implements java.io.Serializable { * @return a file identifier if successful * * @exception HDF5LibraryException - * Error from the HDF-5 Library. + * Error from the HDF5 Library. * @exception NullPointerException * name is null. **/ @@ -4431,8 +4787,10 @@ public class H5 implements java.io.Serializable { throws HDF5LibraryException, NullPointerException; /** + * @ingroup JH5F + * * H5Fflush causes all buffers associated with a file or object to be immediately flushed (written) to - * disk without removing the data from the (memory) cache.

After this call completes, the file (or + * disk without removing the data from the (memory) cache.

After this call completes, the file (or * object) is in a consistent state and all data written to date is assured to be permanent. * * @param object_id @@ -4440,9 +4798,9 @@ public class H5 implements java.io.Serializable { * associated with the file, including the file itself, a dataset, a group, an attribute, * or a named data type. * @param scope - * specifies the scope of the flushing action, in the case that the HDF-5 file is not a single + * specifies the scope of the flushing action, in the case that the HDF5 file is not a single * physical file. - *

Valid values are: + *

Valid values are: *

    *
  • H5F_SCOPE_GLOBAL Flushes the entire virtual file.
  • *
  • H5F_SCOPE_LOCAL Flushes only the specified file.
  • @@ -4451,11 +4809,13 @@ public class H5 implements java.io.Serializable { * @return a non-negative value if successful * * @exception HDF5LibraryException - * Error from the HDF-5 Library. + * Error from the HDF5 Library. **/ public synchronized static native int H5Fflush(long object_id, int scope) throws HDF5LibraryException; /** + * @ingroup JH5F + * * H5Fget_access_plist returns the file access property list identifier of the specified file. * * @param file_id @@ -4464,7 +4824,7 @@ public class H5 implements java.io.Serializable { * @return a file access property list identifier if successful * * @exception HDF5LibraryException - * Error from the HDF-5 Library. + * Error from the HDF5 Library. **/ public static long H5Fget_access_plist(long file_id) throws HDF5LibraryException { @@ -4480,6 +4840,8 @@ public class H5 implements java.io.Serializable { private synchronized static native long _H5Fget_access_plist(long file_id) throws HDF5LibraryException; /** + * @ingroup JH5F + * * H5Fget_create_plist returns a file creation property list identifier identifying the creation * properties used to create this file. * @@ -4489,7 +4851,7 @@ public class H5 implements java.io.Serializable { * @return a file creation property list identifier if successful * * @exception HDF5LibraryException - * Error from the HDF-5 Library. + * Error from the HDF5 Library. **/ public static long H5Fget_create_plist(long file_id) throws HDF5LibraryException { @@ -4505,6 +4867,8 @@ public class H5 implements java.io.Serializable { private synchronized static native long _H5Fget_create_plist(long file_id) throws HDF5LibraryException; /** + * @ingroup JH5F + * * H5Fget_filesize retrieves the file size of the HDF5 file. This function * is called after an existing file is opened in order * to learn the true size of the underlying file. @@ -4515,11 +4879,13 @@ public class H5 implements java.io.Serializable { * @return the file size of the HDF5 file * * @exception HDF5LibraryException - * Error from the HDF-5 Library. + * Error from the HDF5 Library. **/ public synchronized static native long H5Fget_filesize(long file_id) throws HDF5LibraryException; /** + * @ingroup JH5F + * * H5Fget_freespace returns the amount of space that is unused by any objects in the file. * * @param file_id @@ -4528,11 +4894,13 @@ public class H5 implements java.io.Serializable { * @return the amount of free space in the file * * @exception HDF5LibraryException - * Error from the HDF-5 Library. + * Error from the HDF5 Library. **/ public synchronized static native long H5Fget_freespace(long file_id) throws HDF5LibraryException; /** + * @ingroup JH5F + * * H5Fget_intent retrieves the intended access mode flag passed with H5Fopen when the file was opened. * * @param file_id @@ -4541,11 +4909,13 @@ public class H5 implements java.io.Serializable { * @return the intended access mode flag, as originally passed with H5Fopen. * * @exception HDF5LibraryException - * Error from the HDF-5 Library. + * Error from the HDF5 Library. **/ public synchronized static native int H5Fget_intent(long file_id) throws HDF5LibraryException; /** + * @ingroup JH5F + * * H5Fget_fileno retrieves the "file number" for an open file. * * @param file_id @@ -4554,11 +4924,13 @@ public class H5 implements java.io.Serializable { * @return the unique file number for the file. * * @exception HDF5LibraryException - * Error from the HDF-5 Library. + * Error from the HDF5 Library. **/ public synchronized static native long H5Fget_fileno(long file_id) throws HDF5LibraryException; /** + * @ingroup JH5F + * * H5Fget_mdc_hit_rate queries the metadata cache of the target file to obtain its hit rate (cache hits / * (cache hits + cache misses)) since the last time hit rate statistics were reset. * @@ -4568,11 +4940,13 @@ public class H5 implements java.io.Serializable { * @return the double in which the hit rate is returned. * * @exception HDF5LibraryException - * Error from the HDF-5 Library. + * Error from the HDF5 Library. **/ public synchronized static native double H5Fget_mdc_hit_rate(long file_id) throws HDF5LibraryException; /** + * @ingroup JH5F + * * H5Fget_mdc_size queries the metadata cache of the target file for the desired size information. * * @param file_id @@ -4588,7 +4962,7 @@ public class H5 implements java.io.Serializable { * @return current number of entries in the cache * * @exception HDF5LibraryException - * Error from the HDF-5 Library. + * Error from the HDF5 Library. * @exception NullPointerException * metadata_cache is null. **/ @@ -4596,6 +4970,8 @@ public class H5 implements java.io.Serializable { throws HDF5LibraryException, NullPointerException, IllegalArgumentException; /** + * @ingroup JH5F + * * H5Fget_name retrieves the name of the file to which the object obj_id belongs. * * @param obj_id @@ -4604,11 +4980,13 @@ public class H5 implements java.io.Serializable { * @return the filename. * * @exception HDF5LibraryException - * Error from the HDF-5 Library. + * Error from the HDF5 Library. **/ public synchronized static native String H5Fget_name(long obj_id) throws HDF5LibraryException; /** + * @ingroup JH5F + * * H5Fget_obj_count returns the number of open object identifiers for the file. * * @param file_id @@ -4628,12 +5006,14 @@ public class H5 implements java.io.Serializable { * @return the number of open objects. * * @exception HDF5LibraryException - * Error from the HDF-5 Library. + * Error from the HDF5 Library. **/ public synchronized static native long H5Fget_obj_count(long file_id, int types) throws HDF5LibraryException; /** + * @ingroup JH5F + * * H5Fget_obj_ids returns the list of identifiers for all open HDF5 objects fitting the specified * criteria. * @@ -4649,7 +5029,7 @@ public class H5 implements java.io.Serializable { * @return the number of objects placed into obj_id_list. * * @exception HDF5LibraryException - * Error from the HDF-5 Library. + * Error from the HDF5 Library. * @exception NullPointerException * obj_id_list is null. **/ @@ -4658,15 +5038,17 @@ public class H5 implements java.io.Serializable { throws HDF5LibraryException, NullPointerException; /** + * @ingroup JH5F + * * H5Fis_hdf5 determines whether a file is in the HDF5 format. * * @param name * File name to check format. * - * @return true if is HDF-5, false if not. + * @return true if is HDF5, false if not. * * @exception HDF5LibraryException - * Error from the HDF-5 Library. + * Error from the HDF5 Library. * @exception NullPointerException * name is null. * @@ -4677,6 +5059,8 @@ public class H5 implements java.io.Serializable { throws HDF5LibraryException, NullPointerException; /** + * @ingroup JH5F + * * H5Fis_accessible determines if the file can be opened with the given fapl. * * @param name @@ -4687,7 +5071,7 @@ public class H5 implements java.io.Serializable { * @return true if file is accessible, false if not. * * @exception HDF5LibraryException - * Error from the HDF-5 Library. + * Error from the HDF5 Library. * @exception NullPointerException * name is null. **/ @@ -4695,6 +5079,8 @@ public class H5 implements java.io.Serializable { throws HDF5LibraryException, NullPointerException; /** + * @ingroup JH5F + * * H5Fmount mounts the file specified by child_id onto the group specified by loc_id and name using the * mount properties plist_id. * @@ -4710,7 +5096,7 @@ public class H5 implements java.io.Serializable { * @return a non-negative value if successful * * @exception HDF5LibraryException - * Error from the HDF-5 Library. + * Error from the HDF5 Library. * @exception NullPointerException * name is null. **/ @@ -4718,6 +5104,8 @@ public class H5 implements java.io.Serializable { throws HDF5LibraryException, NullPointerException; /** + * @ingroup JH5F + * * Given a mount point, H5Funmount disassociates the mount point's file from the file mounted there. * * @param loc_id @@ -4728,7 +5116,7 @@ public class H5 implements java.io.Serializable { * @return a non-negative value if successful * * @exception HDF5LibraryException - * Error from the HDF-5 Library. + * Error from the HDF5 Library. * @exception NullPointerException * name is null. **/ @@ -4736,6 +5124,8 @@ public class H5 implements java.io.Serializable { throws HDF5LibraryException, NullPointerException; /** + * @ingroup JH5F + * * H5Freset_mdc_hit_rate_stats resets the hit rate statistics counters in the metadata cache associated * with the specified file. * @@ -4743,12 +5133,14 @@ public class H5 implements java.io.Serializable { * IN: Identifier of the target file. * * @exception HDF5LibraryException - * Error from the HDF-5 Library. + * Error from the HDF5 Library. **/ public synchronized static native void H5Freset_mdc_hit_rate_stats(long file_id) throws HDF5LibraryException; /** + * @ingroup JH5F + * * H5Fget_info returns global information for the file associated with the * object identifier obj_id. * @@ -4757,11 +5149,13 @@ public class H5 implements java.io.Serializable { * @return A buffer(H5F_info2_t) for current "global" information about file * * @exception HDF5LibraryException - * Error from the HDF-5 Library. + * Error from the HDF5 Library. **/ public synchronized static native H5F_info2_t H5Fget_info(long obj_id) throws HDF5LibraryException; /** + * @ingroup JH5F + * * H5Fclear_elink_file_cache evicts all the cached child files in the specified file's external file * cache, causing them to be closed if there is nothing else holding them open. * @@ -4769,12 +5163,14 @@ public class H5 implements java.io.Serializable { * IN: Identifier of the target file. * * @exception HDF5LibraryException - * Error from the HDF-5 Library. + * Error from the HDF5 Library. **/ public synchronized static native void H5Fclear_elink_file_cache(long file_id) throws HDF5LibraryException; /** + * @ingroup JH5F + * * H5Fstart_swmr_write will activate SWMR writing mode for a file associated with file_id. This routine * will prepare and ensure the file is safe for SWMR writing. * @@ -4782,22 +5178,26 @@ public class H5 implements java.io.Serializable { * IN: Identifier of the target file. * * @exception HDF5LibraryException - * Error from the HDF-5 Library. + * Error from the HDF5 Library. **/ public synchronized static native void H5Fstart_swmr_write(long file_id) throws HDF5LibraryException; /** + * @ingroup JH5F + * * H5Fstart_mdc_logging starts logging metadata cache events if logging was previously enabled. * * @param file_id * IN: Identifier of the target file. * * @exception HDF5LibraryException - * Error from the HDF-5 Library. + * Error from the HDF5 Library. **/ public synchronized static native void H5Fstart_mdc_logging(long file_id) throws HDF5LibraryException; /** + * @ingroup JH5F + * * H5Fstop_mdc_logging stops logging metadata cache events if logging was previously enabled and is * currently ongoing. * @@ -4805,11 +5205,13 @@ public class H5 implements java.io.Serializable { * IN: Identifier of the target file. * * @exception HDF5LibraryException - * Error from the HDF-5 Library. + * Error from the HDF5 Library. **/ public synchronized static native void H5Fstop_mdc_logging(long file_id) throws HDF5LibraryException; /** + * @ingroup JH5F + * * H5Fget_mdc_logging_status gets the current metadata cache logging status. * * @param file_id @@ -4821,7 +5223,7 @@ public class H5 implements java.io.Serializable { * mdc_logging_status[1] = is_currently_logging, whether events are currently being logged * * @exception HDF5LibraryException - * Error from the HDF-5 Library. + * Error from the HDF5 Library. * @exception NullPointerException * mdc_logging_status is null. **/ @@ -4830,6 +5232,8 @@ public class H5 implements java.io.Serializable { throws HDF5LibraryException, NullPointerException; /** + * @ingroup JH5F + * * H5Fget_dset_no_attrs_hint gets the file-level setting to create minimized dataset object headers. * * @param file_id @@ -4838,12 +5242,14 @@ public class H5 implements java.io.Serializable { * @return true if the file-level is set to create minimized dataset object headers, false if not. * * @exception HDF5LibraryException - * Error from the HDF-5 Library. + * Error from the HDF5 Library. **/ public synchronized static native boolean H5Fget_dset_no_attrs_hint(long file_id) throws HDF5LibraryException; /** + * @ingroup JH5F + * * H5Fset_dset_no_attrs_hint sets the file-level setting to create minimized dataset object headers. * * @param file_id @@ -4852,12 +5258,14 @@ public class H5 implements java.io.Serializable { * the minimize hint setting * * @exception HDF5LibraryException - * Error from the HDF-5 Library. + * Error from the HDF5 Library. **/ public synchronized static native void H5Fset_dset_no_attrs_hint(long file_id, boolean minimize) throws HDF5LibraryException; /** + * @ingroup JH5F + * * H5Fset_libver_bounds sets a different low and high bounds while a file is open. * * @param file_id @@ -4868,7 +5276,7 @@ public class H5 implements java.io.Serializable { * IN: The latest version of the library that will be used for writing objects. * * @exception HDF5LibraryException - * Error from the HDF-5 Library. + * Error from the HDF5 Library. **/ public synchronized static native void H5Fset_libver_bounds(long file_id, int low, int high) throws HDF5LibraryException; @@ -4901,7 +5309,7 @@ public class H5 implements java.io.Serializable { // * @return a pointer to the file handle being used by the low-level // virtual file driver. // * - // * @exception HDF5LibraryException - Error from the HDF-5 Library. + // * @exception HDF5LibraryException - Error from the HDF5 Library. // **/ // public synchronized static native Pointer file_handle // H5Fget_vfd_handle(int file_id, int fapl) @@ -4918,7 +5326,7 @@ public class H5 implements java.io.Serializable { // * // * @return none // * - // * @exception HDF5LibraryException - Error from the HDF-5 Library. + // * @exception HDF5LibraryException - Error from the HDF5 Library. // * @exception NullPointerException - config_ptr is null. // **/ // public synchronized static native void H5Fget_mdc_config(int file_id, H5AC_cache_config_t config_ptr) @@ -4934,7 +5342,7 @@ public class H5 implements java.io.Serializable { // * // * @return none // * - // * @exception HDF5LibraryException - Error from the HDF-5 Library. + // * @exception HDF5LibraryException - Error from the HDF5 Library. // * @exception NullPointerException - config_ptr is null. // **/ // public synchronized static native int H5Fset_mdc_config(int file_id, H5AC_cache_config_t config_ptr) @@ -4979,8 +5387,13 @@ public class H5 implements java.io.Serializable { // H5G: Group Interface Functions // // // // //////////////////////////////////////////////////////////// + /** + * @defgroup JH5G Java Group (H5G) Interface + **/ /** + * @ingroup JH5G + * * H5Gclose releases resources used by a group which was opened by a call to H5Gcreate() or H5Gopen(). * * @param group_id @@ -4989,7 +5402,7 @@ public class H5 implements java.io.Serializable { * @return a non-negative value if successful * * @exception HDF5LibraryException - * Error from the HDF-5 Library. + * Error from the HDF5 Library. **/ public static int H5Gclose(long group_id) throws HDF5LibraryException { @@ -5005,6 +5418,8 @@ public class H5 implements java.io.Serializable { private synchronized static native int _H5Gclose(long group_id) throws HDF5LibraryException; /** + * @ingroup JH5G + * * H5Gcreate creates a new group with the specified name at the specified location, loc_id. * * @param loc_id @@ -5022,7 +5437,7 @@ public class H5 implements java.io.Serializable { * @return a valid group identifier * * @exception HDF5LibraryException - * Error from the HDF-5 Library. + * Error from the HDF5 Library. * @exception NullPointerException * name is null. **/ @@ -5043,6 +5458,8 @@ public class H5 implements java.io.Serializable { throws HDF5LibraryException, NullPointerException; /** + * @ingroup JH5G + * * H5Gcreate_anon creates a new empty group in the file specified by loc_id. * * @param loc_id @@ -5056,7 +5473,7 @@ public class H5 implements java.io.Serializable { * @return a valid group identifier * * @exception HDF5LibraryException - * Error from the HDF-5 Library. + * Error from the HDF5 Library. **/ public static long H5Gcreate_anon(long loc_id, long gcpl_id, long gapl_id) throws HDF5LibraryException { @@ -5073,6 +5490,8 @@ public class H5 implements java.io.Serializable { throws HDF5LibraryException; /** + * @ingroup JH5G + * * H5Gget_create_plist returns an identifier for the group creation property list associated with the * group specified by group_id. * @@ -5082,11 +5501,13 @@ public class H5 implements java.io.Serializable { * @return an identifier for the group's creation property list * * @exception HDF5LibraryException - * Error from the HDF-5 Library. + * Error from the HDF5 Library. **/ public synchronized static native long H5Gget_create_plist(long group_id) throws HDF5LibraryException; /** + * @ingroup JH5G + * * H5Gget_info retrieves information about the group specified by group_id. The information is returned in * the group_info struct. * @@ -5096,11 +5517,13 @@ public class H5 implements java.io.Serializable { * @return a structure in which group information is returned * * @exception HDF5LibraryException - * Error from the HDF-5 Library. + * Error from the HDF5 Library. **/ public synchronized static native H5G_info_t H5Gget_info(long group_id) throws HDF5LibraryException; /** + * @ingroup JH5G + * * H5Gget_info_by_idx retrieves information about a group, according to the group's position within an * index. * @@ -5120,7 +5543,7 @@ public class H5 implements java.io.Serializable { * @return a structure in which group information is returned * * @exception HDF5LibraryException - * Error from the HDF-5 Library. + * Error from the HDF5 Library. * @exception NullPointerException * name is null. **/ @@ -5130,6 +5553,8 @@ public class H5 implements java.io.Serializable { throws HDF5LibraryException, NullPointerException; /** + * @ingroup JH5G + * * H5Gget_info_by_name retrieves information about the group group_name located in the file or group * specified by loc_id. * @@ -5143,7 +5568,7 @@ public class H5 implements java.io.Serializable { * @return a structure in which group information is returned * * @exception HDF5LibraryException - * Error from the HDF-5 Library. + * Error from the HDF5 Library. * @exception NullPointerException * name is null. **/ @@ -5151,6 +5576,8 @@ public class H5 implements java.io.Serializable { throws HDF5LibraryException, NullPointerException; /** + * @ingroup JH5G + * * retrieves information of all objects under the group (name) located in the file or group specified by * loc_id. * @@ -5168,7 +5595,7 @@ public class H5 implements java.io.Serializable { * @return the number of items found * * @exception HDF5LibraryException - * Error from the HDF-5 Library. + * Error from the HDF5 Library. * @exception NullPointerException * name is null. */ @@ -5185,6 +5612,8 @@ public class H5 implements java.io.Serializable { } /** + * @ingroup JH5G + * * retrieves information of all objects under the group (name) located in the file or group specified by * loc_id. * @@ -5206,7 +5635,7 @@ public class H5 implements java.io.Serializable { * @return the number of items found * * @exception HDF5LibraryException - * Error from the HDF-5 Library. + * Error from the HDF5 Library. * @exception NullPointerException * name is null. */ @@ -5219,6 +5648,8 @@ public class H5 implements java.io.Serializable { } /** + * @ingroup JH5G + * * retrieves information of all objects under the group (name) located in the file or group specified by * loc_id. * @@ -5242,7 +5673,7 @@ public class H5 implements java.io.Serializable { * @return the number of items found * * @exception HDF5LibraryException - * Error from the HDF-5 Library. + * Error from the HDF5 Library. * @exception NullPointerException * name is null. */ @@ -5255,6 +5686,8 @@ public class H5 implements java.io.Serializable { } /** + * @ingroup JH5G + * * retrieves information of all objects under the group (name) located in the file or group specified by * loc_id. * @@ -5280,7 +5713,7 @@ public class H5 implements java.io.Serializable { * @return the number of items found * * @exception HDF5LibraryException - * Error from the HDF-5 Library. + * Error from the HDF5 Library. * @exception NullPointerException * name is null. */ @@ -5331,6 +5764,8 @@ public class H5 implements java.io.Serializable { throws HDF5LibraryException, NullPointerException; /** + * @ingroup JH5G + * * H5Gget_obj_info_idx report the name and type of object with index 'idx' in a Group. The 'idx' * corresponds to the index maintained by H5Giterate. Each link is returned, so objects with multiple * links will be counted once for each link. @@ -5349,7 +5784,7 @@ public class H5 implements java.io.Serializable { * @return non-negative if successful, -1 if not. * * @exception HDF5LibraryException - * Error from the HDF-5 Library. + * Error from the HDF5 Library. * @exception NullPointerException * name is null. */ @@ -5373,6 +5808,8 @@ public class H5 implements java.io.Serializable { * a lot of time to finish if the number of objects is more than 10,000 */ /** + * @ingroup JH5G + * * retrieves information of all objects (recurvisely) under the group (name) located in the file or group * specified by loc_id up to maximum specified by objMax. * @@ -5392,7 +5829,7 @@ public class H5 implements java.io.Serializable { * @return the number of items found * * @exception HDF5LibraryException - * Error from the HDF-5 Library. + * Error from the HDF5 Library. * @exception NullPointerException * name is null. */ @@ -5433,6 +5870,8 @@ public class H5 implements java.io.Serializable { throws HDF5LibraryException, NullPointerException; /** + * @ingroup JH5G + * * H5Gn_members report the number of objects in a Group. The 'objects' include everything that will be * visited by H5Giterate. Each link is returned, so objects with multiple links will be counted once for * each link. @@ -5445,7 +5884,7 @@ public class H5 implements java.io.Serializable { * @return the number of members in the group or -1 if error. * * @exception HDF5LibraryException - * Error from the HDF-5 Library. + * Error from the HDF5 Library. * @exception NullPointerException * name is null. */ @@ -5467,6 +5906,8 @@ public class H5 implements java.io.Serializable { } /** + * @ingroup JH5G + * * H5Gopen opens an existing group, name, at the location specified by loc_id. * * @param loc_id @@ -5479,7 +5920,7 @@ public class H5 implements java.io.Serializable { * @return a valid group identifier if successful * * @exception HDF5LibraryException - * Error from the HDF-5 Library. + * Error from the HDF5 Library. * @exception NullPointerException * name is null. **/ @@ -5499,6 +5940,8 @@ public class H5 implements java.io.Serializable { throws HDF5LibraryException, NullPointerException; /** + * @ingroup JH5G + * * H5Gflush causes all buffers associated with a group to be immediately flushed to disk without * removing the data from the cache. * @@ -5506,11 +5949,13 @@ public class H5 implements java.io.Serializable { * IN: Identifier of the group to be flushed. * * @exception HDF5LibraryException - * Error from the HDF-5 Library. + * Error from the HDF5 Library. **/ public synchronized static native void H5Gflush(long group_id) throws HDF5LibraryException; /** + * @ingroup JH5G + * * H5Grefresh causes all buffers associated with a group to be cleared and immediately re-loaded * with updated contents from disk. This function essentially closes the group, evicts all metadata * associated with it from the cache, and then re-opens the group. The reopened group is automatically @@ -5520,7 +5965,7 @@ public class H5 implements java.io.Serializable { * IN: Identifier of the group to be refreshed. * * @exception HDF5LibraryException - * Error from the HDF-5 Library. + * Error from the HDF5 Library. **/ public synchronized static native void H5Grefresh(long group_id) throws HDF5LibraryException; @@ -5553,8 +5998,13 @@ public class H5 implements java.io.Serializable { // H5I: HDF5 Identifier Interface API Functions // // // // //////////////////////////////////////////////////////////// + /** + * @defgroup JH5I Java Identifier (H5I) Interface + **/ /** + * @ingroup JH5I + * * H5Iget_file_id obtains the file ID specified by the identifier, obj_id. * * @param obj_id @@ -5563,11 +6013,13 @@ public class H5 implements java.io.Serializable { * @return the file ID. * * @exception HDF5LibraryException - * Error from the HDF-5 Library. + * Error from the HDF5 Library. **/ public synchronized static native long H5Iget_file_id(long obj_id) throws HDF5LibraryException; /** + * @ingroup JH5I + * * H5Iget_name_long retrieves the name of an object specified by the identifier, obj_id. * @deprecated * @@ -5581,12 +6033,14 @@ public class H5 implements java.io.Serializable { * @return the length of the name retrieved. * * @exception HDF5LibraryException - * Error from the HDF-5 Library. + * Error from the HDF5 Library. **/ @Deprecated public synchronized static native long H5Iget_name_long(long obj_id, String[] name, long size) throws HDF5LibraryException, NullPointerException; /** + * @ingroup JH5I + * * H5Iget_name retrieves the name of an object specified by the identifier, obj_id. * * @param obj_id @@ -5595,11 +6049,13 @@ public class H5 implements java.io.Serializable { * @return String for Attribute name. * * @exception HDF5LibraryException - * Error from the HDF-5 Library. + * Error from the HDF5 Library. **/ public synchronized static native String H5Iget_name(long obj_id) throws HDF5LibraryException; /** + * @ingroup JH5I + * * H5Iget_ref obtains the number of references outstanding specified by the identifier, obj_id. * * @param obj_id @@ -5608,12 +6064,14 @@ public class H5 implements java.io.Serializable { * @return the reference count. * * @exception HDF5LibraryException - * Error from the HDF-5 Library. + * Error from the HDF5 Library. **/ public synchronized static native int H5Iget_ref(long obj_id) throws HDF5LibraryException, NullPointerException; /** + * @ingroup JH5I + * * H5Idec_ref decrements the reference count specified by the identifier, obj_id. * If the reference count for an ID reaches zero, the object will be closed. * @@ -5623,12 +6081,14 @@ public class H5 implements java.io.Serializable { * @return the reference count. * * @exception HDF5LibraryException - * Error from the HDF-5 Library. + * Error from the HDF5 Library. **/ public synchronized static native int H5Idec_ref(long obj_id) throws HDF5LibraryException, NullPointerException; /** + * @ingroup JH5I + * * H5Iinc_ref increments the reference count specified by the identifier, obj_id. * * @param obj_id @@ -5637,12 +6097,14 @@ public class H5 implements java.io.Serializable { * @return the reference count. * * @exception HDF5LibraryException - * Error from the HDF-5 Library. + * Error from the HDF5 Library. **/ public synchronized static native int H5Iinc_ref(long obj_id) throws HDF5LibraryException, NullPointerException; /** + * @ingroup JH5I + * * H5Iget_type retrieves the type of the object identified by obj_id. * * @param obj_id @@ -5651,11 +6113,13 @@ public class H5 implements java.io.Serializable { * @return the object type if successful; otherwise H5I_BADID. * * @exception HDF5LibraryException - * Error from the HDF-5 Library. + * Error from the HDF5 Library. **/ public synchronized static native int H5Iget_type(long obj_id) throws HDF5LibraryException; /** + * @ingroup JH5I + * * H5Iget_type_ref retrieves the reference count on an ID type. The reference count is used by the library * to indicate when an ID type can be destroyed. * @@ -5665,11 +6129,13 @@ public class H5 implements java.io.Serializable { * @return The current reference count on success, negative on failure. * * @exception HDF5LibraryException - * Error from the HDF-5 Library. + * Error from the HDF5 Library. **/ public synchronized static native int H5Iget_type_ref(long type_id) throws HDF5LibraryException; /** + * @ingroup JH5I + * * H5Idec_type_ref decrements the reference count on an identifier type. The reference count is used by * the library to indicate when an identifier type can be destroyed. If the reference count reaches zero, * this function will destroy it. @@ -5680,11 +6146,13 @@ public class H5 implements java.io.Serializable { * @return The current reference count on success, negative on failure. * * @exception HDF5LibraryException - * Error from the HDF-5 Library. + * Error from the HDF5 Library. **/ public synchronized static native int H5Idec_type_ref(long type_id) throws HDF5LibraryException; /** + * @ingroup JH5I + * * H5Iinc_type_ref increments the reference count on an ID type. The reference count is used by the * library to indicate when an ID type can be destroyed. * @@ -5694,11 +6162,13 @@ public class H5 implements java.io.Serializable { * @return The current reference count on success, negative on failure. * * @exception HDF5LibraryException - * Error from the HDF-5 Library. + * Error from the HDF5 Library. **/ public synchronized static native int H5Iinc_type_ref(long type_id) throws HDF5LibraryException; /** + * @ingroup JH5I + * * H5Inmembers returns the number of identifiers of the identifier type specified in type. * * @param type_id @@ -5707,11 +6177,13 @@ public class H5 implements java.io.Serializable { * @return Number of identifiers of the specified identifier type * * @exception HDF5LibraryException - * Error from the HDF-5 Library. + * Error from the HDF5 Library. **/ public synchronized static native int H5Inmembers(long type_id) throws HDF5LibraryException; /** + * @ingroup JH5I + * * H5Iis_valid indicates if the identifier type specified in obj_id is valid. * * @param obj_id @@ -5720,11 +6192,13 @@ public class H5 implements java.io.Serializable { * @return a boolean, true if the specified identifier id is valid * * @exception HDF5LibraryException - * Error from the HDF-5 Library. + * Error from the HDF5 Library. **/ public synchronized static native boolean H5Iis_valid(long obj_id) throws HDF5LibraryException; /** + * @ingroup JH5I + * * H5Itype_exists indicates if the identifier type specified in type exists. * * @param type_id @@ -5733,11 +6207,13 @@ public class H5 implements java.io.Serializable { * @return a boolean, true if the specified identifier type exists * * @exception HDF5LibraryException - * Error from the HDF-5 Library. + * Error from the HDF5 Library. **/ public synchronized static native boolean H5Itype_exists(int type_id) throws HDF5LibraryException; /** + * @ingroup JH5I + * * H5Iclear_type deletes all identifiers of the type identified by the argument type. * * @param type_id @@ -5746,12 +6222,14 @@ public class H5 implements java.io.Serializable { * IN: Whether or not to force deletion of all identifiers * * @exception HDF5LibraryException - * Error from the HDF-5 Library. + * Error from the HDF5 Library. **/ public synchronized static native void H5Iclear_type(int type_id, boolean force) throws HDF5LibraryException; /** + * @ingroup JH5I + * * H5Idestroy_type deletes an entire identifier type. All identifiers of this type are destroyed * and no new identifiers of this type can be registered. * @@ -5759,7 +6237,7 @@ public class H5 implements java.io.Serializable { * IN: Identifier of identifier type which is to be destroyed * * @exception HDF5LibraryException - * Error from the HDF-5 Library. + * Error from the HDF5 Library. **/ public synchronized static native void H5Idestroy_type(int type_id) throws HDF5LibraryException; @@ -5785,8 +6263,13 @@ public class H5 implements java.io.Serializable { // ////////////////////////////////////////////////////////////////// // H5L: Link Interface Functions // // ////////////////////////////////////////////////////////////////// + /** + * @defgroup JH5L Java Link (H5L) Interface + **/ /** + * @ingroup JH5L + * * H5Lcopy copies a link from one location to another. * * @param src_loc @@ -5803,7 +6286,7 @@ public class H5 implements java.io.Serializable { * IN: Link access property list identifier * * @exception HDF5LibraryException - * Error from the HDF-5 Library. + * Error from the HDF5 Library. * @exception NullPointerException * name is null. **/ @@ -5812,6 +6295,8 @@ public class H5 implements java.io.Serializable { throws HDF5LibraryException, NullPointerException; /** + * @ingroup JH5L + * * H5Lcreate_external creates a new soft link to an external object, which is an object in a different * HDF5 file from the location of the link. * @@ -5829,7 +6314,7 @@ public class H5 implements java.io.Serializable { * IN: Link access property list identifier * * @exception HDF5LibraryException - * Error from the HDF-5 Library. + * Error from the HDF5 Library. * @exception NullPointerException * name is null. **/ @@ -5839,6 +6324,8 @@ public class H5 implements java.io.Serializable { throws HDF5LibraryException, NullPointerException; /** + * @ingroup JH5L + * * H5Lcreate_hard creates a new hard link to a pre-existing object in an HDF5 file. * * @param cur_loc @@ -5855,7 +6342,7 @@ public class H5 implements java.io.Serializable { * IN: Link access property list identifier * * @exception HDF5LibraryException - * Error from the HDF-5 Library. + * Error from the HDF5 Library. * @exception NullPointerException * cur_name or dst_name is null. **/ @@ -5864,6 +6351,8 @@ public class H5 implements java.io.Serializable { throws HDF5LibraryException, NullPointerException; /** + * @ingroup JH5L + * * H5Lcreate_soft creates a new soft link to an object in an HDF5 file. * * @param link_target @@ -5878,7 +6367,7 @@ public class H5 implements java.io.Serializable { * IN: Link access property list identifier * * @exception HDF5LibraryException - * Error from the HDF-5 Library. + * Error from the HDF5 Library. * @exception NullPointerException * link_name is null. **/ @@ -5887,6 +6376,8 @@ public class H5 implements java.io.Serializable { throws HDF5LibraryException, NullPointerException; /** + * @ingroup JH5L + * * H5Ldelete removes the link specified from a group. * * @param loc_id @@ -5897,7 +6388,7 @@ public class H5 implements java.io.Serializable { * IN: Link access property list identifier * * @exception HDF5LibraryException - * Error from the HDF-5 Library. + * Error from the HDF5 Library. * @exception NullPointerException * name is null. **/ @@ -5905,6 +6396,8 @@ public class H5 implements java.io.Serializable { throws HDF5LibraryException, NullPointerException; /** + * @ingroup JH5L + * * H5Ldelete_by_idx removes the nth link in a group according to the specified order and in the specified * index. * @@ -5922,7 +6415,7 @@ public class H5 implements java.io.Serializable { * IN: Link access property list identifier * * @exception HDF5LibraryException - * Error from the HDF-5 Library. + * Error from the HDF5 Library. * @exception NullPointerException * group_name is null. **/ @@ -5931,6 +6424,8 @@ public class H5 implements java.io.Serializable { throws HDF5LibraryException, NullPointerException; /** + * @ingroup JH5L + * * H5Lexists checks if a link with a particular name exists in a group. * * @param loc_id @@ -5943,7 +6438,7 @@ public class H5 implements java.io.Serializable { * @return a boolean, true if the name exists, otherwise false. * * @exception HDF5LibraryException - * Error from the HDF-5 Library. + * Error from the HDF5 Library. * @exception NullPointerException * name is null. **/ @@ -5951,6 +6446,8 @@ public class H5 implements java.io.Serializable { throws HDF5LibraryException, NullPointerException; /** + * @ingroup JH5L + * * H5Lget_info returns information about the specified link. * * @param loc_id @@ -5963,7 +6460,7 @@ public class H5 implements java.io.Serializable { * @return a buffer(H5L_info_t) for the link information. * * @exception HDF5LibraryException - * Error from the HDF-5 Library. + * Error from the HDF5 Library. * @exception NullPointerException * name is null. **/ @@ -5971,6 +6468,8 @@ public class H5 implements java.io.Serializable { throws HDF5LibraryException, NullPointerException; /** + * @ingroup JH5L + * * H5Lget_info_by_idx opens a named datatype at the location specified by loc_id and return an identifier * for the datatype. * @@ -5990,7 +6489,7 @@ public class H5 implements java.io.Serializable { * @return a buffer(H5L_info_t) for the link information. * * @exception HDF5LibraryException - * Error from the HDF-5 Library. + * Error from the HDF5 Library. * @exception NullPointerException * group_name is null. **/ @@ -6000,6 +6499,8 @@ public class H5 implements java.io.Serializable { throws HDF5LibraryException, NullPointerException; /** + * @ingroup JH5L + * * H5Lget_name_by_idx retrieves name of the nth link in a group, according to the order within a specified * field or index. * @@ -6019,7 +6520,7 @@ public class H5 implements java.io.Serializable { * @return a String for the link name. * * @exception HDF5LibraryException - * Error from the HDF-5 Library. + * Error from the HDF5 Library. * @exception NullPointerException * group_name is null. **/ @@ -6028,6 +6529,8 @@ public class H5 implements java.io.Serializable { throws HDF5LibraryException, NullPointerException; /** + * @ingroup JH5L + * * H5Lget_value returns the link value of a symbolic link. Note that this function is a combination * of H5Lget_info(), H5Lget_val() and for external links, H5Lunpack_elink_val. * @@ -6043,7 +6546,7 @@ public class H5 implements java.io.Serializable { * @return the link type * * @exception HDF5LibraryException - * Error from the HDF-5 Library. + * Error from the HDF5 Library. * @exception NullPointerException * name is null. **/ @@ -6052,6 +6555,8 @@ public class H5 implements java.io.Serializable { throws HDF5LibraryException, NullPointerException; /** + * @ingroup JH5L + * * H5Lget_value_by_idx retrieves value of the nth link in a group, according to the order within an index. * Note that this function is a combination of H5Lget_info(), H5Lget_val() and for external links, * H5Lunpack_elink_val. @@ -6074,7 +6579,7 @@ public class H5 implements java.io.Serializable { * @return the link type * * @exception HDF5LibraryException - * Error from the HDF-5 Library. + * Error from the HDF5 Library. * @exception NullPointerException * group_name is null. **/ @@ -6084,6 +6589,8 @@ public class H5 implements java.io.Serializable { throws HDF5LibraryException, NullPointerException; /** + * @ingroup JH5L + * * H5Literate iterates through links in a group. * * @param grp_id @@ -6103,13 +6610,15 @@ public class H5 implements java.io.Serializable { * members were processed with no operator returning non-zero. * * @exception HDF5LibraryException - * Error from the HDF-5 Library. + * Error from the HDF5 Library. **/ public synchronized static native int H5Literate(long grp_id, int idx_type, int order, long idx, H5L_iterate_t op, H5L_iterate_opdata_t op_data) throws HDF5LibraryException; /** + * @ingroup JH5L + * * H5Literate_by_name iterates through links in a group. * * @param grp_id @@ -6133,7 +6642,7 @@ public class H5 implements java.io.Serializable { * members were processed with no operator returning non-zero. * * @exception HDF5LibraryException - * Error from the HDF-5 Library. + * Error from the HDF5 Library. * @exception NullPointerException * group_name is null. **/ @@ -6143,6 +6652,8 @@ public class H5 implements java.io.Serializable { throws HDF5LibraryException, NullPointerException; /** + * @ingroup JH5L + * * H5Lmove renames a link within an HDF5 file. * * @param src_loc @@ -6159,7 +6670,7 @@ public class H5 implements java.io.Serializable { * IN: Link access property list identifier to be associated with the new link. * * @exception HDF5LibraryException - * Error from the HDF-5 Library. + * Error from the HDF5 Library. * @exception NullPointerException * name is null. **/ @@ -6168,6 +6679,8 @@ public class H5 implements java.io.Serializable { throws HDF5LibraryException, NullPointerException; /** + * @ingroup JH5L + * * H5Lvisit recursively visits all links starting from a specified group. * * @param grp_id @@ -6185,12 +6698,14 @@ public class H5 implements java.io.Serializable { * members were processed with no operator returning non-zero. * * @exception HDF5LibraryException - * Error from the HDF-5 Library. + * Error from the HDF5 Library. **/ public synchronized static native int H5Lvisit(long grp_id, int idx_type, int order, H5L_iterate_t op, H5L_iterate_opdata_t op_data) throws HDF5LibraryException; /** + * @ingroup JH5L + * * H5Lvisit_by_name recursively visits all links starting from a specified group. * * @param loc_id @@ -6212,7 +6727,7 @@ public class H5 implements java.io.Serializable { * members were processed with no operator returning non-zero. * * @exception HDF5LibraryException - * Error from the HDF-5 Library. + * Error from the HDF5 Library. * @exception NullPointerException * group_name is null. **/ @@ -6222,6 +6737,8 @@ public class H5 implements java.io.Serializable { throws HDF5LibraryException, NullPointerException; /** + * @ingroup JH5L + * * H5Lis_registered tests whether a user-defined link class is currently registered, * either by the HDF5 Library or by the user through the use of H5Lregister. * @@ -6233,11 +6750,13 @@ public class H5 implements java.io.Serializable { * user-defined class identifier. * * @exception HDF5LibraryException - * Error from the HDF-5 Library. + * Error from the HDF5 Library. **/ public synchronized static native int H5Lis_registered(int link_cls_id) throws HDF5LibraryException; /** + * @ingroup JH5L + * * H5Lunregister unregisters a class of user-defined links, preventing them from being traversed, queried, * moved, etc. * @@ -6245,7 +6764,7 @@ public class H5 implements java.io.Serializable { * IN: User-defined link class identifier * * @exception HDF5LibraryException - * Error from the HDF-5 Library. + * Error from the HDF5 Library. **/ public synchronized static native void H5Lunregister(int link_cls_id) throws HDF5LibraryException; @@ -6279,8 +6798,13 @@ public class H5 implements java.io.Serializable { // H5O: HDF5 1.8 Object Interface API Functions // // // // //////////////////////////////////////////////////////////// + /** + * @defgroup JH5O Java Object (H5O) Interface + **/ /** + * @ingroup JH5O + * * H5Oclose closes the group, dataset, or named datatype specified. * * @param object_id @@ -6289,7 +6813,7 @@ public class H5 implements java.io.Serializable { * @return non-negative on success * * @exception HDF5LibraryException - * Error from the HDF-5 Library. + * Error from the HDF5 Library. **/ public static int H5Oclose(long object_id) throws HDF5LibraryException { @@ -6305,6 +6829,8 @@ public class H5 implements java.io.Serializable { private synchronized static native int _H5Oclose(long object_id) throws HDF5LibraryException; /** + * @ingroup JH5O + * * H5Ocopy copies the group, dataset or named datatype specified from the file or group specified by * source location to the destination location. * @@ -6322,7 +6848,7 @@ public class H5 implements java.io.Serializable { * IN: Link creation property list for the new hard link * * @exception HDF5LibraryException - * Error from the HDF-5 Library. + * Error from the HDF5 Library. * @exception NullPointerException * name is null. **/ @@ -6331,6 +6857,8 @@ public class H5 implements java.io.Serializable { throws HDF5LibraryException, NullPointerException; /** + * @ingroup JH5O + * * H5Oget_comment retrieves the comment for the specified object. * * @param obj_id @@ -6339,12 +6867,14 @@ public class H5 implements java.io.Serializable { * @return the comment * * @exception HDF5LibraryException - * Error from the HDF-5 Library. + * Error from the HDF5 Library. **/ public synchronized static native String H5Oget_comment(long obj_id) throws HDF5LibraryException, IllegalArgumentException; /** + * @ingroup JH5O + * * H5Oset_comment sets the comment for the specified object. * * @param obj_id @@ -6353,7 +6883,7 @@ public class H5 implements java.io.Serializable { * IN: The new comment. * * @exception HDF5LibraryException - * Error from the HDF-5 Library. + * Error from the HDF5 Library. * * @deprecated As of HDF5 1.8 in favor of object attributes. **/ @@ -6362,6 +6892,8 @@ public class H5 implements java.io.Serializable { throws HDF5LibraryException; /** + * @ingroup JH5O + * * H5Oget_comment_by_name retrieves the comment for an object. * * @param loc_id @@ -6374,7 +6906,7 @@ public class H5 implements java.io.Serializable { * @return the comment * * @exception HDF5LibraryException - * Error from the HDF-5 Library. + * Error from the HDF5 Library. * @exception NullPointerException * name is null. **/ @@ -6382,6 +6914,8 @@ public class H5 implements java.io.Serializable { throws HDF5LibraryException, IllegalArgumentException, NullPointerException; /** + * @ingroup JH5O + * * H5Oset_comment_by_name sets the comment for the specified object. * * @param loc_id @@ -6394,7 +6928,7 @@ public class H5 implements java.io.Serializable { * IN: Link access property list identifier. * * @exception HDF5LibraryException - * Error from the HDF-5 Library. + * Error from the HDF5 Library. * @exception NullPointerException * name is null. * @@ -6406,6 +6940,8 @@ public class H5 implements java.io.Serializable { throws HDF5LibraryException, NullPointerException; /** + * @ingroup JH5O + * * H5Oget_info retrieves the metadata for an object specified by an identifier. * * @param loc_id @@ -6414,7 +6950,7 @@ public class H5 implements java.io.Serializable { * @return object information * * @exception HDF5LibraryException - * Error from the HDF-5 Library. + * Error from the HDF5 Library. * @exception NullPointerException * name is null. **/ @@ -6424,6 +6960,8 @@ public class H5 implements java.io.Serializable { } /** + * @ingroup JH5O + * * H5Oget_info retrieves the metadata for an object specified by an identifier. * * @param loc_id @@ -6434,7 +6972,7 @@ public class H5 implements java.io.Serializable { * @return object information * * @exception HDF5LibraryException - * Error from the HDF-5 Library. + * Error from the HDF5 Library. * @exception NullPointerException * name is null. **/ @@ -6442,56 +6980,8 @@ public class H5 implements java.io.Serializable { throws HDF5LibraryException, NullPointerException; /** - * H5Oget_info_by_name retrieves the metadata for an object, identifying the object by location and - * relative name. - * - * @param loc_id - * IN: File or group identifier specifying location of group in which object is located - * @param name - * IN: Relative name of group - * @param lapl_id - * IN: Access property list identifier for the link pointing to the object (Not currently used; - * pass as H5P_DEFAULT.) - * - * @return object information - * - * @exception HDF5LibraryException - * Error from the HDF-5 Library. - * @exception NullPointerException - * name is null. - **/ - public static H5O_info_t H5Oget_info_by_name(long loc_id, String name, long lapl_id) - throws HDF5LibraryException, NullPointerException - { - return H5Oget_info_by_name(loc_id, name, HDF5Constants.H5O_INFO_ALL, lapl_id); - } - - /** - * H5Oget_info_by_name retrieves the metadata for an object, identifying the object by location and - * relative name. - * - * @param loc_id - * IN: File or group identifier specifying location of group in which object is located - * @param name - * IN: Relative name of group - * @param fields - * IN: Object fields to select - * @param lapl_id - * IN: Access property list identifier for the link pointing to the object (Not currently used; - * pass as H5P_DEFAULT.) - * - * @return object information + * @ingroup JH5O * - * @exception HDF5LibraryException - * Error from the HDF-5 Library. - * @exception NullPointerException - * name is null. - **/ - public synchronized static native H5O_info_t H5Oget_info_by_name(long loc_id, String name, int fields, - long lapl_id) - throws HDF5LibraryException, NullPointerException; - - /** * H5Oget_info_by_idx retrieves the metadata for an object, identifying the object by an index position. * * @param loc_id @@ -6511,7 +7001,7 @@ public class H5 implements java.io.Serializable { * @return object information * * @exception HDF5LibraryException - * Error from the HDF-5 Library. + * Error from the HDF5 Library. * @exception NullPointerException * name is null. **/ @@ -6524,6 +7014,8 @@ public class H5 implements java.io.Serializable { } /** + * @ingroup JH5O + * * H5Oget_info_by_idx retrieves the metadata for an object, identifying the object by an index position. * * @param loc_id @@ -6545,7 +7037,7 @@ public class H5 implements java.io.Serializable { * @return object information * * @exception HDF5LibraryException - * Error from the HDF-5 Library. + * Error from the HDF5 Library. * @exception NullPointerException * name is null. **/ @@ -6555,50 +7047,10 @@ public class H5 implements java.io.Serializable { throws HDF5LibraryException, NullPointerException; /** - * H5Oget_native_info retrieves the native HDF5-specific metadata for an HDF5 object specified by an - * identifier. Native HDF5-specific metadata includes things like object header information and object - * storage layout information. - * - * @param loc_id - * IN: Identifier for target object - * - * @return object information - * - * @exception HDF5LibraryException - * Error from the HDF-5 Library. - * @exception NullPointerException - * name is null. - **/ - public static H5O_native_info_t H5Oget_native_info(long loc_id) - throws HDF5LibraryException, NullPointerException - { - return H5Oget_native_info(loc_id, HDF5Constants.H5O_NATIVE_INFO_ALL); - } - - /** - * H5Oget_native_info retrieves the native HDF5-specific metadata for an HDF5 object specified by an - * identifier. Native HDF5-specific metadata includes things like object header information and object - * storage layout information. - * - * @param loc_id - * IN: Identifier for target object - * @param fields - * IN: Object fields to select + * @ingroup JH5O * - * @return object information - * - * @exception HDF5LibraryException - * Error from the HDF-5 Library. - * @exception NullPointerException - * name is null. - **/ - public synchronized static native H5O_native_info_t H5Oget_native_info(long loc_id, int fields) - throws HDF5LibraryException, NullPointerException; - - /** - * H5Oget_native_info_by_name retrieves the native HDF5-specific metadata for an HDF5 object, identifying - * the object by location and relative name. Native HDF5-specific metadata includes things like object - * header information and object storage layout information. + * H5Oget_info_by_name retrieves the metadata for an object, identifying the object by location and + * relative name. * * @param loc_id * IN: File or group identifier specifying location of group in which object is located @@ -6611,20 +7063,21 @@ public class H5 implements java.io.Serializable { * @return object information * * @exception HDF5LibraryException - * Error from the HDF-5 Library. + * Error from the HDF5 Library. * @exception NullPointerException * name is null. **/ - public static H5O_native_info_t H5Oget_native_info_by_name(long loc_id, String name, long lapl_id) + public static H5O_info_t H5Oget_info_by_name(long loc_id, String name, long lapl_id) throws HDF5LibraryException, NullPointerException { - return H5Oget_native_info_by_name(loc_id, name, HDF5Constants.H5O_NATIVE_INFO_ALL, lapl_id); + return H5Oget_info_by_name(loc_id, name, HDF5Constants.H5O_INFO_ALL, lapl_id); } /** - * H5Oget_native_info_by_name retrieves the native HDF5-specific metadata for an HDF5 object, identifying - * the object by location and relative name. Native HDF5-specific metadata includes things like object - * header information and object storage layout information. + * @ingroup JH5O + * + * H5Oget_info_by_name retrieves the metadata for an object, identifying the object by location and + * relative name. * * @param loc_id * IN: File or group identifier specifying location of group in which object is located @@ -6639,81 +7092,17 @@ public class H5 implements java.io.Serializable { * @return object information * * @exception HDF5LibraryException - * Error from the HDF-5 Library. + * Error from the HDF5 Library. * @exception NullPointerException * name is null. **/ - public synchronized static native H5O_native_info_t H5Oget_native_info_by_name(long loc_id, String name, - int fields, long lapl_id) + public synchronized static native H5O_info_t H5Oget_info_by_name(long loc_id, String name, int fields, + long lapl_id) throws HDF5LibraryException, NullPointerException; /** - * H5Oget_native_info_by_idx retrieves the native HDF5-specific metadata for an HDF5 object, identifying - * the object by an index position. Native HDF5-specific metadata includes things like object header - * information and object storage layout information. - * - * @param loc_id - * IN: File or group identifier - * @param group_name - * IN: Name of group, relative to loc_id, in which object is located - * @param idx_type - * IN: Type of index by which objects are ordered - * @param order - * IN: Order of iteration within index - * @param n - * IN: Object to open - * @param lapl_id - * IN: Access property list identifier for the link pointing to the object (Not currently used; - * pass as H5P_DEFAULT.) - * - * @return object information - * - * @exception HDF5LibraryException - * Error from the HDF-5 Library. - * @exception NullPointerException - * name is null. - **/ - public static H5O_native_info_t H5Oget_native_info_by_idx(long loc_id, String group_name, int idx_type, - int order, long n, long lapl_id) - throws HDF5LibraryException, NullPointerException - { - return H5Oget_native_info_by_idx(loc_id, group_name, idx_type, order, n, - HDF5Constants.H5O_NATIVE_INFO_ALL, lapl_id); - } - - /** - * H5Oget_native_info_by_idx retrieves the native HDF5-specific metadata for an HDF5 object, identifying - * the object by an index position. Native HDF5-specific metadata includes things like object header - * information and object storage layout information. - * - * @param loc_id - * IN: File or group identifier - * @param group_name - * IN: Name of group, relative to loc_id, in which object is located - * @param idx_type - * IN: Type of index by which objects are ordered - * @param order - * IN: Order of iteration within index - * @param n - * IN: Object to open - * @param fields - * IN: Object fields to select - * @param lapl_id - * IN: Access property list identifier for the link pointing to the object (Not currently used; - * pass as H5P_DEFAULT.) - * - * @return object information + * @ingroup JH5O * - * @exception HDF5LibraryException - * Error from the HDF-5 Library. - * @exception NullPointerException - * name is null. - **/ - public synchronized static native H5O_native_info_t H5Oget_native_info_by_idx( - long loc_id, String group_name, int idx_type, int order, long n, int fields, long lapl_id) - throws HDF5LibraryException, NullPointerException; - - /** * H5Olink creates a new hard link to an object in an HDF5 file. * * @param obj_id @@ -6728,7 +7117,7 @@ public class H5 implements java.io.Serializable { * IN: Access property list identifier. * * @exception HDF5LibraryException - * Error from the HDF-5 Library. + * Error from the HDF5 Library. * @exception NullPointerException * name is null. **/ @@ -6737,6 +7126,8 @@ public class H5 implements java.io.Serializable { throws HDF5LibraryException, NullPointerException; /** + * @ingroup JH5O + * * H5Oopen opens a group, dataset, or named datatype specified by a location and a path name. * * @param loc_id @@ -6749,7 +7140,7 @@ public class H5 implements java.io.Serializable { * @return an object identifier for the opened object * * @exception HDF5LibraryException - * Error from the HDF-5 Library. + * Error from the HDF5 Library. * @exception NullPointerException * name is null. **/ @@ -6769,6 +7160,8 @@ public class H5 implements java.io.Serializable { throws HDF5LibraryException, NullPointerException; /** + * @ingroup JH5O + * * H5Ovisit recursively visits all objects accessible from a specified object. * * @param obj_id @@ -6787,7 +7180,7 @@ public class H5 implements java.io.Serializable { * members were processed with no operator returning non-zero. * * @exception HDF5LibraryException - * Error from the HDF-5 Library. + * Error from the HDF5 Library. * @exception NullPointerException * name is null. **/ @@ -6798,6 +7191,8 @@ public class H5 implements java.io.Serializable { } /** + * @ingroup JH5O + * * H5Ovisit recursively visits all objects accessible from a specified object. * * @param obj_id @@ -6818,7 +7213,7 @@ public class H5 implements java.io.Serializable { * members were processed with no operator returning non-zero. * * @exception HDF5LibraryException - * Error from the HDF-5 Library. + * Error from the HDF5 Library. * @exception NullPointerException * name is null. **/ @@ -6827,6 +7222,8 @@ public class H5 implements java.io.Serializable { throws HDF5LibraryException, NullPointerException; /** + * @ingroup JH5O + * * H5Ovisit_by_name recursively visits all objects starting from a specified object. * * @param loc_id @@ -6849,7 +7246,7 @@ public class H5 implements java.io.Serializable { * members were processed with no operator returning non-zero. * * @exception HDF5LibraryException - * Error from the HDF-5 Library. + * Error from the HDF5 Library. * @exception NullPointerException * name is null. **/ @@ -6862,6 +7259,8 @@ public class H5 implements java.io.Serializable { } /** + * @ingroup JH5O + * * H5Ovisit_by_name recursively visits all objects starting from a specified object. * * @param loc_id @@ -6886,7 +7285,7 @@ public class H5 implements java.io.Serializable { * members were processed with no operator returning non-zero. * * @exception HDF5LibraryException - * Error from the HDF-5 Library. + * Error from the HDF5 Library. * @exception NullPointerException * name is null. **/ @@ -6896,6 +7295,8 @@ public class H5 implements java.io.Serializable { throws HDF5LibraryException, NullPointerException; /** + * @ingroup JH5O + * * H5Oexists_by_name is used by an application to check that an existing link resolves to an object. * Primarily, it is designed to check for dangling soft, external, or user-defined links. * @@ -6909,7 +7310,7 @@ public class H5 implements java.io.Serializable { * @return Returns TRUE or FALSE if successful * * @exception HDF5LibraryException - * Error from the HDF-5 Library. + * Error from the HDF5 Library. * @exception NullPointerException * name is null. **/ @@ -6917,24 +7318,28 @@ public class H5 implements java.io.Serializable { throws HDF5LibraryException, NullPointerException; /** + * @ingroup JH5O + * * H5Odecr_refcount decrements the hard link reference count for an object. * * @param object_id * IN: Object identifier * * @exception HDF5LibraryException - * Error from the HDF-5 Library. + * Error from the HDF5 Library. **/ public synchronized static native void H5Odecr_refcount(long object_id) throws HDF5LibraryException; /** + * @ingroup JH5O + * * H5Oincr_refcount increments the hard link reference count for an object. * * @param object_id * IN: Object identifier * * @exception HDF5LibraryException - * Error from the HDF-5 Library. + * Error from the HDF5 Library. **/ public synchronized static native void H5Oincr_refcount(long object_id) throws HDF5LibraryException; @@ -6949,7 +7354,7 @@ public class H5 implements java.io.Serializable { * @return an object identifier for the opened object * * @exception HDF5LibraryException - * Error from the HDF-5 Library. + * Error from the HDF5 Library. **/ public static long H5Oopen_by_token(long loc_id, H5O_token_t token) throws HDF5LibraryException { @@ -6968,6 +7373,8 @@ public class H5 implements java.io.Serializable { throws HDF5LibraryException, NullPointerException; /** + * @ingroup JH5O + * * H5Oopen_by_idx opens the nth object in the group specified. * * @param loc_id @@ -6986,7 +7393,7 @@ public class H5 implements java.io.Serializable { * @return an object identifier for the opened object * * @exception HDF5LibraryException - * Error from the HDF-5 Library. + * Error from the HDF5 Library. * @exception NullPointerException * group_name is null. **/ @@ -7007,6 +7414,8 @@ public class H5 implements java.io.Serializable { throws HDF5LibraryException, NullPointerException; /** + * @ingroup JH5O + * * H5Oflush causes all buffers associated with an object to be immediately flushed to disk without * removing the data from the cache. object_id can be any named object associated with an HDF5 file * including a dataset, a group, or a committed datatype. @@ -7015,11 +7424,13 @@ public class H5 implements java.io.Serializable { * IN: Identifier of the object to be flushed. * * @exception HDF5LibraryException - * Error from the HDF-5 Library. + * Error from the HDF5 Library. **/ public synchronized static native void H5Oflush(long object_id) throws HDF5LibraryException; /** + * @ingroup JH5O + * * H5Orefresh causes all buffers associated with an object to be cleared and immediately re-loaded with * updated contents from disk. This function essentially closes the object, evicts all metadata associated * with it from the cache, and then re-opens the object. The reopened object is automatically @@ -7030,11 +7441,13 @@ public class H5 implements java.io.Serializable { * IN: Identifier of the object to be refreshed. * * @exception HDF5LibraryException - * Error from the HDF-5 Library. + * Error from the HDF5 Library. **/ public synchronized static native void H5Orefresh(long object_id) throws HDF5LibraryException; /** + * @ingroup JH5O + * * H5Odisable_mdc_flushes corks an object, keeping dirty entries associated with the object in the * metadata cache. * @@ -7042,7 +7455,10 @@ public class H5 implements java.io.Serializable { * IN: Identifier of the object to be corked. **/ public synchronized static native void H5Odisable_mdc_flushes(long object_id); + /** + * @ingroup JH5O + * * H5Oenable_mdc_flushes uncorks an object, keeping dirty entries associated with the object in the * metadata cache. * @@ -7050,7 +7466,10 @@ public class H5 implements java.io.Serializable { * IN: Identifier of the object to be uncorked. **/ public synchronized static native void H5Oenable_mdc_flushes(long object_id); + /** + * @ingroup JH5O + * * H5Oare_mdc_flushes_disabled retrieve the object's "cork" status. * * @param object_id @@ -7062,46 +7481,221 @@ public class H5 implements java.io.Serializable { **/ public synchronized static native boolean H5Oare_mdc_flushes_disabled(long object_id); - // /////// unimplemented //////// - // herr_t H5Otoken_cmp(hid_t loc_id, const H5O_token_t *token1, const H5O_token_t *token2, - // int *cmp_value); - // herr_t H5Otoken_to_str(hid_t loc_id, const H5O_token_t *token, char **token_str); - // herr_t H5Otoken_from_str(hid_t loc_id, const char *token_str, H5O_token_t *token); - - // //////////////////////////////////////////////////////////// - // // - // H5P: Property List Interface Functions // - // // - // //////////////////////////////////////////////////////////// - - // /////// Generic property list routines /////// - /** - * H5Pget_class_name retrieves the name of a generic property list class - * - * @param plid - * IN: Identifier of property object to query - * @return name of a property list if successful; null if failed + * @ingroup JH5O * - * @exception HDF5LibraryException - * Error from the HDF-5 Library. - */ - public synchronized static native String H5Pget_class_name(long plid) throws HDF5LibraryException; - - /** - * H5Pcreate creates a new property as an instance of some property list class. + * H5Oget_native_info retrieves the native HDF5-specific metadata for an HDF5 object specified by an + * identifier. Native HDF5-specific metadata includes things like object header information and object + * storage layout information. * - * @param type - * IN: The type of property list to create. + * @param loc_id + * IN: Identifier for target object * - * @return a property list identifier (plist) if successful; otherwise Fail (-1). + * @return object information * * @exception HDF5LibraryException - * Error from the HDF-5 Library. + * Error from the HDF5 Library. + * @exception NullPointerException + * name is null. **/ - public static long H5Pcreate(long type) throws HDF5LibraryException + public static H5O_native_info_t H5Oget_native_info(long loc_id) + throws HDF5LibraryException, NullPointerException { - long id = _H5Pcreate(type); + return H5Oget_native_info(loc_id, HDF5Constants.H5O_NATIVE_INFO_ALL); + } + + /** + * @ingroup JH5O + * + * H5Oget_native_info retrieves the native HDF5-specific metadata for an HDF5 object specified by an + * identifier. Native HDF5-specific metadata includes things like object header information and object + * storage layout information. + * + * @param loc_id + * IN: Identifier for target object + * @param fields + * IN: Object fields to select + * + * @return object information + * + * @exception HDF5LibraryException + * Error from the HDF5 Library. + * @exception NullPointerException + * name is null. + **/ + public synchronized static native H5O_native_info_t H5Oget_native_info(long loc_id, int fields) + throws HDF5LibraryException, NullPointerException; + + /** + * @ingroup JH5O + * + * H5Oget_native_info_by_name retrieves the native HDF5-specific metadata for an HDF5 object, identifying + * the object by location and relative name. Native HDF5-specific metadata includes things like object + * header information and object storage layout information. + * + * @param loc_id + * IN: File or group identifier specifying location of group in which object is located + * @param name + * IN: Relative name of group + * @param lapl_id + * IN: Access property list identifier for the link pointing to the object (Not currently used; + * pass as H5P_DEFAULT.) + * + * @return object information + * + * @exception HDF5LibraryException + * Error from the HDF5 Library. + * @exception NullPointerException + * name is null. + **/ + public static H5O_native_info_t H5Oget_native_info_by_name(long loc_id, String name, long lapl_id) + throws HDF5LibraryException, NullPointerException + { + return H5Oget_native_info_by_name(loc_id, name, HDF5Constants.H5O_NATIVE_INFO_ALL, lapl_id); + } + + /** + * @ingroup JH5O + * + * H5Oget_native_info_by_name retrieves the native HDF5-specific metadata for an HDF5 object, identifying + * the object by location and relative name. Native HDF5-specific metadata includes things like object + * header information and object storage layout information. + * + * @param loc_id + * IN: File or group identifier specifying location of group in which object is located + * @param name + * IN: Relative name of group + * @param fields + * IN: Object fields to select + * @param lapl_id + * IN: Access property list identifier for the link pointing to the object (Not currently used; + * pass as H5P_DEFAULT.) + * + * @return object information + * + * @exception HDF5LibraryException + * Error from the HDF5 Library. + * @exception NullPointerException + * name is null. + **/ + public synchronized static native H5O_native_info_t H5Oget_native_info_by_name(long loc_id, String name, + int fields, long lapl_id) + throws HDF5LibraryException, NullPointerException; + + /** + * @ingroup JH5O + * + * H5Oget_native_info_by_idx retrieves the native HDF5-specific metadata for an HDF5 object, identifying + * the object by an index position. Native HDF5-specific metadata includes things like object header + * information and object storage layout information. + * + * @param loc_id + * IN: File or group identifier + * @param group_name + * IN: Name of group, relative to loc_id, in which object is located + * @param idx_type + * IN: Type of index by which objects are ordered + * @param order + * IN: Order of iteration within index + * @param n + * IN: Object to open + * @param lapl_id + * IN: Access property list identifier for the link pointing to the object (Not currently used; + * pass as H5P_DEFAULT.) + * + * @return object information + * + * @exception HDF5LibraryException + * Error from the HDF5 Library. + * @exception NullPointerException + * name is null. + **/ + public static H5O_native_info_t H5Oget_native_info_by_idx(long loc_id, String group_name, int idx_type, + int order, long n, long lapl_id) + throws HDF5LibraryException, NullPointerException + { + return H5Oget_native_info_by_idx(loc_id, group_name, idx_type, order, n, + HDF5Constants.H5O_NATIVE_INFO_ALL, lapl_id); + } + + /** + * @ingroup JH5O + * + * H5Oget_native_info_by_idx retrieves the native HDF5-specific metadata for an HDF5 object, identifying + * the object by an index position. Native HDF5-specific metadata includes things like object header + * information and object storage layout information. + * + * @param loc_id + * IN: File or group identifier + * @param group_name + * IN: Name of group, relative to loc_id, in which object is located + * @param idx_type + * IN: Type of index by which objects are ordered + * @param order + * IN: Order of iteration within index + * @param n + * IN: Object to open + * @param fields + * IN: Object fields to select + * @param lapl_id + * IN: Access property list identifier for the link pointing to the object (Not currently used; + * pass as H5P_DEFAULT.) + * + * @return object information + * + * @exception HDF5LibraryException + * Error from the HDF5 Library. + * @exception NullPointerException + * name is null. + **/ + public synchronized static native H5O_native_info_t H5Oget_native_info_by_idx( + long loc_id, String group_name, int idx_type, int order, long n, int fields, long lapl_id) + throws HDF5LibraryException, NullPointerException; + + // /////// unimplemented //////// + // herr_t H5Otoken_cmp(hid_t loc_id, const H5O_token_t *token1, const H5O_token_t *token2, + // int *cmp_value); + // herr_t H5Otoken_to_str(hid_t loc_id, const H5O_token_t *token, char **token_str); + // herr_t H5Otoken_from_str(hid_t loc_id, const char *token_str, H5O_token_t *token); + + // //////////////////////////////////////////////////////////// + // // + // H5P: Property List Interface Functions // + // // + // //////////////////////////////////////////////////////////// + + // /////// Generic property list routines /////// + + /** + * @ingroup JH5P + * + * H5Pget_class_name retrieves the name of a generic property list class + * + * @param plid + * IN: Identifier of property object to query + * @return name of a property list if successful; null if failed + * + * @exception HDF5LibraryException + * Error from the HDF5 Library. + */ + public synchronized static native String H5Pget_class_name(long plid) throws HDF5LibraryException; + + /** + * @ingroup JH5P + * + * H5Pcreate creates a new property as an instance of some property list class. + * + * @param type + * IN: The type of property list to create. + * + * @return a property list identifier (plist) if successful; otherwise Fail (-1). + * + * @exception HDF5LibraryException + * Error from the HDF5 Library. + **/ + public static long H5Pcreate(long type) throws HDF5LibraryException + { + long id = _H5Pcreate(type); if (id > 0) { log.trace("OPEN_IDS: H5Pcreate add {}", id); OPEN_IDS.add(id); @@ -7113,6 +7707,8 @@ public class H5 implements java.io.Serializable { private synchronized static native long _H5Pcreate(long type) throws HDF5LibraryException; /** + * @ingroup JH5P + * * H5Pget retrieves a copy of the value for a property in a property list (support integer only) * * @param plid @@ -7122,11 +7718,13 @@ public class H5 implements java.io.Serializable { * @return value for a property if successful; a negative value if failed * * @exception HDF5LibraryException - * Error from the HDF-5 Library. + * Error from the HDF5 Library. */ public synchronized static native int H5Pget(long plid, String name) throws HDF5LibraryException; /** + * @ingroup JH5P + * * Sets a property list value (support integer only) * * @param plid @@ -7138,12 +7736,14 @@ public class H5 implements java.io.Serializable { * @return a non-negative value if successful; a negative value if failed * * @exception HDF5LibraryException - * Error from the HDF-5 Library. + * Error from the HDF5 Library. */ public synchronized static native int H5Pset(long plid, String name, int value) throws HDF5LibraryException; /** + * @ingroup JH5P + * * H5Pexist determines whether a property exists within a property list or class * * @param plid @@ -7154,11 +7754,13 @@ public class H5 implements java.io.Serializable { * exist; * * @exception HDF5LibraryException - * Error from the HDF-5 Library. + * Error from the HDF5 Library. */ public synchronized static native boolean H5Pexist(long plid, String name) throws HDF5LibraryException; /** + * @ingroup JH5P + * * H5Pget_size retrieves the size of a property's value in bytes * * @param plid @@ -7168,11 +7770,13 @@ public class H5 implements java.io.Serializable { * @return size of a property's value if successful; a negative value if failed * * @exception HDF5LibraryException - * Error from the HDF-5 Library. + * Error from the HDF5 Library. */ public synchronized static native long H5Pget_size(long plid, String name) throws HDF5LibraryException; /** + * @ingroup JH5P + * * H5Pget_nprops retrieves the number of properties in a property list or class * * @param plid @@ -7180,11 +7784,13 @@ public class H5 implements java.io.Serializable { * @return number of properties if successful; a negative value if failed * * @exception HDF5LibraryException - * Error from the HDF-5 Library. + * Error from the HDF5 Library. */ public synchronized static native long H5Pget_nprops(long plid) throws HDF5LibraryException; /** + * @ingroup JH5P + * * H5Pget_class returns the property list class for the property list identified by the plist parameter. * * @param plist @@ -7192,11 +7798,13 @@ public class H5 implements java.io.Serializable { * @return a property list class if successful. Otherwise returns H5P_ROOT (-1). * * @exception HDF5LibraryException - * Error from the HDF-5 Library. + * Error from the HDF5 Library. **/ public synchronized static native long H5Pget_class(long plist) throws HDF5LibraryException; /** + * @ingroup JH5P + * * H5Pget_class_parent retrieves an identifier for the parent class of a property class * * @param plid @@ -7204,11 +7812,13 @@ public class H5 implements java.io.Serializable { * @return a valid parent class object identifier if successful; a negative value if failed * * @exception HDF5LibraryException - * Error from the HDF-5 Library. + * Error from the HDF5 Library. */ public synchronized static native long H5Pget_class_parent(long plid) throws HDF5LibraryException; /** + * @ingroup JH5P + * * H5Pequal determines if two property lists or classes are equal * * @param plid1 @@ -7219,11 +7829,13 @@ public class H5 implements java.io.Serializable { * @return positive value if equal; zero if unequal, a negative value if failed * * @exception HDF5LibraryException - * Error from the HDF-5 Library. + * Error from the HDF5 Library. */ public synchronized static native int H5Pequal(long plid1, long plid2) throws HDF5LibraryException; /** + * @ingroup JH5P + * * H5Pequal determines if two property lists or classes are equal * * @param plid1 @@ -7234,7 +7846,7 @@ public class H5 implements java.io.Serializable { * @return TRUE if equal, FALSE if unequal * * @exception HDF5LibraryException - * Error from the HDF-5 Library. + * Error from the HDF5 Library. */ public static boolean H5P_equal(long plid1, long plid2) throws HDF5LibraryException { @@ -7244,6 +7856,8 @@ public class H5 implements java.io.Serializable { } /** + * @ingroup JH5P + * * H5Pisa_class checks to determine whether a property list is a member of the specified class * * @param plist @@ -7253,11 +7867,13 @@ public class H5 implements java.io.Serializable { * @return a positive value if equal; zero if unequal; a negative value if failed * * @exception HDF5LibraryException - * Error from the HDF-5 Library. + * Error from the HDF5 Library. */ public synchronized static native int H5Pisa_class(long plist, long pclass) throws HDF5LibraryException; /** + * @ingroup JH5P + * * H5Pcopy_prop copies a property from one property list or class to another * * @param dst_id @@ -7269,12 +7885,14 @@ public class H5 implements java.io.Serializable { * @return a non-negative value if successful; a negative value if failed * * @exception HDF5LibraryException - * Error from the HDF-5 Library. + * Error from the HDF5 Library. */ public synchronized static native int H5Pcopy_prop(long dst_id, long src_id, String name) throws HDF5LibraryException; /** + * @ingroup JH5P + * * H5Premove removes a property from a property list * * @param plid @@ -7284,11 +7902,13 @@ public class H5 implements java.io.Serializable { * @return a non-negative value if successful; a negative value if failed * * @exception HDF5LibraryException - * Error from the HDF-5 Library. + * Error from the HDF5 Library. */ public synchronized static native int H5Premove(long plid, String name) throws HDF5LibraryException; /** + * @ingroup JH5P + * * H5Punregister removes a property from a property list class * * @param plid @@ -7298,11 +7918,13 @@ public class H5 implements java.io.Serializable { * @return a non-negative value if successful; a negative value if failed * * @exception HDF5LibraryException - * Error from the HDF-5 Library. + * Error from the HDF5 Library. */ public synchronized static native int H5Punregister(long plid, String name) throws HDF5LibraryException; /** + * @ingroup JH5P + * * Closes an existing property list class * * @param plid @@ -7310,7 +7932,7 @@ public class H5 implements java.io.Serializable { * @return a non-negative value if successful; a negative value if failed * * @exception HDF5LibraryException - * Error from the HDF-5 Library. + * Error from the HDF5 Library. */ public static int H5Pclose_class(long plid) throws HDF5LibraryException { @@ -7326,6 +7948,8 @@ public class H5 implements java.io.Serializable { private synchronized static native int _H5Pclose_class(long plid) throws HDF5LibraryException; /** + * @ingroup JH5P + * * H5Pclose terminates access to a property list. * * @param plist @@ -7333,7 +7957,7 @@ public class H5 implements java.io.Serializable { * @return a non-negative value if successful * * @exception HDF5LibraryException - * Error from the HDF-5 Library. + * Error from the HDF5 Library. **/ public static int H5Pclose(long plist) throws HDF5LibraryException { @@ -7349,6 +7973,8 @@ public class H5 implements java.io.Serializable { private synchronized static native int _H5Pclose(long plist) throws HDF5LibraryException; /** + * @ingroup JH5P + * * H5Pcopy copies an existing property list to create a new property list. * * @param plist @@ -7357,7 +7983,7 @@ public class H5 implements java.io.Serializable { * @return a property list identifier if successful * * @exception HDF5LibraryException - * Error from the HDF-5 Library. + * Error from the HDF5 Library. **/ public static long H5Pcopy(long plist) throws HDF5LibraryException { @@ -7394,6 +8020,8 @@ public class H5 implements java.io.Serializable { // typedef herr_t (*H5P_iterate_t)(hid_t id, const char *name, void *iter_data); /** + * @ingroup JH5P + * * H5Pcreate_class_nocb creates an new property class with no callback functions. * * @param parent_class @@ -7404,7 +8032,7 @@ public class H5 implements java.io.Serializable { * @return a property list identifier if successful * * @exception HDF5LibraryException - * Error from the HDF-5 Library. + * Error from the HDF5 Library. **/ public static long H5Pcreate_class_nocb(long parent_class, String name) throws HDF5LibraryException { @@ -7440,6 +8068,8 @@ public class H5 implements java.io.Serializable { // H5P_cls_close_func_t close_data) throws HDF5LibraryException; /** + * @ingroup JH5P + * * H5Pregister2_nocb registers a property list with no callback functions. * * @param plist_class @@ -7452,7 +8082,7 @@ public class H5 implements java.io.Serializable { * IN: Default value of the property * * @exception HDF5LibraryException - * - Error from the HDF-5 Library. + * - Error from the HDF5 Library. **/ public synchronized static native void H5Pregister2_nocb(long plist_class, String name, long size, byte[] def_value) throws HDF5LibraryException; @@ -7463,6 +8093,8 @@ public class H5 implements java.io.Serializable { // H5P_prp_compare_func_cb prp_cmp, H5P_prp_close_func_cb prp_close) throws HDF5LibraryException; /** + * @ingroup JH5P + * * H5Pinsert2_nocb inserts a property list with no callback functions. * * @param plist @@ -7475,7 +8107,7 @@ public class H5 implements java.io.Serializable { * IN: Default value of the property * * @exception HDF5LibraryException - * Error from the HDF-5 Library. + * Error from the HDF5 Library. **/ public synchronized static native void H5Pinsert2_nocb(long plist, String name, long size, byte[] value) throws HDF5LibraryException; @@ -7486,6 +8118,8 @@ public class H5 implements java.io.Serializable { // H5P_prp_close_func_cb prp_close) throws HDF5LibraryException; /** + * @ingroup JH5P + * * H5Piterate iterates over the properties in a property list or class * * @param plist @@ -7501,7 +8135,7 @@ public class H5 implements java.io.Serializable { * zero if all properties have been processed * * @exception HDF5LibraryException - * Error from the HDF-5 Library. + * Error from the HDF5 Library. * @exception NullPointerException * size is null. * @@ -7512,6 +8146,8 @@ public class H5 implements java.io.Serializable { // /////// Object creation property list (OCPL) routines /////// /** + * @ingroup JH5P + * * H5Pget_attr_phase_change retrieves attribute storage phase change thresholds. * * @param ocpl_id @@ -7527,7 +8163,7 @@ public class H5 implements java.io.Serializable { * @return Returns a non-negative value if successful; otherwise returns a negative value. * * @exception HDF5LibraryException - * Error from the HDF-5 Library. + * Error from the HDF5 Library. * @exception NullPointerException * size is null. * @@ -7536,6 +8172,8 @@ public class H5 implements java.io.Serializable { throws HDF5LibraryException, NullPointerException; /** + * @ingroup JH5P + * * H5Pset_attr_phase_change sets threshold values for attribute storage on an object. These * thresholds determine the point at which attribute storage changes * from compact storage (i.e., storage in the object header) @@ -7549,7 +8187,7 @@ public class H5 implements java.io.Serializable { * IN: Minimum number of attributes to be stored in dense storage (Default: 6) * * @exception HDF5LibraryException - * Error from the HDF-5 Library. + * Error from the HDF5 Library. * **/ public synchronized static native void H5Pset_attr_phase_change(long ocpl_id, int max_compact, @@ -7557,6 +8195,8 @@ public class H5 implements java.io.Serializable { throws HDF5LibraryException; /** + * @ingroup JH5P + * * H5Pget_attr_creation_order retrieves the settings for tracking and indexing attribute creation order on * an object. * @@ -7566,13 +8206,15 @@ public class H5 implements java.io.Serializable { * @return Flags specifying whether to track and index attribute creation order * * @exception HDF5LibraryException - * Error from the HDF-5 Library. + * Error from the HDF5 Library. * **/ public synchronized static native int H5Pget_attr_creation_order(long ocpl_id) throws HDF5LibraryException; /** + * @ingroup JH5P + * * H5Pset_attr_creation_order sets flags specifying whether to track and index attribute creation order on * an object. * @@ -7584,13 +8226,15 @@ public class H5 implements java.io.Serializable { * @return Returns a non-negative value if successful; otherwise returns a negative value. * * @exception HDF5LibraryException - * Error from the HDF-5 Library. + * Error from the HDF5 Library. * **/ public synchronized static native int H5Pset_attr_creation_order(long ocpl_id, int crt_order_flags) throws HDF5LibraryException; /** + * @ingroup JH5P + * * H5Pget_obj_track_times queries the object creation property list, ocpl_id, to determine whether object * times are being recorded. * @@ -7600,13 +8244,15 @@ public class H5 implements java.io.Serializable { * @return TRUE or FALSE, specifying whether object times are being recorded * * @exception HDF5LibraryException - * Error from the HDF-5 Library. + * Error from the HDF5 Library. * **/ public synchronized static native boolean H5Pget_obj_track_times(long ocpl_id) throws HDF5LibraryException; /** + * @ingroup JH5P + * * H5Pset_obj_track_times sets a property in the object creation property list, ocpl_id, that governs the * recording of times associated with an object. * @@ -7617,13 +8263,15 @@ public class H5 implements java.io.Serializable { * IN: TRUE or FALSE, specifying whether object times are to be tracked * * @exception HDF5LibraryException - * Error from the HDF-5 Library. + * Error from the HDF5 Library. * **/ public synchronized static native void H5Pset_obj_track_times(long ocpl_id, boolean track_times) throws HDF5LibraryException; /** + * @ingroup JH5P + * * H5Pmodify_filter modifies the specified FILTER in the transient or permanent output filter pipeline * depending on whether PLIST is a dataset creation or dataset * transfer property list. The FLAGS argument specifies certain @@ -7663,7 +8311,7 @@ public class H5 implements java.io.Serializable { * @return a non-negative value if successful * * @exception HDF5LibraryException - * Error from the HDF-5 Library. + * Error from the HDF5 Library. * @exception NullPointerException * name or an array is null. * @@ -7673,6 +8321,8 @@ public class H5 implements java.io.Serializable { throws HDF5LibraryException, NullPointerException; /** + * @ingroup JH5P + * * H5Pset_filter adds the specified filter and corresponding properties to the end of an output filter * pipeline. * @@ -7690,12 +8340,14 @@ public class H5 implements java.io.Serializable { * @return a non-negative value if successful * * @exception HDF5LibraryException - * Error from the HDF-5 Library. + * Error from the HDF5 Library. **/ public synchronized static native int H5Pset_filter(long plist, int filter, int flags, long cd_nelmts, int[] cd_values) throws HDF5LibraryException; /** + * @ingroup JH5P + * * H5Pget_nfilters returns the number of filters defined in the filter pipeline associated with the * property list plist. * @@ -7705,11 +8357,13 @@ public class H5 implements java.io.Serializable { * @return the number of filters in the pipeline if successful * * @exception HDF5LibraryException - * Error from the HDF-5 Library. + * Error from the HDF5 Library. **/ public synchronized static native int H5Pget_nfilters(long plist) throws HDF5LibraryException; /** + * @ingroup JH5P + * * H5Pget_filter returns information about a filter, specified by its filter number, in a filter pipeline, * specified by the property list with which it is associated. * @@ -7738,7 +8392,7 @@ public class H5 implements java.io.Serializable { * @exception ArrayStoreException * Fatal error on Copyback * @exception HDF5LibraryException - * Error from the HDF-5 Library. + * Error from the HDF5 Library. * @exception NullPointerException * name or an array is null. * @@ -7752,6 +8406,8 @@ public class H5 implements java.io.Serializable { } /** + * @ingroup JH5P + * * H5Pget_filter2 returns information about a filter, specified by its filter number, in a filter * pipeline, specified by the property list with which it is associated. * @@ -7766,6 +8422,8 @@ public class H5 implements java.io.Serializable { NullPointerException; /** + * @ingroup JH5P + * * H5Pget_filter_by_id returns information about the filter specified in filter_id, a filter identifier. * plist_id must be a dataset or group creation property list and filter_id must be in the associated * filter pipeline. The filter_id and flags parameters are used in the same manner as described in the @@ -7798,7 +8456,7 @@ public class H5 implements java.io.Serializable { * @return the filter identification number if successful. Otherwise returns H5Z_FILTER_ERROR (-1). * * @exception HDF5LibraryException - * Error from the HDF-5 Library. + * Error from the HDF5 Library. * @exception ArrayIndexOutOfBoundsException * Fatal error on Copyback * @exception ArrayStoreException @@ -7816,6 +8474,8 @@ public class H5 implements java.io.Serializable { } /** + * @ingroup JH5P + * * H5Pget_filter_by_id2 returns information about a filter, specified by its filter id, in a filter * pipeline, specified by the property list with which it is associated. * @@ -7839,7 +8499,7 @@ public class H5 implements java.io.Serializable { * @return the filter identification number if successful. Otherwise returns H5Z_FILTER_ERROR (-1). * * @exception HDF5LibraryException - * Error from the HDF-5 Library. + * Error from the HDF5 Library. * @exception NullPointerException * name or an array is null. * @@ -7850,6 +8510,8 @@ public class H5 implements java.io.Serializable { throws HDF5LibraryException, NullPointerException; /** + * @ingroup JH5P + * * H5Pall_filters_avail query to verify that all the filters set * in the dataset creation property list are available currently. * @@ -7861,12 +8523,14 @@ public class H5 implements java.io.Serializable { * FALSE if one or more filters not currently available. * * @exception HDF5LibraryException - * Error from the HDF-5 Library. + * Error from the HDF5 Library. **/ public synchronized static native boolean H5Pall_filters_avail(long dcpl_id) throws HDF5LibraryException, NullPointerException; /** + * @ingroup JH5P + * * H5Premove_filter deletes a filter from the dataset creation property list; * deletes all filters if filter is H5Z_FILTER_NONE * @@ -7878,12 +8542,14 @@ public class H5 implements java.io.Serializable { * @return a non-negative value and the size of the user block; if successful * * @exception HDF5LibraryException - * Error from the HDF-5 Library. + * Error from the HDF5 Library. **/ public synchronized static native int H5Premove_filter(long obj_id, long filter) throws HDF5LibraryException; /** + * @ingroup JH5P + * * H5Pset_deflate sets the compression method for a dataset. * * @param plist @@ -7894,11 +8560,13 @@ public class H5 implements java.io.Serializable { * @return non-negative if successful * * @exception HDF5LibraryException - * Error from the HDF-5 Library. + * Error from the HDF5 Library. **/ public synchronized static native int H5Pset_deflate(long plist, int level) throws HDF5LibraryException; /** + * @ingroup JH5P + * * H5Pset_fletcher32 sets Fletcher32 checksum of EDC for a dataset creation * property list or group creation property list. * @@ -7908,7 +8576,7 @@ public class H5 implements java.io.Serializable { * @return a non-negative value and the size of the user block; if successful * * @exception HDF5LibraryException - * Error from the HDF-5 Library. + * Error from the HDF5 Library. **/ public synchronized static native int H5Pset_fletcher32(long plist) throws HDF5LibraryException, NullPointerException; @@ -7916,6 +8584,8 @@ public class H5 implements java.io.Serializable { // /////// File creation property list (FCPL) routines /////// /** + * @ingroup JH5P + * * H5Pget_userblock retrieves the size of a user block in a file creation property list. * * @param plist @@ -7926,7 +8596,7 @@ public class H5 implements java.io.Serializable { * @return a non-negative value and the size of the user block; if successful * * @exception HDF5LibraryException - * Error from the HDF-5 Library. + * Error from the HDF5 Library. * @exception NullPointerException * size is null. **/ @@ -7934,6 +8604,8 @@ public class H5 implements java.io.Serializable { throws HDF5LibraryException, NullPointerException; /** + * @ingroup JH5P + * * H5Pset_userblock sets the user block size of a file creation property list. * * @param plist @@ -7944,11 +8616,13 @@ public class H5 implements java.io.Serializable { * @return a non-negative value if successful * * @exception HDF5LibraryException - * Error from the HDF-5 Library. + * Error from the HDF5 Library. **/ public synchronized static native int H5Pset_userblock(long plist, long size) throws HDF5LibraryException; /** + * @ingroup JH5P + * * H5Pget_sizes retrieves the size of the offsets and lengths used in an HDF5 file. This function is only * valid for file creation property lists. * @@ -7964,7 +8638,7 @@ public class H5 implements java.io.Serializable { * @return a non-negative value with the sizes initialized; if successful; * * @exception HDF5LibraryException - * Error from the HDF-5 Library. + * Error from the HDF5 Library. * @exception NullPointerException * size is null. * @exception IllegalArgumentException @@ -7974,6 +8648,8 @@ public class H5 implements java.io.Serializable { throws HDF5LibraryException, NullPointerException, IllegalArgumentException; /** + * @ingroup JH5P + * * H5Pset_sizes sets the byte size of the offsets and lengths used to address objects in an HDF5 file. * * @param plist @@ -7986,12 +8662,14 @@ public class H5 implements java.io.Serializable { * @return a non-negative value if successful * * @exception HDF5LibraryException - * Error from the HDF-5 Library. + * Error from the HDF5 Library. **/ public synchronized static native int H5Pset_sizes(long plist, int sizeof_addr, int sizeof_size) throws HDF5LibraryException; /** + * @ingroup JH5P + * * H5Pget_sym_k retrieves the size of the symbol table B-tree 1/2 rank and the symbol table leaf node 1/2 * size. * @@ -8008,7 +8686,7 @@ public class H5 implements java.io.Serializable { * @return a non-negative value if successful * * @exception HDF5LibraryException - * Error from the HDF-5 Library. + * Error from the HDF5 Library. * @exception NullPointerException * size is null. * @exception IllegalArgumentException @@ -8018,6 +8696,8 @@ public class H5 implements java.io.Serializable { throws HDF5LibraryException, NullPointerException, IllegalArgumentException; /** + * @ingroup JH5P + * * H5Pset_sym_k sets the size of parameters used to control the symbol table nodes. * * @param plist @@ -8030,12 +8710,14 @@ public class H5 implements java.io.Serializable { * @return a non-negative value if successful * * @exception HDF5LibraryException - * Error from the HDF-5 Library. + * Error from the HDF5 Library. **/ public synchronized static native int H5Pset_sym_k(long plist, int ik, int lk) throws HDF5LibraryException; /** + * @ingroup JH5P + * * H5Pget_istore_k queries the 1/2 rank of an indexed storage B-tree. * * @param plist @@ -8046,7 +8728,7 @@ public class H5 implements java.io.Serializable { * @return a non-negative value if successful * * @exception HDF5LibraryException - * Error from the HDF-5 Library. + * Error from the HDF5 Library. * @exception NullPointerException * ik array is null. **/ @@ -8054,6 +8736,8 @@ public class H5 implements java.io.Serializable { throws HDF5LibraryException, NullPointerException; /** + * @ingroup JH5P + * * H5Pset_istore_k sets the size of the parameter used to control the B-trees for indexing chunked * datasets. * @@ -8065,11 +8749,13 @@ public class H5 implements java.io.Serializable { * @return a non-negative value if successful * * @exception HDF5LibraryException - * Error from the HDF-5 Library. + * Error from the HDF5 Library. **/ public synchronized static native int H5Pset_istore_k(long plist, int ik) throws HDF5LibraryException; /** + * @ingroup JH5P + * * H5Pget_shared_mesg_nindexes retrieves number of shared object header message indexes in file creation * property list. * @@ -8080,13 +8766,15 @@ public class H5 implements java.io.Serializable { * this property list * * @exception HDF5LibraryException - * Error from the HDF-5 Library. + * Error from the HDF5 Library. * **/ public synchronized static native int H5Pget_shared_mesg_nindexes(long fcpl_id) throws HDF5LibraryException; /** + * @ingroup JH5P + * * H5Pset_shared_mesg_nindexes sets the number of shared object header message indexes in the specified * file creation property list. * @@ -8099,7 +8787,7 @@ public class H5 implements java.io.Serializable { * @return a non-negative value if successful; otherwise returns a negative value. * * @exception HDF5LibraryException - * Error from the HDF-5 Library. + * Error from the HDF5 Library. * @exception IllegalArgumentException * Invalid value of nindexes * @@ -8108,6 +8796,8 @@ public class H5 implements java.io.Serializable { throws HDF5LibraryException, IllegalArgumentException; /** + * @ingroup JH5P + * * H5Pget_shared_mesg_index Retrieves the configuration settings for a shared message index. * * @param fcpl_id @@ -8125,7 +8815,7 @@ public class H5 implements java.io.Serializable { * @return Returns a non-negative value if successful; otherwise returns a negative value. * * @exception HDF5LibraryException - * Error from the HDF-5 Library. + * Error from the HDF5 Library. * @exception NullPointerException * mesg_info is null. * @exception IllegalArgumentException @@ -8137,6 +8827,8 @@ public class H5 implements java.io.Serializable { throws HDF5LibraryException, NullPointerException, IllegalArgumentException; /** + * @ingroup JH5P + * * H5Pset_shared_mesg_index Configures the specified shared object header message index * * @param fcpl_id @@ -8151,7 +8843,7 @@ public class H5 implements java.io.Serializable { * @return a non-negative value if successful; otherwise returns a negative value. * * @exception HDF5LibraryException - * Error from the HDF-5 Library. + * Error from the HDF5 Library. * @exception IllegalArgumentException * Invalid value of nindexes * @@ -8161,6 +8853,8 @@ public class H5 implements java.io.Serializable { throws HDF5LibraryException, IllegalArgumentException; /** + * @ingroup JH5P + * * H5Pget_shared_mesg_phase_change retrieves shared object header message phase change information. * * @param fcpl_id @@ -8177,7 +8871,7 @@ public class H5 implements java.io.Serializable { * @return Returns a non-negative value if successful; otherwise returns a negative value. * * @exception HDF5LibraryException - * Error from the HDF-5 Library. + * Error from the HDF5 Library. * @exception NullPointerException * size is null. * @@ -8186,6 +8880,8 @@ public class H5 implements java.io.Serializable { throws HDF5LibraryException, NullPointerException; /** + * @ingroup JH5P + * * H5Pset_shared_mesg_phase_change sets shared object header message storage phase change thresholds. * * @param fcpl_id @@ -8200,7 +8896,7 @@ public class H5 implements java.io.Serializable { * @return a non-negative value if successful; otherwise returns a negative value. * * @exception HDF5LibraryException - * Error from the HDF-5 Library. + * Error from the HDF5 Library. * @exception IllegalArgumentException * Invalid values of max_list and min_btree. * @@ -8210,6 +8906,8 @@ public class H5 implements java.io.Serializable { throws HDF5LibraryException, IllegalArgumentException; /** + * @ingroup JH5P + * * H5Pset_file_space_strategy sets the file space management strategy for the file associated with fcpl_id * to strategy. There are four strategies that applications can select and they are described in the * Parameters section. @@ -8236,7 +8934,7 @@ public class H5 implements java.io.Serializable { * is not to be modified. * * @exception HDF5LibraryException - * Error from the HDF-5 Library. + * Error from the HDF5 Library. * @exception IllegalArgumentException * Invalid values of max_list and min_btree. * @@ -8246,6 +8944,8 @@ public class H5 implements java.io.Serializable { throws HDF5LibraryException, IllegalArgumentException; /** + * @ingroup JH5P + * * H5Pget_file_space_strategy provides the means for applications to manage the HDF5 file's file space * strategy for their specific needs. * @@ -8259,7 +8959,7 @@ public class H5 implements java.io.Serializable { * @return the current free-space strategy. * * @exception HDF5LibraryException - * Error from the HDF-5 Library. + * Error from the HDF5 Library. * @exception IllegalArgumentException * Invalid values of max_list and min_btree. * @@ -8269,6 +8969,8 @@ public class H5 implements java.io.Serializable { throws HDF5LibraryException, IllegalArgumentException; /** + * @ingroup JH5P + * * H5Pget_file_space_strategy_persist provides the means for applications to manage the HDF5 file's file * space strategy for their specific needs. * @@ -8278,7 +8980,7 @@ public class H5 implements java.io.Serializable { * @return the current free-space persistence. * * @exception HDF5LibraryException - * Error from the HDF-5 Library. + * Error from the HDF5 Library. * @exception IllegalArgumentException * Invalid values of max_list and min_btree. * @@ -8287,6 +8989,8 @@ public class H5 implements java.io.Serializable { throws HDF5LibraryException, IllegalArgumentException; /** + * @ingroup JH5P + * * H5Pget_file_space_strategy_threshold provides the means for applications to manage the HDF5 file's file * space strategy for their specific needs. * @@ -8296,7 +9000,7 @@ public class H5 implements java.io.Serializable { * @return the current free-space section threshold. * * @exception HDF5LibraryException - * Error from the HDF-5 Library. + * Error from the HDF5 Library. * @exception IllegalArgumentException * Invalid values of max_list and min_btree. * @@ -8305,6 +9009,8 @@ public class H5 implements java.io.Serializable { throws HDF5LibraryException, IllegalArgumentException; /** + * @ingroup JH5P + * * H5Pset_file_space_page_size retrieves the file space page size for aggregating small metadata or raw * data. * @@ -8315,7 +9021,7 @@ public class H5 implements java.io.Serializable { * * * @exception HDF5LibraryException - * Error from the HDF-5 Library. + * Error from the HDF5 Library. * @exception IllegalArgumentException * Invalid values of max_list and min_btree. * @@ -8324,6 +9030,8 @@ public class H5 implements java.io.Serializable { throws HDF5LibraryException, IllegalArgumentException; /** + * @ingroup JH5P + * * H5Pget_file_space_page_size Sets the file space page size for paged aggregation. * * @param fcpl_id @@ -8332,7 +9040,7 @@ public class H5 implements java.io.Serializable { * @return the current file space page size. * * @exception HDF5LibraryException - * Error from the HDF-5 Library. + * Error from the HDF5 Library. * @exception IllegalArgumentException * Invalid values of max_list and min_btree. * @@ -8343,6 +9051,8 @@ public class H5 implements java.io.Serializable { // /////// File access property list (FAPL) routines /////// /** + * @ingroup JH5P + * * H5Pget_alignment retrieves the current settings for alignment properties from a file access property * list. * @@ -8358,7 +9068,7 @@ public class H5 implements java.io.Serializable { * @return a non-negative value if successful * * @exception HDF5LibraryException - * Error from the HDF-5 Library. + * Error from the HDF5 Library. * @exception NullPointerException * alignment array is null. * @exception IllegalArgumentException @@ -8368,6 +9078,8 @@ public class H5 implements java.io.Serializable { throws HDF5LibraryException, NullPointerException, IllegalArgumentException; /** + * @ingroup JH5P + * * H5Pset_alignment sets the alignment properties of a file access property list so that any file object * >= THRESHOLD bytes will be aligned on an address which is a multiple of ALIGNMENT. * @@ -8381,12 +9093,14 @@ public class H5 implements java.io.Serializable { * @return a non-negative value if successful * * @exception HDF5LibraryException - * Error from the HDF-5 Library. + * Error from the HDF5 Library. **/ public synchronized static native int H5Pset_alignment(long plist, long threshold, long alignment) throws HDF5LibraryException; /** + * @ingroup JH5P + * * H5Pget_driver returns the identifier of the low-level file driver associated with the file access * property list or data transfer property list plid. * @@ -8395,11 +9109,13 @@ public class H5 implements java.io.Serializable { * @return a valid low-level driver identifier if successful; a negative value if failed * * @exception HDF5LibraryException - * Error from the HDF-5 Library. + * Error from the HDF5 Library. */ public synchronized static native long H5Pget_driver(long plid) throws HDF5LibraryException; /** + * @ingroup JH5P + * * H5Pget_family_offset gets offset for family driver. * * @param fapl_id @@ -8408,12 +9124,14 @@ public class H5 implements java.io.Serializable { * @return the offset. * * @exception HDF5LibraryException - * Error from the HDF-5 Library. + * Error from the HDF5 Library. * **/ public synchronized static native long H5Pget_family_offset(long fapl_id) throws HDF5LibraryException; /** + * @ingroup JH5P + * * H5Pset_family_offset sets the offset for family driver. * * @param fapl_id @@ -8424,13 +9142,15 @@ public class H5 implements java.io.Serializable { * @return a non-negative value if successful * * @exception HDF5LibraryException - * Error from the HDF-5 Library. + * Error from the HDF5 Library. * **/ public synchronized static native int H5Pset_family_offset(long fapl_id, long offset) throws HDF5LibraryException; /** + * @ingroup JH5P + * * Retrieves the maximum possible number of elements in the meta data cache and the maximum possible * number of bytes and the RDCC_W0 value in the raw data chunk cache. * @@ -8448,7 +9168,7 @@ public class H5 implements java.io.Serializable { * @return a non-negative value if successful * * @exception HDF5LibraryException - * Error from the HDF-5 Library. + * Error from the HDF5 Library. * @exception NullPointerException * an array is null. **/ @@ -8457,6 +9177,8 @@ public class H5 implements java.io.Serializable { throws HDF5LibraryException, NullPointerException; /** + * @ingroup JH5P + * * H5Pset_cache sets the number of elements (objects) in the meta data cache and the total number of bytes * in the raw data chunk cache. * @@ -8474,13 +9196,15 @@ public class H5 implements java.io.Serializable { * @return a non-negative value if successful * * @exception HDF5LibraryException - * Error from the HDF-5 Library. + * Error from the HDF5 Library. **/ public synchronized static native int H5Pset_cache(long plist, int mdc_nelmts, long rdcc_nelmts, long rdcc_nbytes, double rdcc_w0) throws HDF5LibraryException; /** + * @ingroup JH5P + * * H5Pget_mdc_config gets the initial metadata cache configuration contained in a file access property * list. This configuration is used when the file is opened. * @@ -8490,12 +9214,14 @@ public class H5 implements java.io.Serializable { * @return A buffer(H5AC_cache_config_t) for the current metadata cache configuration information * * @exception HDF5LibraryException - * Error from the HDF-5 Library. + * Error from the HDF5 Library. **/ public synchronized static native H5AC_cache_config_t H5Pget_mdc_config(long plist_id) throws HDF5LibraryException; /** + * @ingroup JH5P + * * H5Pset_mdc_config sets the initial metadata cache configuration contained in a file access property * list and loads it into the instance of H5AC_cache_config_t pointed to by the config_ptr parameter. This * configuration is used when the file is opened. @@ -8506,12 +9232,14 @@ public class H5 implements java.io.Serializable { * IN: H5AC_cache_config_t, the initial metadata cache configuration. * * @exception HDF5LibraryException - * Error from the HDF-5 Library. + * Error from the HDF5 Library. **/ public synchronized static native void H5Pset_mdc_config(long plist_id, H5AC_cache_config_t config_ptr) throws HDF5LibraryException; /** + * @ingroup JH5P + * * H5Pget_gc_references Returns the current setting for the garbage collection references property from a * file access property list. * @@ -8521,11 +9249,13 @@ public class H5 implements java.io.Serializable { * @return GC is on (true) or off (false) * * @exception HDF5LibraryException - * Error from the HDF-5 Library. + * Error from the HDF5 Library. **/ public synchronized static native boolean H5Pget_gc_references(long fapl_id) throws HDF5LibraryException; /** + * @ingroup JH5P + * * H5Pset_gc_references Sets the flag for garbage collecting references for the file. Default value for * garbage collecting references is off. * @@ -8537,12 +9267,14 @@ public class H5 implements java.io.Serializable { * @return non-negative if successful * * @exception HDF5LibraryException - * Error from the HDF-5 Library. + * Error from the HDF5 Library. **/ public synchronized static native int H5Pset_gc_references(long fapl_id, boolean gc_ref) throws HDF5LibraryException; /** + * @ingroup JH5P + * * H5Pget_fclose_degree returns the degree for the file close behavior for a file access * property list. * @@ -8552,12 +9284,14 @@ public class H5 implements java.io.Serializable { * @return the degree for the file close behavior * * @exception HDF5LibraryException - * Error from the HDF-5 Library. + * Error from the HDF5 Library. **/ public synchronized static native int H5Pget_fclose_degree(long fapl_id) throws HDF5LibraryException, NullPointerException; /** + * @ingroup JH5P + * * H5Pset_fclose_degree sets the degree for the file close behavior. * * @param fapl_id @@ -8568,12 +9302,14 @@ public class H5 implements java.io.Serializable { * @return non-negative if successful * * @exception HDF5LibraryException - * Error from the HDF-5 Library. + * Error from the HDF5 Library. **/ public synchronized static native int H5Pset_fclose_degree(long fapl_id, int degree) throws HDF5LibraryException, NullPointerException; /** + * @ingroup JH5P + * * H5Pget_meta_block_size the current metadata block size setting. * * @param fapl_id @@ -8582,12 +9318,14 @@ public class H5 implements java.io.Serializable { * @return the minimum size, in bytes, of metadata block allocations. * * @exception HDF5LibraryException - * Error from the HDF-5 Library. + * Error from the HDF5 Library. * **/ public synchronized static native long H5Pget_meta_block_size(long fapl_id) throws HDF5LibraryException; /** + * @ingroup JH5P + * * H5Pset_meta_block_size sets the minimum metadata block size. * * @param fapl_id @@ -8596,13 +9334,15 @@ public class H5 implements java.io.Serializable { * IN: Minimum size, in bytes, of metadata block allocations. * * @exception HDF5LibraryException - * Error from the HDF-5 Library. + * Error from the HDF5 Library. * **/ public synchronized static native void H5Pset_meta_block_size(long fapl_id, long size) throws HDF5LibraryException; /** + * @ingroup JH5P + * * H5Pget_sieve_buf_size retrieves the current settings for the data sieve buffer size * property from a file access property list. * @@ -8612,11 +9352,13 @@ public class H5 implements java.io.Serializable { * @return a non-negative value and the size of the user block; if successful * * @exception HDF5LibraryException - * Error from the HDF-5 Library. + * Error from the HDF5 Library. **/ public synchronized static native long H5Pget_sieve_buf_size(long fapl_id) throws HDF5LibraryException; /** + * @ingroup JH5P + * * H5Pset_sieve_buf_size Sets the maximum size of the data seive buffer used for file * drivers which are capable of using data sieving. The data sieve * buffer is used when performing I/O on datasets in the file. Using a @@ -8634,12 +9376,14 @@ public class H5 implements java.io.Serializable { * IN: maximum size of the data seive buffer. * * @exception HDF5LibraryException - * Error from the HDF-5 Library. + * Error from the HDF5 Library. **/ public synchronized static native void H5Pset_sieve_buf_size(long fapl_id, long size) throws HDF5LibraryException; /** + * @ingroup JH5P + * * H5Pget_small_data_block_size retrieves the size of a block of small data in a file creation property * list. * @@ -8649,12 +9393,14 @@ public class H5 implements java.io.Serializable { * @return a non-negative value and the size of the user block; if successful * * @exception HDF5LibraryException - * Error from the HDF-5 Library. + * Error from the HDF5 Library. **/ public synchronized static native long H5Pget_small_data_block_size(long plist) throws HDF5LibraryException; /** + * @ingroup JH5P + * * H5Pset_small_data_block_size reserves blocks of size bytes for the contiguous storage of the raw data * portion of small datasets. * @@ -8666,12 +9412,14 @@ public class H5 implements java.io.Serializable { * @return a non-negative value if successful * * @exception HDF5LibraryException - * Error from the HDF-5 Library. + * Error from the HDF5 Library. **/ public synchronized static native int H5Pset_small_data_block_size(long plist, long size) throws HDF5LibraryException; /** + * @ingroup JH5P + * * H5Pget_libver_bounds retrieves the lower and upper bounds on the HDF5 Library versions that indirectly * determine the object formats versions used when creating objects in the file. * @@ -8688,7 +9436,7 @@ public class H5 implements java.io.Serializable { * @return Returns a non-negative value if successful; otherwise returns a negative value. * * @exception HDF5LibraryException - * Error from the HDF-5 Library. + * Error from the HDF5 Library. * @exception NullPointerException * size is null. * @@ -8697,6 +9445,8 @@ public class H5 implements java.io.Serializable { throws HDF5LibraryException, NullPointerException; /** + * @ingroup JH5P + * * H5Pset_libver_bounds Sets bounds on library versions, and indirectly format versions, to be used when * creating objects * @@ -8711,7 +9461,7 @@ public class H5 implements java.io.Serializable { * @return Returns a non-negative value if successful; otherwise returns a negative value. * * @exception HDF5LibraryException - * Error from the HDF-5 Library. + * Error from the HDF5 Library. * @exception IllegalArgumentException * Argument is Illegal * @@ -8720,6 +9470,8 @@ public class H5 implements java.io.Serializable { throws HDF5LibraryException, IllegalArgumentException; /** + * @ingroup JH5P + * * H5Pget_elink_file_cache_size retrieves the size of the external link open file cache. * * @param fapl_id @@ -8728,13 +9480,15 @@ public class H5 implements java.io.Serializable { * @return External link open file cache size in number of files. * * @exception HDF5LibraryException - * Error from the HDF-5 Library. + * Error from the HDF5 Library. * **/ public synchronized static native int H5Pget_elink_file_cache_size(long fapl_id) throws HDF5LibraryException; /** + * @ingroup JH5P + * * H5Pset_elink_file_cache_size sets the number of files that can be held open in an external link open * file cache. * @@ -8744,13 +9498,15 @@ public class H5 implements java.io.Serializable { * IN: External link open file cache size in number of files. * * @exception HDF5LibraryException - * Error from the HDF-5 Library. + * Error from the HDF5 Library. * **/ public synchronized static native void H5Pset_elink_file_cache_size(long fapl_id, int efc_size) throws HDF5LibraryException; /** + * @ingroup JH5P + * * H5Pset_mdc_log_options sets metadata cache logging options. * * @param fapl_id @@ -8763,7 +9519,7 @@ public class H5 implements java.io.Serializable { * IN: Whether the logging begins as soon as the file is opened or created. * * @exception HDF5LibraryException - * Error from the HDF-5 Library. + * Error from the HDF5 Library. * @exception NullPointerException * location is null. * @@ -8773,6 +9529,8 @@ public class H5 implements java.io.Serializable { throws HDF5LibraryException, NullPointerException; /** + * @ingroup JH5P + * * H5Pget_mdc_log_options gets metadata cache logging options. * * @param fapl_id @@ -8786,13 +9544,15 @@ public class H5 implements java.io.Serializable { * @return the location of log in UTF-8/ASCII (file path/name) (On Windows, this must be ASCII). * * @exception HDF5LibraryException - * Error from the HDF-5 Library. + * Error from the HDF5 Library. * **/ public synchronized static native String H5Pget_mdc_log_options(long fapl_id, boolean[] mdc_log_options) throws HDF5LibraryException; /** + * @ingroup JH5P + * * H5Pget_metadata_read_attempts retrieves the number of read attempts that is set in the file access * property list plist_id. * @@ -8802,13 +9562,15 @@ public class H5 implements java.io.Serializable { * @return The number of read attempts. * * @exception HDF5LibraryException - * Error from the HDF-5 Library. + * Error from the HDF5 Library. * **/ public synchronized static native long H5Pget_metadata_read_attempts(long plist_id) throws HDF5LibraryException; /** + * @ingroup JH5P + * * H5Pset_metadata_read_attempts sets the number of reads that the library will try when reading * checksummed metadata in an HDF5 file opened with SWMR access. When reading such metadata, the library * will compare the checksum computed for the metadata just read with the checksum stored within the piece @@ -8823,13 +9585,15 @@ public class H5 implements java.io.Serializable { * IN: The number of read attempts which is a value greater than 0. * * @exception HDF5LibraryException - * Error from the HDF-5 Library. + * Error from the HDF5 Library. * **/ public synchronized static native void H5Pset_metadata_read_attempts(long plist_id, long attempts) throws HDF5LibraryException; /** + * @ingroup JH5P + * * H5Pget_evict_on_close retrieves the file access property list setting that determines whether an HDF5 * object will be evicted from the library's metadata cache when it is closed. * @@ -8839,12 +9603,14 @@ public class H5 implements java.io.Serializable { * @return indication if the object will be evicted on close. * * @exception HDF5LibraryException - * Error from the HDF-5 Library. + * Error from the HDF5 Library. * **/ public synchronized static native boolean H5Pget_evict_on_close(long fapl_id) throws HDF5LibraryException; /** + * @ingroup JH5P + * * H5Pset_evict_on_close controls the library's behavior of evicting metadata associated with a closed * object. * @@ -8854,13 +9620,15 @@ public class H5 implements java.io.Serializable { * IN: Whether the HDF5 object should be evicted on close. * * @exception HDF5LibraryException - * Error from the HDF-5 Library. + * Error from the HDF5 Library. * **/ public synchronized static native void H5Pset_evict_on_close(long fapl_id, boolean evict_on_close) throws HDF5LibraryException; /** + * @ingroup JH5P + * * H5Pget_use_file_locking retrieves whether we are using file locking. * * @param fapl_id @@ -8869,13 +9637,15 @@ public class H5 implements java.io.Serializable { * @return indication if file locking is used. * * @exception HDF5LibraryException - * Error from the HDF-5 Library. + * Error from the HDF5 Library. * **/ public synchronized static native boolean H5Pget_use_file_locking(long fapl_id) throws HDF5LibraryException; /** + * @ingroup JH5P + * * H5Pget_use_file_locking retrieves whether we ignore file locks when they are disabled. * * @param fapl_id @@ -8884,13 +9654,15 @@ public class H5 implements java.io.Serializable { * @return indication if file locking is ignored. * * @exception HDF5LibraryException - * Error from the HDF-5 Library. + * Error from the HDF5 Library. * **/ public synchronized static native boolean H5Pget_ignore_disabled_file_locking(long fapl_id) throws HDF5LibraryException; /** + * @ingroup JH5P + * * H5Pset_file_locking sets parameters related to file locking. * * @param fapl_id @@ -8904,7 +9676,7 @@ public class H5 implements java.io.Serializable { * IN: Whether file locking will be ignored when disabled on a file system (useful for Lustre). * * @exception HDF5LibraryException - * Error from the HDF-5 Library. + * Error from the HDF5 Library. * **/ public synchronized static native void H5Pset_file_locking(long fapl_id, boolean use_file_locking, @@ -8919,6 +9691,8 @@ public class H5 implements java.io.Serializable { // Dataset creation property list (DCPL) routines // /** + * @ingroup JH5P + * * H5Pget_layout returns the layout of the raw data for a dataset. * * @param plist @@ -8928,11 +9702,13 @@ public class H5 implements java.io.Serializable { * H5D_LAYOUT_ERROR (-1). * * @exception HDF5LibraryException - * Error from the HDF-5 Library. + * Error from the HDF5 Library. **/ public synchronized static native int H5Pget_layout(long plist) throws HDF5LibraryException; /** + * @ingroup JH5P + * * H5Pset_layout sets the type of storage used store the raw data for a dataset. * * @param plist @@ -8943,11 +9719,13 @@ public class H5 implements java.io.Serializable { * @return a non-negative value if successful * * @exception HDF5LibraryException - * Error from the HDF-5 Library. + * Error from the HDF5 Library. **/ public synchronized static native int H5Pset_layout(long plist, int layout) throws HDF5LibraryException; /** + * @ingroup JH5P + * * H5Pget_chunk retrieves the size of chunks for the raw data of a chunked layout dataset. * * @param plist @@ -8960,7 +9738,7 @@ public class H5 implements java.io.Serializable { * @return chunk dimensionality successful * * @exception HDF5LibraryException - * Error from the HDF-5 Library. + * Error from the HDF5 Library. * @exception NullPointerException * dims array is null. * @exception IllegalArgumentException @@ -8970,6 +9748,8 @@ public class H5 implements java.io.Serializable { throws HDF5LibraryException, NullPointerException, IllegalArgumentException; /** + * @ingroup JH5P + * * H5Pset_chunk sets the size of the chunks used to store a chunked layout dataset. * * @param plist @@ -8982,7 +9762,7 @@ public class H5 implements java.io.Serializable { * @return a non-negative value if successful * * @exception HDF5LibraryException - * Error from the HDF-5 Library. + * Error from the HDF5 Library. * @exception NullPointerException * dims array is null. * @exception IllegalArgumentException @@ -8992,6 +9772,8 @@ public class H5 implements java.io.Serializable { throws HDF5LibraryException, NullPointerException, IllegalArgumentException; /** + * @ingroup JH5P + * * H5Pset_chunk sets the size of the chunks used to store a chunked layout dataset. * * @param plist @@ -9004,7 +9786,7 @@ public class H5 implements java.io.Serializable { * @return a non-negative value if successful * * @exception HDF5Exception - * Error from the HDF-5 Library. + * Error from the HDF5 Library. * @exception NullPointerException * dims array is null. * @exception IllegalArgumentException @@ -9028,6 +9810,8 @@ public class H5 implements java.io.Serializable { } /** + * @ingroup JH5P + * * H5Pset_virtual maps elements of the virtual dataset (VDS) described by the * virtual dataspace identifier vspace_id to the elements of the source dataset * described by the source dataset dataspace identifier src_space_id. The source @@ -9051,7 +9835,7 @@ public class H5 implements java.io.Serializable { * selection. * * @exception HDF5LibraryException - * Error from the HDF-5 Library. + * Error from the HDF5 Library. * @exception NullPointerException * an name string is null. * @exception IllegalArgumentException @@ -9062,6 +9846,8 @@ public class H5 implements java.io.Serializable { throws HDF5LibraryException, NullPointerException, IllegalArgumentException; /** + * @ingroup JH5P + * * H5Pget_virtual_count gets the number of mappings for a virtual dataset that has the creation property * list specified by dcpl_id. * @@ -9071,7 +9857,7 @@ public class H5 implements java.io.Serializable { * @return a non-negative number of mappings if successful * * @exception HDF5LibraryException - * Error from the HDF-5 Library. + * Error from the HDF5 Library. * @exception IllegalArgumentException * An id is <=0 **/ @@ -9079,6 +9865,8 @@ public class H5 implements java.io.Serializable { throws HDF5LibraryException, IllegalArgumentException; /** + * @ingroup JH5P + * * H5Pget_virtual_vspace takes the dataset creation property list for the virtual dataset, dcpl_id, and * the mapping index, index, and returns a dataspace identifier for the selection within the virtual * dataset used in the mapping. @@ -9091,7 +9879,7 @@ public class H5 implements java.io.Serializable { * @return a valid dataspace identifier if successful * * @exception HDF5LibraryException - * Error from the HDF-5 Library. + * Error from the HDF5 Library. * @exception IllegalArgumentException * An id is <=0 **/ @@ -9099,6 +9887,8 @@ public class H5 implements java.io.Serializable { throws HDF5LibraryException, IllegalArgumentException; /** + * @ingroup JH5P + * * H5Pget_virtual_srcspace takes the dataset creation property list for the virtual dataset, dcpl_id, and * the mapping index, index, and returns a dataspace identifier for the selection within the source * dataset used in the mapping. @@ -9111,7 +9901,7 @@ public class H5 implements java.io.Serializable { * @return a valid dataspace identifier if successful * * @exception HDF5LibraryException - * Error from the HDF-5 Library. + * Error from the HDF5 Library. * @exception IllegalArgumentException * An id is <=0 **/ @@ -9119,6 +9909,8 @@ public class H5 implements java.io.Serializable { throws HDF5LibraryException, IllegalArgumentException; /** + * @ingroup JH5P + * * H5Pget_virtual_filename takes the dataset creation property list for the virtual dataset, dcpl_id, the * mapping index, index, the size of the filename for a source dataset, size, and retrieves the name of * the file for a source dataset used in the mapping. @@ -9131,7 +9923,7 @@ public class H5 implements java.io.Serializable { * @return the name of the file containing the source dataset if successful * * @exception HDF5LibraryException - * Error from the HDF-5 Library. + * Error from the HDF5 Library. * @exception IllegalArgumentException * An id is <=0 **/ @@ -9139,6 +9931,8 @@ public class H5 implements java.io.Serializable { throws HDF5LibraryException, IllegalArgumentException; /** + * @ingroup JH5P + * * H5Pget_virtual_dsetname takes the dataset creation property list for the virtual dataset, dcpl_id, the * mapping index, index, the size of the dataset name for a source dataset, size, and retrieves the name * of the source dataset used in the mapping. @@ -9151,7 +9945,7 @@ public class H5 implements java.io.Serializable { * @return the name of the source dataset if successful * * @exception HDF5LibraryException - * Error from the HDF-5 Library. + * Error from the HDF5 Library. * @exception IllegalArgumentException * An id is <=0 **/ @@ -9168,7 +9962,7 @@ public class H5 implements java.io.Serializable { // * @return VDS link open file cache size in number of files. // * // * @exception HDF5LibraryException - // * Error from the HDF-5 Library. + // * Error from the HDF5 Library. // * // **/ // public synchronized static native int H5Pget_vds_file_cache_size(long fapl_id) throws @@ -9184,13 +9978,15 @@ public class H5 implements java.io.Serializable { // * IN: VDS link open file cache size in number of files. // * // * @exception HDF5LibraryException - // * Error from the HDF-5 Library. + // * Error from the HDF5 Library. // * // **/ // public synchronized static native void H5Pset_vds_file_cache_size(long fapl_id, int efc_size) // throws HDF5LibraryException; /** + * @ingroup JH5P + * * H5Pget_external returns information about an external file. * * @param plist @@ -9217,7 +10013,7 @@ public class H5 implements java.io.Serializable { * @exception ArrayStoreException * Fatal error on Copyback * @exception HDF5LibraryException - * Error from the HDF-5 Library. + * Error from the HDF5 Library. * @exception NullPointerException * name or size is null. * @exception IllegalArgumentException @@ -9230,6 +10026,8 @@ public class H5 implements java.io.Serializable { NullPointerException, IllegalArgumentException; /** + * @ingroup JH5P + * * H5Pset_external adds an external file to the list of external files. * * @param plist @@ -9245,7 +10043,7 @@ public class H5 implements java.io.Serializable { * @return a non-negative value if successful * * @exception HDF5LibraryException - * Error from the HDF-5 Library. + * Error from the HDF5 Library. * @exception NullPointerException * name is null. **/ @@ -9253,6 +10051,8 @@ public class H5 implements java.io.Serializable { throws HDF5LibraryException, NullPointerException; /** + * @ingroup JH5P + * * H5Pget_external_count returns the number of external files for the specified dataset. * * @param plist @@ -9261,11 +10061,13 @@ public class H5 implements java.io.Serializable { * @return the number of external files if successful * * @exception HDF5LibraryException - * Error from the HDF-5 Library. + * Error from the HDF5 Library. **/ public synchronized static native int H5Pget_external_count(long plist) throws HDF5LibraryException; /** + * @ingroup JH5P + * * H5Pset_szip Sets up the use of the szip filter. * * @param plist @@ -9278,13 +10080,15 @@ public class H5 implements java.io.Serializable { * @return a non-negative value if successful; otherwise returns a negative value. * * @exception HDF5LibraryException - * Error from the HDF-5 Library. + * Error from the HDF5 Library. * **/ public synchronized static native int H5Pset_szip(long plist, int options_mask, int pixels_per_block) throws HDF5LibraryException, NullPointerException; /** + * @ingroup JH5P + * * H5Pset_shuffle Sets up the use of the shuffle filter. * * @param plist_id @@ -9293,13 +10097,15 @@ public class H5 implements java.io.Serializable { * @return a non-negative value if successful; otherwise returns a negative value. * * @exception HDF5LibraryException - * Error from the HDF-5 Library. + * Error from the HDF5 Library. * **/ public synchronized static native int H5Pset_shuffle(long plist_id) throws HDF5LibraryException, NullPointerException; /** + * @ingroup JH5P + * * H5Pset_nbit Sets up the use of the N-Bit filter. * * @param plist_id @@ -9308,12 +10114,14 @@ public class H5 implements java.io.Serializable { * @return a non-negative value if successful; otherwise returns a negative value. * * @exception HDF5LibraryException - * Error from the HDF-5 Library. + * Error from the HDF5 Library. * **/ public synchronized static native int H5Pset_nbit(long plist_id) throws HDF5LibraryException; /** + * @ingroup JH5P + * * H5Pset_scaleoffset sets the Scale-Offset filter for a dataset. * * @param plist_id @@ -9326,7 +10134,7 @@ public class H5 implements java.io.Serializable { * @return a non-negative value if successful; otherwise returns a negative value. * * @exception HDF5LibraryException - * Error from the HDF-5 Library. + * Error from the HDF5 Library. * @exception IllegalArgumentException * Invalid arguments * @@ -9335,6 +10143,8 @@ public class H5 implements java.io.Serializable { throws HDF5LibraryException, IllegalArgumentException; /** + * @ingroup JH5P + * * H5Pget_fill_value queries the fill value property of a dataset creation property list. * * @param plist_id @@ -9353,6 +10163,8 @@ public class H5 implements java.io.Serializable { throws HDF5Exception; /** + * @ingroup JH5P + * * H5Pget_fill_value queries the fill value property of a dataset creation property list. * * @param plist_id @@ -9381,6 +10193,8 @@ public class H5 implements java.io.Serializable { } /** + * @ingroup JH5P + * * H5Pset_fill_value sets the fill value for a dataset creation property list. * * @param plist_id @@ -9399,6 +10213,8 @@ public class H5 implements java.io.Serializable { throws HDF5Exception; /** + * @ingroup JH5P + * * H5Pset_fill_value sets the fill value for a dataset creation property list. * * @param plist_id @@ -9427,6 +10243,8 @@ public class H5 implements java.io.Serializable { } /** + * @ingroup JH5P + * * H5Pset_fill_value checks if the fill value is defined for a dataset creation property list. * * @param plist_id @@ -9447,6 +10265,8 @@ public class H5 implements java.io.Serializable { throws HDF5LibraryException, NullPointerException; /** + * @ingroup JH5P + * * H5Pset_alloc_time Gets space allocation time for dataset during creation. * * @param plist_id @@ -9457,13 +10277,15 @@ public class H5 implements java.io.Serializable { * @return a non-negative value if successful; otherwise returns a negative value. * * @exception HDF5LibraryException - * Error from the HDF-5 Library. + * Error from the HDF5 Library. * **/ public synchronized static native int H5Pget_alloc_time(long plist_id, int[] alloc_time) throws HDF5LibraryException, NullPointerException; /** + * @ingroup JH5P + * * H5Pset_alloc_time Sets space allocation time for dataset during creation. * * @param plist_id @@ -9474,13 +10296,15 @@ public class H5 implements java.io.Serializable { * @return a non-negative value if successful; otherwise returns a negative value. * * @exception HDF5LibraryException - * Error from the HDF-5 Library. + * Error from the HDF5 Library. * **/ public synchronized static native int H5Pset_alloc_time(long plist_id, int alloc_time) throws HDF5LibraryException, NullPointerException; /** + * @ingroup JH5P + * * H5Pset_fill_time Gets fill value writing time. * * @param plist_id @@ -9491,13 +10315,15 @@ public class H5 implements java.io.Serializable { * @return a non-negative value if successful; otherwise returns a negative value. * * @exception HDF5LibraryException - * Error from the HDF-5 Library. + * Error from the HDF5 Library. * **/ public synchronized static native int H5Pget_fill_time(long plist_id, int[] fill_time) throws HDF5LibraryException; /** + * @ingroup JH5P + * * H5Pset_fill_time Sets the fill value writing time. * * @param plist_id @@ -9508,13 +10334,15 @@ public class H5 implements java.io.Serializable { * @return a non-negative value if successful; otherwise returns a negative value. * * @exception HDF5LibraryException - * Error from the HDF-5 Library. + * Error from the HDF5 Library. * **/ public synchronized static native int H5Pset_fill_time(long plist_id, int fill_time) throws HDF5LibraryException; /** + * @ingroup JH5P + * * H5Pset_chunk_opts Sets the edge chunk option in a dataset creation property list. * * @param dcpl_id @@ -9525,12 +10353,14 @@ public class H5 implements java.io.Serializable { * 0 - Disables option; partial edge chunks will be compressed. * * @exception HDF5LibraryException - * Error from the HDF-5 Library + * Error from the HDF5 Library **/ public synchronized static native void H5Pset_chunk_opts(long dcpl_id, int opts) throws HDF5LibraryException; /** + * @ingroup JH5P + * * H5Pget_chunk_opts retrieves the edge chunk option setting stored in the dataset creation property list * * @param dcpl_id @@ -9539,12 +10369,14 @@ public class H5 implements java.io.Serializable { * @return The edge chunk option setting. * * @exception HDF5LibraryException - * Error from the HDF-5 Library + * Error from the HDF5 Library * */ public synchronized static native int H5Pget_chunk_opts(long dcpl_id) throws HDF5LibraryException; /** + * @ingroup JH5P + * * H5Pget_dset_no_attrs_hint accesses the flag for whether or not datasets created by the given dcpl * will be created with a "minimized" object header. * @@ -9554,12 +10386,14 @@ public class H5 implements java.io.Serializable { * @return true if the given dcpl is set to create minimized dataset object headers, false if not. * * @exception HDF5LibraryException - * Error from the HDF-5 Library. + * Error from the HDF5 Library. **/ public synchronized static native boolean H5Pget_dset_no_attrs_hint(long dcpl_id) throws HDF5LibraryException; /** + * @ingroup JH5P + * * H5Pset_dset_no_attrs_hint sets the dcpl to minimize (or explicitly to not minimized) dataset object * headers upon creation. * @@ -9570,7 +10404,7 @@ public class H5 implements java.io.Serializable { * IN: the minimize hint setting * * @exception HDF5LibraryException - * Error from the HDF-5 Library. + * Error from the HDF5 Library. **/ public synchronized static native void H5Pset_dset_no_attrs_hint(long dcpl_id, boolean minimize) throws HDF5LibraryException; @@ -9578,6 +10412,8 @@ public class H5 implements java.io.Serializable { // /////// Dataset access property list (DAPL) routines /////// /** + * @ingroup JH5P + * * Retrieves the maximum possible number of elements in the meta data cache and the maximum possible * number of bytes and the RDCC_W0 value in the raw data chunk cache on a per-datset basis. * @@ -9591,7 +10427,7 @@ public class H5 implements java.io.Serializable { * IN/OUT: Preemption policy. * * @exception HDF5LibraryException - * Error from the HDF-5 Library. + * Error from the HDF5 Library. * @exception NullPointerException * an array is null. **/ @@ -9600,6 +10436,8 @@ public class H5 implements java.io.Serializable { throws HDF5LibraryException, NullPointerException; /** + * @ingroup JH5P + * * H5Pset_chunk_cache sets the number of elements (objects) in the meta data cache and the total number of * bytes in the raw data chunk cache on a per-datset basis. * @@ -9613,13 +10451,15 @@ public class H5 implements java.io.Serializable { * IN: Preemption policy. * * @exception HDF5LibraryException - * Error from the HDF-5 Library. + * Error from the HDF5 Library. **/ public synchronized static native void H5Pset_chunk_cache(long dapl_id, long rdcc_nslots, long rdcc_nbytes, double rdcc_w0) throws HDF5LibraryException; /** + * @ingroup JH5P + * * H5Pset_virtual_view takes the access property list for the virtual dataset, dapl_id, and the flag, * view, and sets the VDS view according to the flag value. * @@ -9629,12 +10469,14 @@ public class H5 implements java.io.Serializable { * IN: Flag specifying the extent of the data to be included in the view. * * @exception HDF5LibraryException - * Error from the HDF-5 Library + * Error from the HDF5 Library **/ public synchronized static native void H5Pset_virtual_view(long dapl_id, int view) throws HDF5LibraryException; /** + * @ingroup JH5P + * * H5Pget_virtual_view takes the virtual dataset access property list, dapl_id, and retrieves the flag, * view, set by the H5Pset_virtual_view call. * @@ -9644,12 +10486,14 @@ public class H5 implements java.io.Serializable { * @return The flag specifying the view of the virtual dataset. * * @exception HDF5LibraryException - * Error from the HDF-5 Library + * Error from the HDF5 Library * */ public synchronized static native int H5Pget_virtual_view(long dapl_id) throws HDF5LibraryException; /** + * @ingroup JH5P + * * H5Pset_virtual_printf_gap sets the access property list for the virtual dataset, dapl_id, to instruct * the library to stop looking for the mapped data stored in the files and/or datasets with the * printf-style names after not finding gap_size files and/or datasets. The found source files and @@ -9662,12 +10506,14 @@ public class H5 implements java.io.Serializable { * the extent of an unlimited virtual dataset with printf-style mappings. * * @exception HDF5LibraryException - * Error from the HDF-5 Library + * Error from the HDF5 Library **/ public synchronized static native void H5Pset_virtual_printf_gap(long dapl_id, long gap_size) throws HDF5LibraryException; /** + * @ingroup JH5P + * * H5Pget_virtual_printf_gap returns the maximum number of missing printf-style files and/or datasets for * determining the extent of an unlimited virtual dataaset, gap_size, using the access property list for * the virtual dataset, dapl_id. @@ -9679,13 +10525,15 @@ public class H5 implements java.io.Serializable { * the extent of an unlimited virtual dataset with printf-style mappings. * * @exception HDF5LibraryException - * Error from the HDF-5 Library + * Error from the HDF5 Library * */ public synchronized static native long H5Pget_virtual_printf_gap(long dapl_id) throws HDF5LibraryException; /** + * @ingroup JH5P + * * H5Pget_virtual_prefix Retrieves prefix applied to virtual file paths. * * @param dapl_id @@ -9694,12 +10542,14 @@ public class H5 implements java.io.Serializable { * @return the prefix to be applied to virtual file paths. * * @exception HDF5LibraryException - * Error from the HDF-5 Library. + * Error from the HDF5 Library. * **/ public synchronized static native String H5Pget_virtual_prefix(long dapl_id) throws HDF5LibraryException; /** + * @ingroup JH5P + * * H5Pset_virtual_prefix Sets prefix to be applied to virtual file paths. * * @param dapl_id @@ -9708,7 +10558,7 @@ public class H5 implements java.io.Serializable { * IN: Prefix to be applied to virtual file paths * * @exception HDF5LibraryException - * Error from the HDF-5 Library. + * Error from the HDF5 Library. * @exception NullPointerException * prefix is null. * @@ -9717,6 +10567,8 @@ public class H5 implements java.io.Serializable { throws HDF5LibraryException, NullPointerException; /** + * @ingroup JH5P + * * H5Pget_efile_prefix Retrieves prefix applied to external file paths. * * @param dapl_id @@ -9725,12 +10577,14 @@ public class H5 implements java.io.Serializable { * @return the prefix to be applied to external file paths. * * @exception HDF5LibraryException - * Error from the HDF-5 Library. + * Error from the HDF5 Library. * **/ public synchronized static native String H5Pget_efile_prefix(long dapl_id) throws HDF5LibraryException; /** + * @ingroup JH5P + * * H5Pset_efile_prefix Sets prefix to be applied to external file paths. * * @param dapl_id @@ -9739,7 +10593,7 @@ public class H5 implements java.io.Serializable { * IN: Prefix to be applied to external file paths * * @exception HDF5LibraryException - * Error from the HDF-5 Library. + * Error from the HDF5 Library. * @exception NullPointerException * prefix is null. * @@ -9756,6 +10610,8 @@ public class H5 implements java.io.Serializable { // /////// Dataset xfer property list (DXPL) routines /////// /** + * @ingroup JH5P + * * H5Pget_data_transform retrieves the data transform expression previously set in the dataset transfer * property list plist_id by H5Pset_data_transform. * @@ -9771,7 +10627,7 @@ public class H5 implements java.io.Serializable { * * * @exception HDF5LibraryException - * Error from the HDF-5 Library. + * Error from the HDF5 Library. * @exception IllegalArgumentException * Size is <= 0. * @@ -9781,6 +10637,8 @@ public class H5 implements java.io.Serializable { throws HDF5LibraryException, IllegalArgumentException; /** + * @ingroup JH5P + * * H5Pset_data_transform sets a data transform expression * * @param plist_id @@ -9791,7 +10649,7 @@ public class H5 implements java.io.Serializable { * @return a non-negative valule if successful; otherwise returns a negative value. * * @exception HDF5LibraryException - * Error from the HDF-5 Library. + * Error from the HDF5 Library. * @exception NullPointerException * expression is null. * @@ -9800,7 +10658,9 @@ public class H5 implements java.io.Serializable { throws HDF5LibraryException, NullPointerException; /** - * HH5Pget_buffer gets type conversion and background buffers. Returns buffer size, in bytes, if + * @ingroup JH5P + * + * H5Pget_buffer gets type conversion and background buffers. Returns buffer size, in bytes, if * successful; otherwise 0 on failure. * * @param plist @@ -9813,7 +10673,7 @@ public class H5 implements java.io.Serializable { * @return buffer size, in bytes, if successful; otherwise 0 on failure * * @exception HDF5LibraryException - * Error from the HDF-5 Library. + * Error from the HDF5 Library. * @exception IllegalArgumentException * plist is invalid. **/ @@ -9821,6 +10681,8 @@ public class H5 implements java.io.Serializable { throws HDF5LibraryException, IllegalArgumentException; /** + * @ingroup JH5P + * * H5Pget_buffer_size gets type conversion and background buffer size, in bytes, if successful; * otherwise 0 on failure. * @@ -9830,7 +10692,7 @@ public class H5 implements java.io.Serializable { * @return buffer size, in bytes, if successful; otherwise 0 on failure * * @exception HDF5LibraryException - * Error from the HDF-5 Library. + * Error from the HDF5 Library. * @exception IllegalArgumentException * plist is invalid. **/ @@ -9838,6 +10700,8 @@ public class H5 implements java.io.Serializable { throws HDF5LibraryException, IllegalArgumentException; /** + * @ingroup JH5P + * * H5Pset_buffer sets type conversion and background buffers. status to TRUE or FALSE. * * Given a dataset transfer property list, H5Pset_buffer sets the maximum size for the type conversion @@ -9858,7 +10722,7 @@ public class H5 implements java.io.Serializable { * Size, in bytes, of the type conversion and background buffers. * * @exception HDF5LibraryException - * Error from the HDF-5 Library. + * Error from the HDF5 Library. * @exception IllegalArgumentException * plist is invalid. **/ @@ -9866,6 +10730,8 @@ public class H5 implements java.io.Serializable { throws HDF5LibraryException, IllegalArgumentException; /** + * @ingroup JH5P + * * H5Pget_edc_check gets the error-detecting algorithm in use. * * @param plist @@ -9874,11 +10740,13 @@ public class H5 implements java.io.Serializable { * @return the error-detecting algorithm * * @exception HDF5LibraryException - * Error from the HDF-5 Library. + * Error from the HDF5 Library. **/ public synchronized static native int H5Pget_edc_check(long plist) throws HDF5LibraryException; /** + * @ingroup JH5P + * * H5Pset_edc_check sets the error-detecting algorithm. * * @param plist @@ -9889,11 +10757,13 @@ public class H5 implements java.io.Serializable { * @return non-negative if succeed * * @exception HDF5LibraryException - * Error from the HDF-5 Library. + * Error from the HDF5 Library. **/ public synchronized static native int H5Pset_edc_check(long plist, int check) throws HDF5LibraryException; /** + * @ingroup JH5P + * * H5Pget_btree_ratio Get the B-tree split ratios for a dataset transfer property list. * * @param plist_id @@ -9908,7 +10778,7 @@ public class H5 implements java.io.Serializable { * @return non-negative if succeed * * @exception HDF5LibraryException - * Error from the HDF-5 Library. + * Error from the HDF5 Library. * @exception NullPointerException * an input array is null. **/ @@ -9917,6 +10787,8 @@ public class H5 implements java.io.Serializable { throws HDF5LibraryException, NullPointerException; /** + * @ingroup JH5P + * * H5Pset_btree_ratio Sets B-tree split ratios for a dataset transfer property list. The split ratios * determine what percent of children go in the first node when a node splits. * @@ -9932,12 +10804,14 @@ public class H5 implements java.io.Serializable { * @return non-negative if succeed * * @exception HDF5LibraryException - * Error from the HDF-5 Library. + * Error from the HDF5 Library. **/ public synchronized static native int H5Pset_btree_ratios(long plist_id, double left, double middle, double right) throws HDF5LibraryException; /** + * @ingroup JH5P + * * H5Pget_hyper_vector_size reads values previously set with H5Pset_hyper_vector_size. * * @param dxpl_id @@ -9948,13 +10822,15 @@ public class H5 implements java.io.Serializable { * @return a non-negative value if successful; otherwise returns a negative value. * * @exception HDF5LibraryException - * Error from the HDF-5 Library. + * Error from the HDF5 Library. * **/ public synchronized static native int H5Pget_hyper_vector_size(long dxpl_id, long[] vector_size) throws HDF5LibraryException, NullPointerException; /** + * @ingroup JH5P + * * H5Pset_hyper_vector_size sets the number of * "I/O vectors" (offset and length pairs) which are to be * accumulated in memory before being issued to the lower levels @@ -9974,7 +10850,7 @@ public class H5 implements java.io.Serializable { * @return a non-negative value if successful; otherwise returns a negative value. * * @exception HDF5LibraryException - * Error from the HDF-5 Library. + * Error from the HDF5 Library. * **/ public synchronized static native int H5Pset_hyper_vector_size(long dxpl_id, long vector_size) @@ -9983,6 +10859,8 @@ public class H5 implements java.io.Serializable { // /////// Link creation property list (LCPL) routines /////// /** + * @ingroup JH5P + * * H5Pget_create_intermediate_group determines whether property is set to enable creating missing * intermediate groups. * @@ -9992,13 +10870,15 @@ public class H5 implements java.io.Serializable { * @return Boolean true or false * * @exception HDF5LibraryException - * Error from the HDF-5 Library. + * Error from the HDF5 Library. * **/ public synchronized static native boolean H5Pget_create_intermediate_group(long lcpl_id) throws HDF5LibraryException; /** + * @ingroup JH5P + * * H5Pset_create_intermediate_group specifies in property list whether to create missing intermediate * groups * @@ -10010,7 +10890,7 @@ public class H5 implements java.io.Serializable { * @return a non-negative valule if successful; otherwise returns a negative value. * * @exception HDF5LibraryException - * Error from the HDF-5 Library. + * Error from the HDF5 Library. * **/ public synchronized static native int H5Pset_create_intermediate_group(long lcpl_id, @@ -10020,6 +10900,8 @@ public class H5 implements java.io.Serializable { // /////// Group creation property list (GCPL) routines /////// /** + * @ingroup JH5P + * * H5Pget_local_heap_size_hint Retrieves the anticipated size of the local heap for original-style groups. * * @param gcpl_id @@ -10028,13 +10910,15 @@ public class H5 implements java.io.Serializable { * @return size_hint, the anticipated size of local heap * * @exception HDF5LibraryException - * Error from the HDF-5 Library. + * Error from the HDF5 Library. * **/ public synchronized static native long H5Pget_local_heap_size_hint(long gcpl_id) throws HDF5LibraryException; /** + * @ingroup JH5P + * * H5Pset_local_heap_size_hint Specifies the anticipated maximum size of a local heap. * * @param gcpl_id @@ -10045,13 +10929,15 @@ public class H5 implements java.io.Serializable { * @return a non-negative value if successful; otherwise returns a negative value. * * @exception HDF5LibraryException - * Error from the HDF-5 Library. + * Error from the HDF5 Library. * **/ public synchronized static native int H5Pset_local_heap_size_hint(long gcpl_id, long size_hint) throws HDF5LibraryException; /** + * @ingroup JH5P + * * H5Pget_link_phase_change Queries the settings for conversion between compact and dense groups. * * @param gcpl_id @@ -10068,7 +10954,7 @@ public class H5 implements java.io.Serializable { * @return Returns a non-negative value if successful; otherwise returns a negative value. * * @exception HDF5LibraryException - * Error from the HDF-5 Library. + * Error from the HDF5 Library. * @exception NullPointerException * size is null. * @@ -10077,6 +10963,8 @@ public class H5 implements java.io.Serializable { throws HDF5LibraryException, NullPointerException; /** + * @ingroup JH5P + * * H5Pset_link_phase_change Sets the parameters for conversion between compact and dense groups. * * @param gcpl_id @@ -10089,7 +10977,7 @@ public class H5 implements java.io.Serializable { * @return a non-negative value if successful; otherwise returns a negative value. * * @exception HDF5LibraryException - * Error from the HDF-5 Library. + * Error from the HDF5 Library. * @exception IllegalArgumentException * Invalid values of max_compact and min_dense. * @@ -10099,6 +10987,8 @@ public class H5 implements java.io.Serializable { throws HDF5LibraryException, IllegalArgumentException; /** + * @ingroup JH5P + * * H5Pget_est_link_info Queries data required to estimate required local heap or object header size. * * @param gcpl_id @@ -10115,7 +11005,7 @@ public class H5 implements java.io.Serializable { * @return Returns a non-negative value if successful; otherwise returns a negative value. * * @exception HDF5LibraryException - * Error from the HDF-5 Library. + * Error from the HDF5 Library. * @exception NullPointerException * link_info is null. * @@ -10124,6 +11014,8 @@ public class H5 implements java.io.Serializable { throws HDF5LibraryException, NullPointerException; /** + * @ingroup JH5P + * * H5Pset_est_link_info Sets estimated number of links and length of link names in a group. * * @param gcpl_id @@ -10136,7 +11028,7 @@ public class H5 implements java.io.Serializable { * @return a non-negative value if successful; otherwise returns a negative value. * * @exception HDF5LibraryException - * Error from the HDF-5 Library. + * Error from the HDF5 Library. * @exception IllegalArgumentException * Invalid values to est_num_entries and est_name_len. * @@ -10146,6 +11038,8 @@ public class H5 implements java.io.Serializable { throws HDF5LibraryException, IllegalArgumentException; /** + * @ingroup JH5P + * * H5Pget_link_creation_order queries the group creation property list, gcpl_id, and returns a flag * indicating whether link creation order is tracked and/or indexed in a group. * @@ -10155,13 +11049,15 @@ public class H5 implements java.io.Serializable { * @return crt_order_flags -Creation order flag(s) * * @exception HDF5LibraryException - * Error from the HDF-5 Library. + * Error from the HDF5 Library. * **/ public synchronized static native int H5Pget_link_creation_order(long gcpl_id) throws HDF5LibraryException; /** + * @ingroup JH5P + * * H5Pset_link_creation_order Sets flags in a group creation property list, gcpl_id, for tracking and/or * indexing links on creation order. * @@ -10174,7 +11070,7 @@ public class H5 implements java.io.Serializable { * @return Returns a non-negative value if successful; otherwise returns a negative value. * * @exception HDF5LibraryException - * Error from the HDF-5 Library. + * Error from the HDF5 Library. * **/ public synchronized static native int H5Pset_link_creation_order(long gcpl_id, int crt_order_flags) @@ -10183,6 +11079,8 @@ public class H5 implements java.io.Serializable { // /////// String creation property list (STRCPL) routines /////// /** + * @ingroup JH5P + * * H5Pget_char_encoding gets the character encoding of the string. * * @param plist_id @@ -10191,12 +11089,14 @@ public class H5 implements java.io.Serializable { * @return Returns the character encoding of the string. * * @exception HDF5LibraryException - * Error from the HDF-5 Library. + * Error from the HDF5 Library. * **/ public synchronized static native int H5Pget_char_encoding(long plist_id) throws HDF5LibraryException; /** + * @ingroup JH5P + * * H5Pset_char_encoding sets the character encoding of the string. * * @param plist_id @@ -10205,7 +11105,7 @@ public class H5 implements java.io.Serializable { * IN: the character encoding of the string * * @exception HDF5LibraryException - * Error from the HDF-5 Library. + * Error from the HDF5 Library. * **/ public synchronized static native void H5Pset_char_encoding(long plist_id, int encoding) @@ -10214,6 +11114,8 @@ public class H5 implements java.io.Serializable { // /////// Link access property list (LAPL) routines /////// /** + * @ingroup JH5P + * * H5Pget_nlinks retrieves the maximum number of soft or user-defined link traversals allowed, nlinks, * before the library assumes it has found a cycle and aborts the traversal. This value is retrieved from * the link access property list lapl_id. @@ -10224,12 +11126,14 @@ public class H5 implements java.io.Serializable { * @return Returns a Maximum number of links to traverse. * * @exception HDF5LibraryException - * Error from the HDF-5 Library. + * Error from the HDF5 Library. * **/ public synchronized static native long H5Pget_nlinks(long lapl_id) throws HDF5LibraryException; /** + * @ingroup JH5P + * * H5Pset_nlinks sets the maximum number of soft or user-defined link traversals allowed, nlinks, before * the library assumes it has found a cycle and aborts the traversal. This value is set in the link access * property list lapl_id. @@ -10242,7 +11146,7 @@ public class H5 implements java.io.Serializable { * @return Returns a non-negative value if successful; otherwise returns a negative value. * * @exception HDF5LibraryException - * Error from the HDF-5 Library. + * Error from the HDF5 Library. * @exception IllegalArgumentException * Argument is Illegal * @@ -10251,6 +11155,8 @@ public class H5 implements java.io.Serializable { throws HDF5LibraryException, IllegalArgumentException; /** + * @ingroup JH5P + * * H5Pget_elink_prefix Retrieves prefix applied to external link paths. * * @param lapl_id @@ -10262,7 +11168,7 @@ public class H5 implements java.io.Serializable { * the NULL terminator; otherwise returns a negative value. * * @exception HDF5LibraryException - * Error from the HDF-5 Library. + * Error from the HDF5 Library. * @exception NullPointerException * prefix is null. * @@ -10271,6 +11177,8 @@ public class H5 implements java.io.Serializable { throws HDF5LibraryException, NullPointerException; /** + * @ingroup JH5P + * * H5Pset_elink_prefix Sets prefix to be applied to external link paths. * * @param lapl_id @@ -10281,7 +11189,7 @@ public class H5 implements java.io.Serializable { * @return a non-negative value if successful; otherwise returns a negative value. * * @exception HDF5LibraryException - * Error from the HDF-5 Library. + * Error from the HDF5 Library. * @exception NullPointerException * prefix is null. * @@ -10290,6 +11198,8 @@ public class H5 implements java.io.Serializable { throws HDF5LibraryException, NullPointerException; /** + * @ingroup JH5P + * * H5Pget_elink_fapl Retrieves the file access property list identifier associated with the link access * property list. * @@ -10299,7 +11209,7 @@ public class H5 implements java.io.Serializable { * @return a non-negative value if successful; otherwise returns a negative value. * * @exception HDF5LibraryException - * Error from the HDF-5 Library. + * Error from the HDF5 Library. * **/ public static long H5Pget_elink_fapl(long lapl_id) throws HDF5LibraryException @@ -10316,6 +11226,8 @@ public class H5 implements java.io.Serializable { private synchronized static native long _H5Pget_elink_fapl(long lapl_id) throws HDF5LibraryException; /** + * @ingroup JH5P + * * H5Pset_elink_fapl sets a file access property list for use in accessing a file pointed to by an * external link. * @@ -10327,13 +11239,15 @@ public class H5 implements java.io.Serializable { * @return a non-negative value if successful; otherwise returns a negative value. * * @exception HDF5LibraryException - * Error from the HDF-5 Library. + * Error from the HDF5 Library. * **/ public synchronized static native int H5Pset_elink_fapl(long lapl_id, long fapl_id) throws HDF5LibraryException; /** + * @ingroup JH5P + * * H5Pget_elink_acc_flags retrieves the external link traversal file access flag from the specified link * access property list. * @@ -10343,12 +11257,14 @@ public class H5 implements java.io.Serializable { * @return File access flag for link traversal. * * @exception HDF5LibraryException - * Error from the HDF-5 Library. + * Error from the HDF5 Library. * **/ public synchronized static native int H5Pget_elink_acc_flags(long lapl_id) throws HDF5LibraryException; /** + * @ingroup JH5P + * * H5Pset_elink_acc_flags Sets the external link traversal file access flag in a link access property * list. * @@ -10360,7 +11276,7 @@ public class H5 implements java.io.Serializable { * @return a non-negative value if successful; otherwise returns a negative value. * * @exception HDF5LibraryException - * Error from the HDF-5 Library. + * Error from the HDF5 Library. * @exception IllegalArgumentException * Invalid Flag values. * @@ -10371,6 +11287,8 @@ public class H5 implements java.io.Serializable { // /////// Object copy property list (OCPYPL) routines /////// /** + * @ingroup JH5P + * * H5Pget_copy_object retrieves the properties to be used when an object is copied. * * @param ocp_plist_id @@ -10379,12 +11297,14 @@ public class H5 implements java.io.Serializable { * @return Copy option(s) set in the object copy property list * * @exception HDF5LibraryException - * Error from the HDF-5 Library. + * Error from the HDF5 Library. * **/ public synchronized static native int H5Pget_copy_object(long ocp_plist_id) throws HDF5LibraryException; /** + * @ingroup JH5P + * * H5Pset_copy_object Sets properties to be used when an object is copied. * * @param ocp_plist_id @@ -10393,7 +11313,7 @@ public class H5 implements java.io.Serializable { * IN: Copy option(s) to be set * * @exception HDF5LibraryException - * Error from the HDF-5 Library. + * Error from the HDF5 Library. * **/ public synchronized static native void H5Pset_copy_object(long ocp_plist_id, int copy_options) @@ -10402,6 +11322,8 @@ public class H5 implements java.io.Serializable { // /////// file drivers property list routines /////// /** + * @ingroup JH5P + * * H5Pget_fapl_core retrieve H5FD_CORE I/O settings. * * @param fapl_id @@ -10412,7 +11334,7 @@ public class H5 implements java.io.Serializable { * OUT: write to file name on flush setting * * @exception HDF5LibraryException - * Error from the HDF-5 Library. + * Error from the HDF5 Library. * **/ public synchronized static native void H5Pget_fapl_core(long fapl_id, long[] increment, @@ -10420,6 +11342,8 @@ public class H5 implements java.io.Serializable { throws HDF5LibraryException, NullPointerException; /** + * @ingroup JH5P + * * H5Pset_fapl_core modifies the file access property list to use the H5FD_CORE driver. * * @param fapl_id @@ -10432,7 +11356,7 @@ public class H5 implements java.io.Serializable { * @return a non-negative value if successful; otherwise returns a negative value. * * @exception HDF5LibraryException - * Error from the HDF-5 Library. + * Error from the HDF5 Library. * **/ public synchronized static native int H5Pset_fapl_core(long fapl_id, long increment, @@ -10440,6 +11364,8 @@ public class H5 implements java.io.Serializable { throws HDF5LibraryException, NullPointerException; /** + * @ingroup JH5P + * * H5Pget_fapl_direct queries properties set by the H5Pset_fapl_direct. * * @param fapl_id @@ -10452,13 +11378,15 @@ public class H5 implements java.io.Serializable { * @return a non-negative value if successful; otherwise returns a negative value. * * @exception HDF5LibraryException - * Error from the HDF-5 Library. + * Error from the HDF5 Library. * **/ public synchronized static native int H5Pget_fapl_direct(long fapl_id, long[] info) throws HDF5LibraryException; /** + * @ingroup JH5P + * * H5Pset_fapl_direct Sets up use of the direct I/O driver. * * @param fapl_id @@ -10473,13 +11401,15 @@ public class H5 implements java.io.Serializable { * @return a non-negative value if successful; otherwise returns a negative value. * * @exception HDF5LibraryException - * Error from the HDF-5 Library. + * Error from the HDF5 Library. * **/ public synchronized static native int H5Pset_fapl_direct(long fapl_id, long alignment, long block_size, long cbuf_size) throws HDF5LibraryException; /** + * @ingroup JH5P + * * H5Pget_fapl_family Returns information about the family file access property list. * * @param fapl_id @@ -10492,7 +11422,7 @@ public class H5 implements java.io.Serializable { * @return a non-negative value if successful; otherwise returns a negative value. * * @exception HDF5LibraryException - * Error from the HDF-5 Library. + * Error from the HDF5 Library. * **/ public synchronized static native int H5Pget_fapl_family(long fapl_id, long[] memb_size, @@ -10500,6 +11430,8 @@ public class H5 implements java.io.Serializable { throws HDF5LibraryException, NullPointerException; /** + * @ingroup JH5P + * * H5Pset_fapl_family Sets up use of the direct I/O driver. * * @param fapl_id @@ -10512,13 +11444,15 @@ public class H5 implements java.io.Serializable { * @return a non-negative value if successful; otherwise returns a negative value. * * @exception HDF5LibraryException - * Error from the HDF-5 Library. + * Error from the HDF5 Library. * **/ public synchronized static native int H5Pset_fapl_family(long fapl_id, long memb_size, long memb_fapl_id) throws HDF5LibraryException, NullPointerException; /** + * @ingroup JH5P + * * H5Pset_fapl_hdfs Modify the file access property list to use the H5FD_HDFS driver. * * @param fapl_id @@ -10529,13 +11463,15 @@ public class H5 implements java.io.Serializable { * @return a non-negative value if successful; otherwise returns a negative value. * * @exception HDF5LibraryException - * Error from the HDF-5 Library. + * Error from the HDF5 Library. * **/ public synchronized static native int H5Pset_fapl_hdfs(long fapl_id, H5FD_hdfs_fapl_t fapl_conf) throws HDF5LibraryException, NullPointerException; /** + * @ingroup JH5P + * * H5Pget_fapl_hdfs gets the properties hdfs I/O driver. * * @param fapl_id @@ -10544,13 +11480,15 @@ public class H5 implements java.io.Serializable { * @return the properties of the hdfs driver. * * @exception HDF5LibraryException - * Error from the HDF-5 Library. + * Error from the HDF5 Library. * **/ public synchronized static native H5FD_hdfs_fapl_t H5Pget_fapl_hdfs(long fapl_id) throws HDF5LibraryException, NullPointerException; /** + * @ingroup JH5P + * * H5Pget_fapl_multi Sets up use of the multi I/O driver. * * @param fapl_id @@ -10568,7 +11506,7 @@ public class H5 implements java.io.Serializable { * @return a boolean value; Allows read-only access to incomplete file sets when TRUE. * * @exception HDF5LibraryException - * Error from the HDF-5 Library. + * Error from the HDF5 Library. * @exception NullPointerException * an array is null. * @@ -10579,6 +11517,8 @@ public class H5 implements java.io.Serializable { throws HDF5LibraryException, NullPointerException; /** + * @ingroup JH5P + * * H5Pset_fapl_multi Sets up use of the multi I/O driver. * * @param fapl_id @@ -10596,7 +11536,7 @@ public class H5 implements java.io.Serializable { * IN: Allows read-only access to incomplete file sets when TRUE. * * @exception HDF5LibraryException - * Error from the HDF-5 Library. + * Error from the HDF5 Library. * @exception NullPointerException * an array is null. * @@ -10607,6 +11547,8 @@ public class H5 implements java.io.Serializable { throws HDF5LibraryException, NullPointerException; /** + * @ingroup JH5P + * * H5Pset_fapl_log Sets up the logging virtual file driver (H5FD_LOG) for use. H5Pset_fapl_log modifies * the file access property list to use the logging driver, H5FD_LOG. The logging virtual file driver * (VFD) is a clone of the standard SEC2 (H5FD_SEC2) driver with additional facilities for logging VFD @@ -10622,7 +11564,7 @@ public class H5 implements java.io.Serializable { * IN: The size of the logging buffers, in bytes. * * @exception HDF5LibraryException - * Error from the HDF-5 Library. + * Error from the HDF5 Library. * @exception NullPointerException * logfile is null. **/ @@ -10631,6 +11573,8 @@ public class H5 implements java.io.Serializable { throws HDF5LibraryException, NullPointerException; /** + * @ingroup JH5P + * * H5Pset_fapl_sec2 Sets up use of the sec2 I/O driver. * * @param fapl_id @@ -10639,13 +11583,15 @@ public class H5 implements java.io.Serializable { * @return a non-negative value if successful; otherwise returns a negative value. * * @exception HDF5LibraryException - * Error from the HDF-5 Library. + * Error from the HDF5 Library. * **/ public synchronized static native int H5Pset_fapl_sec2(long fapl_id) throws HDF5LibraryException, NullPointerException; /** + * @ingroup JH5P + * * H5Pset_fapl_split Sets up use of the split I/O driver. Makes the multi driver act like the * old split driver which stored meta data in one file and raw * data in another file @@ -10662,7 +11608,7 @@ public class H5 implements java.io.Serializable { * IN: File access property list identifier raw data * * @exception HDF5LibraryException - * Error from the HDF-5 Library. + * Error from the HDF5 Library. * **/ public synchronized static native void @@ -10670,6 +11616,8 @@ public class H5 implements java.io.Serializable { throws HDF5LibraryException, NullPointerException; /** + * @ingroup JH5P + * * H5Pset_fapl_stdio Sets up use of the stdio I/O driver. * * @param fapl_id @@ -10678,13 +11626,15 @@ public class H5 implements java.io.Serializable { * @return a non-negative value if successful; otherwise returns a negative value. * * @exception HDF5LibraryException - * Error from the HDF-5 Library. + * Error from the HDF5 Library. * **/ public synchronized static native int H5Pset_fapl_stdio(long fapl_id) throws HDF5LibraryException, NullPointerException; /** + * @ingroup JH5P + * * H5Pset_fapl_windows Sets up use of the windows I/O driver. * * @param fapl_id @@ -10693,13 +11643,15 @@ public class H5 implements java.io.Serializable { * @return a non-negative value if successful; otherwise returns a negative value. * * @exception HDF5LibraryException - * Error from the HDF-5 Library. + * Error from the HDF5 Library. * **/ public synchronized static native int H5Pset_fapl_windows(long fapl_id) throws HDF5LibraryException, NullPointerException; /** + * @ingroup JH5P + * * H5Pset_fapl_ros3 Modify the file access property list to use the H5FD_ROS3 driver. * * @param fapl_id @@ -10710,13 +11662,15 @@ public class H5 implements java.io.Serializable { * @return a non-negative value if successful; otherwise returns a negative value. * * @exception HDF5LibraryException - * Error from the HDF-5 Library. + * Error from the HDF5 Library. * **/ public synchronized static native int H5Pset_fapl_ros3(long fapl_id, H5FD_ros3_fapl_t fapl_conf) throws HDF5LibraryException, NullPointerException; /** + * @ingroup JH5P + * * H5Pget_fapl_ros3 gets the properties of the ros3 I/O driver. * * @param fapl_id @@ -10725,7 +11679,7 @@ public class H5 implements java.io.Serializable { * @return the properties of the ros3 driver. * * @exception HDF5LibraryException - * Error from the HDF-5 Library. + * Error from the HDF5 Library. * **/ public synchronized static native H5FD_ros3_fapl_t H5Pget_fapl_ros3(long fapl_id) @@ -10813,6 +11767,12 @@ public class H5 implements java.io.Serializable { // // // //////////////////////////////////////////////////////////// /** + * @defgroup JH5PL Java Plugin (H5PL) Interface + **/ + + /** + * @ingroup JH5PL + * * H5PLset_loading_state uses one argument to enable or disable individual plugins. * The plugin_flags parameter is an encoded integer in which each bit controls a specific * plugin or class of plugins. @@ -10834,12 +11794,14 @@ public class H5 implements java.io.Serializable { * * * @exception HDF5LibraryException - * Error from the HDF-5 Library. + * Error from the HDF5 Library. **/ public synchronized static native void H5PLset_loading_state(int plugin_flags) throws HDF5LibraryException; /** + * @ingroup JH5PL + * * H5PLget_loading_state retrieves the state of the dynamic plugins flag, plugin_flags.. * * @return the list of dynamic plugin types that are enabled or disabled. @@ -10849,33 +11811,39 @@ public class H5 implements java.io.Serializable { * If the value of plugin_flags is 0 (zero), all dynamic plugins are disabled. * * @exception HDF5LibraryException - * Error from the HDF-5 Library. + * Error from the HDF5 Library. **/ public synchronized static native int H5PLget_loading_state() throws HDF5LibraryException; /** + * @ingroup JH5PL + * * H5PLappend inserts the plugin path at the end of the table. * * @param plugin_path * IN: Path for location of filter plugin libraries. * * @exception HDF5LibraryException - * Error from the HDF-5 Library. + * Error from the HDF5 Library. **/ public synchronized static native void H5PLappend(String plugin_path) throws HDF5LibraryException; /** + * @ingroup JH5PL + * * H5PLprepend inserts the plugin path at the beginning of the table. * * @param plugin_path * IN: Path for location of filter plugin libraries. * * @exception HDF5LibraryException - * Error from the HDF-5 Library. + * Error from the HDF5 Library. **/ public synchronized static native void H5PLprepend(String plugin_path) throws HDF5LibraryException; /** + * @ingroup JH5PL + * * H5PLreplace replaces the plugin path at the specified index. * * @param plugin_path @@ -10884,12 +11852,14 @@ public class H5 implements java.io.Serializable { * IN: The table index (0-based). * * @exception HDF5LibraryException - * Error from the HDF-5 Library. + * Error from the HDF5 Library. **/ public synchronized static native void H5PLreplace(String plugin_path, int index) throws HDF5LibraryException; /** + * @ingroup JH5PL + * * H5PLinsert inserts the plugin path at the specified index. * * @param plugin_path @@ -10898,23 +11868,27 @@ public class H5 implements java.io.Serializable { * IN: The table index (0-based). * * @exception HDF5LibraryException - * Error from the HDF-5 Library. + * Error from the HDF5 Library. **/ public synchronized static native void H5PLinsert(String plugin_path, int index) throws HDF5LibraryException; /** + * @ingroup JH5PL + * * H5PLremove removes the plugin path at the specified index. * * @param index * IN: The table index (0-based). * * @exception HDF5LibraryException - * Error from the HDF-5 Library. + * Error from the HDF5 Library. **/ public synchronized static native void H5PLremove(int index) throws HDF5LibraryException; /** + * @ingroup JH5PL + * * H5PLget retrieves the plugin path at the specified index. * * @param index @@ -10923,29 +11897,254 @@ public class H5 implements java.io.Serializable { * @return the current path at the index in plugin path table * * @exception HDF5LibraryException - * Error from the HDF-5 Library. + * Error from the HDF5 Library. **/ public synchronized static native String H5PLget(int index) throws HDF5LibraryException; /** + * @ingroup JH5PL + * * H5PLsize retrieves the size of the current list of plugin paths. * * @return the current number of paths in the plugin path table * * @exception HDF5LibraryException - * Error from the HDF-5 Library. + * Error from the HDF5 Library. **/ public synchronized static native int H5PLsize() throws HDF5LibraryException; // //////////////////////////////////////////////////////////// // // - // H5R: HDF5 1.12 Reference API Functions // + // H5R: HDF5 1.8 Reference API Functions // // // // //////////////////////////////////////////////////////////// - // Constructors // + /** + * @defgroup JH5R Java Reference (H5R) Interface + **/ + + private synchronized static native int H5Rcreate(byte[] ref, long loc_id, String name, int ref_type, + long space_id) + throws HDF5LibraryException, NullPointerException, IllegalArgumentException; + + /** + * @ingroup JH5R + * + * H5Rcreate creates the reference, ref, of the type specified in ref_type, pointing to the object name + * located at loc_id. + * + * @param loc_id + * IN: Location identifier used to locate the object being pointed to. + * @param name + * IN: Name of object at location loc_id. + * @param ref_type + * IN: Type of reference. + * @param space_id + * IN: Dataspace identifier with selection. + * + * @return the reference (byte[]) if successful + * + * @exception HDF5LibraryException + * Error from the HDF5 Library. + * @exception NullPointerException + * an input array is null. + * @exception IllegalArgumentException + * an input array is invalid. + **/ + public synchronized static byte[] H5Rcreate(long loc_id, String name, int ref_type, long space_id) + throws HDF5LibraryException, NullPointerException, IllegalArgumentException + { + /* These sizes are correct for HDF5.1.2 */ + int ref_size = 8; + if (ref_type == HDF5Constants.H5R_DATASET_REGION) { + ref_size = 12; + } + byte rbuf[] = new byte[ref_size]; + + /* will raise an exception if fails */ + H5Rcreate(rbuf, loc_id, name, ref_type, space_id); + + return rbuf; + } + + /** + * @ingroup JH5R + * + * Given a reference to some object, H5Rdereference opens that object and return an identifier. + * + * @param dataset + * IN: Dataset containing reference object. + * @param access_list + * IN: Property list of the object being referenced. + * @param ref_type + * IN: The reference type of ref. + * @param ref + * IN: reference to an object + * + * @return valid identifier if successful + * + * @exception HDF5LibraryException + * Error from the HDF5 Library. + * @exception NullPointerException + * output array is null. + * @exception IllegalArgumentException + * output array is invalid. + **/ + public static long H5Rdereference(long dataset, long access_list, int ref_type, byte[] ref) + throws HDF5LibraryException, NullPointerException, IllegalArgumentException + { + long id = _H5Rdereference(dataset, access_list, ref_type, ref); + if (id > 0) { + log.trace("OPEN_IDS: H5Rdereference add {}", id); + OPEN_IDS.add(id); + log.trace("OPEN_IDS: {}", OPEN_IDS.size()); + } + return id; + } + + private synchronized static native long _H5Rdereference(long dataset, long access_list, int ref_type, + byte[] ref) + throws HDF5LibraryException, NullPointerException, IllegalArgumentException; + + /** + * @ingroup JH5R + * + * H5Rget_name retrieves a name for the object identified by ref. + * + * @param loc_id + * IN: Identifier for the dataset containing the reference or for the group that dataset is in. + * @param ref_type + * IN: Type of reference. + * @param ref + * IN: An object or dataset region reference. + * @param name + * OUT: A name associated with the referenced object or dataset region. + * @param size + * IN: The size of the name buffer. + * + * @return Returns the length of the name if successful, returning 0 (zero) if no name is associated with + * the identifier. Otherwise returns a negative value. + * + * + * @exception HDF5LibraryException + * Error from the HDF5 Library. + * @exception NullPointerException + * size is null. + * @exception IllegalArgumentException + * Argument is illegal. + * + **/ + public synchronized static native long H5Rget_name(long loc_id, int ref_type, byte[] ref, String[] name, + long size) + throws HDF5LibraryException, NullPointerException, IllegalArgumentException; + + /** + * @ingroup JH5R + * + * H5Rget_name_string retrieves a name for the object identified by ref. + * + * @param loc_id + * IN: Identifier for the dataset containing the reference or for the group that dataset is in. + * @param ref_type + * IN: Type of reference. + * @param ref + * IN: An object or dataset region reference. + * + * @return Returns the name if successful, returning null if no name is associated with + * the identifier. + * + * @exception HDF5LibraryException + * Error from the HDF5 Library. + * @exception NullPointerException + * size is null. + * @exception IllegalArgumentException + * Argument is illegal. + **/ + public synchronized static native String H5Rget_name_string(long loc_id, int ref_type, byte[] ref) + throws HDF5LibraryException, NullPointerException, IllegalArgumentException; + + /** + * @ingroup JH5R + * + * H5Rget_obj_type Given a reference to an object ref, H5Rget_obj_type returns the type of the object + * pointed to. + * + * @param loc_id + * IN: loc_id of the reference object. + * @param ref_type + * IN: Type of reference to query. + * @param ref + * IN: the reference + * + * @return Returns the object type + * + * @exception HDF5LibraryException + * Error from the HDF5 Library. + * @exception NullPointerException + * array is null. + * @exception IllegalArgumentException + * array is invalid. + **/ + public synchronized static native int H5Rget_obj_type(long loc_id, int ref_type, byte ref[]) + throws HDF5LibraryException, NullPointerException, IllegalArgumentException; + + /** + * @ingroup JH5R + * + * H5Rget_obj_type2 Retrieves the type of object that an object reference points to. + * + * @see public static int H5Rget_obj_type(int loc_id, int ref_type, byte ref[]) + **/ + private synchronized static native int H5Rget_obj_type2(long loc_id, int ref_type, byte ref[], + int[] obj_type) + throws HDF5LibraryException, NullPointerException, IllegalArgumentException; + + /** + * @ingroup JH5R + * + * Given a reference to an object ref, H5Rget_region creates a copy of the dataspace of the dataset + * pointed to and defines a selection in the copy which is the region pointed to. + * + * @param loc_id + * IN: loc_id of the reference object. + * @param ref_type + * IN: The reference type of ref. + * @param ref + * OUT: the reference to the object and region + * + * @return a valid identifier if successful + * + * @exception HDF5LibraryException + * Error from the HDF5 Library. + * @exception NullPointerException + * output array is null. + * @exception IllegalArgumentException + * output array is invalid. + **/ + public static long H5Rget_region(long loc_id, int ref_type, byte[] ref) + throws HDF5LibraryException, NullPointerException, IllegalArgumentException + { + long id = _H5Rget_region(loc_id, ref_type, ref); + if (id > 0) { + log.trace("OPEN_IDS: H5Rget_region add {}", id); + OPEN_IDS.add(id); + log.trace("OPEN_IDS: {}", OPEN_IDS.size()); + } + return id; + } + + private synchronized static native long _H5Rget_region(long loc_id, int ref_type, byte[] ref) + throws HDF5LibraryException, NullPointerException, IllegalArgumentException; + + // //////////////////////////////////////////////////////////// + // // + // H5R: HDF5 1.12 Reference API Functions // + // // + // //////////////////////////////////////////////////////////// /** + * @ingroup JH5R + * * H5Rcreate_object creates a reference pointing to the object named name located at loc id. * * @param loc_id @@ -10958,7 +12157,7 @@ public class H5 implements java.io.Serializable { * @return the reference (byte[]) if successful * * @exception HDF5LibraryException - * Error from the HDF-5 Library. + * Error from the HDF5 Library. * @exception NullPointerException * an input array is null. * @exception IllegalArgumentException @@ -10968,6 +12167,8 @@ public class H5 implements java.io.Serializable { throws HDF5LibraryException, NullPointerException, IllegalArgumentException; /** + * @ingroup JH5R + * * H5Rcreate_region creates the reference, pointing to the region represented by * space id within the object named name located at loc id. * @@ -10983,7 +12184,7 @@ public class H5 implements java.io.Serializable { * @return the reference (byte[]) if successful * * @exception HDF5LibraryException - * Error from the HDF-5 Library. + * Error from the HDF5 Library. * @exception NullPointerException * an input array is null. * @exception IllegalArgumentException @@ -10994,6 +12195,8 @@ public class H5 implements java.io.Serializable { throws HDF5LibraryException, NullPointerException, IllegalArgumentException; /** + * @ingroup JH5R + * * H5Rcreate_attr creates the reference, pointing to the attribute named attr name * and attached to the object named name located at loc id. * @@ -11009,7 +12212,7 @@ public class H5 implements java.io.Serializable { * @return the reference (byte[]) if successful * * @exception HDF5LibraryException - * Error from the HDF-5 Library. + * Error from the HDF5 Library. * @exception NullPointerException * an input array is null. * @exception IllegalArgumentException @@ -11020,13 +12223,15 @@ public class H5 implements java.io.Serializable { throws HDF5LibraryException, NullPointerException, IllegalArgumentException; /** + * @ingroup JH5R + * * H5Rdestroy destroys a reference and releases resources. * * @param ref_ptr * IN: Reference to an object, region or attribute attached to an object. * * @exception HDF5LibraryException - * Error from the HDF-5 Library. + * Error from the HDF5 Library. * @exception NullPointerException * an input array is null. * @exception IllegalArgumentException @@ -11036,6 +12241,8 @@ public class H5 implements java.io.Serializable { throws HDF5LibraryException, NullPointerException, IllegalArgumentException; /** + * @ingroup JH5R + * * H5Rget_type retrieves the type of a reference. * * @param ref_ptr @@ -11044,7 +12251,7 @@ public class H5 implements java.io.Serializable { * @return a valid reference type if successful; otherwise returns H5R UNKNOWN. * * @exception HDF5LibraryException - * Error from the HDF-5 Library. + * Error from the HDF5 Library. * @exception NullPointerException * an input array is null. * @exception IllegalArgumentException @@ -11054,6 +12261,8 @@ public class H5 implements java.io.Serializable { throws HDF5LibraryException, NullPointerException, IllegalArgumentException; /** + * @ingroup JH5R + * * H5Requal determines whether two references point to the same object, region or attribute. * * @param ref1_ptr @@ -11064,7 +12273,7 @@ public class H5 implements java.io.Serializable { * @return true if equal, else false * * @exception HDF5LibraryException - * Error from the HDF-5 Library. + * Error from the HDF5 Library. * @exception NullPointerException * an input array is null. * @exception IllegalArgumentException @@ -11074,6 +12283,8 @@ public class H5 implements java.io.Serializable { throws HDF5LibraryException, NullPointerException, IllegalArgumentException; /** + * @ingroup JH5R + * * H5Rcopy creates a copy of an existing reference. * * @param src_ref_ptr @@ -11082,7 +12293,7 @@ public class H5 implements java.io.Serializable { * @return a valid copy of the reference (byte[]) if successful. * * @exception HDF5LibraryException - * Error from the HDF-5 Library. + * Error from the HDF5 Library. * @exception NullPointerException * an input array is null. * @exception IllegalArgumentException @@ -11092,6 +12303,8 @@ public class H5 implements java.io.Serializable { throws HDF5LibraryException, NullPointerException, IllegalArgumentException; /** + * @ingroup JH5R + * * H5Ropen_object opens that object and returns an identifier. * The object opened with this function should be closed when it is no longer needed * so that resource leaks will not develop. Use the appropriate close function such @@ -11111,7 +12324,7 @@ public class H5 implements java.io.Serializable { * @return a valid identifier if successful * * @exception HDF5LibraryException - * Error from the HDF-5 Library. + * Error from the HDF5 Library. * @exception NullPointerException * an input array is null. * @exception IllegalArgumentException @@ -11133,6 +12346,8 @@ public class H5 implements java.io.Serializable { throws HDF5LibraryException, NullPointerException, IllegalArgumentException; /** + * @ingroup JH5R + * * H5Ropen region creates a copy of the dataspace of the dataset pointed to by a region reference, * ref ptr, and defines a selection matching the selection pointed to by ref ptr within the dataspace * copy. Use H5Sclose to release the dataspace identifier returned by this function when the identifier is @@ -11152,7 +12367,7 @@ public class H5 implements java.io.Serializable { * @return a valid dataspace identifier if successful * * @exception HDF5LibraryException - * Error from the HDF-5 Library. + * Error from the HDF5 Library. * @exception NullPointerException * an input array is null. * @exception IllegalArgumentException @@ -11174,6 +12389,8 @@ public class H5 implements java.io.Serializable { throws HDF5LibraryException, NullPointerException, IllegalArgumentException; /** + * @ingroup JH5R + * * H5Ropen_attr opens the attribute attached to the object and returns an identifier. * The attribute opened with this function should be closed with H5Aclose when it is no longer needed * so that resource leaks will not develop. @@ -11192,7 +12409,7 @@ public class H5 implements java.io.Serializable { * @return a valid attribute identifier if successful * * @exception HDF5LibraryException - * Error from the HDF-5 Library. + * Error from the HDF5 Library. * @exception NullPointerException * an input array is null. * @exception IllegalArgumentException @@ -11213,9 +12430,9 @@ public class H5 implements java.io.Serializable { private synchronized static native long _H5Ropen_attr(byte[] ref_ptr, long rapl_id, long aapl_id) throws HDF5LibraryException, NullPointerException, IllegalArgumentException; - // Get type // - /** + * @ingroup JH5R + * * H5Rget obj type3 retrieves the type of the referenced object pointed to. * * @param ref_ptr @@ -11228,7 +12445,7 @@ public class H5 implements java.io.Serializable { * @return Returns the object type * * @exception HDF5LibraryException - * Error from the HDF-5 Library. + * Error from the HDF5 Library. * @exception NullPointerException * array is null. * @exception IllegalArgumentException @@ -11238,6 +12455,8 @@ public class H5 implements java.io.Serializable { throws HDF5LibraryException, NullPointerException, IllegalArgumentException; /** + * @ingroup JH5R + * * H5Rget_file_name retrieves the file name for the object, region or attribute reference pointed to. * * @param ref_ptr @@ -11246,7 +12465,7 @@ public class H5 implements java.io.Serializable { * @return Returns the file name of the reference * * @exception HDF5LibraryException - * Error from the HDF-5 Library. + * Error from the HDF5 Library. * @exception NullPointerException * array is null. * @exception IllegalArgumentException @@ -11256,6 +12475,8 @@ public class H5 implements java.io.Serializable { throws HDF5LibraryException, NullPointerException, IllegalArgumentException; /** + * @ingroup JH5R + * * H5Rget_obj_name retrieves the object name for the object, region or attribute reference pointed to. * * @param ref_ptr @@ -11268,7 +12489,7 @@ public class H5 implements java.io.Serializable { * @return Returns the object name of the reference * * @exception HDF5LibraryException - * Error from the HDF-5 Library. + * Error from the HDF5 Library. * @exception NullPointerException * array is null. * @exception IllegalArgumentException @@ -11278,6 +12499,8 @@ public class H5 implements java.io.Serializable { throws HDF5LibraryException, NullPointerException, IllegalArgumentException; /** + * @ingroup JH5R + * * H5Rget_attr_name retrieves the attribute name for the object, region or attribute reference pointed to. * * @param ref_ptr @@ -11286,7 +12509,7 @@ public class H5 implements java.io.Serializable { * @return Returns the attribute name of the reference * * @exception HDF5LibraryException - * Error from the HDF-5 Library. + * Error from the HDF5 Library. * @exception NullPointerException * array is null. * @exception IllegalArgumentException @@ -11297,218 +12520,18 @@ public class H5 implements java.io.Serializable { // //////////////////////////////////////////////////////////// // // - // H5R: HDF5 1.8 Reference API Functions // + // H5S: Dataspace Interface Functions // // // // //////////////////////////////////////////////////////////// - - private synchronized static native int H5Rcreate(byte[] ref, long loc_id, String name, int ref_type, - long space_id) - throws HDF5LibraryException, NullPointerException, IllegalArgumentException; - - /** - * H5Rcreate creates the reference, ref, of the type specified in ref_type, pointing to the object name - * located at loc_id. - * - * @param loc_id - * IN: Location identifier used to locate the object being pointed to. - * @param name - * IN: Name of object at location loc_id. - * @param ref_type - * IN: Type of reference. - * @param space_id - * IN: Dataspace identifier with selection. - * - * @return the reference (byte[]) if successful - * - * @exception HDF5LibraryException - * Error from the HDF-5 Library. - * @exception NullPointerException - * an input array is null. - * @exception IllegalArgumentException - * an input array is invalid. - **/ - public synchronized static byte[] H5Rcreate(long loc_id, String name, int ref_type, long space_id) - throws HDF5LibraryException, NullPointerException, IllegalArgumentException - { - /* These sizes are correct for HDF5.1.2 */ - int ref_size = 8; - if (ref_type == HDF5Constants.H5R_DATASET_REGION) { - ref_size = 12; - } - byte rbuf[] = new byte[ref_size]; - - /* will raise an exception if fails */ - H5Rcreate(rbuf, loc_id, name, ref_type, space_id); - - return rbuf; - } - - /** - * Given a reference to some object, H5Rdereference opens that object and return an identifier. - * - * @param dataset - * IN: Dataset containing reference object. - * @param access_list - * IN: Property list of the object being referenced. - * @param ref_type - * IN: The reference type of ref. - * @param ref - * IN: reference to an object - * - * @return valid identifier if successful - * - * @exception HDF5LibraryException - * Error from the HDF-5 Library. - * @exception NullPointerException - * output array is null. - * @exception IllegalArgumentException - * output array is invalid. - **/ - public static long H5Rdereference(long dataset, long access_list, int ref_type, byte[] ref) - throws HDF5LibraryException, NullPointerException, IllegalArgumentException - { - long id = _H5Rdereference(dataset, access_list, ref_type, ref); - if (id > 0) { - log.trace("OPEN_IDS: H5Rdereference add {}", id); - OPEN_IDS.add(id); - log.trace("OPEN_IDS: {}", OPEN_IDS.size()); - } - return id; - } - - private synchronized static native long _H5Rdereference(long dataset, long access_list, int ref_type, - byte[] ref) - throws HDF5LibraryException, NullPointerException, IllegalArgumentException; - - /** - * H5Rget_name retrieves a name for the object identified by ref. - * - * @param loc_id - * IN: Identifier for the dataset containing the reference or for the group that dataset is in. - * @param ref_type - * IN: Type of reference. - * @param ref - * IN: An object or dataset region reference. - * @param name - * OUT: A name associated with the referenced object or dataset region. - * @param size - * IN: The size of the name buffer. - * - * @return Returns the length of the name if successful, returning 0 (zero) if no name is associated with - * the identifier. Otherwise returns a negative value. - * - * - * @exception HDF5LibraryException - * Error from the HDF-5 Library. - * @exception NullPointerException - * size is null. - * @exception IllegalArgumentException - * Argument is illegal. - * - **/ - public synchronized static native long H5Rget_name(long loc_id, int ref_type, byte[] ref, String[] name, - long size) - throws HDF5LibraryException, NullPointerException, IllegalArgumentException; - - /** - * H5Rget_name_string retrieves a name for the object identified by ref. - * - * @param loc_id - * IN: Identifier for the dataset containing the reference or for the group that dataset is in. - * @param ref_type - * IN: Type of reference. - * @param ref - * IN: An object or dataset region reference. - * - * @return Returns the name if successful, returning null if no name is associated with - * the identifier. - * - * @exception HDF5LibraryException - * Error from the HDF-5 Library. - * @exception NullPointerException - * size is null. - * @exception IllegalArgumentException - * Argument is illegal. - **/ - public synchronized static native String H5Rget_name_string(long loc_id, int ref_type, byte[] ref) - throws HDF5LibraryException, NullPointerException, IllegalArgumentException; - - /** - * H5Rget_obj_type Given a reference to an object ref, H5Rget_obj_type returns the type of the object - * pointed to. - * - * @param loc_id - * IN: loc_id of the reference object. - * @param ref_type - * IN: Type of reference to query. - * @param ref - * IN: the reference - * - * @return Returns the object type - * - * @exception HDF5LibraryException - * Error from the HDF-5 Library. - * @exception NullPointerException - * array is null. - * @exception IllegalArgumentException - * array is invalid. - **/ - public synchronized static native int H5Rget_obj_type(long loc_id, int ref_type, byte ref[]) - throws HDF5LibraryException, NullPointerException, IllegalArgumentException; - - /** - * H5Rget_obj_type2 Retrieves the type of object that an object reference points to. - * - * @see public static int H5Rget_obj_type(int loc_id, int ref_type, byte ref[]) - **/ - private synchronized static native int H5Rget_obj_type2(long loc_id, int ref_type, byte ref[], - int[] obj_type) - throws HDF5LibraryException, NullPointerException, IllegalArgumentException; - /** - * Given a reference to an object ref, H5Rget_region creates a copy of the dataspace of the dataset - * pointed to and defines a selection in the copy which is the region pointed to. - * - * @param loc_id - * IN: loc_id of the reference object. - * @param ref_type - * IN: The reference type of ref. - * @param ref - * OUT: the reference to the object and region - * - * @return a valid identifier if successful - * - * @exception HDF5LibraryException - * Error from the HDF-5 Library. - * @exception NullPointerException - * output array is null. - * @exception IllegalArgumentException - * output array is invalid. + * @defgroup JH5S Java Dataspace (H5S) Interface **/ - public static long H5Rget_region(long loc_id, int ref_type, byte[] ref) - throws HDF5LibraryException, NullPointerException, IllegalArgumentException - { - long id = _H5Rget_region(loc_id, ref_type, ref); - if (id > 0) { - log.trace("OPEN_IDS: H5Rget_region add {}", id); - OPEN_IDS.add(id); - log.trace("OPEN_IDS: {}", OPEN_IDS.size()); - } - return id; - } - - private synchronized static native long _H5Rget_region(long loc_id, int ref_type, byte[] ref) - throws HDF5LibraryException, NullPointerException, IllegalArgumentException; - - // //////////////////////////////////////////////////////////// - // // - // H5S: Dataspace Interface Functions // - // // - // //////////////////////////////////////////////////////////// /**************** Operations on dataspaces ********************/ /** + * @ingroup JH5S + * * H5Screate creates a new dataspace of a particular type. * * @param type @@ -11517,7 +12540,7 @@ public class H5 implements java.io.Serializable { * @return a dataspace identifier * * @exception HDF5LibraryException - * Error from the HDF-5 Library. + * Error from the HDF5 Library. **/ public static long H5Screate(int type) throws HDF5LibraryException { @@ -11533,6 +12556,8 @@ public class H5 implements java.io.Serializable { private synchronized static native long _H5Screate(int type) throws HDF5LibraryException; /** + * @ingroup JH5S + * * H5Screate_simple creates a new simple data space and opens it for access. * * @param rank @@ -11545,7 +12570,7 @@ public class H5 implements java.io.Serializable { * @return a dataspace identifier * * @exception HDF5Exception - * Error from the HDF-5 Library. + * Error from the HDF5 Library. * @exception NullPointerException * dims or maxdims is null. **/ @@ -11565,6 +12590,8 @@ public class H5 implements java.io.Serializable { throws HDF5Exception, NullPointerException; /** + * @ingroup JH5S + * * H5Sset_extent_simple sets or resets the size of an existing dataspace. * * @param space_id @@ -11579,13 +12606,15 @@ public class H5 implements java.io.Serializable { * @return a dataspace identifier if successful * * @exception HDF5LibraryException - * Error from the HDF-5 Library. + * Error from the HDF5 Library. **/ public synchronized static native long H5Sset_extent_simple(long space_id, int rank, long[] current_size, long[] maximum_size) throws HDF5LibraryException, NullPointerException; /** + * @ingroup JH5S + * * H5Sset_extent_simple sets or resets the size of an existing dataspace. * * @param space_id @@ -11600,7 +12629,7 @@ public class H5 implements java.io.Serializable { * @return a dataspace identifier if successful * * @exception HDF5LibraryException - * Error from the HDF-5 Library. + * Error from the HDF5 Library. **/ public synchronized static long H5Sset_extent_simple(long space_id, int rank, byte[] current_size, byte[] maximum_size) @@ -11615,6 +12644,8 @@ public class H5 implements java.io.Serializable { } /** + * @ingroup JH5S + * * H5Scopy creates a new dataspace which is an exact copy of the dataspace identified by space_id. * * @param space_id @@ -11622,7 +12653,7 @@ public class H5 implements java.io.Serializable { * @return a dataspace identifier if successful * * @exception HDF5LibraryException - * Error from the HDF-5 Library. + * Error from the HDF5 Library. **/ public static long H5Scopy(long space_id) throws HDF5LibraryException { @@ -11638,6 +12669,8 @@ public class H5 implements java.io.Serializable { private synchronized static native long _H5Scopy(long space_id) throws HDF5LibraryException; /** + * @ingroup JH5S + * * H5Sclose releases a dataspace. * * @param space_id @@ -11646,7 +12679,7 @@ public class H5 implements java.io.Serializable { * @return a non-negative value if successful * * @exception HDF5LibraryException - * Error from the HDF-5 Library. + * Error from the HDF5 Library. **/ public static int H5Sclose(long space_id) throws HDF5LibraryException { @@ -11662,6 +12695,8 @@ public class H5 implements java.io.Serializable { private synchronized static native int _H5Sclose(long space_id) throws HDF5LibraryException; /** + * @ingroup JH5S + * * H5Sencode converts a data space description into binary form in a buffer. * * @param obj_id @@ -11670,12 +12705,14 @@ public class H5 implements java.io.Serializable { * @return the buffer for the object to be encoded into. * * @exception HDF5LibraryException - * Error from the HDF-5 Library. + * Error from the HDF5 Library. **/ public synchronized static native byte[] H5Sencode(long obj_id) throws HDF5LibraryException, NullPointerException; /** + * @ingroup JH5S + * * H5Sdecode reconstructs the HDF5 data space object and returns a new object handle for it. * * @param buf @@ -11684,7 +12721,7 @@ public class H5 implements java.io.Serializable { * @return a new object handle * * @exception HDF5LibraryException - * Error from the HDF-5 Library. + * Error from the HDF5 Library. * @exception NullPointerException * buf is null. **/ @@ -11692,19 +12729,24 @@ public class H5 implements java.io.Serializable { throws HDF5LibraryException, NullPointerException; /** + * @ingroup JH5S + * * H5Sget_simple_extent_npoints determines the number of elements in a dataspace. * * @param space_id * ID of the dataspace object to query + * * @return the number of elements in the dataspace if successful * * @exception HDF5LibraryException - * Error from the HDF-5 Library. + * Error from the HDF5 Library. **/ public synchronized static native long H5Sget_simple_extent_npoints(long space_id) throws HDF5LibraryException; /** + * @ingroup JH5S + * * H5Sget_simple_extent_ndims determines the dimensionality (or rank) of a dataspace. * * @param space_id @@ -11713,12 +12755,14 @@ public class H5 implements java.io.Serializable { * @return the number of dimensions in the dataspace if successful * * @exception HDF5LibraryException - * Error from the HDF-5 Library. + * Error from the HDF5 Library. **/ public synchronized static native int H5Sget_simple_extent_ndims(long space_id) throws HDF5LibraryException; /** + * @ingroup JH5S + * * H5Sget_simple_extent_dims returns the size and maximum sizes of each dimension of a dataspace through * the dims and maxdims parameters. * @@ -11732,7 +12776,7 @@ public class H5 implements java.io.Serializable { * @return the number of dimensions in the dataspace if successful * * @exception HDF5LibraryException - * Error from the HDF-5 Library. + * Error from the HDF5 Library. * @exception NullPointerException * dims or maxdims is null. **/ @@ -11741,6 +12785,8 @@ public class H5 implements java.io.Serializable { throws HDF5LibraryException, NullPointerException; /** + * @ingroup JH5S + * * H5Sis_simple determines whether a dataspace is a simple dataspace. * * @param space_id @@ -11749,11 +12795,13 @@ public class H5 implements java.io.Serializable { * @return true if is a simple dataspace * * @exception HDF5LibraryException - * Error from the HDF-5 Library. + * Error from the HDF5 Library. **/ public synchronized static native boolean H5Sis_simple(long space_id) throws HDF5LibraryException; /** + * @ingroup JH5S + * * H5Sget_simple_extent_type queries a dataspace to determine the current class of a dataspace. * * @param space_id @@ -11762,12 +12810,14 @@ public class H5 implements java.io.Serializable { * @return a dataspace class name if successful * * @exception HDF5LibraryException - * Error from the HDF-5 Library. + * Error from the HDF5 Library. **/ public synchronized static native int H5Sget_simple_extent_type(long space_id) throws HDF5LibraryException; /** + * @ingroup JH5S + * * H5Sset_extent_none removes the extent from a dataspace and sets the type to H5S_NONE. * * @param space_id @@ -11776,11 +12826,13 @@ public class H5 implements java.io.Serializable { * @return a non-negative value if successful * * @exception HDF5LibraryException - * Error from the HDF-5 Library. + * Error from the HDF5 Library. **/ public synchronized static native int H5Sset_extent_none(long space_id) throws HDF5LibraryException; /** + * @ingroup JH5S + * * H5Sextent_copy copies the extent from source_space_id to dest_space_id. This action may change the type * of the dataspace. * @@ -11792,12 +12844,14 @@ public class H5 implements java.io.Serializable { * @return a non-negative value if successful * * @exception HDF5LibraryException - * Error from the HDF-5 Library. + * Error from the HDF5 Library. **/ public synchronized static native int H5Sextent_copy(long dest_space_id, long source_space_id) throws HDF5LibraryException; /** + * @ingroup JH5S + * * H5Sextent_equal determines whether the dataspace extents of two dataspaces, space1_id and space2_id, * are equal. * @@ -11809,7 +12863,7 @@ public class H5 implements java.io.Serializable { * @return true if successful, else false * * @exception HDF5LibraryException - * Error from the HDF-5 Library. + * Error from the HDF5 Library. **/ public synchronized static native boolean H5Sextent_equal(long first_space_id, long second_space_id) throws HDF5LibraryException; @@ -11817,6 +12871,8 @@ public class H5 implements java.io.Serializable { /***************** Operations on dataspace selections *****************/ /** + * @ingroup JH5S + * * H5Sget_select_type retrieves the type of selection currently defined for the dataspace space_id. * * @param space_id @@ -11825,11 +12881,13 @@ public class H5 implements java.io.Serializable { * @return the dataspace selection type if successful * * @exception HDF5LibraryException - * Error from the HDF-5 Library. + * Error from the HDF5 Library. **/ public synchronized static native int H5Sget_select_type(long space_id) throws HDF5LibraryException; /** + * @ingroup JH5S + * * H5Sget_select_npoints determines the number of elements in the current selection of a dataspace. * * @param space_id @@ -11838,11 +12896,13 @@ public class H5 implements java.io.Serializable { * @return the number of elements in the selection if successful * * @exception HDF5LibraryException - * Error from the HDF-5 Library. + * Error from the HDF5 Library. **/ public synchronized static native long H5Sget_select_npoints(long space_id) throws HDF5LibraryException; /** + * @ingroup JH5S + * * H5Sselect_copy copies all the selection information (including offset) from the source * dataspace to the destination dataspace. * @@ -11852,12 +12912,14 @@ public class H5 implements java.io.Serializable { * ID of the source dataspace * * @exception HDF5LibraryException - * Error from the HDF-5 Library. + * Error from the HDF5 Library. **/ public synchronized static native void H5Sselect_copy(long dst_id, long src_id) throws HDF5LibraryException; /** + * @ingroup JH5S + * * H5Sselect_valid verifies that the selection for the dataspace. * * @param space_id @@ -11866,11 +12928,13 @@ public class H5 implements java.io.Serializable { * @return true if the selection is contained within the extent and FALSE if it is not or is an error. * * @exception HDF5LibraryException - * Error from the HDF-5 Library. + * Error from the HDF5 Library. **/ public synchronized static native boolean H5Sselect_valid(long space_id) throws HDF5LibraryException; /** + * @ingroup JH5S + * * H5Sselect_adjust moves a selection by subtracting an offset from it. * * @param space_id @@ -11879,7 +12943,7 @@ public class H5 implements java.io.Serializable { * Offset to subtract * * @exception HDF5LibraryException - * Error from the HDF-5 Library. + * Error from the HDF5 Library. * @exception NullPointerException * offset is null. **/ @@ -11887,8 +12951,10 @@ public class H5 implements java.io.Serializable { throws HDF5LibraryException, NullPointerException; /** + * @ingroup JH5S + * * H5Sget_select_bounds retrieves the coordinates of the bounding box containing the current selection and - * places them into user-supplied buffers.

    The start and end buffers must be large enough to hold the + * places them into user-supplied buffers.

    The start and end buffers must be large enough to hold the * dataspace rank number of coordinates. * * @param space_id @@ -11901,7 +12967,7 @@ public class H5 implements java.io.Serializable { * @return a non-negative value if successful,with start and end initialized. * * @exception HDF5LibraryException - * Error from the HDF-5 Library. + * Error from the HDF5 Library. * @exception NullPointerException * start or end is null. **/ @@ -11909,25 +12975,29 @@ public class H5 implements java.io.Serializable { throws HDF5LibraryException, NullPointerException; /** + * @ingroup JH5S + * * H5Sselect_shape_same checks to see if the current selection in the dataspaces are the same * dimensionality and shape. * This is primarily used for reading the entire selection in one swoop. * - * @param space1_id + * @param space1_id * ID of 1st Dataspace pointer to compare - * @param space2_id + * @param space2_id * ID of 2nd Dataspace pointer to compare * * @return true if the selection is the same dimensionality and shape; * false otherwise * * @exception HDF5LibraryException - * Error from the HDF-5 Library. + * Error from the HDF5 Library. **/ public synchronized static native boolean H5Sselect_shape_same(long space1_id, long space2_id) throws HDF5LibraryException; /** + * @ingroup JH5S + * * H5Sselect_intersect_block checks to see if the current selection in the * dataspace intersects with the block given. * @@ -11942,7 +13012,7 @@ public class H5 implements java.io.Serializable { * FALSE otherwise * * @exception HDF5LibraryException - * Error from the HDF-5 Library. + * Error from the HDF5 Library. * @exception NullPointerException * offset is null. **/ @@ -11951,6 +13021,8 @@ public class H5 implements java.io.Serializable { throws HDF5LibraryException, NullPointerException; /** + * @ingroup JH5S + * * H5Soffset_simple sets the offset of a simple dataspace space_id. * * @param space_id @@ -11961,7 +13033,7 @@ public class H5 implements java.io.Serializable { * @return a non-negative value if successful * * @exception HDF5LibraryException - * Error from the HDF-5 Library. + * Error from the HDF5 Library. * @exception NullPointerException * offset array is null. **/ @@ -11969,6 +13041,8 @@ public class H5 implements java.io.Serializable { throws HDF5LibraryException, NullPointerException; /** + * @ingroup JH5S + * * H5Soffset_simple sets the offset of a simple dataspace space_id. * * @param space_id @@ -11979,7 +13053,7 @@ public class H5 implements java.io.Serializable { * @return a non-negative value if successful * * @exception HDF5LibraryException - * Error from the HDF-5 Library. + * Error from the HDF5 Library. * @exception NullPointerException * offset array is null. **/ @@ -12000,6 +13074,8 @@ public class H5 implements java.io.Serializable { } /** + * @ingroup JH5S + * * H5Sselect_all selects the entire extent of the dataspace space_id. * * @param space_id @@ -12008,23 +13084,28 @@ public class H5 implements java.io.Serializable { * @return a non-negative value if successful * * @exception HDF5LibraryException - * Error from the HDF-5 Library. + * Error from the HDF5 Library. **/ public synchronized static native int H5Sselect_all(long space_id) throws HDF5LibraryException; /** + * @ingroup JH5S + * * H5Sselect_none resets the selection region for the dataspace space_id to include no elements. * * @param space_id * IN: The identifier of the dataspace to be reset. + * * @return a non-negative value if successful * * @exception HDF5LibraryException - * Error from the HDF-5 Library. + * Error from the HDF5 Library. **/ public synchronized static native int H5Sselect_none(long space_id) throws HDF5LibraryException; /** + * @ingroup JH5S + * * H5Sselect_elements selects array elements to be included in the selection for the space_id dataspace. * * @param space_id @@ -12039,13 +13120,15 @@ public class H5 implements java.io.Serializable { * @return a non-negative value if successful * * @exception HDF5LibraryException - * Error from the HDF-5 Library. + * Error from the HDF5 Library. **/ private synchronized static native int H5Sselect_elements(long space_id, int op, int num_elements, byte[] coord) throws HDF5LibraryException, NullPointerException; /** + * @ingroup JH5S + * * H5Sselect_elements selects array elements to be included in the selection for the space_id dataspace. * * @param space_id @@ -12062,7 +13145,7 @@ public class H5 implements java.io.Serializable { * @exception HDF5Exception * Error in the data conversion * @exception HDF5LibraryException - * Error from the HDF-5 Library. + * Error from the HDF5 Library. * @exception NullPointerException * cord array is **/ @@ -12084,6 +13167,8 @@ public class H5 implements java.io.Serializable { } /** + * @ingroup JH5S + * * H5Sget_select_elem_npoints returns the number of element points in the current dataspace selection. * * @param spaceid @@ -12092,12 +13177,14 @@ public class H5 implements java.io.Serializable { * @return a non-negative value if successful * * @exception HDF5LibraryException - * Error from the HDF-5 Library. + * Error from the HDF5 Library. **/ public synchronized static native long H5Sget_select_elem_npoints(long spaceid) throws HDF5LibraryException; /** + * @ingroup JH5S + * * H5Sget_select_elem_pointlist returns an array of of element points in the current dataspace selection. * The point coordinates have the same dimensionality (rank) as the dataspace they are located within, one * coordinate per point. @@ -12114,7 +13201,7 @@ public class H5 implements java.io.Serializable { * @return a non-negative value if successful * * @exception HDF5LibraryException - * Error from the HDF-5 Library. + * Error from the HDF5 Library. * @exception NullPointerException * buf is null. **/ @@ -12123,6 +13210,8 @@ public class H5 implements java.io.Serializable { throws HDF5LibraryException, NullPointerException; /** + * @ingroup JH5S + * * H5Sselect_hyperslab selects a hyperslab region to add to the current selected region for the dataspace * specified by space_id. The start, stride, count, and block arrays must be the same size as the rank of * the dataspace. @@ -12143,7 +13232,7 @@ public class H5 implements java.io.Serializable { * @return a non-negative value if successful * * @exception HDF5LibraryException - * Error from the HDF-5 Library. + * Error from the HDF5 Library. * @exception NullPointerException * an input array is null. * @exception IllegalArgumentException @@ -12166,6 +13255,8 @@ public class H5 implements java.io.Serializable { } /** + * @ingroup JH5S + * * H5Sselect_hyperslab selects a hyperslab region to add to the current selected region for the dataspace * specified by space_id. The start, stride, count, and block arrays must be the same size as the rank of * the dataspace. @@ -12186,7 +13277,7 @@ public class H5 implements java.io.Serializable { * @return a non-negative value if successful * * @exception HDF5LibraryException - * Error from the HDF-5 Library. + * Error from the HDF5 Library. * @exception NullPointerException * an input array is null. * @exception IllegalArgumentException @@ -12197,6 +13288,8 @@ public class H5 implements java.io.Serializable { throws HDF5LibraryException, NullPointerException, IllegalArgumentException; /** + * @ingroup JH5S + * * H5Scombine_hyperslab combines a hyperslab selection with the current selection for a dataspace, * creating a new dataspace to return the generated selection. * If the current selection is not a hyperslab, it is freed and the hyperslab @@ -12220,7 +13313,7 @@ public class H5 implements java.io.Serializable { * @return a dataspace ID on success / H5I_INVALID_HID on failure * * @exception HDF5LibraryException - * Error from the HDF-5 Library. + * Error from the HDF5 Library. * @exception NullPointerException * an input array is null. * @exception IllegalArgumentException @@ -12231,8 +13324,10 @@ public class H5 implements java.io.Serializable { throws HDF5LibraryException, NullPointerException, IllegalArgumentException; /** + * @ingroup JH5S + * * H5Smodify_select refine an existing hyperslab selection with an operation, using a second - * hyperslab. The first selection is modified to contain the result of + * hyperslab. The first selection is modified to contain the result of * space1 operated on by space2. * * @param space1_id @@ -12243,12 +13338,14 @@ public class H5 implements java.io.Serializable { * ID of the source dataspace * * @exception HDF5LibraryException - * Error from the HDF-5 Library. + * Error from the HDF5 Library. **/ public synchronized static native void H5Smodify_select(long space1_id, int op, long space2_id) throws HDF5LibraryException; /** + * @ingroup JH5S + * * H5Scombine_select combines two existing hyperslab selections with an operation, returning * a new dataspace with the resulting selection. The dataspace extent from * space1 is copied for the dataspace extent of the newly created dataspace. @@ -12263,12 +13360,14 @@ public class H5 implements java.io.Serializable { * @return a dataspace ID on success / H5I_INVALID_HID on failure * * @exception HDF5LibraryException - * Error from the HDF-5 Library. + * Error from the HDF5 Library. **/ public synchronized static native long H5Scombine_select(long space1_id, int op, long space2_id) throws HDF5LibraryException; /** + * @ingroup JH5S + * * H5Sis_regular_hyperslab retrieves a regular hyperslab selection for the dataspace specified * by space_id. * @@ -12278,12 +13377,14 @@ public class H5 implements java.io.Serializable { * @return a TRUE/FALSE for hyperslab selection if successful * * @exception HDF5LibraryException - * Error from the HDF-5 Library. + * Error from the HDF5 Library. **/ public synchronized static native boolean H5Sis_regular_hyperslab(long space_id) throws HDF5LibraryException; /** + * @ingroup JH5S + * * H5Sget_regular_hyperslab determines if a hyperslab selection is regular for the dataspace specified * by space_id. The start, stride, count, and block arrays must be the same size as the rank of the * dataspace. @@ -12300,7 +13401,7 @@ public class H5 implements java.io.Serializable { * OUT: Size of block in hyperslab. * * @exception HDF5LibraryException - * Error from the HDF-5 Library. + * Error from the HDF5 Library. * @exception NullPointerException * an output array is null. * @exception IllegalArgumentException @@ -12311,6 +13412,8 @@ public class H5 implements java.io.Serializable { throws HDF5LibraryException, NullPointerException, IllegalArgumentException; /** + * @ingroup JH5S + * * H5Sget_select_hyper_nblocks returns the number of hyperslab blocks in the current dataspace selection. * * @param spaceid @@ -12319,12 +13422,14 @@ public class H5 implements java.io.Serializable { * @return a non-negative value if successful * * @exception HDF5LibraryException - * Error from the HDF-5 Library. + * Error from the HDF5 Library. **/ public synchronized static native long H5Sget_select_hyper_nblocks(long spaceid) throws HDF5LibraryException; /** + * @ingroup JH5S + * * H5Sget_select_hyper_blocklist returns an array of hyperslab blocks. The block coordinates have the same * dimensionality (rank) as the dataspace they are located within. The list of blocks is formatted as * follows: @@ -12350,7 +13455,7 @@ public class H5 implements java.io.Serializable { * @return a non-negative value if successful * * @exception HDF5LibraryException - * Error from the HDF-5 Library. + * Error from the HDF5 Library. * @exception NullPointerException * buf is null. **/ @@ -12359,23 +13464,25 @@ public class H5 implements java.io.Serializable { throws HDF5LibraryException, NullPointerException; /** + * @ingroup JH5S + * * H5Sselect_project_intersection projects the intersection of the selections of src_space_id and * src_intersect_space_id within the selection of src_space_id as a * selection within the selection of dst_space_id. * * @param src_space_id - * Selection that is mapped to dst_space_id, and intersected with src_intersect_space_id + * Selection that is mapped to dst_space_id, and intersected with src_intersect_space_id * @param dst_space_id - * Selection that is mapped to src_space_id + * Selection that is mapped to src_space_id * @param src_intersect_space_id - * Selection whose intersection with src_space_id is projected to dst_space_id to obtain the - * result + * Selection whose intersection with src_space_id is projected to dst_space_id to obtain the + * result * * @return a dataspace with a selection equal to the intersection of * src_intersect_space_id and src_space_id projected from src_space to dst_space on success * * @exception HDF5LibraryException - * Error from the HDF-5 Library. + * Error from the HDF5 Library. **/ public synchronized static native long H5Sselect_project_intersection(long src_space_id, long dst_space_id, long src_intersect_space_id) @@ -12398,8 +13505,13 @@ public class H5 implements java.io.Serializable { // H5T: Datatype Interface Functions // // // // //////////////////////////////////////////////////////////// + /** + * @defgroup JH5T Java Datatype (H5T) Interface + **/ /** + * @ingroup JH5T + * * H5Tarray_create creates a new array datatype object. * * @param base_id @@ -12412,7 +13524,7 @@ public class H5 implements java.io.Serializable { * @return a valid datatype identifier * * @exception HDF5LibraryException - * Error from the HDF-5 Library. + * Error from the HDF5 Library. * @exception NullPointerException * dim is null. **/ @@ -12432,6 +13544,8 @@ public class H5 implements java.io.Serializable { throws HDF5LibraryException, NullPointerException; /** + * @ingroup JH5T + * * H5Tclose releases a datatype. * * @param type_id @@ -12440,7 +13554,7 @@ public class H5 implements java.io.Serializable { * @return a non-negative value if successful * * @exception HDF5LibraryException - * Error from the HDF-5 Library. + * Error from the HDF5 Library. **/ public static int H5Tclose(long type_id) throws HDF5LibraryException { @@ -12456,6 +13570,8 @@ public class H5 implements java.io.Serializable { private synchronized static native int _H5Tclose(long type_id) throws HDF5LibraryException; /** + * @ingroup JH5T + * * H5Tcommit saves a transient datatype as an immutable named datatype in a file. * * @param loc_id @@ -12472,7 +13588,7 @@ public class H5 implements java.io.Serializable { * IN: Datatype access property list. * * @exception HDF5LibraryException - * Error from the HDF-5 Library. + * Error from the HDF5 Library. * @exception NullPointerException * name is null. **/ @@ -12481,6 +13597,8 @@ public class H5 implements java.io.Serializable { throws HDF5LibraryException, NullPointerException; /** + * @ingroup JH5T + * * H5Tcommit_anon commits a transient datatype (not immutable) to a file, turning it into a named datatype * with the specified creation and property lists. * @@ -12494,12 +13612,14 @@ public class H5 implements java.io.Serializable { * IN: Datatype access property list. * * @exception HDF5LibraryException - * Error from the HDF-5 Library. + * Error from the HDF5 Library. **/ public synchronized static native void H5Tcommit_anon(long loc_id, long type_id, long tcpl_id, long tapl_id) throws HDF5LibraryException; /** + * @ingroup JH5T + * * H5Tcommitted queries a type to determine whether the type specified by the type identifier is a named * type or a transient type. * @@ -12509,11 +13629,13 @@ public class H5 implements java.io.Serializable { * @return true the datatype has been committed * * @exception HDF5LibraryException - * Error from the HDF-5 Library. + * Error from the HDF5 Library. **/ public synchronized static native boolean H5Tcommitted(long type_id) throws HDF5LibraryException; /** + * @ingroup JH5T + * * H5Tcompiler_conv finds out whether the library's conversion function from type src_id to type dst_id is * a compiler (hard) conversion. * @@ -12523,13 +13645,15 @@ public class H5 implements java.io.Serializable { * IN: Identifier of destination datatype. * * @exception HDF5LibraryException - * Error from the HDF-5 Library. + * Error from the HDF5 Library. **/ public synchronized static native void H5Tcompiler_conv(long src_id, long dst_id) throws HDF5LibraryException; /** - ** H5Tconvert converts nelmts elements from the type specified by the src_id identifier to type dst_id. + * @ingroup JH5T + * + * H5Tconvert converts nelmts elements from the type specified by the src_id identifier to type dst_id. * * @param src_id * IN: Identifier of source datatype. @@ -12545,7 +13669,7 @@ public class H5 implements java.io.Serializable { * IN: Dataset transfer property list identifier. * * @exception HDF5LibraryException - * Error from the HDF-5 Library. + * Error from the HDF5 Library. * @exception NullPointerException * buf is null. **/ @@ -12556,6 +13680,8 @@ public class H5 implements java.io.Serializable { // int H5Tconvert(int src_id, int dst_id, long nelmts, Pointer buf, Pointer background, int plist_id); /** + * @ingroup JH5T + * * H5Tcopy copies an existing datatype. The returned type is always transient and unlocked. * * @param type_id @@ -12565,7 +13691,7 @@ public class H5 implements java.io.Serializable { * @return a datatype identifier if successful * * @exception HDF5LibraryException - * Error from the HDF-5 Library. + * Error from the HDF5 Library. **/ public static long H5Tcopy(long type_id) throws HDF5LibraryException { @@ -12581,6 +13707,8 @@ public class H5 implements java.io.Serializable { private synchronized static native long _H5Tcopy(long type_id) throws HDF5LibraryException; /** + * @ingroup JH5T + * * H5Tcreate creates a new dataype of the specified class with the specified number of bytes. * * @param tclass @@ -12591,7 +13719,7 @@ public class H5 implements java.io.Serializable { * @return datatype identifier * * @exception HDF5LibraryException - * Error from the HDF-5 Library. + * Error from the HDF5 Library. **/ public static long H5Tcreate(int tclass, long size) throws HDF5LibraryException { @@ -12607,6 +13735,8 @@ public class H5 implements java.io.Serializable { private synchronized static native long _H5Tcreate(int type, long size) throws HDF5LibraryException; /** + * @ingroup JH5T + * * H5Tdecode reconstructs the HDF5 data type object and returns a new object handle for it. * * @param buf @@ -12615,7 +13745,7 @@ public class H5 implements java.io.Serializable { * @return a new object handle * * @exception HDF5LibraryException - * Error from the HDF-5 Library. + * Error from the HDF5 Library. * @exception NullPointerException * buf is null. **/ @@ -12634,6 +13764,8 @@ public class H5 implements java.io.Serializable { throws HDF5LibraryException, NullPointerException; /** + * @ingroup JH5T + * * H5Tdetect_class determines whether the datatype specified in dtype_id contains any datatypes of the * datatype class specified in dtype_class. * @@ -12645,12 +13777,14 @@ public class H5 implements java.io.Serializable { * @return true if the datatype specified in dtype_id contains any datatypes of the datatype class * * @exception HDF5LibraryException - * Error from the HDF-5 Library. + * Error from the HDF5 Library. **/ public synchronized static native boolean H5Tdetect_class(long type_id, int cls) throws HDF5LibraryException; /** + * @ingroup JH5T + * * H5Tencode converts a data type description into binary form in a buffer. * * @param obj_id @@ -12664,7 +13798,7 @@ public class H5 implements java.io.Serializable { * @return the size needed for the allocated buffer. * * @exception HDF5LibraryException - * Error from the HDF-5 Library. + * Error from the HDF5 Library. * @exception NullPointerException * buf is null. **/ @@ -12672,6 +13806,8 @@ public class H5 implements java.io.Serializable { throws HDF5LibraryException, NullPointerException; // /** + // * @ingroup JH5T + // * // * H5Tencode converts a data type description into binary form in a buffer. // * // * @param obj_id @@ -12680,12 +13816,14 @@ public class H5 implements java.io.Serializable { // * @return the buffer for the object to be encoded into. // * // * @exception HDF5LibraryException - // * Error from the HDF-5 Library. + // * Error from the HDF5 Library. // **/ // public synchronized static native byte[] H5Tencode(int obj_id) - // throws HDF5LibraryException; + // throws HDF5LibraryException; /** + * @ingroup JH5T + * * H5Tenum_create creates a new enumeration datatype based on the specified base datatype, parent_id, * which must be an integer type. * @@ -12695,7 +13833,7 @@ public class H5 implements java.io.Serializable { * @return the datatype identifier for the new enumeration datatype * * @exception HDF5LibraryException - * Error from the HDF-5 Library. + * Error from the HDF5 Library. **/ public static long H5Tenum_create(long base_id) throws HDF5LibraryException { @@ -12711,6 +13849,8 @@ public class H5 implements java.io.Serializable { private synchronized static native long _H5Tenum_create(long base_id) throws HDF5LibraryException; /** + * @ingroup JH5T + * * H5Tenum_insert inserts a new enumeration datatype member into an enumeration datatype. * * @param type @@ -12721,7 +13861,7 @@ public class H5 implements java.io.Serializable { * IN: The value of the member, data of the correct type * * @exception HDF5LibraryException - * Error from the HDF-5 Library. + * Error from the HDF5 Library. * @exception NullPointerException * name is null. **/ @@ -12729,6 +13869,8 @@ public class H5 implements java.io.Serializable { throws HDF5LibraryException, NullPointerException; /** + * @ingroup JH5T + * * H5Tenum_insert inserts a new enumeration datatype member into an enumeration datatype. * * @param type @@ -12741,7 +13883,7 @@ public class H5 implements java.io.Serializable { * @return a non-negative value if successful * * @exception HDF5LibraryException - * Error from the HDF-5 Library. + * Error from the HDF5 Library. * @exception NullPointerException * name is null. **/ @@ -12752,6 +13894,8 @@ public class H5 implements java.io.Serializable { } /** + * @ingroup JH5T + * * H5Tenum_insert inserts a new enumeration datatype member into an enumeration datatype. * * @param type @@ -12764,7 +13908,7 @@ public class H5 implements java.io.Serializable { * @return a non-negative value if successful * * @exception HDF5LibraryException - * Error from the HDF-5 Library. + * Error from the HDF5 Library. * @exception NullPointerException * name is null. **/ @@ -12779,6 +13923,8 @@ public class H5 implements java.io.Serializable { throws HDF5LibraryException, NullPointerException; /** + * @ingroup JH5T + * * H5Tenum_nameof finds the symbol name that corresponds to the specified value of the enumeration * datatype type. * @@ -12792,7 +13938,7 @@ public class H5 implements java.io.Serializable { * @return the symbol name. * * @exception HDF5LibraryException - * Error from the HDF-5 Library. + * Error from the HDF5 Library. * @exception NullPointerException * value is null. **/ @@ -12802,6 +13948,8 @@ public class H5 implements java.io.Serializable { // int H5Tenum_nameof(int type, Pointer value, Buffer name/* out */, long size); /** + * @ingroup JH5T + * * H5Tenum_nameof finds the symbol name that corresponds to the specified value of the enumeration * datatype type. * @@ -12817,7 +13965,7 @@ public class H5 implements java.io.Serializable { * @return a non-negative value if successful * * @exception HDF5LibraryException - * Error from the HDF-5 Library. + * Error from the HDF5 Library. * @exception NullPointerException * name is null. **/ @@ -12831,6 +13979,8 @@ public class H5 implements java.io.Serializable { throws HDF5LibraryException, NullPointerException; /** + * @ingroup JH5T + * * H5Tenum_valueof finds the value that corresponds to the specified name of the enumeration datatype * type. * @@ -12842,12 +13992,14 @@ public class H5 implements java.io.Serializable { * OUT: The value of the member * * @exception HDF5LibraryException - * Error from the HDF-5 Library. + * Error from the HDF5 Library. **/ public synchronized static native void H5Tenum_valueof(long type, String name, byte[] value) throws HDF5LibraryException, NullPointerException; /** + * @ingroup JH5T + * * H5Tenum_valueof finds the value that corresponds to the specified name of the enumeration datatype * type. * @@ -12861,7 +14013,7 @@ public class H5 implements java.io.Serializable { * @return a non-negative value if successful * * @exception HDF5LibraryException - * Error from the HDF-5 Library. + * Error from the HDF5 Library. * @exception NullPointerException * name is null. **/ @@ -12875,6 +14027,8 @@ public class H5 implements java.io.Serializable { throws HDF5LibraryException, NullPointerException; /** + * @ingroup JH5T + * * H5Tequal determines whether two datatype identifiers refer to the same datatype. * * @param type_id1 @@ -12885,12 +14039,14 @@ public class H5 implements java.io.Serializable { * @return true if the datatype identifiers refer to the same datatype, else false. * * @exception HDF5LibraryException - * Error from the HDF-5 Library. + * Error from the HDF5 Library. **/ public synchronized static native boolean H5Tequal(long type_id1, long type_id2) throws HDF5LibraryException; /** + * @ingroup JH5T + * * H5Tget_array_dims returns the sizes of the dimensions of the specified array datatype object. * * @param type_id @@ -12901,7 +14057,7 @@ public class H5 implements java.io.Serializable { * @return the non-negative number of dimensions of the array type * * @exception HDF5LibraryException - * Error from the HDF-5 Library. + * Error from the HDF5 Library. * @exception NullPointerException * dims is null. **/ @@ -12912,6 +14068,8 @@ public class H5 implements java.io.Serializable { } /** + * @ingroup JH5T + * * H5Tget_array_dims2 returns the sizes of the dimensions of the specified array datatype object. * * @param type_id @@ -12922,7 +14080,7 @@ public class H5 implements java.io.Serializable { * @return the non-negative number of dimensions of the array type * * @exception HDF5LibraryException - * Error from the HDF-5 Library. + * Error from the HDF5 Library. * @exception NullPointerException * dims is null. **/ @@ -12930,6 +14088,8 @@ public class H5 implements java.io.Serializable { throws HDF5LibraryException, NullPointerException; /** + * @ingroup JH5T + * * H5Tget_array_ndims returns the rank, the number of dimensions, of an array datatype object. * * @param type_id @@ -12938,11 +14098,13 @@ public class H5 implements java.io.Serializable { * @return the rank of the array * * @exception HDF5LibraryException - * Error from the HDF-5 Library. + * Error from the HDF5 Library. **/ public synchronized static native int H5Tget_array_ndims(long type_id) throws HDF5LibraryException; /** + * @ingroup JH5T + * * H5Tget_class returns the datatype class identifier. * * @param type_id @@ -12951,11 +14113,13 @@ public class H5 implements java.io.Serializable { * @return datatype class identifier if successful; otherwise H5T_NO_CLASS(-1). * * @exception HDF5LibraryException - * Error from the HDF-5 Library. + * Error from the HDF5 Library. **/ public synchronized static native int H5Tget_class(long type_id) throws HDF5LibraryException; /** + * @ingroup JH5T + * * H5Tget_class_name returns the datatype class identifier. * * @param class_id @@ -12996,6 +14160,8 @@ public class H5 implements java.io.Serializable { } /** + * @ingroup JH5T + * * H5Tget_create_plist returns a property list identifier for the datatype creation property list * associated with the datatype specified by type_id. * @@ -13005,7 +14171,7 @@ public class H5 implements java.io.Serializable { * @return a datatype property list identifier. * * @exception HDF5LibraryException - * Error from the HDF-5 Library. + * Error from the HDF5 Library. **/ public static long H5Tget_create_plist(long type_id) throws HDF5LibraryException { @@ -13021,6 +14187,8 @@ public class H5 implements java.io.Serializable { private synchronized static native long _H5Tget_create_plist(long type_id) throws HDF5LibraryException; /** + * @ingroup JH5T + * * H5Tget_cset retrieves the character set type of a string datatype. * * @param type_id @@ -13029,11 +14197,13 @@ public class H5 implements java.io.Serializable { * @return a valid character set type if successful * * @exception HDF5LibraryException - * Error from the HDF-5 Library. + * Error from the HDF5 Library. **/ public synchronized static native int H5Tget_cset(long type_id) throws HDF5LibraryException; /** + * @ingroup JH5T + * * H5Tset_cset the character set to be used. * * @param type_id @@ -13044,11 +14214,13 @@ public class H5 implements java.io.Serializable { * @return a non-negative value if successful * * @exception HDF5LibraryException - * Error from the HDF-5 Library. + * Error from the HDF5 Library. **/ public synchronized static native int H5Tset_cset(long type_id, int cset) throws HDF5LibraryException; /** + * @ingroup JH5T + * * H5Tget_ebias retrieves the exponent bias of a floating-point type. * * @param type_id @@ -13057,11 +14229,13 @@ public class H5 implements java.io.Serializable { * @return the bias if successful; otherwise 0. * * @exception HDF5LibraryException - * Error from the HDF-5 Library. + * Error from the HDF5 Library. **/ public synchronized static native int H5Tget_ebias(long type_id) throws HDF5LibraryException; /** + * @ingroup JH5T + * * H5Tset_ebias sets the exponent bias of a floating-point type. * * @param type_id @@ -13072,7 +14246,7 @@ public class H5 implements java.io.Serializable { * @return a non-negative value if successful * * @exception HDF5LibraryException - * Error from the HDF-5 Library. + * Error from the HDF5 Library. **/ public static int H5Tset_ebias(long type_id, int ebias) throws HDF5LibraryException { @@ -13081,6 +14255,8 @@ public class H5 implements java.io.Serializable { } /** + * @ingroup JH5T + * * H5Tget_ebias retrieves the exponent bias of a floating-point type. * * @param type_id @@ -13089,11 +14265,13 @@ public class H5 implements java.io.Serializable { * @return the bias * * @exception HDF5LibraryException - * Error from the HDF-5 Library. + * Error from the HDF5 Library. **/ public synchronized static native long H5Tget_ebias_long(long type_id) throws HDF5LibraryException; /** + * @ingroup JH5T + * * H5Tset_ebias sets the exponent bias of a floating-point type. * * @param type_id @@ -13102,11 +14280,13 @@ public class H5 implements java.io.Serializable { * IN: Exponent bias value. * * @exception HDF5LibraryException - * Error from the HDF-5 Library. + * Error from the HDF5 Library. **/ public synchronized static native void H5Tset_ebias(long type_id, long ebias) throws HDF5LibraryException; /** + * @ingroup JH5T + * * H5Tget_fields retrieves information about the locations of the various bit fields of a floating point * datatype. * @@ -13123,7 +14303,7 @@ public class H5 implements java.io.Serializable { *

* * @exception HDF5LibraryException - * Error from the HDF-5 Library. + * Error from the HDF5 Library. * @exception NullPointerException * fields is null. * @exception IllegalArgumentException @@ -13133,6 +14313,8 @@ public class H5 implements java.io.Serializable { throws HDF5LibraryException, NullPointerException, IllegalArgumentException; /** + * @ingroup JH5T + * * H5Tget_fields retrieves information about the locations of the various bit fields of a floating point * datatype. * @@ -13141,7 +14323,7 @@ public class H5 implements java.io.Serializable { * @param fields * OUT: location of size and bit-position. * - *
+     * 
      *      fields[0] = spos  OUT: location to return size of in bits.
      *      fields[1] = epos  OUT: location to return exponent bit-position.
      *      fields[2] = esize OUT: location to return size of exponent in bits.
@@ -13152,7 +14334,7 @@ public class H5 implements java.io.Serializable {
      * @return a non-negative value if successful
      *
      * @exception HDF5LibraryException
-     *            Error from the HDF-5 Library.
+     *            Error from the HDF5 Library.
      * @exception NullPointerException
      *            fields is null.
      * @exception IllegalArgumentException
@@ -13168,6 +14350,8 @@ public class H5 implements java.io.Serializable {
         throws HDF5LibraryException, NullPointerException, IllegalArgumentException;
 
     /**
+     * @ingroup JH5T
+     *
      * H5Tset_fields sets the locations and sizes of the various floating point bit fields.
      *
      * @param type_id
@@ -13184,12 +14368,14 @@ public class H5 implements java.io.Serializable {
      *            IN: Size of mantissa in bits.
      *
      * @exception HDF5LibraryException
-     *            Error from the HDF-5 Library.
+     *            Error from the HDF5 Library.
      **/
     public synchronized static native void H5Tset_fields(long type_id, long spos, long epos, long esize,
                                                          long mpos, long msize) throws HDF5LibraryException;
 
     /**
+     * @ingroup JH5T
+     *
      * H5Tset_fields sets the locations and sizes of the various floating point bit fields.
      *
      * @param type_id
@@ -13208,7 +14394,7 @@ public class H5 implements java.io.Serializable {
      * @return a non-negative value if successful
      *
      * @exception HDF5LibraryException
-     *            Error from the HDF-5 Library.
+     *            Error from the HDF5 Library.
      **/
     public static int H5Tset_fields(long type_id, int spos, int epos, int esize, int mpos, int msize)
         throws HDF5LibraryException
@@ -13218,6 +14404,8 @@ public class H5 implements java.io.Serializable {
     }
 
     /**
+     * @ingroup JH5T
+     *
      * H5Tget_inpad retrieves the internal padding type for unused bits in floating-point datatypes.
      *
      * @param type_id
@@ -13226,11 +14414,13 @@ public class H5 implements java.io.Serializable {
      * @return a valid padding type if successful
      *
      * @exception HDF5LibraryException
-     *            Error from the HDF-5 Library.
+     *            Error from the HDF5 Library.
      **/
     public synchronized static native int H5Tget_inpad(long type_id) throws HDF5LibraryException;
 
     /**
+     * @ingroup JH5T
+     *
      * If any internal bits of a floating point type are unused (that is, those significant bits which are not
      * part of the sign, exponent, or mantissa), then H5Tset_inpad will be filled according to the value of
      * the padding value property inpad.
@@ -13243,11 +14433,13 @@ public class H5 implements java.io.Serializable {
      * @return a non-negative value if successful
      *
      * @exception HDF5LibraryException
-     *            Error from the HDF-5 Library.
+     *             Error from the HDF5 Library.
      **/
     public synchronized static native int H5Tset_inpad(long type_id, int inpad) throws HDF5LibraryException;
 
     /**
+     * @ingroup JH5T
+     *
      * H5Tget_member_class returns the class of datatype of the specified member.
      *
      * @param type_id
@@ -13258,12 +14450,14 @@ public class H5 implements java.io.Serializable {
      * @return the class of the datatype of the field if successful;
      *
      * @exception HDF5LibraryException
-     *            Error from the HDF-5 Library.
+     *            Error from the HDF5 Library.
      **/
     public synchronized static native int H5Tget_member_class(long type_id, int membno)
         throws HDF5LibraryException;
 
     /**
+     * @ingroup JH5T
+     *
      * H5Tget_member_index retrieves the index of a field of a compound datatype.
      *
      * @param type_id
@@ -13274,12 +14468,14 @@ public class H5 implements java.io.Serializable {
      * @return if field is defined, the index; else negative.
      *
      * @exception HDF5LibraryException
-     *            Error from the HDF-5 Library.
+     *            Error from the HDF5 Library.
      **/
     public synchronized static native int H5Tget_member_index(long type_id, String field_name)
         throws HDF5LibraryException;
 
     /**
+     * @ingroup JH5T
+     *
      * H5Tget_member_name retrieves the name of a field of a compound datatype or an element of an enumeration
      * datatype.
      *
@@ -13291,14 +14487,16 @@ public class H5 implements java.io.Serializable {
      * @return a valid pointer to the name if successful; otherwise null.
      *
      * @exception HDF5LibraryException
-     *            Error from the HDF-5 Library.
+     *            Error from the HDF5 Library.
      **/
     public synchronized static native String H5Tget_member_name(long type_id, int field_idx)
         throws HDF5LibraryException;
 
     /**
+     * @ingroup JH5T
+     *
      * H5Tget_member_offset returns the byte offset of the specified member of the compound datatype. This is
-     * the byte offset in the HDF-5 file/library, NOT the offset of any Java object which might be mapped to
+     * the byte offset in the HDF5 file/library, NOT the offset of any Java object which might be mapped to
      * this data item.
      *
      * @param type_id
@@ -13311,6 +14509,8 @@ public class H5 implements java.io.Serializable {
     public synchronized static native long H5Tget_member_offset(long type_id, int membno);
 
     /**
+     * @ingroup JH5T
+     *
      * H5Tget_member_type returns the datatype of the specified member.
      *
      * @param type_id
@@ -13321,7 +14521,7 @@ public class H5 implements java.io.Serializable {
      * @return the identifier of a copy of the datatype of the field if successful;
      *
      * @exception HDF5LibraryException
-     *            Error from the HDF-5 Library.
+     *            Error from the HDF5 Library.
      **/
     public static long H5Tget_member_type(long type_id, int field_idx) throws HDF5LibraryException
     {
@@ -13338,6 +14538,8 @@ public class H5 implements java.io.Serializable {
         throws HDF5LibraryException;
 
     /**
+     * @ingroup JH5T
+     *
      * H5Tget_member_value returns the value of the enumeration datatype member memb_no.
      *
      * @param type_id
@@ -13348,7 +14550,7 @@ public class H5 implements java.io.Serializable {
      *            OUT: The value of the member
      *
      * @exception HDF5LibraryException
-     *            Error from the HDF-5 Library.
+     *            Error from the HDF5 Library.
      * @exception NullPointerException
      *            value is null.
      **/
@@ -13356,6 +14558,8 @@ public class H5 implements java.io.Serializable {
         throws HDF5LibraryException, NullPointerException;
 
     /**
+     * @ingroup JH5T
+     *
      * H5Tget_member_value returns the value of the enumeration datatype member memb_no.
      *
      * @param type_id
@@ -13368,7 +14572,7 @@ public class H5 implements java.io.Serializable {
      * @return a non-negative value if successful
      *
      * @exception HDF5LibraryException
-     *            Error from the HDF-5 Library.
+     *            Error from the HDF5 Library.
      * @exception NullPointerException
      *            value is null.
      **/
@@ -13382,6 +14586,8 @@ public class H5 implements java.io.Serializable {
         throws HDF5LibraryException, NullPointerException;
 
     /**
+     * @ingroup JH5T
+     *
      * H5Tget_native_type returns the equivalent native datatype for the datatype specified in type_id.
      *
      * @param type_id
@@ -13391,7 +14597,7 @@ public class H5 implements java.io.Serializable {
      * @return the native datatype identifier for the specified dataset datatype.
      *
      * @exception HDF5LibraryException
-     *            Error from the HDF-5 Library.
+     *            Error from the HDF5 Library.
      **/
     public synchronized static long H5Tget_native_type(long type_id) throws HDF5LibraryException
     {
@@ -13399,6 +14605,8 @@ public class H5 implements java.io.Serializable {
     }
 
     /**
+     * @ingroup JH5T
+     *
      * H5Tget_native_type returns the equivalent native datatype for the datatype specified in type_id.
      *
      * @param type_id
@@ -13409,7 +14617,7 @@ public class H5 implements java.io.Serializable {
      * @return the native datatype identifier for the specified dataset datatype.
      *
      * @exception HDF5LibraryException
-     *            Error from the HDF-5 Library.
+     *            Error from the HDF5 Library.
      **/
     public static long H5Tget_native_type(long type_id, int direction) throws HDF5LibraryException
     {
@@ -13426,6 +14634,8 @@ public class H5 implements java.io.Serializable {
         throws HDF5LibraryException;
 
     /**
+     * @ingroup JH5T
+     *
      * H5Tget_nmembers retrieves the number of fields a compound datatype has.
      *
      * @param type_id
@@ -13434,11 +14644,13 @@ public class H5 implements java.io.Serializable {
      * @return number of members datatype has if successful
      *
      * @exception HDF5LibraryException
-     *            Error from the HDF-5 Library.
+     *            Error from the HDF5 Library.
      **/
     public synchronized static native int H5Tget_nmembers(long type_id) throws HDF5LibraryException;
 
     /**
+     * @ingroup JH5T
+     *
      * H5Tget_norm retrieves the mantissa normalization of a floating-point datatype.
      *
      * @param type_id
@@ -13447,11 +14659,13 @@ public class H5 implements java.io.Serializable {
      * @return a valid normalization type if successful
      *
      * @exception HDF5LibraryException
-     *            Error from the HDF-5 Library.
+     *            Error from the HDF5 Library.
      **/
     public synchronized static native int H5Tget_norm(long type_id) throws HDF5LibraryException;
 
     /**
+     * @ingroup JH5T
+     *
      * H5Tset_norm sets the mantissa normalization of a floating-point datatype.
      *
      * @param type_id
@@ -13462,11 +14676,13 @@ public class H5 implements java.io.Serializable {
      * @return a non-negative value if successful
      *
      * @exception HDF5LibraryException
-     *            Error from the HDF-5 Library.
+     *            Error from the HDF5 Library.
      **/
     public synchronized static native int H5Tset_norm(long type_id, int norm) throws HDF5LibraryException;
 
     /**
+     * @ingroup JH5T
+     *
      * H5Tget_offset retrieves the bit offset of the first significant bit.
      *
      * @param type_id
@@ -13475,11 +14691,13 @@ public class H5 implements java.io.Serializable {
      * @return a positive offset value if successful; otherwise 0.
      *
      * @exception HDF5LibraryException
-     *            Error from the HDF-5 Library.
+     *            Error from the HDF5 Library.
      **/
     public synchronized static native int H5Tget_offset(long type_id) throws HDF5LibraryException;
 
     /**
+     * @ingroup JH5T
+     *
      * H5Tset_offset sets the bit offset of the first significant bit.
      *
      * @param type_id
@@ -13490,7 +14708,7 @@ public class H5 implements java.io.Serializable {
      * @return a non-negative value if successful
      *
      * @exception HDF5LibraryException
-     *            Error from the HDF-5 Library.
+     *            Error from the HDF5 Library.
      **/
     public static int H5Tset_offset(long type_id, int offset) throws HDF5LibraryException
     {
@@ -13499,6 +14717,8 @@ public class H5 implements java.io.Serializable {
     }
 
     /**
+     * @ingroup JH5T
+     *
      * H5Tset_offset sets the bit offset of the first significant bit.
      *
      * @param type_id
@@ -13507,12 +14727,14 @@ public class H5 implements java.io.Serializable {
      *            IN: Offset of first significant bit.
      *
      * @exception HDF5LibraryException
-     *            Error from the HDF-5 Library.
+     *            Error from the HDF5 Library.
      **/
     public synchronized static native void H5Tset_offset(long type_id, long offset)
         throws HDF5LibraryException;
 
     /**
+     * @ingroup JH5T
+     *
      * H5Tget_order returns the byte order of an atomic datatype.
      *
      * @param type_id
@@ -13521,11 +14743,13 @@ public class H5 implements java.io.Serializable {
      * @return a byte order constant if successful
      *
      * @exception HDF5LibraryException
-     *            Error from the HDF-5 Library.
+     *            Error from the HDF5 Library.
      **/
     public synchronized static native int H5Tget_order(long type_id) throws HDF5LibraryException;
 
     /**
+     * @ingroup JH5T
+     *
      * H5Tset_order sets the byte ordering of an atomic datatype.
      *
      * @param type_id
@@ -13536,11 +14760,13 @@ public class H5 implements java.io.Serializable {
      * @return a non-negative value if successful
      *
      * @exception HDF5LibraryException
-     *            Error from the HDF-5 Library.
+     *            Error from the HDF5 Library.
      **/
     public synchronized static native int H5Tset_order(long type_id, int order) throws HDF5LibraryException;
 
     /**
+     * @ingroup JH5T
+     *
      * H5Tget_pad retrieves the padding type of the least and most-significant bit padding.
      *
      * @param type_id
@@ -13556,7 +14782,7 @@ public class H5 implements java.io.Serializable {
      * @return a non-negative value if successful
      *
      * @exception HDF5LibraryException
-     *            Error from the HDF-5 Library.
+     *            Error from the HDF5 Library.
      * @exception NullPointerException
      *            pad is null.
      **/
@@ -13564,6 +14790,8 @@ public class H5 implements java.io.Serializable {
         throws HDF5LibraryException, NullPointerException;
 
     /**
+     * @ingroup JH5T
+     *
      * H5Tset_pad sets the least and most-significant bits padding types.
      *
      * @param type_id
@@ -13576,12 +14804,14 @@ public class H5 implements java.io.Serializable {
      * @return a non-negative value if successful
      *
      * @exception HDF5LibraryException
-     *            Error from the HDF-5 Library.
+     *            Error from the HDF5 Library.
      **/
     public synchronized static native int H5Tset_pad(long type_id, int lsb, int msb)
         throws HDF5LibraryException;
 
     /**
+     * @ingroup JH5T
+     *
      * H5Tget_precision returns the precision of an atomic datatype.
      *
      * @param type_id
@@ -13590,11 +14820,13 @@ public class H5 implements java.io.Serializable {
      * @return the number of significant bits if successful
      *
      * @exception HDF5LibraryException
-     *            Error from the HDF-5 Library.
+     *            Error from the HDF5 Library.
      **/
     public synchronized static native int H5Tget_precision(long type_id) throws HDF5LibraryException;
 
     /**
+     * @ingroup JH5T
+     *
      * H5Tset_precision sets the precision of an atomic datatype.
      *
      * @param type_id
@@ -13605,7 +14837,7 @@ public class H5 implements java.io.Serializable {
      * @return a non-negative value if successful
      *
      * @exception HDF5LibraryException
-     *            Error from the HDF-5 Library.
+     *            Error from the HDF5 Library.
      **/
     public static int H5Tset_precision(long type_id, int precision) throws HDF5LibraryException
     {
@@ -13614,6 +14846,8 @@ public class H5 implements java.io.Serializable {
     }
 
     /**
+     * @ingroup JH5T
+     *
      * H5Tget_precision returns the precision of an atomic datatype.
      *
      * @param type_id
@@ -13622,11 +14856,13 @@ public class H5 implements java.io.Serializable {
      * @return the number of significant bits if successful
      *
      * @exception HDF5LibraryException
-     *            Error from the HDF-5 Library.
+     *            Error from the HDF5 Library.
      **/
     public synchronized static native long H5Tget_precision_long(long type_id) throws HDF5LibraryException;
 
     /**
+     * @ingroup JH5T
+     *
      * H5Tset_precision sets the precision of an atomic datatype.
      *
      * @param type_id
@@ -13635,12 +14871,14 @@ public class H5 implements java.io.Serializable {
      *            IN: Number of bits of precision for datatype.
      *
      * @exception HDF5LibraryException
-     *            Error from the HDF-5 Library.
+     *            Error from the HDF5 Library.
      **/
     public synchronized static native void H5Tset_precision(long type_id, long precision)
         throws HDF5LibraryException;
 
     /**
+     * @ingroup JH5T
+     *
      * H5Tget_sign retrieves the sign type for an integer type.
      *
      * @param type_id
@@ -13649,11 +14887,13 @@ public class H5 implements java.io.Serializable {
      * @return a valid sign type if successful
      *
      * @exception HDF5LibraryException
-     *            Error from the HDF-5 Library.
+     *            Error from the HDF5 Library.
      **/
     public synchronized static native int H5Tget_sign(long type_id) throws HDF5LibraryException;
 
     /**
+     * @ingroup JH5T
+     *
      * H5Tset_sign sets the sign proprety for an integer type.
      *
      * @param type_id
@@ -13664,11 +14904,13 @@ public class H5 implements java.io.Serializable {
      * @return a non-negative value if successful
      *
      * @exception HDF5LibraryException
-     *            Error from the HDF-5 Library.
+     *            Error from the HDF5 Library.
      **/
     public synchronized static native int H5Tset_sign(long type_id, int sign) throws HDF5LibraryException;
 
     /**
+     * @ingroup JH5T
+     *
      * H5Tget_size returns the size of a datatype in bytes.
      *
      * @param type_id
@@ -13677,11 +14919,13 @@ public class H5 implements java.io.Serializable {
      * @return the size of the datatype in bytes
      *
      * @exception HDF5LibraryException
-     *            Error from the HDF-5 Library.
+     *            Error from the HDF5 Library.
      **/
     public synchronized static native long H5Tget_size(long type_id) throws HDF5LibraryException;
 
     /**
+     * @ingroup JH5T
+     *
      * H5Tset_size sets the total size in bytes, size, for an atomic datatype (this operation is not permitted
      * on compound datatypes).
      *
@@ -13693,11 +14937,13 @@ public class H5 implements java.io.Serializable {
      * @return a non-negative value if successful
      *
      * @exception HDF5LibraryException
-     *            Error from the HDF-5 Library.
+     *            Error from the HDF5 Library.
      **/
     public synchronized static native int H5Tset_size(long type_id, long size) throws HDF5LibraryException;
 
     /**
+     * @ingroup JH5T
+     *
      * H5Tget_strpad retrieves the string padding method for a string datatype.
      *
      * @param type_id
@@ -13706,11 +14952,13 @@ public class H5 implements java.io.Serializable {
      * @return a valid string padding type if successful
      *
      * @exception HDF5LibraryException
-     *            Error from the HDF-5 Library.
+     *            Error from the HDF5 Library.
      **/
     public synchronized static native int H5Tget_strpad(long type_id) throws HDF5LibraryException;
 
     /**
+     * @ingroup JH5T
+     *
      * H5Tset_strpad defines the storage mechanism for the string.
      *
      * @param type_id
@@ -13721,11 +14969,13 @@ public class H5 implements java.io.Serializable {
      * @return a non-negative value if successful
      *
      * @exception HDF5LibraryException
-     *            Error from the HDF-5 Library.
+     *            Error from the HDF5 Library.
      **/
     public synchronized static native int H5Tset_strpad(long type_id, int strpad) throws HDF5LibraryException;
 
     /**
+     * @ingroup JH5T
+     *
      * H5Tget_super returns the type from which TYPE is derived.
      *
      * @param type
@@ -13734,7 +14984,7 @@ public class H5 implements java.io.Serializable {
      * @return the parent type
      *
      * @exception HDF5LibraryException
-     *            Error from the HDF-5 Library.
+     *            Error from the HDF5 Library.
      **/
     public static long H5Tget_super(long type) throws HDF5LibraryException
     {
@@ -13750,6 +15000,8 @@ public class H5 implements java.io.Serializable {
     private synchronized static native long _H5Tget_super(long type) throws HDF5LibraryException;
 
     /**
+     * @ingroup JH5T
+     *
      * H5Tget_tag returns the tag associated with datatype type_id.
      *
      * @param type
@@ -13758,11 +15010,13 @@ public class H5 implements java.io.Serializable {
      * @return the tag
      *
      * @exception HDF5LibraryException
-     *            Error from the HDF-5 Library.
+     *            Error from the HDF5 Library.
      **/
     public synchronized static native String H5Tget_tag(long type) throws HDF5LibraryException;
 
     /**
+     * @ingroup JH5T
+     *
      * H5Tset_tag tags an opaque datatype type_id with a unique ASCII identifier tag.
      *
      * @param type
@@ -13773,11 +15027,13 @@ public class H5 implements java.io.Serializable {
      * @return a non-negative value if successful
      *
      * @exception HDF5LibraryException
-     *            Error from the HDF-5 Library.
+     *            Error from the HDF5 Library.
      **/
     public synchronized static native int H5Tset_tag(long type, String tag) throws HDF5LibraryException;
 
     /**
+     * @ingroup JH5T
+     *
      * H5Tinsert adds another member to the compound datatype type_id.
      *
      * @param type_id
@@ -13792,7 +15048,7 @@ public class H5 implements java.io.Serializable {
      * @return a non-negative value if successful
      *
      * @exception HDF5LibraryException
-     *            Error from the HDF-5 Library.
+     *            Error from the HDF5 Library.
      * @exception NullPointerException
      *            name is null.
      **/
@@ -13800,6 +15056,8 @@ public class H5 implements java.io.Serializable {
         throws HDF5LibraryException, NullPointerException;
 
     /**
+     * @ingroup JH5T
+     *
      * H5Tis_variable_str determines whether the datatype identified in type_id is a variable-length string.
      *
      * @param type_id
@@ -13808,11 +15066,13 @@ public class H5 implements java.io.Serializable {
      * @return true if type_id is a variable-length string.
      *
      * @exception HDF5LibraryException
-     *            Error from the HDF-5 Library.
+     *            Error from the HDF5 Library.
      **/
     public synchronized static native boolean H5Tis_variable_str(long type_id) throws HDF5LibraryException;
 
     /**
+     * @ingroup JH5T
+     *
      * H5Tlock locks the datatype specified by the type_id identifier, making it read-only and
      * non-destrucible.
      *
@@ -13822,11 +15082,13 @@ public class H5 implements java.io.Serializable {
      * @return a non-negative value if successful
      *
      * @exception HDF5LibraryException
-     *            Error from the HDF-5 Library.
+     *            Error from the HDF5 Library.
      **/
     public synchronized static native int H5Tlock(long type_id) throws HDF5LibraryException;
 
     /**
+     * @ingroup JH5T
+     *
      * H5Topen opens a named datatype at the location specified by loc_id and return an identifier for the
      * datatype.
      *
@@ -13840,7 +15102,7 @@ public class H5 implements java.io.Serializable {
      * @return a named datatype identifier if successful
      *
      * @exception HDF5LibraryException
-     *            Error from the HDF-5 Library.
+     *            Error from the HDF5 Library.
      * @exception NullPointerException
      *            name is null.
      **/
@@ -13860,6 +15122,8 @@ public class H5 implements java.io.Serializable {
         throws HDF5LibraryException, NullPointerException;
 
     /**
+     * @ingroup JH5T
+     *
      * H5Tpack recursively removes padding from within a compound datatype to make it more efficient
      * (space-wise) to store that data. 

WARNING: This call only affects the C-data, even if it * succeeds, there may be no visible effect on Java objects. @@ -13870,11 +15134,13 @@ public class H5 implements java.io.Serializable { * @return a non-negative value if successful * * @exception HDF5LibraryException - * Error from the HDF-5 Library. + * Error from the HDF5 Library. **/ public synchronized static native int H5Tpack(long type_id) throws HDF5LibraryException; /** + * @ingroup JH5T + * * H5Treclaim reclaims buffer used for VL data. * * @param type_id @@ -13887,7 +15153,7 @@ public class H5 implements java.io.Serializable { * Buffer with data to be reclaimed. * * @exception HDF5LibraryException - * Error from the HDF-5 Library. + * Error from the HDF5 Library. * @exception NullPointerException * buf is null. **/ @@ -13896,6 +15162,8 @@ public class H5 implements java.io.Serializable { throws HDF5LibraryException, NullPointerException; /** + * @ingroup JH5T + * * H5Tvlen_create creates a new variable-length (VL) dataype. * * @param base_id @@ -13904,7 +15172,7 @@ public class H5 implements java.io.Serializable { * @return a non-negative value if successful * * @exception HDF5LibraryException - * Error from the HDF-5 Library. + * Error from the HDF5 Library. **/ public static long H5Tvlen_create(long base_id) throws HDF5LibraryException { @@ -13920,6 +15188,8 @@ public class H5 implements java.io.Serializable { private synchronized static native long _H5Tvlen_create(long base_id) throws HDF5LibraryException; /** + * @ingroup JH5T + * * H5Tflush causes all buffers associated with a committed datatype to be immediately flushed to disk * without removing the data from the cache. * @@ -13927,11 +15197,13 @@ public class H5 implements java.io.Serializable { * IN: Identifier of the committed datatype to be flushed. * * @exception HDF5LibraryException - * Error from the HDF-5 Library. + * Error from the HDF5 Library. **/ public synchronized static native void H5Tflush(long dtype_id) throws HDF5LibraryException; /** + * @ingroup JH5T + * * H5Trefresh causes all buffers associated with a committed datatype to be cleared and immediately * re-loaded with updated contents from disk. This function essentially closes the datatype, evicts * all metadata associated with it from the cache, and then re-opens the datatype. The reopened datatype @@ -13941,7 +15213,7 @@ public class H5 implements java.io.Serializable { * IN: Identifier of the committed datatype to be refreshed. * * @exception HDF5LibraryException - * Error from the HDF-5 Library. + * Error from the HDF5 Library. **/ public synchronized static native void H5Trefresh(long dtype_id) throws HDF5LibraryException; @@ -13961,9 +15233,13 @@ public class H5 implements java.io.Serializable { // H5VL: VOL Interface Functions // // // // //////////////////////////////////////////////////////////// + /** + * @defgroup JH5VL Java VOL Connector (H5VL) Interface + **/ - /// VOL Connector Functionality /** + * @ingroup JH5VL + * * H5VLregister_connector_by_name registers a new VOL connector as a member of the virtual object layer * class. * @@ -13976,11 +15252,13 @@ public class H5 implements java.io.Serializable { * @return a VOL connector ID * * @exception HDF5LibraryException - * Error from the HDF-5 Library. + * Error from the HDF5 Library. **/ public synchronized static native long H5VLregister_connector_by_name(String connector_name, long vipl_id) throws HDF5LibraryException; /** + * @ingroup JH5VL + * * H5VLregister_connector_by_value registers a new VOL connector as a member of the virtual object layer * class. * @@ -13993,11 +15271,13 @@ public class H5 implements java.io.Serializable { * @return a VOL connector ID * * @exception HDF5LibraryException - * Error from the HDF-5 Library. + * Error from the HDF5 Library. **/ public synchronized static native long H5VLregister_connector_by_value(int connector_value, long vipl_id) throws HDF5LibraryException; /** + * @ingroup JH5VL + * * H5VLis_connector_registered_by_name tests whether a VOL class has been registered. * * @param name @@ -14006,11 +15286,13 @@ public class H5 implements java.io.Serializable { * @return true if a VOL connector with that name has been registered * * @exception HDF5LibraryException - * Error from the HDF-5 Library. + * Error from the HDF5 Library. **/ public synchronized static native boolean H5VLis_connector_registered_by_name(String name) throws HDF5LibraryException; /** + * @ingroup JH5VL + * * H5VLis_connector_registered_by_value tests whether a VOL class has been registered. * * @param connector_value @@ -14019,11 +15301,13 @@ public class H5 implements java.io.Serializable { * @return true if a VOL connector with that value has been registered * * @exception HDF5LibraryException - * Error from the HDF-5 Library. + * Error from the HDF5 Library. **/ public synchronized static native boolean H5VLis_connector_registered_by_value(int connector_value) throws HDF5LibraryException; /** + * @ingroup JH5VL + * * H5VLget_connector_id retrieves the ID for a registered VOL connector for a given object. * * @param object_id @@ -14032,10 +15316,12 @@ public class H5 implements java.io.Serializable { * @return a VOL connector ID * * @exception HDF5LibraryException - * Error from the HDF-5 Library. + * Error from the HDF5 Library. **/ public synchronized static native long H5VLget_connector_id(long object_id) throws HDF5LibraryException; /** + * @ingroup JH5VL + * * H5VLget_connector_id_by_name retrieves the ID for a registered VOL connector. * * @param name @@ -14044,11 +15330,13 @@ public class H5 implements java.io.Serializable { * @return a VOL connector ID * * @exception HDF5LibraryException - * Error from the HDF-5 Library. + * Error from the HDF5 Library. **/ public synchronized static native long H5VLget_connector_id_by_name(String name) throws HDF5LibraryException; /** + * @ingroup JH5VL + * * H5VLget_connector_id_by_value retrieves the ID for a registered VOL connector. * * @param connector_value @@ -14057,11 +15345,13 @@ public class H5 implements java.io.Serializable { * @return a VOL connector ID * * @exception HDF5LibraryException - * Error from the HDF-5 Library. + * Error from the HDF5 Library. **/ public synchronized static native long H5VLget_connector_id_by_value(int connector_value) throws HDF5LibraryException; /** + * @ingroup JH5VL + * * H5VLget_connector_name returns the connector name for the VOL associated with the * object or file ID. * @@ -14071,28 +15361,32 @@ public class H5 implements java.io.Serializable { * @return the connector name * * @exception HDF5LibraryException - * Error from the HDF-5 Library. + * Error from the HDF5 Library. **/ public synchronized static native String H5VLget_connector_name(long object_id) throws HDF5LibraryException; /** + * @ingroup JH5VL + * * H5VLclose closes a VOL connector ID. * * @param connector_id * IN: Identifier of the connector. * * @exception HDF5LibraryException - * Error from the HDF-5 Library. + * Error from the HDF5 Library. **/ public synchronized static native void H5VLclose(long connector_id) throws HDF5LibraryException; /** + * @ingroup JH5VL + * * H5VLunregister_connector removes a VOL connector ID from the library. * * @param connector_id * IN: Identifier of the connector. * * @exception HDF5LibraryException - * Error from the HDF-5 Library. + * Error from the HDF5 Library. **/ public synchronized static native void H5VLunregister_connector(long connector_id) throws HDF5LibraryException; @@ -14105,8 +15399,13 @@ public class H5 implements java.io.Serializable { // H5Z: Filter Interface Functions // // // // //////////////////////////////////////////////////////////// + /** + * @defgroup JH5Z Java Filter (H5Z) Interface + **/ /** + * @ingroup JH5Z + * * H5Zfilter_avail checks if a filter is available. * * @param filter @@ -14115,11 +15414,13 @@ public class H5 implements java.io.Serializable { * @return a non-negative(TRUE/FALSE) value if successful * * @exception HDF5LibraryException - * Error from the HDF-5 Library. + * Error from the HDF5 Library. **/ public synchronized static native int H5Zfilter_avail(int filter) throws HDF5LibraryException; /** + * @ingroup JH5Z + * * H5Zget_filter_info gets information about a pipeline data filter. * * @param filter @@ -14128,11 +15429,13 @@ public class H5 implements java.io.Serializable { * @return the filter information flags * * @exception HDF5LibraryException - * Error from the HDF-5 Library. + * Error from the HDF5 Library. **/ public synchronized static native int H5Zget_filter_info(int filter) throws HDF5LibraryException; /** + * @ingroup JH5Z + * * H5Zunregister unregisters a filter. * * @param filter @@ -14141,7 +15444,7 @@ public class H5 implements java.io.Serializable { * @return a non-negative value if successful * * @exception HDF5LibraryException - * Error from the HDF-5 Library. + * Error from the HDF5 Library. **/ public synchronized static native int H5Zunregister(int filter) throws HDF5LibraryException; diff --git a/java/src/hdf/hdf5lib/HDF5Constants.java b/java/src/hdf/hdf5lib/HDF5Constants.java index 2f5ec91..67b9449 100644 --- a/java/src/hdf/hdf5lib/HDF5Constants.java +++ b/java/src/hdf/hdf5lib/HDF5Constants.java @@ -16,14 +16,15 @@ package hdf.hdf5lib; import hdf.hdf5lib.structs.H5O_token_t; /** + * @page HDF5CONST Constants and Enumerated Types * This class contains C constants and enumerated types of HDF5 library. The - * values of these constants are obtained from the library by calling J2C(int - * jconstant), where jconstant is any of the private constants which start their - * name with "JH5" need to be converted. + * values of these constants are obtained from the library by calling + * the JNI function jconstant, where jconstant is any of the private constants + * which start their name with "H5" need to be converted. *

* Do not edit this file! * - * See also: hdf.hdf5lib.HDF5Library + * @see @ref HDF5LIB */ public class HDF5Constants { static { H5.loadH5Lib(); } @@ -32,8 +33,6 @@ public class HDF5Constants { // Get the HDF5 constants from the library // // ///////////////////////////////////////////////////////////////////////// - // public static final long H5_QUARTER_HADDR_MAX = H5_QUARTER_HADDR_MAX(); - /** Special parameters for szip compression */ public static final int H5_SZIP_MAX_PIXELS_PER_BLOCK = H5_SZIP_MAX_PIXELS_PER_BLOCK(); /** Special parameters for szip compression */ diff --git a/java/src/hdf/hdf5lib/HDF5GroupInfo.java b/java/src/hdf/hdf5lib/HDF5GroupInfo.java deleted file mode 100644 index 220cfb6..0000000 --- a/java/src/hdf/hdf5lib/HDF5GroupInfo.java +++ /dev/null @@ -1,182 +0,0 @@ -/* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * - * Copyright by The HDF Group. * - * Copyright by the Board of Trustees of the University of Illinois. * - * All rights reserved. * - * * - * This file is part of HDF5. The full HDF5 copyright notice, including * - * terms governing use, modification, and redistribution, is contained in * - * the COPYING file, which can be found at the root of the source code * - * distribution tree, or in https://www.hdfgroup.org/licenses. * - * If you do not have access to either file, you may request a copy from * - * help@hdfgroup.org. * - * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */ - -package hdf.hdf5lib; - -/** - *

- * This class is a container for the information reported about an HDF5 Object - * from the H5Gget_obj_info() method. - *

- * The fileno and objno fields contain four values which uniquely identify an - * object among those HDF5 files which are open: if all four values are the same - * between two objects, then the two objects are the same (provided both files - * are still open). The nlink field is the number of hard links to the object or - * zero when information is being returned about a symbolic link (symbolic links - * do not have hard links but all other objects always have at least one). The - * type field contains the type of the object, one of H5G_GROUP, H5G_DATASET, or - * H5G_LINK. The mtime field contains the modification time. If information is - * being returned about a symbolic link then linklen will be the length of the - * link value (the name of the pointed-to object with the null terminator); - * otherwise linklen will be zero. Other fields may be added to this structure - * in the future. - */ - -public class HDF5GroupInfo { - long[] fileno; - long[] objno; - int nlink; - int type; - long mtime; - int linklen; - - /** - * Container for the information reported about an HDF5 Object - * from the H5Gget_obj_info() method - */ - public HDF5GroupInfo() - { - fileno = new long[2]; - objno = new long[2]; - nlink = -1; - type = -1; - mtime = 0; - linklen = 0; - } - - /** - * Sets the HDF5 group information. Used by the JHI5. - * - * @param fn - * File id number - * @param on - * Object id number - * @param nl - * Number of links - * @param t - * Type of the object - * @param mt - * Modification time - * @param len - * Length of link - **/ - public void setGroupInfo(long[] fn, long[] on, int nl, int t, long mt, int len) - { - fileno = fn; - objno = on; - nlink = nl; - type = t; - mtime = mt; - linklen = len; - } - - /** Resets all the group information to defaults. */ - public void reset() - { - fileno[0] = 0; - fileno[1] = 0; - objno[0] = 0; - objno[1] = 0; - nlink = -1; - type = -1; - mtime = 0; - linklen = 0; - } - - /** - * fileno accessors - * @return the file number if successful - */ - public long[] getFileno() { return fileno; } - - /** - * accessors - * @return the object number if successful - */ - public long[] getObjno() { return objno; } - - /** - * accessors - * @return type of group if successful - */ - public int getType() { return type; } - - /** - * accessors - * @return the number of links in the group if successful - */ - public int getNlink() { return nlink; } - - /** - * accessors - * @return the modified time value if successful - */ - public long getMtime() { return mtime; } - - /** - * accessors - * @return a length of link name if successful - */ - public int getLinklen() { return linklen; } - - /** - * The fileno and objno fields contain four values which uniquely identify - * an object among those HDF5 files. - */ - @Override - public boolean equals(Object obj) - { - if (!(obj instanceof HDF5GroupInfo)) { - return false; - } - - HDF5GroupInfo target = (HDF5GroupInfo)obj; - if ((fileno[0] == target.fileno[0]) && (fileno[1] == target.fileno[1]) && - (objno[0] == target.objno[0]) && (objno[1] == target.objno[1])) { - return true; - } - else { - return false; - } - } - - /** - * Returns the object id. - * - * @return the object id - */ - public long getOID() { return objno[0]; } - - /** - * /** Converts this object to a String representation. - * - * @return a string representation of this object - */ - @Override - public String toString() - { - String fileStr = "fileno=null"; - String objStr = "objno=null"; - - if (fileno != null) { - fileStr = "fileno[0]=" + fileno[0] + ",fileno[1]=" + fileno[1]; - } - - if (objno != null) { - objStr = "objno[0]=" + objno[0] + ",objno[1]=" + objno[1]; - } - - return getClass().getName() + "[" + fileStr + "," + objStr + ",type=" + type + ",nlink=" + nlink + - ",mtime=" + mtime + ",linklen=" + linklen + "]"; - } -} diff --git a/java/src/hdf/hdf5lib/HDFArray.java b/java/src/hdf/hdf5lib/HDFArray.java index 8525fb0..9ea314d 100644 --- a/java/src/hdf/hdf5lib/HDFArray.java +++ b/java/src/hdf/hdf5lib/HDFArray.java @@ -19,14 +19,15 @@ import hdf.hdf5lib.exceptions.HDF5Exception; import hdf.hdf5lib.exceptions.HDF5JavaException; /** + * @page HDFARRAY Java Array Conversion * This is a class for handling multidimensional arrays for HDF. *

* The purpose is to allow the storage and retrieval of arbitrary array types containing scientific data. *

* The methods support the conversion of an array to and from Java to a one-dimensional array of bytes - * suitable for I/O by the C library.

This class heavily uses the HDFNativeData class to convert between Java and C - * representations. + * suitable for I/O by the C library.

This class heavily uses the + * @ref HDFNATIVE + * class to convert between Java and C representations. */ public class HDFArray { diff --git a/java/src/hdf/hdf5lib/HDFNativeData.java b/java/src/hdf/hdf5lib/HDFNativeData.java index c497043..bc4e866 100644 --- a/java/src/hdf/hdf5lib/HDFNativeData.java +++ b/java/src/hdf/hdf5lib/HDFNativeData.java @@ -17,11 +17,12 @@ import hdf.hdf5lib.exceptions.HDF5Exception; import hdf.hdf5lib.exceptions.HDF5JavaException; /** + * @page HDFNATIVE Native Arrays of Numbers * This class encapsulates native methods to deal with arrays of numbers, * converting from numbers to bytes and bytes to numbers. *

- * These routines are used by class HDFArray to pass data to and from the - * HDF-5 library. + * These routines are used by class @ref HDFARRAY to pass data to and from the + * HDF5 library. *

* Methods xxxToByte() convert a Java array of primitive numbers (int, short, * ...) to a Java array of bytes. Methods byteToXxx() convert from a Java array @@ -30,7 +31,7 @@ import hdf.hdf5lib.exceptions.HDF5JavaException; * Variant interfaces convert a section of an array, and also can convert to * sub-classes of Java Number. *

- * See also: hdf.hdf5lib.HDFArray. + * @see @ref HDFARRAY. */ public class HDFNativeData { diff --git a/java/src/hdf/hdf5lib/callbacks/Callbacks.java b/java/src/hdf/hdf5lib/callbacks/Callbacks.java index 86d6193..3d5fbd1 100644 --- a/java/src/hdf/hdf5lib/callbacks/Callbacks.java +++ b/java/src/hdf/hdf5lib/callbacks/Callbacks.java @@ -13,6 +13,7 @@ package hdf.hdf5lib.callbacks; /** + * @page CALLBACKS HDF5 Java Callbacks Interface * All callback definitions must derive from this interface. Any * derived interfaces must define a single public method named "callback". * You are responsible for deregistering your callback (if necessary) @@ -20,11 +21,14 @@ package hdf.hdf5lib.callbacks; * a callback which has been GC'd, you will likely crash the VM. If * there is no method to deregister the callback (e.g. atexit * in the C library), you must ensure that you always keep a live reference - * to the callback object.

+ * to the callback object. + * * A callback should generally never throw an exception, since it doesn't * necessarily have an encompassing Java environment to catch it. Any * exceptions thrown will be passed to the default callback exception * handler. + * + * @defgroup JCALL HDF5 Library Java Callbacks */ public interface Callbacks { } diff --git a/java/src/hdf/hdf5lib/callbacks/H5A_iterate_cb.java b/java/src/hdf/hdf5lib/callbacks/H5A_iterate_cb.java index 6c68f36..9958b3b 100644 --- a/java/src/hdf/hdf5lib/callbacks/H5A_iterate_cb.java +++ b/java/src/hdf/hdf5lib/callbacks/H5A_iterate_cb.java @@ -20,6 +20,8 @@ import hdf.hdf5lib.structs.H5A_info_t; */ public interface H5A_iterate_cb extends Callbacks { /** + * @ingroup JCALL + * * application callback for each attribute * * @param loc_id the ID for the group or dataset being iterated over diff --git a/java/src/hdf/hdf5lib/callbacks/H5D_append_cb.java b/java/src/hdf/hdf5lib/callbacks/H5D_append_cb.java index cf7ada6..49323a2 100644 --- a/java/src/hdf/hdf5lib/callbacks/H5D_append_cb.java +++ b/java/src/hdf/hdf5lib/callbacks/H5D_append_cb.java @@ -18,6 +18,8 @@ package hdf.hdf5lib.callbacks; */ public interface H5D_append_cb extends Callbacks { /** + * @ingroup JCALL + * * application callback for each dataset access property list * * @param dataset_id the ID for the dataset being iterated over diff --git a/java/src/hdf/hdf5lib/callbacks/H5D_iterate_cb.java b/java/src/hdf/hdf5lib/callbacks/H5D_iterate_cb.java index 54c12e3..5f77998 100644 --- a/java/src/hdf/hdf5lib/callbacks/H5D_iterate_cb.java +++ b/java/src/hdf/hdf5lib/callbacks/H5D_iterate_cb.java @@ -18,6 +18,8 @@ package hdf.hdf5lib.callbacks; */ public interface H5D_iterate_cb extends Callbacks { /** + * @ingroup JCALL + * * application callback for each dataset element * * @param elem the pointer to the element in memory containing the current point diff --git a/java/src/hdf/hdf5lib/callbacks/H5E_walk_cb.java b/java/src/hdf/hdf5lib/callbacks/H5E_walk_cb.java index 5722195..a8ef5df 100644 --- a/java/src/hdf/hdf5lib/callbacks/H5E_walk_cb.java +++ b/java/src/hdf/hdf5lib/callbacks/H5E_walk_cb.java @@ -20,6 +20,8 @@ import hdf.hdf5lib.structs.H5E_error2_t; */ public interface H5E_walk_cb extends Callbacks { /** + * @ingroup JCALL + * * application callback for each error stack element * * @param nidx the index of the current error stack element diff --git a/java/src/hdf/hdf5lib/callbacks/H5L_iterate_t.java b/java/src/hdf/hdf5lib/callbacks/H5L_iterate_t.java index 53635bf..7342e58 100644 --- a/java/src/hdf/hdf5lib/callbacks/H5L_iterate_t.java +++ b/java/src/hdf/hdf5lib/callbacks/H5L_iterate_t.java @@ -20,6 +20,8 @@ import hdf.hdf5lib.structs.H5L_info_t; */ public interface H5L_iterate_t extends Callbacks { /** + * @ingroup JCALL + * * application callback for each group * * @param loc_id the ID for the group being iterated over diff --git a/java/src/hdf/hdf5lib/callbacks/H5O_iterate_t.java b/java/src/hdf/hdf5lib/callbacks/H5O_iterate_t.java index ecf868c..bfe8c67 100644 --- a/java/src/hdf/hdf5lib/callbacks/H5O_iterate_t.java +++ b/java/src/hdf/hdf5lib/callbacks/H5O_iterate_t.java @@ -20,6 +20,8 @@ import hdf.hdf5lib.structs.H5O_info_t; */ public interface H5O_iterate_t extends Callbacks { /** + * @ingroup JCALL + * * application callback for each group * * @param loc_id the ID for the group or dataset being iterated over diff --git a/java/src/hdf/hdf5lib/callbacks/H5P_cls_close_func_cb.java b/java/src/hdf/hdf5lib/callbacks/H5P_cls_close_func_cb.java index 0a09a94..a235861 100644 --- a/java/src/hdf/hdf5lib/callbacks/H5P_cls_close_func_cb.java +++ b/java/src/hdf/hdf5lib/callbacks/H5P_cls_close_func_cb.java @@ -18,6 +18,8 @@ package hdf.hdf5lib.callbacks; */ public interface H5P_cls_close_func_cb extends Callbacks { /** + * @ingroup JCALL + * * application callback for each property list * * @param prop_id the ID for the property list class being iterated over diff --git a/java/src/hdf/hdf5lib/callbacks/H5P_cls_copy_func_cb.java b/java/src/hdf/hdf5lib/callbacks/H5P_cls_copy_func_cb.java index 53f86be..b218e0c 100644 --- a/java/src/hdf/hdf5lib/callbacks/H5P_cls_copy_func_cb.java +++ b/java/src/hdf/hdf5lib/callbacks/H5P_cls_copy_func_cb.java @@ -18,6 +18,8 @@ package hdf.hdf5lib.callbacks; */ public interface H5P_cls_copy_func_cb extends Callbacks { /** + * @ingroup JCALL + * * application callback for each property list * * @param new_prop_id the ID for the property list copy diff --git a/java/src/hdf/hdf5lib/callbacks/H5P_cls_create_func_cb.java b/java/src/hdf/hdf5lib/callbacks/H5P_cls_create_func_cb.java index 8f4e782..3d407d0 100644 --- a/java/src/hdf/hdf5lib/callbacks/H5P_cls_create_func_cb.java +++ b/java/src/hdf/hdf5lib/callbacks/H5P_cls_create_func_cb.java @@ -18,6 +18,8 @@ package hdf.hdf5lib.callbacks; */ public interface H5P_cls_create_func_cb extends Callbacks { /** + * @ingroup JCALL + * * application callback for each property list * * @param prop_id the ID for the property list class being iterated over diff --git a/java/src/hdf/hdf5lib/callbacks/H5P_iterate_cb.java b/java/src/hdf/hdf5lib/callbacks/H5P_iterate_cb.java index db98a67..51a5768 100644 --- a/java/src/hdf/hdf5lib/callbacks/H5P_iterate_cb.java +++ b/java/src/hdf/hdf5lib/callbacks/H5P_iterate_cb.java @@ -18,6 +18,8 @@ package hdf.hdf5lib.callbacks; */ public interface H5P_iterate_cb extends Callbacks { /** + * @ingroup JCALL + * * application callback for each property list * * @param plist the ID for the property list being iterated over diff --git a/java/src/hdf/hdf5lib/callbacks/H5P_prp_close_func_cb.java b/java/src/hdf/hdf5lib/callbacks/H5P_prp_close_func_cb.java index 1aa7ce4..2ddc980 100644 --- a/java/src/hdf/hdf5lib/callbacks/H5P_prp_close_func_cb.java +++ b/java/src/hdf/hdf5lib/callbacks/H5P_prp_close_func_cb.java @@ -18,6 +18,8 @@ package hdf.hdf5lib.callbacks; */ public interface H5P_prp_close_func_cb extends Callbacks { /** + * @ingroup JCALL + * * application callback for each property list * * @param name the name of the property being closed diff --git a/java/src/hdf/hdf5lib/callbacks/H5P_prp_compare_func_cb.java b/java/src/hdf/hdf5lib/callbacks/H5P_prp_compare_func_cb.java index 49cef7d..53caa94 100644 --- a/java/src/hdf/hdf5lib/callbacks/H5P_prp_compare_func_cb.java +++ b/java/src/hdf/hdf5lib/callbacks/H5P_prp_compare_func_cb.java @@ -18,6 +18,8 @@ package hdf.hdf5lib.callbacks; */ public interface H5P_prp_compare_func_cb extends Callbacks { /** + * @ingroup JCALL + * * application callback for each property list * * @param value1 the value of the first property being compared diff --git a/java/src/hdf/hdf5lib/callbacks/H5P_prp_copy_func_cb.java b/java/src/hdf/hdf5lib/callbacks/H5P_prp_copy_func_cb.java index f4924ee..0b2349e 100644 --- a/java/src/hdf/hdf5lib/callbacks/H5P_prp_copy_func_cb.java +++ b/java/src/hdf/hdf5lib/callbacks/H5P_prp_copy_func_cb.java @@ -18,6 +18,8 @@ package hdf.hdf5lib.callbacks; */ public interface H5P_prp_copy_func_cb extends Callbacks { /** + * @ingroup JCALL + * * application callback for each property list * * @param name the name of the property being copied diff --git a/java/src/hdf/hdf5lib/callbacks/H5P_prp_create_func_cb.java b/java/src/hdf/hdf5lib/callbacks/H5P_prp_create_func_cb.java index bce024b..6065ce0 100644 --- a/java/src/hdf/hdf5lib/callbacks/H5P_prp_create_func_cb.java +++ b/java/src/hdf/hdf5lib/callbacks/H5P_prp_create_func_cb.java @@ -18,6 +18,8 @@ package hdf.hdf5lib.callbacks; */ public interface H5P_prp_create_func_cb extends Callbacks { /** + * @ingroup JCALL + * * application callback for each property list * * @param name the name of the property list being created diff --git a/java/src/hdf/hdf5lib/callbacks/H5P_prp_delete_func_cb.java b/java/src/hdf/hdf5lib/callbacks/H5P_prp_delete_func_cb.java index 8c5dccc..4384ca7 100644 --- a/java/src/hdf/hdf5lib/callbacks/H5P_prp_delete_func_cb.java +++ b/java/src/hdf/hdf5lib/callbacks/H5P_prp_delete_func_cb.java @@ -18,6 +18,8 @@ package hdf.hdf5lib.callbacks; */ public interface H5P_prp_delete_func_cb extends Callbacks { /** + * @ingroup JCALL + * * application callback for each property list * * @param prop_id the ID of the property list the property is deleted from diff --git a/java/src/hdf/hdf5lib/callbacks/H5P_prp_get_func_cb.java b/java/src/hdf/hdf5lib/callbacks/H5P_prp_get_func_cb.java index 0f3457f..999c7b0 100644 --- a/java/src/hdf/hdf5lib/callbacks/H5P_prp_get_func_cb.java +++ b/java/src/hdf/hdf5lib/callbacks/H5P_prp_get_func_cb.java @@ -18,6 +18,8 @@ package hdf.hdf5lib.callbacks; */ public interface H5P_prp_get_func_cb extends Callbacks { /** + * @ingroup JCALL + * * application callback for each property list * * @param prop_id the ID for the property list being queried diff --git a/java/src/hdf/hdf5lib/callbacks/H5P_prp_set_func_cb.java b/java/src/hdf/hdf5lib/callbacks/H5P_prp_set_func_cb.java index a55ca3a..893344b 100644 --- a/java/src/hdf/hdf5lib/callbacks/H5P_prp_set_func_cb.java +++ b/java/src/hdf/hdf5lib/callbacks/H5P_prp_set_func_cb.java @@ -18,6 +18,8 @@ package hdf.hdf5lib.callbacks; */ public interface H5P_prp_set_func_cb extends Callbacks { /** + * @ingroup JCALL + * * application callback for each property list * * @param prop_id the ID for the property list being modified diff --git a/java/src/hdf/hdf5lib/callbacks/package-info.java b/java/src/hdf/hdf5lib/callbacks/package-info.java index 114045c..5ef3fab 100644 --- a/java/src/hdf/hdf5lib/callbacks/package-info.java +++ b/java/src/hdf/hdf5lib/callbacks/package-info.java @@ -12,6 +12,7 @@ * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */ /** + * @page CALLBACKS_UG HDF5 Java Callbacks Interface * All callback definitions must derive from the Callbacks interface. Any * derived interfaces must define a single public method named "callback". * You are responsible for deregistering your callback (if necessary) diff --git a/java/src/hdf/hdf5lib/exceptions/HDF5AtomException.java b/java/src/hdf/hdf5lib/exceptions/HDF5AtomException.java index a1473c4..280db7d 100644 --- a/java/src/hdf/hdf5lib/exceptions/HDF5AtomException.java +++ b/java/src/hdf/hdf5lib/exceptions/HDF5AtomException.java @@ -16,17 +16,21 @@ package hdf.hdf5lib.exceptions; /** * The class HDF5LibraryException returns errors raised by the HDF5 library. *

- * This sub-class represents HDF-5 major error code H5E_ATOM + * This sub-class represents HDF5 major error code H5E_ATOM */ public class HDF5AtomException extends HDF5LibraryException { /** + * @ingroup JERRLIB + * * Constructs an HDF5AtomException with no specified detail * message. */ public HDF5AtomException() { super(); } /** + * @ingroup JERRLIB + * * Constructs an HDF5AtomException with the specified detail * message. * diff --git a/java/src/hdf/hdf5lib/exceptions/HDF5AttributeException.java b/java/src/hdf/hdf5lib/exceptions/HDF5AttributeException.java index 4cb7b1d..f8b526e 100644 --- a/java/src/hdf/hdf5lib/exceptions/HDF5AttributeException.java +++ b/java/src/hdf/hdf5lib/exceptions/HDF5AttributeException.java @@ -16,16 +16,20 @@ package hdf.hdf5lib.exceptions; /** * The class HDF5LibraryException returns errors raised by the HDF5 library. *

- * This sub-class represents HDF-5 major error code H5E_ATTR + * This sub-class represents HDF5 major error code H5E_ATTR */ public class HDF5AttributeException extends HDF5LibraryException { /** + * @ingroup JERRLIB + * * Constructs an HDF5AttributeException with no specified * detail message. */ public HDF5AttributeException() { super(); } /** + * @ingroup JERRLIB + * * Constructs an HDF5AttributeException with the specified * detail message. * diff --git a/java/src/hdf/hdf5lib/exceptions/HDF5BtreeException.java b/java/src/hdf/hdf5lib/exceptions/HDF5BtreeException.java index 9f70456..71b8e47 100644 --- a/java/src/hdf/hdf5lib/exceptions/HDF5BtreeException.java +++ b/java/src/hdf/hdf5lib/exceptions/HDF5BtreeException.java @@ -16,16 +16,20 @@ package hdf.hdf5lib.exceptions; /** * The class HDF5LibraryException returns errors raised by the HDF5 library. *

- * This sub-class represents HDF-5 major error code H5E_BTREE + * This sub-class represents HDF5 major error code H5E_BTREE */ public class HDF5BtreeException extends HDF5LibraryException { /** + * @ingroup JERRLIB + * * Constructs an HDF5BtreeException with no specified detail * message. */ public HDF5BtreeException() { super(); } /** + * @ingroup JERRLIB + * * Constructs an HDF5BtreeException with the specified detail * message. * diff --git a/java/src/hdf/hdf5lib/exceptions/HDF5DataFiltersException.java b/java/src/hdf/hdf5lib/exceptions/HDF5DataFiltersException.java index b4397b7..a837708 100644 --- a/java/src/hdf/hdf5lib/exceptions/HDF5DataFiltersException.java +++ b/java/src/hdf/hdf5lib/exceptions/HDF5DataFiltersException.java @@ -16,16 +16,20 @@ package hdf.hdf5lib.exceptions; /** * The class HDF5LibraryException returns errors raised by the HDF5 library. *

- * This sub-class represents HDF-5 major error code H5E_PLINE + * This sub-class represents HDF5 major error code H5E_PLINE */ public class HDF5DataFiltersException extends HDF5LibraryException { /** + * @ingroup JERRLIB + * * Constructs an HDF5DataFiltersException with no specified * detail message. */ public HDF5DataFiltersException() { super(); } /** + * @ingroup JERRLIB + * * Constructs an HDF5DataFiltersException with the specified * detail message. * diff --git a/java/src/hdf/hdf5lib/exceptions/HDF5DataStorageException.java b/java/src/hdf/hdf5lib/exceptions/HDF5DataStorageException.java index f6993a8..d9f49da 100644 --- a/java/src/hdf/hdf5lib/exceptions/HDF5DataStorageException.java +++ b/java/src/hdf/hdf5lib/exceptions/HDF5DataStorageException.java @@ -16,17 +16,21 @@ package hdf.hdf5lib.exceptions; /** * The class HDF5LibraryException returns errors raised by the HDF5 library. *

- * This sub-class represents HDF-5 major error code H5E_STORAGE + * This sub-class represents HDF5 major error code H5E_STORAGE */ public class HDF5DataStorageException extends HDF5LibraryException { /** + * @ingroup JERRLIB + * * Constructs an HDF5DataStorageExceptionn with no specified * detail message. */ public HDF5DataStorageException() { super(); } /** + * @ingroup JERRLIB + * * Constructs an HDF5DataStorageException with the specified * detail message. * diff --git a/java/src/hdf/hdf5lib/exceptions/HDF5DatasetInterfaceException.java b/java/src/hdf/hdf5lib/exceptions/HDF5DatasetInterfaceException.java index 8fd4ae9..fea1346 100644 --- a/java/src/hdf/hdf5lib/exceptions/HDF5DatasetInterfaceException.java +++ b/java/src/hdf/hdf5lib/exceptions/HDF5DatasetInterfaceException.java @@ -16,16 +16,20 @@ package hdf.hdf5lib.exceptions; /** * The class HDF5LibraryException returns errors raised by the HDF5 library. *

- * This sub-class represents HDF-5 major error code H5E_DATASET + * This sub-class represents HDF5 major error code H5E_DATASET */ public class HDF5DatasetInterfaceException extends HDF5LibraryException { /** + * @ingroup JERRLIB + * * Constructs an HDF5DatasetInterfaceException with no * specified detail message. */ public HDF5DatasetInterfaceException() { super(); } /** + * @ingroup JERRLIB + * * Constructs an HDF5DatasetInterfaceException with the * specified detail message. * diff --git a/java/src/hdf/hdf5lib/exceptions/HDF5DataspaceInterfaceException.java b/java/src/hdf/hdf5lib/exceptions/HDF5DataspaceInterfaceException.java index d0d2a09..e2d29d0 100644 --- a/java/src/hdf/hdf5lib/exceptions/HDF5DataspaceInterfaceException.java +++ b/java/src/hdf/hdf5lib/exceptions/HDF5DataspaceInterfaceException.java @@ -16,17 +16,21 @@ package hdf.hdf5lib.exceptions; /** * The class HDF5LibraryException returns errors raised by the HDF5 library. *

- * This sub-class represents HDF-5 major error code H5E_DATASPACE + * This sub-class represents HDF5 major error code H5E_DATASPACE */ public class HDF5DataspaceInterfaceException extends HDF5LibraryException { /** + * @ingroup JERRLIB + * * Constructs an HDF5DataspaceInterfaceException with no * specified detail message. */ public HDF5DataspaceInterfaceException() { super(); } /** + * @ingroup JERRLIB + * * Constructs an HDF5DataspaceInterfaceException with the * specified detail message. * diff --git a/java/src/hdf/hdf5lib/exceptions/HDF5DatatypeInterfaceException.java b/java/src/hdf/hdf5lib/exceptions/HDF5DatatypeInterfaceException.java index 2ab4ff9..d7e678b 100644 --- a/java/src/hdf/hdf5lib/exceptions/HDF5DatatypeInterfaceException.java +++ b/java/src/hdf/hdf5lib/exceptions/HDF5DatatypeInterfaceException.java @@ -16,17 +16,21 @@ package hdf.hdf5lib.exceptions; /** * The class HDF5LibraryException returns errors raised by the HDF5 library. *

- * This sub-class represents HDF-5 major error code H5E_DATATYPE + * This sub-class represents HDF5 major error code H5E_DATATYPE */ public class HDF5DatatypeInterfaceException extends HDF5LibraryException { /** + * @ingroup JERRLIB + * * Constructs an HDF5DatatypeInterfaceException with no * specified detail message. */ public HDF5DatatypeInterfaceException() { super(); } /** + * @ingroup JERRLIB + * * Constructs an HDF5DatatypeInterfaceException with the * specified detail message. * diff --git a/java/src/hdf/hdf5lib/exceptions/HDF5Exception.java b/java/src/hdf/hdf5lib/exceptions/HDF5Exception.java index b098a12..ad42644 100644 --- a/java/src/hdf/hdf5lib/exceptions/HDF5Exception.java +++ b/java/src/hdf/hdf5lib/exceptions/HDF5Exception.java @@ -14,21 +14,23 @@ package hdf.hdf5lib.exceptions; /** - *

+ * @page ERRORS Errors and Exceptions * The class HDF5Exception returns errors from the Java HDF5 Interface. - *

+ * * Two sub-classes of HDF5Exception are defined: *

    *
  1. - * HDF5LibraryException -- errors raised the HDF5 library code + * HDF5LibraryException -- errors raised by the HDF5 library code *
  2. - * HDF5JavaException -- errors raised the HDF5 Java wrapper code + * HDF5JavaException -- errors raised by the HDF5 Java wrapper code *
- *

+ * * These exceptions are sub-classed to represent specific error conditions, as * needed. In particular, HDF5LibraryException has a sub-class for each major * error code returned by the HDF5 library. * + * @defgroup JERR HDF5 Library Exception Interface + * */ public class HDF5Exception extends RuntimeException { /** @@ -37,12 +39,16 @@ public class HDF5Exception extends RuntimeException { protected String detailMessage; /** + * @ingroup JERR + * * Constructs an HDF5Exception with no specified detail * message. */ public HDF5Exception() { super(); } /** + * @ingroup JERR + * * Constructs an HDF5Exception with the specified detail * message. * @@ -56,6 +62,8 @@ public class HDF5Exception extends RuntimeException { } /** + * @ingroup JERR + * * Returns the detail message of this exception * * @return the detail message or null if this object does not diff --git a/java/src/hdf/hdf5lib/exceptions/HDF5ExternalFileListException.java b/java/src/hdf/hdf5lib/exceptions/HDF5ExternalFileListException.java index c8df3d0..f9f49a1 100644 --- a/java/src/hdf/hdf5lib/exceptions/HDF5ExternalFileListException.java +++ b/java/src/hdf/hdf5lib/exceptions/HDF5ExternalFileListException.java @@ -16,17 +16,21 @@ package hdf.hdf5lib.exceptions; /** * The class HDF5LibraryException returns errors raised by the HDF5 library. *

- * This sub-class represents HDF-5 major error code H5E_EFL + * This sub-class represents HDF5 major error code H5E_EFL */ public class HDF5ExternalFileListException extends HDF5LibraryException { /** + * @ingroup JERRLIB + * * Constructs an HDF5ExternalFileListException with no * specified detail message. */ public HDF5ExternalFileListException() { super(); } /** + * @ingroup JERRLIB + * * Constructs an HDF5ExternalFileListException with the * specified detail message. * diff --git a/java/src/hdf/hdf5lib/exceptions/HDF5FileInterfaceException.java b/java/src/hdf/hdf5lib/exceptions/HDF5FileInterfaceException.java index afd6d69..3ebe63a 100644 --- a/java/src/hdf/hdf5lib/exceptions/HDF5FileInterfaceException.java +++ b/java/src/hdf/hdf5lib/exceptions/HDF5FileInterfaceException.java @@ -16,17 +16,21 @@ package hdf.hdf5lib.exceptions; /** * The class HDF5LibraryException returns errors raised by the HDF5 library. *

- * This sub-class represents HDF-5 major error code H5E_FILE + * This sub-class represents HDF5 major error code H5E_FILE */ public class HDF5FileInterfaceException extends HDF5LibraryException { /** + * @ingroup JERRLIB + * * Constructs an HDF5FileInterfaceException with no specified * detail message. */ public HDF5FileInterfaceException() { super(); } /** + * @ingroup JERRLIB + * * Constructs an HDF5FileInterfaceException with the specified * detail message. * diff --git a/java/src/hdf/hdf5lib/exceptions/HDF5FunctionArgumentException.java b/java/src/hdf/hdf5lib/exceptions/HDF5FunctionArgumentException.java index 58e2980..3dc0c72 100644 --- a/java/src/hdf/hdf5lib/exceptions/HDF5FunctionArgumentException.java +++ b/java/src/hdf/hdf5lib/exceptions/HDF5FunctionArgumentException.java @@ -16,17 +16,21 @@ package hdf.hdf5lib.exceptions; /** * The class HDF5LibraryException returns errors raised by the HDF5 library. *

- * This sub-class represents HDF-5 major error code H5E_ARGS + * This sub-class represents HDF5 major error code H5E_ARGS */ public class HDF5FunctionArgumentException extends HDF5LibraryException { /** + * @ingroup JERRLIB + * * Constructs an HDF5FunctionArgumentException with no * specified detail message. */ public HDF5FunctionArgumentException() { super(); } /** + * @ingroup JERRLIB + * * Constructs an HDF5FunctionArgumentException with the * specified detail message. * diff --git a/java/src/hdf/hdf5lib/exceptions/HDF5FunctionEntryExitException.java b/java/src/hdf/hdf5lib/exceptions/HDF5FunctionEntryExitException.java index db46aae..aa9289c 100644 --- a/java/src/hdf/hdf5lib/exceptions/HDF5FunctionEntryExitException.java +++ b/java/src/hdf/hdf5lib/exceptions/HDF5FunctionEntryExitException.java @@ -16,17 +16,21 @@ package hdf.hdf5lib.exceptions; /** * The class HDF5LibraryException returns errors raised by the HDF5 library. *

- * This sub-class represents HDF-5 major error code H5E_FUNC + * This sub-class represents HDF5 major error code H5E_FUNC */ public class HDF5FunctionEntryExitException extends HDF5LibraryException { /** + * @ingroup JERRLIB + * * Constructs an HDF5FunctionEntryExitException with no * specified detail message. */ public HDF5FunctionEntryExitException() { super(); } /** + * @ingroup JERRLIB + * * Constructs an HDF5FunctionEntryExitException with the * specified detail message. * diff --git a/java/src/hdf/hdf5lib/exceptions/HDF5HeapException.java b/java/src/hdf/hdf5lib/exceptions/HDF5HeapException.java index 7f1691d..ba1b5ad 100644 --- a/java/src/hdf/hdf5lib/exceptions/HDF5HeapException.java +++ b/java/src/hdf/hdf5lib/exceptions/HDF5HeapException.java @@ -16,17 +16,21 @@ package hdf.hdf5lib.exceptions; /** * The class HDF5LibraryException returns errors raised by the HDF5 library. *

- * This sub-class represents HDF-5 major error code H5E_HEAP + * This sub-class represents HDF5 major error code H5E_HEAP */ public class HDF5HeapException extends HDF5LibraryException { /** + * @ingroup JERRLIB + * * Constructs an HDF5HeapException with no specified detail * message. */ public HDF5HeapException() { super(); } /** + * @ingroup JERRLIB + * * Constructs an HDF5HeapException with the specified detail * message. * diff --git a/java/src/hdf/hdf5lib/exceptions/HDF5InternalErrorException.java b/java/src/hdf/hdf5lib/exceptions/HDF5InternalErrorException.java index 4489486..31efe56 100644 --- a/java/src/hdf/hdf5lib/exceptions/HDF5InternalErrorException.java +++ b/java/src/hdf/hdf5lib/exceptions/HDF5InternalErrorException.java @@ -16,17 +16,21 @@ package hdf.hdf5lib.exceptions; /** * The class HDF5LibraryException returns errors raised by the HDF5 library. *

- * This sub-class represents HDF-5 major error code H5E_INTERNAL + * This sub-class represents HDF5 major error code H5E_INTERNAL */ public class HDF5InternalErrorException extends HDF5LibraryException { /** + * @ingroup JERRLIB + * * Constructs an HDF5InternalErrorException with no specified * detail message. */ public HDF5InternalErrorException() { super(); } /** + * @ingroup JERRLIB + * * Constructs an HDF5InternalErrorException with the specified * detail message. * diff --git a/java/src/hdf/hdf5lib/exceptions/HDF5JavaException.java b/java/src/hdf/hdf5lib/exceptions/HDF5JavaException.java index ae1cf85..9b38b87 100644 --- a/java/src/hdf/hdf5lib/exceptions/HDF5JavaException.java +++ b/java/src/hdf/hdf5lib/exceptions/HDF5JavaException.java @@ -14,21 +14,27 @@ package hdf.hdf5lib.exceptions; /** - *

+ * @page ERRORSJAVA Java Wrapper Errors and Exceptions * The class HDF5JavaException returns errors from the Java wrapper of theHDF5 * library. *

* These errors include Java configuration errors, security violations, and * resource exhaustion. + * + * @defgroup JERRJAVA HDF5 Library Java Exception Interface */ public class HDF5JavaException extends HDF5Exception { /** + * @ingroup JERRJAVA + * * Constructs an HDF5JavaException with no specified detail * message. */ public HDF5JavaException() { super(); } /** + * @ingroup JERRJAVA + * * Constructs an HDF5JavaException with the specified detail * message. * diff --git a/java/src/hdf/hdf5lib/exceptions/HDF5LibraryException.java b/java/src/hdf/hdf5lib/exceptions/HDF5LibraryException.java index 6628cf5..3de2a28 100644 --- a/java/src/hdf/hdf5lib/exceptions/HDF5LibraryException.java +++ b/java/src/hdf/hdf5lib/exceptions/HDF5LibraryException.java @@ -17,14 +17,16 @@ import hdf.hdf5lib.H5; import hdf.hdf5lib.HDF5Constants; /** - *

+ * @page ERRORSLIB HDF5 Library Errors and Exceptions * The class HDF5LibraryException returns errors raised by the HDF5 library. - *

- * Each major error code from the HDF-5 Library is represented by a sub-class of + * + * Each major error code from the HDF5 Library is represented by a sub-class of * this class, and by default the 'detailedMessage' is set according to the - * minor error code from the HDF-5 Library. + * minor error code from the HDF5 Library. *

- * For major and minor error codes, see H5Epublic.h in the HDF-5 library. + * For major and minor error codes, @see @ref H5E in the HDF5 library. + * + * @defgroup JERRLIB HDF5 Library JNI Exception Interface * */ @@ -36,6 +38,8 @@ public class HDF5LibraryException extends HDF5Exception { private final long minorErrorNumber; /** + * @ingroup JERRLIB + * * Constructs an HDF5LibraryException with no specified detail * message. */ @@ -43,7 +47,7 @@ public class HDF5LibraryException extends HDF5Exception { { super(); - // this code forces the loading of the HDF-5 library + // this code forces the loading of the HDF5 library // to assure that the native methods are available try { H5.H5open(); @@ -57,6 +61,8 @@ public class HDF5LibraryException extends HDF5Exception { } /** + * @ingroup JERRLIB + * * Constructs an HDF5LibraryException with the specified detail * message. * @@ -66,7 +72,7 @@ public class HDF5LibraryException extends HDF5Exception { public HDF5LibraryException(String s) { super(s); - // this code forces the loading of the HDF-5 library + // this code forces the loading of the HDF5 library // to assure that the native methods are available try { H5.H5open(); @@ -78,6 +84,8 @@ public class HDF5LibraryException extends HDF5Exception { } /** + * @ingroup JERRLIB + * * Get the major error number of the first error on the HDF5 library error * stack. * @@ -87,6 +95,8 @@ public class HDF5LibraryException extends HDF5Exception { private native long _getMajorErrorNumber(); /** + * @ingroup JERRLIB + * * Get the minor error number of the first error on the HDF5 library error * stack. * @@ -96,9 +106,11 @@ public class HDF5LibraryException extends HDF5Exception { private native long _getMinorErrorNumber(); /** + * @ingroup JERRLIB + * * Return a error message for the minor error number. - *

- * These messages come from H5Epublic.h. + * + * These messages come from @ref H5E. * * @param err_code * the error code @@ -349,19 +361,23 @@ public class HDF5LibraryException extends HDF5Exception { } /** - * Prints this HDF5LibraryException, the HDF-5 Library error + * @ingroup JERRLIB + * + * Prints this HDF5LibraryException, the HDF5 Library error * stack, and and the Java stack trace to the standard error stream. */ @Override public void printStackTrace() { System.err.println(this); - printStackTrace0(null); // the HDF-5 Library error stack + printStackTrace0(null); // the HDF5 Library error stack super.printStackTrace(); // the Java stack trace } /** - * Prints this HDF5LibraryException the HDF-5 Library error + * @ingroup JERRLIB + * + * Prints this HDF5LibraryException the HDF5 Library error * stack, and and the Java stack trace to the specified print stream. * * @param f @@ -382,14 +398,14 @@ public class HDF5LibraryException extends HDF5Exception { catch (Exception ex) { System.err.println(this); }; - // the HDF-5 Library error stack + // the HDF5 Library error stack printStackTrace0(f.getPath()); super.printStackTrace(); // the Java stack trace } } /* - * This private method calls the HDF-5 library to extract the error codes + * This private method calls the HDF5 library to extract the error codes * and error stack. */ private native void printStackTrace0(String s); diff --git a/java/src/hdf/hdf5lib/exceptions/HDF5LowLevelIOException.java b/java/src/hdf/hdf5lib/exceptions/HDF5LowLevelIOException.java index fef5721..719748e 100644 --- a/java/src/hdf/hdf5lib/exceptions/HDF5LowLevelIOException.java +++ b/java/src/hdf/hdf5lib/exceptions/HDF5LowLevelIOException.java @@ -16,17 +16,21 @@ package hdf.hdf5lib.exceptions; /** * The class HDF5LibraryException returns errors raised by the HDF5 library. *

- * This sub-class represents HDF-5 major error code H5E_IO + * This sub-class represents HDF5 major error code H5E_IO */ public class HDF5LowLevelIOException extends HDF5LibraryException { /** + * @ingroup JERRLIB + * * Constructs an HDF5LowLevelIOException with no specified * detail message. */ public HDF5LowLevelIOException() { super(); } /** + * @ingroup JERRLIB + * * Constructs an HDF5LowLevelIOException with the specified * detail message. * diff --git a/java/src/hdf/hdf5lib/exceptions/HDF5MetaDataCacheException.java b/java/src/hdf/hdf5lib/exceptions/HDF5MetaDataCacheException.java index 4f00006..298d8b8 100644 --- a/java/src/hdf/hdf5lib/exceptions/HDF5MetaDataCacheException.java +++ b/java/src/hdf/hdf5lib/exceptions/HDF5MetaDataCacheException.java @@ -16,17 +16,21 @@ package hdf.hdf5lib.exceptions; /** * The class HDF5LibraryException returns errors raised by the HDF5 library. *

- * This sub-class represents HDF-5 major error code H5E_CACHE + * This sub-class represents HDF5 major error code H5E_CACHE */ public class HDF5MetaDataCacheException extends HDF5LibraryException { /** + * @ingroup JERRLIB + * * Constructs an HDF5MetaDataCacheException with no specified * detail message. */ public HDF5MetaDataCacheException() { super(); } /** + * @ingroup JERRLIB + * * Constructs an HDF5MetaDataCacheException with the specified * detail message. * diff --git a/java/src/hdf/hdf5lib/exceptions/HDF5ObjectHeaderException.java b/java/src/hdf/hdf5lib/exceptions/HDF5ObjectHeaderException.java index 9675354..b6e94be 100644 --- a/java/src/hdf/hdf5lib/exceptions/HDF5ObjectHeaderException.java +++ b/java/src/hdf/hdf5lib/exceptions/HDF5ObjectHeaderException.java @@ -16,17 +16,21 @@ package hdf.hdf5lib.exceptions; /** * The class HDF5LibraryException returns errors raised by the HDF5 library. *

- * This sub-class represents HDF-5 major error code H5E_OHDR + * This sub-class represents HDF5 major error code H5E_OHDR */ public class HDF5ObjectHeaderException extends HDF5LibraryException { /** + * @ingroup JERRLIB + * * Constructs an HDF5ObjectHeaderException with no specified * detail message. */ public HDF5ObjectHeaderException() { super(); } /** + * @ingroup JERRLIB + * * Constructs an HDF5ObjectHeaderException with the specified * detail message. * diff --git a/java/src/hdf/hdf5lib/exceptions/HDF5PropertyListInterfaceException.java b/java/src/hdf/hdf5lib/exceptions/HDF5PropertyListInterfaceException.java index 66f0bd1..68d581f 100644 --- a/java/src/hdf/hdf5lib/exceptions/HDF5PropertyListInterfaceException.java +++ b/java/src/hdf/hdf5lib/exceptions/HDF5PropertyListInterfaceException.java @@ -16,17 +16,21 @@ package hdf.hdf5lib.exceptions; /** * The class HDF5LibraryException returns errors raised by the HDF5 library. *

- * This sub-class represents HDF-5 major error code H5E_PLIST + * This sub-class represents HDF5 major error code H5E_PLIST */ public class HDF5PropertyListInterfaceException extends HDF5LibraryException { /** + * @ingroup JERRLIB + * * Constructs an HDF5PropertyListInterfaceException with no * specified detail message. */ public HDF5PropertyListInterfaceException() { super(); } /** + * @ingroup JERRLIB + * * Constructs an HDF5PropertyListInterfaceException with the * specified detail message. * diff --git a/java/src/hdf/hdf5lib/exceptions/HDF5ReferenceException.java b/java/src/hdf/hdf5lib/exceptions/HDF5ReferenceException.java index 4feaba7..4c96136 100644 --- a/java/src/hdf/hdf5lib/exceptions/HDF5ReferenceException.java +++ b/java/src/hdf/hdf5lib/exceptions/HDF5ReferenceException.java @@ -16,17 +16,21 @@ package hdf.hdf5lib.exceptions; /** * The class HDF5LibraryException returns errors raised by the HDF5 library. *

- * This sub-class represents HDF-5 major error code H5E_REFERENCE + * This sub-class represents HDF5 major error code H5E_REFERENCE */ public class HDF5ReferenceException extends HDF5LibraryException { /** + * @ingroup JERRLIB + * * Constructs an HDF5ReferenceException with no specified * detail message. */ public HDF5ReferenceException() { super(); } /** + * @ingroup JERRLIB + * * Constructs an HDF5ReferenceException with the specified * detail message. * diff --git a/java/src/hdf/hdf5lib/exceptions/HDF5ResourceUnavailableException.java b/java/src/hdf/hdf5lib/exceptions/HDF5ResourceUnavailableException.java index 1a007e7..f920c53 100644 --- a/java/src/hdf/hdf5lib/exceptions/HDF5ResourceUnavailableException.java +++ b/java/src/hdf/hdf5lib/exceptions/HDF5ResourceUnavailableException.java @@ -16,17 +16,21 @@ package hdf.hdf5lib.exceptions; /** * The class HDF5LibraryException returns errors raised by the HDF5 library. *

- * This sub-class represents HDF-5 major error code H5E_RESOURCE + * This sub-class represents HDF5 major error code H5E_RESOURCE */ public class HDF5ResourceUnavailableException extends HDF5LibraryException { /** + * @ingroup JERRLIB + * * Constructs an HDF5ResourceUnavailableException with no * specified detail message. */ public HDF5ResourceUnavailableException() { super(); } /** + * @ingroup JERRLIB + * * Constructs an HDF5FunctionArgumentException with the * specified detail message. * diff --git a/java/src/hdf/hdf5lib/exceptions/HDF5SymbolTableException.java b/java/src/hdf/hdf5lib/exceptions/HDF5SymbolTableException.java index 4fb8c2e..5d3aa90 100644 --- a/java/src/hdf/hdf5lib/exceptions/HDF5SymbolTableException.java +++ b/java/src/hdf/hdf5lib/exceptions/HDF5SymbolTableException.java @@ -16,17 +16,21 @@ package hdf.hdf5lib.exceptions; /** * The class HDF5LibraryException returns errors raised by the HDF5 library. *

- * This sub-class represents HDF-5 major error code H5E_SYM + * This sub-class represents HDF5 major error code H5E_SYM */ public class HDF5SymbolTableException extends HDF5LibraryException { /** + * @ingroup JERRLIB + * * Constructs an HDF5SymbolTableException with no specified * detail message. */ public HDF5SymbolTableException() { super(); } /** + * @ingroup JERRLIB + * * Constructs an HDF5SymbolTableException with the specified * detail message. * diff --git a/java/src/hdf/hdf5lib/exceptions/package-info.java b/java/src/hdf/hdf5lib/exceptions/package-info.java index 8640ccb..2ac7806 100644 --- a/java/src/hdf/hdf5lib/exceptions/package-info.java +++ b/java/src/hdf/hdf5lib/exceptions/package-info.java @@ -12,6 +12,7 @@ * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */ /** + * @page ERRORS_UG Errors and Exceptions *

* The package exceptions contains error classes for the Java HDF5 Interface. *

diff --git a/java/src/hdf/hdf5lib/package-info.java b/java/src/hdf/hdf5lib/package-info.java index c04b862..7ae4df9 100644 --- a/java/src/hdf/hdf5lib/package-info.java +++ b/java/src/hdf/hdf5lib/package-info.java @@ -12,10 +12,11 @@ * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */ /** + @page HDF5LIB_UG HDF5 Java Package * This package is the Java interface for the HDF5 library. *

- * This code is the called by Java programs to access the entry points of the HDF5 library. Each routine wraps - a single + * This code is the called by Java programs to access the entry points of the HDF5 library. + * Each routine wraps a single * HDF5 entry point, generally with the arguments and return codes analogous to the C interface. *

* For details of the HDF5 library, see the HDF5 Documentation at: @@ -25,14 +26,13 @@ * Mapping of arguments for Java * *

- * In general, arguments to the HDF Java API are straightforward translations from the 'C' API described in - the HDF - * Reference Manual. + * In general, arguments to the HDF Java API are straightforward translations from the 'C' API described + * in the HDF Reference Manual. * * - * + * * - * + * * * * @@ -60,28 +60,27 @@ * * * - * * * *
HDF-5 C types to Java types HDF5 C types to Java types
HDF-5HDF5Java
java.lang.String
void *
+ *
void *
* (i.e., pointer to `Any')
Special -- see HDFArray
* General Rules for Passing Arguments and Results *

- * In general, arguments passed IN to Java are the analogous basic types, as above. The exception is - for arrays, - * which are discussed below. + * In general, arguments passed IN to Java are the analogous basic types, as above. The exception + * is for arrays, which are discussed below. *

* The return value of Java methods is also the analogous type, as above. A major exception to that - rule is that + * rule is that * all HDF functions that return SUCCEED/FAIL are declared boolean in the Java version, rather than - int as + * int as * in the C. Functions that return a value or else FAIL are declared the equivalent to the C function. - However, in most + * However, in most * cases the Java method will raise an exception instead of returning an error code. - * See Errors and Exceptions below. + * @see @ref ERRORS. *

* Java does not support pass by reference of arguments, so arguments that are returned through OUT - parameters + * parameters * must be wrapped in an object or array. The Java API for HDF consistently wraps arguments in arrays. *

* For instance, a function that returns two integers is declared: @@ -104,12 +103,12 @@ * *

* All the routines where this convention is used will have specific documentation of the details, given - below. + * below. *

* Arrays *

* HDF5 needs to read and write multi-dimensional arrays of any number type (and records). The HDF5 API - describes the + * describes the * layout of the source and destination, and the data for the array passed as a block of bytes, for instance, * *

@@ -118,52 +117,52 @@
  *
  * 

* where ``void *'' means that the data may be any valid numeric type, and is a contiguous block of bytes that - is the + * is the * data for a multi-dimensional array. The other parameters describe the dimensions, rank, and datatype of the - array on + * array on * disk (source) and in memory (destination). *

* For Java, this ``ANY'' is a problem, as the type of data must always be declared. Furthermore, - multidimensional + * multidimensional * arrays are definitely not laid out contiguously in memory. It would be infeasible to declare a - separate + * separate * routine for every combination of number type and dimensionality. For that reason, the - * HDFArray class is used to discover the type, shape, and - size of the + * HDFArray class is used to discover the type, shape, and + * size of the * data array at run time, and to convert to and from a contiguous array of bytes in synchronized static - native C order. + * native C order. *

* The upshot is that any Java array of numbers (either primitive or sub-classes of type Number) can be - passed as + * passed as * an ``Object'', and the Java API will translate to and from the appropriate packed array of bytes needed by - the C + * the C * library. So the function above would be declared: * *

  * public synchronized static native int H5Dread(long fid, long filetype, long memtype, long memspace, Object
- data);
+ * data);
  * 
* OPEN_IDS.addElement(id); * and the parameter data can be any multi-dimensional array of numbers, such as float[][], or - int[][][], or + * int[][][], or * Double[][]. *

- * HDF-5 Constants + * HDF5 Constants *

- * The HDF-5 API defines a set of constants and enumerated values. Most of these values are available to Java - programs - * via the class HDF5Constants. For example, the - parameters for + * The HDF5 API defines a set of constants and enumerated values. Most of these values are available to Java + * programs + * via the class HDF5Constants. For example, the + * parameters for * the h5open() call include two numeric values, HDFConstants.H5F_ACC_RDWR and * HDF5Constants.H5P_DEFAULT. As would be expected, these numbers correspond to the C constants * H5F_ACC_RDWR and H5P_DEFAULT. *

- * The HDF-5 API defines a set of values that describe number types and sizes, such as "H5T_NATIVE_INT" and - "hsize_t". - * These values are determined at run time by the HDF-5 C library. To support these parameters, the Java class - * HDF5CDataTypes looks up the values when initiated. - The values + * The HDF5 API defines a set of values that describe number types and sizes, such as "H5T_NATIVE_INT" and + * "hsize_t". + * These values are determined at run time by the HDF5 C library. To support these parameters, the Java class + * HDF5CDataTypes looks up the values when initiated. + * The values * can be accessed as public variables of the Java class, such as: * *

@@ -175,31 +174,30 @@
  * 

* Error handling and Exceptions *

- * The HDF5 error API (H5E) manages the behavior of the error stack in the HDF-5 library. This API is - available from the + * The HDF5 error API (H5E) manages the behavior of the error stack in the HDF5 library. This API is + * available from the * JHI5. Errors are converted into Java exceptions. This is totally different from the C interface, but is - very natural + * very natural * for Java programming. *

* The exceptions of the JHI5 are organized as sub-classes of the class - * HDF5Exception. There are two subclasses - of - * HDF5Exception, - HDF5LibraryException - * and HDF5JavaException. The - sub-classes of the - * former represent errors from the HDF-5 C library, while sub-classes of the latter represent errors in the - JHI5 - * wrapper and support code. - *

- * The super-class HDF5LibraryException implements the method 'printStackTrace()', - which - * prints out the HDF-5 error stack, as described in the HDF-5 C API H5Eprint(). This may be - used by Java - * exception handlers to print out the HDF-5 error stack. + * HDF5Exception. There are two subclasses + * of + * HDF5Exception, @ref ERRORSLIB HDF5LibraryException + * and @ref ERRORSJAVA HDF5JavaException. + * The sub-classes of the former represent errors from the HDF5 C library, + * while sub-classes of the latter represent errors in the JHI5 wrapper and support code. + *

+ * The super-class HDF5LibraryException implements the method + * 'printStackTrace()', which prints out the HDF5 error stack, as described + * in the HDF5 C API @ref H5Eprint(). This may be + * used by Java + * exception handlers to print out the HDF5 error stack. *


* - * See also: http://hdfgroup.org/HDF5" + * @ref HDF5LIB + * + * @see: HDF5" * */ package hdf.hdf5lib; diff --git a/java/src/hdf/overview.html b/java/src/hdf/overview.html index f181510..e059606 100644 --- a/java/src/hdf/overview.html +++ b/java/src/hdf/overview.html @@ -4,7 +4,7 @@

What it is

The Java HD5 Interface (JHI5) is a Java package -(hdf.hdf5lib) +(hdf.hdf5lib) that ``wraps around'' the HDF5 library.

There are a large number of functions in the HDF5 library (version 1.12). Some of the functions are not supported in JHI5. Most @@ -32,7 +32,7 @@ library contains C functions which implement the native methods. The C functions call the standard HDF5 library, which is linked as part of the same library on most platforms.

The central part of the JHI5 is the Java class -hdf.hdf5lib.H5. +hdf.hdf5lib.H5. The H5 class calls the standard (i.e., `native' code) HDF5 library, with native methods for most of the HDF5 functions. @@ -42,11 +42,11 @@ The JHI5 is used by Java classes to call the HDF5 library, in order to create HDF5 files, and read and write data in existing HDF5 files.

For example, the HDF5 library has the function H5Fopen to open an HDF5 file. The Java interface is the class -hdf.hdf5lib.H5, +hdf.hdf5lib.H5, which has a method:

static native int H5Fopen(String filename, int flags, int access );
The native method is implemented in C using the -Java +Java Native Method Interface (JNI). This is written something like the following:
JNIEXPORT jlong
 JNICALL Java_hdf_hdf5lib_H5_H5Fopen
diff --git a/java/src/jni/exceptionImp.c b/java/src/jni/exceptionImp.c
index a772ae2..1d27e1b 100644
--- a/java/src/jni/exceptionImp.c
+++ b/java/src/jni/exceptionImp.c
@@ -153,7 +153,7 @@ Java_hdf_hdf5lib_H5_H5error_1on(JNIEnv *env, jclass clss)
  * Method:    printStackTrace0
  * Signature: (Ljava/lang/Object;)V
  *
- *  Call the HDF-5 library to print the HDF-5 error stack to 'file_name'.
+ *  Call the HDF5 library to print the HDF5 error stack to 'file_name'.
  */
 JNIEXPORT void JNICALL
 Java_hdf_hdf5lib_exceptions_HDF5LibraryException_printStackTrace0(JNIEnv *env, jobject obj, jstring file_name)
@@ -187,7 +187,7 @@ done:
  * Method:    _getMajorErrorNumber
  * Signature: ()J
  *
- *  Extract the HDF-5 major error number from the HDF-5 error stack.
+ *  Extract the HDF5 major error number from the HDF5 error stack.
  */
 JNIEXPORT jlong JNICALL
 Java_hdf_hdf5lib_exceptions_HDF5LibraryException__1getMajorErrorNumber(JNIEnv *env, jobject obj)
@@ -211,7 +211,7 @@ Java_hdf_hdf5lib_exceptions_HDF5LibraryException__1getMajorErrorNumber(JNIEnv *e
  * Method:    _getMinorErrorNumber
  * Signature: ()J
  *
- *  Extract the HDF-5 minor error number from the HDF-5 error stack.
+ *  Extract the HDF5 minor error number from the HDF5 error stack.
  */
 JNIEXPORT jlong JNICALL
 Java_hdf_hdf5lib_exceptions_HDF5LibraryException__1getMinorErrorNumber(JNIEnv *env, jobject obj)
@@ -350,10 +350,10 @@ h5raiseException(JNIEnv *env, const char *message, const char *exception)
 } /* end h5raiseException() */
 
 /*
- *  h5libraryError()   determines the HDF-5 major error code
+ *  h5libraryError()   determines the HDF5 major error code
  *  and creates and throws the appropriate sub-class of
  *  HDF5LibraryException().  This routine should be called
- *  whenever a call to the HDF-5 library fails, i.e., when
+ *  whenever a call to the HDF5 library fails, i.e., when
  *  the return is -1.
  *
  *  Note:  This routine never returns from the 'throw',
@@ -436,7 +436,7 @@ done:
 
 /*
  *  defineHDF5LibraryException()  returns the name of the sub-class
- *  which goes with an HDF-5 error code.
+ *  which goes with an HDF5 error code.
  */
 static const char *
 defineHDF5LibraryException(hid_t maj_num)
diff --git a/java/src/jni/exceptionImp.h b/java/src/jni/exceptionImp.h
index 38469df..c7375e7 100644
--- a/java/src/jni/exceptionImp.h
+++ b/java/src/jni/exceptionImp.h
@@ -41,7 +41,7 @@ JNIEXPORT void JNICALL Java_hdf_hdf5lib_H5_H5error_1on(JNIEnv *env, jclass clss)
  * Method:    printStackTrace0
  * Signature: (Ljava/lang/Object;)V
  *
- *  Call the HDF-5 library to print the HDF-5 error stack to 'file_name'.
+ *  Call the HDF5 library to print the HDF5 error stack to 'file_name'.
  */
 JNIEXPORT void JNICALL Java_hdf_hdf5lib_exceptions_HDF5LibraryException_printStackTrace0(JNIEnv *env,
                                                                                          jobject obj,
diff --git a/release_docs/RELEASE.txt b/release_docs/RELEASE.txt
index b4f4e6d..8e8856d 100644
--- a/release_docs/RELEASE.txt
+++ b/release_docs/RELEASE.txt
@@ -154,7 +154,11 @@ New Features
 
     Documentation:
     --------------
-    -
+    - Doxygen User Guide documentation is available when configured and generated.
+      The resulting documentation files will be in the share/html subdirectory
+      of the HDF5 install directory.
+
+        (ADB - 2022/08/09)
 
 
 Support for new platforms, languages and compilers
diff --git a/src/H5Amodule.h b/src/H5Amodule.h
index 9f86ddd..e3bfe6f 100644
--- a/src/H5Amodule.h
+++ b/src/H5Amodule.h
@@ -29,30 +29,92 @@
 #define H5_MY_PKG_ERR  H5E_ATTR
 #define H5_MY_PKG_INIT YES
 
-/**\defgroup H5A H5A
+/** \page H5A_UG HDF5 Attributes
  *
- * Use the functions in this module to manage HDF5 attributes.
+ * \section sec_attribute HDF5 Attributes
  *
- * Like HDF5 datasets, HDF5 attributes are array variables which have an element
- * datatype and a shape (dataspace). However, they perform a different function:
- * Attributes decorate other HDF5 objects, and are typically used to
- * represent application metadata. Unlike datasets, the HDF5 library does not
- * support partial I/O operations for attributes and they cannot be compressed
- * or extended.
+ * An HDF5 attribute is a small metadata object describing the nature and/or intended usage of a primary data
+ * object. A primary data object may be a dataset, group, or committed datatype.
  *
+ * \subsection subsec_attribute_intro Introduction
+ *
+ * Attributes are assumed to be very small as data objects go, so storing them as standard HDF5 datasets would
+ * be quite inefficient. HDF5 attributes are therefore managed through a special attributes interface,
+ * \ref H5A, which is designed to easily attach attributes to primary data objects as small datasets
+ * containing metadata information and to minimize storage requirements.
+ *
+ * Consider, as examples of the simplest case, a set of laboratory readings taken under known temperature and
+ * pressure conditions of 18.0 degrees Celsius and 0.5 atmospheres, respectively. The temperature and pressure
+ * stored as attributes of the dataset could be described as the following name/value pairs:
+ *     \li temp=18.0
+ *     \li pressure=0.5
+ *
+ * While HDF5 attributes are not standard HDF5 datasets, they have much in common:
+ * \li An attribute has a user-defined dataspace and the included metadata has a user-assigned datatype
+ * \li Metadata can be of any valid HDF5 datatype
+ * \li Attributes are addressed by name
+ *
+ * But there are some very important differences:
+ * \li There is no provision for special storage such as compression or chunking
+ * \li There is no partial I/O or sub-setting capability for attribute data
+ * \li Attributes cannot be shared
+ * \li Attributes cannot have attributes
+ * \li Being small, an attribute is stored in the object header of the object it describes and is thus
+ * attached directly to that object
+ *
+ * \subsection subsec_error_H5A Attribute Function Summaries
+ * @see H5A reference manual
+ *
+ * \subsection subsec_attribute_program Programming Model for Attributes
+ *
+ * The figure below shows the UML model for an HDF5 attribute and its associated dataspace and datatype.
  * 
- * 
+ * 
+ * 
+ * 
+ * 
CreateRead
+ * \image html UML_Attribute.jpg "The UML model for an HDF5 attribute" + *
+ * + * Creating an attribute is similar to creating a dataset. To create an attribute, the application must + * specify the object to which the attribute is attached, the datatype and dataspace of the attribute + * data, and the attribute creation property list. + * + * The following steps are required to create and write an HDF5 attribute: + * \li Obtain the object identifier for the attribute’s primary data object + * \li Define the characteristics of the attribute and specify the attribute creation property list + *
  • Define the datatype
  • + *
  • Define the dataspace
  • + *
  • Specify the attribute creation property list
+ * \li Create the attribute + * \li Write the attribute data (optional) + * \li Close the attribute (and datatype, dataspace, and attribute creation property list, if necessary) + * \li Close the primary data object (if appropriate) + * + * The following steps are required to open and read/write an existing attribute. Since HDF5 attributes + * allow no partial I/O, you need specify only the attribute and the attribute’s memory datatype to read it: + * \li Obtain the object identifier for the attribute’s primary data object + * \li Obtain the attribute’s name or index + * \li Open the attribute + * \li Get attribute dataspace and datatype (optional) + * \li Specify the attribute’s memory type + * \li Read and/or write the attribute data + * \li Close the attribute + * \li Close the primary data object (if appropriate) + * + * + * * * * - * + * * * * *
CreateUpdate
* \snippet{lineno} H5A_examples.c create * - * \snippet{lineno} H5A_examples.c read + * \snippet{lineno} H5A_examples.c update *
UpdateDelete
ReadDelete
- * \snippet{lineno} H5A_examples.c update + * \snippet{lineno} H5A_examples.c read * * \snippet{lineno} H5A_examples.c delete @@ -60,6 +122,266 @@ *
* + * \subsection subsec_attribute_work Working with Attributes + * + * \subsubsection subsubsec_attribute_work_struct The Structure of an Attribute + * + * An attribute has two parts: name and value(s). + * + * HDF5 attributes are sometimes discussed as name/value pairs in the form name=value. + * + * An attribute’s name is a null-terminated ASCII or UTF-8 character string. Each attribute attached to an + * object has a unique name. + * + * The value portion of the attribute contains one or more data elements of the same datatype. + * + * HDF5 attributes have all the characteristics of HDF5 datasets except that there is no partial I/O + * capability. In other words, attributes can be written and read only in full with no sub-setting. + * + * \subsubsection subsubsec_attribute_work_create Creating, Writing, and Reading Attributes + * + * If attributes are used in an HDF5 file, these functions will be employed: \ref H5Acreate, \ref H5Awrite, + * and \ref H5Aread. \ref H5Acreate and \ref H5Awrite are used together to place the attribute in the file. If + * an attribute is to be used and is not currently in memory, \ref H5Aread generally comes into play + * usually in concert with one each of the H5Aget_* and H5Aopen_* functions. + * + * To create an attribute, call H5Acreate: + * \code + * hid_t H5Acreate (hid_t loc_id, const char *name, + * hid_t type_id, hid_t space_id, hid_t create_plist, + * hid_t access_plist) + * \endcode + * loc_id identifies the object (dataset, group, or committed datatype) to which the attribute is to be + * attached. name, type_id, space_id, and create_plist convey, respectively, the attribute’s name, datatype, + * dataspace, and attribute creation property list. The attribute’s name must be locally unique: it must be + * unique within the context of the object to which it is attached. + * + * \ref H5Acreate creates the attribute in memory. The attribute does not exist in the file until + * \ref H5Awrite writes it there. + * + * To write or read an attribute, call H5Awrite or H5Aread, respectively: + * \code + * herr_t H5Awrite (hid_t attr_id, hid_t mem_type_id, const void *buf) + * herr_t H5Aread (hid_t attr_id, hid_t mem_type_id, void *buf) + * \endcode + * attr_id identifies the attribute while mem_type_id identifies the in-memory datatype of the attribute data. + * + * \ref H5Awrite writes the attribute data from the buffer buf to the file. \ref H5Aread reads attribute data + * from the file into buf. + * + * The HDF5 Library converts the metadata between the in-memory datatype, mem_type_id, and the in-file + * datatype, defined when the attribute was created, without user intervention. + * + * \subsubsection subsubsec_attribute_work_access Accessing Attributes by Name or Index + * + * Attributes can be accessed by name or index value. The use of an index value makes it possible to iterate + * through all of the attributes associated with a given object. + * + * To access an attribute by its name, use the \ref H5Aopen_by_name function. \ref H5Aopen_by_name returns an + * attribute identifier that can then be used by any function that must access an attribute such as \ref + * H5Aread. Use the function \ref H5Aget_name to determine an attribute’s name. + * + * To access an attribute by its index value, use the \ref H5Aopen_by_idx function. To determine an attribute + * index value when it is not already known, use the H5Oget_info function. \ref H5Aopen_by_idx is generally + * used in the course of opening several attributes for later access. Use \ref H5Aiterate if the intent is to + * perform the same operation on every attribute attached to an object. + * + * \subsubsection subsubsec_attribute_work_info Obtaining Information Regarding an Object’s Attributes + * + * In the course of working with HDF5 attributes, one may need to obtain any of several pieces of information: + * \li An attribute name + * \li The dataspace of an attribute + * \li The datatype of an attribute + * \li The number of attributes attached to an object + * + * To obtain an attribute’s name, call H5Aget_name with an attribute identifier, attr_id: + * \code + * ssize_t H5Aget_name (hid_t attr_id, size_t buf_size, char *buf) + * \endcode + * As with other attribute functions, attr_id identifies the attribute; buf_size defines the size of the + * buffer; and buf is the buffer to which the attribute’s name will be read. + * + * If the length of the attribute name, and hence the value required for buf_size, is unknown, a first call + * to \ref H5Aget_name will return that size. If the value of buf_size used in that first call is too small, + * the name will simply be truncated in buf. A second \ref H5Aget_name call can then be used to retrieve the + * name in an appropriately-sized buffer. + * + * To determine the dataspace or datatype of an attribute, call \ref H5Aget_space or \ref H5Aget_type, + * respectively: \code hid_t H5Aget_space (hid_t attr_id) hid_t H5Aget_type (hid_t attr_id) \endcode \ref + * H5Aget_space returns the dataspace identifier for the attribute attr_id. \ref H5Aget_type returns the + * datatype identifier for the attribute attr_id. + * + * To determine the number of attributes attached to an object, use the \ref H5Oget_info function. The + * function signature is below. \code herr_t H5Oget_info( hid_t object_id, H5O_info_t *object_info ) \endcode + * The number of attributes will be returned in the object_info buffer. This is generally the preferred first + * step in determining attribute index values. If the call returns N, the attributes attached to the object + * object_id have index values of 0 through N-1. + * + * \subsubsection subsubsec_attribute_work_iterate Iterating across an Object’s Attributes + * + * It is sometimes useful to be able to perform the identical operation across all of the attributes attached + * to an object. At the simplest level, you might just want to open each attribute. At a higher level, you + * might wish to perform a rather complex operation on each attribute as you iterate across the set. + * + * To iterate an operation across the attributes attached to an object, one must make a series of calls to + * \ref H5Aiterate + * \code + * herr_t H5Aiterate (hid_t obj_id, H5_index_t index_type, + * H5_iter_order_t order, hsize_t *n, H5A_operator2_t op, + * void *op_data) + * \endcode + * \ref H5Aiterate successively marches across all of the attributes attached to the object specified in + * loc_id, performing the operation(s) specified in op_func with the data specified in op_data on each + * attribute. + * + * When \ref H5Aiterate is called, index contains the index of the attribute to be accessed in this call. When + * \ref H5Aiterate returns, index will contain the index of the next attribute. If the returned index is the + * null pointer, then all attributes have been processed, and the iterative process is complete. + * + * op_func is a user-defined operation that adheres to the \ref H5A_operator_t prototype. This prototype and + * certain requirements imposed on the operator’s behavior are described in the \ref H5Aiterate entry in the + * \ref RM. + * + * op_data is also user-defined to meet the requirements of op_func. Beyond providing a parameter with which + * to pass this data, HDF5 provides no tools for its management and imposes no restrictions. + * + * \subsubsection subsubsec_attribute_work_delete Deleting an Attribute + * + * Once an attribute has outlived its usefulness or is no longer appropriate, it may become necessary to + * delete it. + * + * To delete an attribute, call \ref H5Adelete + * \code + * herr_t H5Adelete (hid_t loc_id, const char *name) + * \endcode + * \ref H5Adelete removes the attribute name from the group, dataset, or committed datatype specified in + * loc_id. + * + * \ref H5Adelete must not be called if there are any open attribute identifiers on the object loc_id. Such a + * call can cause the internal attribute indexes to change; future writes to an open attribute would then + * produce unintended results. + * + * \subsubsection subsubsec_attribute_work_close Closing an Attribute + * + * As is the case with all HDF5 objects, once access to an attribute it is no longer needed, that attribute + * must be closed. It is best practice to close it as soon as practicable; it is mandatory that it be closed + * prior to the H5close call closing the HDF5 Library. + * + * To close an attribute, call \ref H5Aclose + * \code + * herr_t H5Aclose (hid_t attr_id) + * \endcode + * \ref H5Aclose closes the specified attribute by terminating access to its identifier, attr_id. + * + * \subsection subsec_attribute_special Special Issues + * + * Some special issues for attributes are discussed below. + * + *

Large Numbers of Attributes Stored in Dense Attribute Storage

+ * + * The dense attribute storage scheme was added in version 1.8 so that datasets, groups, and committed + * datatypes that have large numbers of attributes could be processed more quickly. + * + * Attributes start out being stored in an object's header. This is known as compact storage. For more + * information, see "Storage Strategies." + * + * As the number of attributes grows, attribute-related performance slows. To improve performance, dense + * attribute storage can be initiated with the H5Pset_attr_phase_change function. See the HDF5 Reference + * Manual for more information. + * + * When dense attribute storage is enabled, a threshold is defined for the number of attributes kept in + * compact storage. When the number is exceeded, the library moves all of the attributes into dense storage + * at another location. The library handles the movement of attributes and the pointers between the locations + * automatically. If some of the attributes are deleted so that the number falls below the threshold, then + * the attributes are moved back to compact storage by the library. + * + * The improvements in performance from using dense attribute storage are the result of holding attributes + * in a heap and indexing the heap with a B-tree. + * + * Note that there are some disadvantages to using dense attribute storage. One is that this is a new feature. + * Datasets, groups, and committed datatypes that use dense storage cannot be read by applications built with + * earlier versions of the library. Another disadvantage is that attributes in dense storage cannot be + * compressed. + * + *

Large Attributes Stored in Dense Attribute Storage

+ * + * We generally consider the maximum size of an attribute to be 64K bytes. The library has two ways of storing + * attributes larger than 64K bytes: in dense attribute storage or in a separate dataset. Using dense + * attribute storage is described in this section, and storing in a separate dataset is described in the next + * section. + * + * To use dense attribute storage to store large attributes, set the number of attributes that will be stored + * in compact storage to 0 with the H5Pset_attr_phase_change function. This will force all attributes to be + * put into dense attribute storage and will avoid the 64KB size limitation for a single attribute in compact + * attribute storage. + * + * The example code below illustrates how to create a large attribute that will be kept in dense storage. + * + * + * + * + * + * + *
Create
+ * \snippet{lineno} H5A_examples.c create + *
+ * + *

Large Attributes Stored in a Separate Dataset

+ * + * In addition to dense attribute storage (see above), a large attribute can be stored in a separate dataset. + * In the figure below, DatasetA holds an attribute that is too large for the object header in Dataset1. By + * putting a pointer to DatasetA as an attribute in Dataset1, the attribute becomes available to those + * working with Dataset1. + * This way of handling large attributes can be used in situations where backward compatibility is important + * and where compression is important. Applications built with versions before 1.8.x can read large + * attributes stored in separate datasets. Datasets can be compressed while attributes cannot. + * + * + * + * + *
+ * \image html Shared_Attribute.jpg "A large or shared HDF5 attribute and its associated dataset(s)" + *
+ * Note: In the figure above, DatasetA is an attribute of Dataset1 that is too large to store in Dataset1's + * header. DatasetA is associated with Dataset1 by means of an object reference pointer attached as an + * attribute to Dataset1. The attribute in DatasetA can be shared among multiple datasets by means of + * additional object reference pointers attached to additional datasets. + * + *

Shared Attributes

+ * + * Attributes written and managed through the \ref H5A interface cannot be shared. If shared attributes are + * required, they must be handled in the manner described above for large attributes and illustrated in + * the figure above. + * + *

Attribute Names

+ * + * While any ASCII or UTF-8 character may be used in the name given to an attribute, it is usually wise + * to avoid the following kinds of characters: + * \li Commonly used separators or delimiters such as slash, backslash, colon, and semi-colon (\, /, :, ;) + * \li Escape characters + * \li Wild cards such as asterisk and question mark (*, ?) + * NULL can be used within a name, but HDF5 names are terminated with a NULL: whatever comes after the NULL + * will be ignored by HDF5. + * + * The use of ASCII or UTF-8 characters is determined by the character encoding property. See + * #H5Pset_char_encoding in the \ref RM. + * + *

No Special I/O or Storage

+ * + * HDF5 attributes have all the characteristics of HDF5 datasets except the following: + * \li Attributes are written and read only in full: there is no provision for partial I/O or sub-setting + * \li No special storage capability is provided for attributes: there is no compression or chunking, and + * attributes are not extendable + * + * Previous Chapter \ref sec_dataspace - Next Chapter \ref sec_error + * + * \defgroup H5A Attributes (H5A) + * + * An HDF5 attribute is a small metadata object describing the nature and/or intended usage of a primary data + * object. A primary data object may be a dataset, group, or committed datatype. + * + * @see sec_attribute + * */ #endif /* H5Amodule_H */ diff --git a/src/H5Dmodule.h b/src/H5Dmodule.h index 596fd48..4ad3709 100644 --- a/src/H5Dmodule.h +++ b/src/H5Dmodule.h @@ -29,7 +29,2961 @@ #define H5_MY_PKG_ERR H5E_DATASET #define H5_MY_PKG_INIT YES -/**\defgroup H5D H5D +/** \page H5D_UG HDF5 Datasets + * + * \section sec_dataset HDF5 Datasets + * + * \subsection subsec_dataset_intro Introduction + * + * An HDF5 dataset is an object composed of a collection of data elements, or raw data, and + * metadata that stores a description of the data elements, data layout, and all other information + * necessary to write, read, and interpret the stored data. From the viewpoint of the application the + * raw data is stored as a one-dimensional or multi-dimensional array of elements (the raw data), + * those elements can be any of several numerical or character types, small arrays, or even + * compound types similar to C structs. The dataset object may have attribute objects. See the + * figure below. + * + * + * + * + * + *
+ * \image html Dsets_fig1.gif "Application view of a dataset" + *
+ * + * A dataset object is stored in a file in two parts: a header and a data array. The header contains + * information that is needed to interpret the array portion of the dataset, as well as metadata (or + * pointers to metadata) that describes or annotates the dataset. Header information includes the + * name of the object, its dimensionality, its number-type, information about how the data itself is + * stored on disk (the storage layout), and other information used by the library to speed up access + * to the dataset or maintain the file’s integrity. + * + * The HDF5 dataset interface, comprising the @ref H5D functions, provides a mechanism for managing + * HDF5 datasets including the transfer of data between memory and disk and the description of + * dataset properties. + * + * A dataset is used by other HDF5 APIs, either by name or by an identifier. For more information, + * \see \ref api-compat-macros. + * + * \subsubsection subsubsec_dataset_intro_link Link/Unlink + * A dataset can be added to a group with one of the H5Lcreate calls, and deleted from a group with + * #H5Ldelete. The link and unlink operations use the name of an object, which may be a dataset. + * The dataset does not have to open to be linked or unlinked. + * + * \subsubsection subsubsec_dataset_intro_obj Object Reference + * A dataset may be the target of an object reference. The object reference is created by + * #H5Rcreate with the name of an object which may be a dataset and the reference type + * #H5R_OBJECT. The dataset does not have to be open to create a reference to it. + * + * An object reference may also refer to a region (selection) of a dataset. The reference is created + * with #H5Rcreate and a reference type of #H5R_DATASET_REGION. + * + * An object reference can be accessed by a call to #H5Rdereference. When the reference is to a + * dataset or dataset region, the #H5Rdereference call returns an identifier to the dataset just as if + * #H5Dopen has been called. + * + * \subsubsection subsubsec_dataset_intro_attr Adding Attributes + * A dataset may have user-defined attributes which are created with #H5Acreate and accessed + * through the @ref H5A API. To create an attribute for a dataset, the dataset must be open, and the + * identifier is passed to #H5Acreate. The attributes of a dataset are discovered and opened using + * #H5Aopen_name, #H5Aopen_idx, or #H5Aiterate; these functions use the identifier of the dataset. + * An attribute can be deleted with #H5Adelete which also uses the identifier of the dataset. + * + * \subsection subsec_dataset_function Dataset Function Summaries + * Functions that can be used with datasets (@ref H5D functions) and property list functions that can + * used with datasets (@ref H5P functions) are listed below. + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + *
Dataset functions
FunctionPurpose
#H5DcreateCreates a dataset at the specified location. The + * C function is a macro: \see \ref api-compat-macros.
#H5Dcreate_anonCreates a dataset in a file without linking it into the file structure.
#H5DopenOpens an existing dataset. The C function is a macro: \see \ref api-compat-macros.
#H5DcloseCloses the specified dataset.
#H5Dget_spaceReturns an identifier for a copy of the dataspace for a dataset.
#H5Dget_space_statusDetermines whether space has been allocated for a dataset.
#H5Dget_typeReturns an identifier for a copy of the datatype for a dataset.
#H5Dget_create_plistReturns an identifier for a copy of the dataset creation property list for a dataset.
#H5Dget_access_plistReturns the dataset access property list associated with a dataset.
#H5Dget_offsetReturns the dataset address in a file.
#H5Dget_storage_sizeReturns the amount of storage required for a dataset.
#H5Dvlen_get_buf_sizeDetermines the number of bytes required to store variable-length (VL) data.
#H5Dvlen_reclaimReclaims VL datatype memory buffers.
#H5DreadReads raw data from a dataset into a buffer.
#H5DwriteWrites raw data from a buffer to a dataset.
#H5DiterateIterates over all selected elements in a dataspace.
#H5DgatherGathers data from a selection within a memory buffer.
#H5DscatterScatters data into a selection within a memory buffer.
#H5DfillFills dataspace elements with a fill value in a memory buffer.
#H5Dset_extentChanges the sizes of a dataset’s dimensions.
+ * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + *
Dataset creation property list functions (H5P)
FunctionPurpose
#H5Pset_layoutSets the type of storage used to store the raw data for a dataset.
#H5Pget_layoutReturns the layout of the raw data for a dataset.
#H5Pset_chunkSets the size of the chunks used to store a chunked layout dataset.
#H5Pget_chunkRetrieves the size of chunks for the raw data of a chunked layout dataset.
#H5Pset_deflateSets compression method and compression level.
#H5Pset_fill_valueSets the fill value for a dataset.
#H5Pget_fill_valueRetrieves a dataset fill value.
#H5Pfill_value_definedDetermines whether the fill value is defined.
#H5Pset_fill_timeSets the time when fill values are written to a dataset.
#H5Pget_fill_timeRetrieves the time when fill value are written to a dataset.
#H5Pset_alloc_timeSets the timing for storage space allocation.
#H5Pget_alloc_timeRetrieves the timing for storage space allocation.
#H5Pset_filterAdds a filter to the filter pipeline.
#H5Pall_filters_availVerifies that all required filters are available.
#H5Pget_nfiltersReturns the number of filters in the pipeline.
#H5Pget_filterReturns information about a filter in a pipeline. + * The C function is a macro: \see \ref api-compat-macros.
#H5Pget_filter_by_idReturns information about the specified filter. + * The C function is a macro: \see \ref api-compat-macros.
#H5Pmodify_filterModifies a filter in the filter pipeline.
#H5Premove_filterDeletes one or more filters in the filter pipeline.
#H5Pset_fletcher32Sets up use of the Fletcher32 checksum filter.
#H5Pset_nbitSets up use of the n-bit filter.
#H5Pset_scaleoffsetSets up use of the scale-offset filter.
#H5Pset_shuffleSets up use of the shuffle filter.
#H5Pset_szipSets up use of the Szip compression filter.
#H5Pset_externalAdds an external file to the list of external files.
#H5Pget_external_countReturns the number of external files for a dataset.
#H5Pget_externalReturns information about an external file.
#H5Pset_char_encodingSets the character encoding used to encode a string. Use to set ASCII or UTF-8 character + * encoding for object names.
#H5Pget_char_encodingRetrieves the character encoding used to create a string.
+ * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + *
Dataset access property list functions (H5P)
FunctionPurpose
#H5Pset_bufferSets type conversion and background buffers.
#H5Pget_bufferReads buffer settings.
#H5Pset_chunk_cacheSets the raw data chunk cache parameters.
#H5Pget_chunk_cacheRetrieves the raw data chunk cache parameters.
#H5Pset_edc_checkSets whether to enable error-detection when reading a dataset.
#H5Pget_edc_checkDetermines whether error-detection is enabled for dataset reads.
#H5Pset_filter_callbackSets user-defined filter callback function.
#H5Pset_data_transformSets a data transform expression.
#H5Pget_data_transformRetrieves a data transform expression.
#H5Pset_type_conv_cbSets user-defined datatype conversion callback function.
#H5Pget_type_conv_cbGets user-defined datatype conversion callback function.
#H5Pset_hyper_vector_sizeSets number of I/O vectors to be read/written in hyperslab I/O.
#H5Pget_hyper_vector_sizeRetrieves number of I/O vectors to be read/written in hyperslab I/O.
#H5Pset_btree_ratiosSets B-tree split ratios for a dataset transfer property list.
#H5Pget_btree_ratiosGets B-tree split ratios for a dataset transfer property list.
#H5Pset_vlen_mem_managerSets the memory manager for variable-length datatype allocation in #H5Dread and + * #H5Dvlen_reclaim.
#H5Pget_vlen_mem_managerGets the memory manager for variable-length datatype allocation in #H5Dread and + * #H5Dvlen_reclaim.
#H5Pset_dxpl_mpioSets data transfer mode.
#H5Pget_dxpl_mpioReturns the data transfer mode.
#H5Pset_dxpl_mpio_chunk_optSets a flag specifying linked-chunk I/O or multi-chunk I/O.
#H5Pset_dxpl_mpio_chunk_opt_numSets a numeric threshold for linked-chunk I/O.
#H5Pset_dxpl_mpio_chunk_opt_ratioSets a ratio threshold for collective I/O.
#H5Pset_dxpl_mpio_collective_optSets a flag governing the use of independent versus collective I/O.
#H5Pset_multi_typeSets the type of data property for the MULTI driver.
#H5Pget_multi_typeRetrieves the type of data property for the MULTI driver.
#H5Pset_small_data_block_sizeSets the size of a contiguous block reserved for small data.
#H5Pget_small_data_block_sizeRetrieves the current small data block size setting.
+ * + * \subsection subsec_dataset_program Programming Model for Datasets + * This section explains the programming model for datasets. + * + * \subsubsection subsubsec_dataset_program_general General Model + * + * The programming model for using a dataset has three main phases: + * \li Obtain access to the dataset + * \li Operate on the dataset using the dataset identifier returned at access + * \li Release the dataset + * + * These three phases or steps are described in more detail below the figure. + * + * A dataset may be opened several times and operations performed with several different + * identifiers to the same dataset. All the operations affect the dataset although the calling program + * must synchronize if necessary to serialize accesses. + * + * Note that the dataset remains open until every identifier is closed. The figure below shows the + * basic sequence of operations. + * + * + * + * + * + *
+ * \image html Dsets_fig2.gif "Dataset programming sequence" + *
+ * + * Creation and data access operations may have optional parameters which are set with property + * lists. The general programming model is: + * \li Create property list of appropriate class (dataset create, dataset transfer) + * \li Set properties as needed; each type of property has its own format and datatype + * \li Pass the property list as a parameter of the API call + * + * The steps below describe the programming phases or steps for using a dataset. + *

Step 1. Obtain Access

+ * A new dataset is created by a call to #H5Dcreate. If successful, the call returns an identifier for the + * newly created dataset. + * + * Access to an existing dataset is obtained by a call to #H5Dopen. This call returns an identifier for + * the existing dataset. + * + * An object reference may be dereferenced to obtain an identifier to the dataset it points to. + * + * In each of these cases, the successful call returns an identifier to the dataset. The identifier is + * used in subsequent operations until the dataset is closed. + * + *

Step 2. Operate on the Dataset

+ * The dataset identifier can be used to write and read data to the dataset, to query and set + * properties, and to perform other operations such as adding attributes, linking in groups, and + * creating references. + * + * The dataset identifier can be used for any number of operations until the dataset is closed. + * + *

Step 3. Close the Dataset

+ * When all operations are completed, the dataset identifier should be closed with a call to + * #H5Dclose. This releases the dataset. + * + * After the identifier is closed, it cannot be used for further operations. + * + * \subsubsection subsubsec_dataset_program_create Create Dataset + * + * A dataset is created and initialized with a call to #H5Dcreate. The dataset create operation sets + * permanent properties of the dataset: + * \li Name + * \li Dataspace + * \li Datatype + * \li Storage properties + * + * These properties cannot be changed for the life of the dataset, although the dataspace may be + * expanded up to its maximum dimensions. + * + *

Name

+ * A dataset name is a sequence of alphanumeric ASCII characters. The full name would include a + * tracing of the group hierarchy from the root group of the file. An example is + * /rootGroup/groupA/subgroup23/dataset1. The local name or relative name within the lowest- + * level group containing the dataset would include none of the group hierarchy. An example is + * Dataset1. + * + *

Dataspace

+ * The dataspace of a dataset defines the number of dimensions and the size of each dimension. The + * dataspace defines the number of dimensions, and the maximum dimension sizes and current size + * of each dimension. The maximum dimension size can be a fixed value or the constant + * #H5S_UNLIMITED, in which case the actual dimension size can be changed with calls to + * #H5Dset_extent, up to the maximum set with the maxdims parameter in the #H5Screate_simple + * call that established the dataset’s original dimensions. The maximum dimension size is set when + * the dataset is created and cannot be changed. + * + *

Datatype

+ * Raw data has a datatype which describes the layout of the raw data stored in the file. The + * datatype is set when the dataset is created and can never be changed. When data is transferred to + * and from the dataset, the HDF5 library will assure that the data is transformed to and from the + * stored format. + * + *

Storage Properties

+ * Storage properties of the dataset are set when it is created. The required inputs table below shows + * the categories of storage properties. The storage properties cannot be changed after the dataset is + * created. + * + *

Filters

+ * When a dataset is created, optional filters are specified. The filters are added to the data transfer + * pipeline when data is read or written. The standard library includes filters to implement + * compression, data shuffling, and error detection code. Additional user-defined filters may also be + * used. + * + * The required filters are stored as part of the dataset, and the list may not be changed after the + * dataset is created. The HDF5 library automatically applies the filters whenever data is + * transferred. + * + *

Summary

+ * + * A newly created dataset has no attributes and no data values. The dimensions, datatype, storage + * properties, and selected filters are set. The table below lists the required inputs, and the second + * table below lists the optional inputs. + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + *
Required inputs
Required InputsDescription
DataspaceThe shape of the array.
DatatypeThe layout of the stored elements.
NameThe name of the dataset in the group.
+ * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + *
Optional inputs
Optional InputsDescription
Storage LayoutHow the data is organized in the file including chunking.
Fill ValueThe behavior and value for uninitialized data.
External StorageOption to store the raw data in an external file.
FiltersSelect optional filters to be applied. One of the filters that might be applied is compression.
+ * + *

Example

+ * To create a new dataset, go through the following general steps: + * \li Set dataset characteristics (optional where default settings are acceptable) + * \li Datatype + * \li Dataspace + * \li Dataset creation property list + * \li Create the dataset + * \li Close the datatype, dataspace, and property list (as necessary) + * \li Close the dataset + * + * Example 1 below shows example code to create an empty dataset. The dataspace is 7 x 8, and the + * datatype is a big-endian integer. The dataset is created with the name “dset1” and is a member of + * the root group, “/”. + * + * Example 1. Create an empty dataset + * \code + * hid_t dataset, datatype, dataspace; + * + * // Create dataspace: Describe the size of the array and create the dataspace for fixed-size dataset. + * dimsf[0] = 7; + * dimsf[1] = 8; + * dataspace = H5Screate_simple(2, dimsf, NULL); + * + * // Define datatype for the data in the file. + * // For this example, store little-endian integer numbers. + * datatype = H5Tcopy(H5T_NATIVE_INT); + * status = H5Tset_order(datatype, H5T_ORDER_LE); + * + * // Create a new dataset within the file using defined + * // dataspace and datatype. No properties are set. + * dataset = H5Dcreate(file, "/dset", datatype, dataspace, H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT); + * H5Dclose(dataset); + * H5Sclose(dataspace); + * H5Tclose(datatype); + * \endcode + * + * Example 2, below, shows example code to create a similar dataset with a fill value of ‘-1’. This + * code has the same steps as in the example above, but uses a non-default property list. A file + * creation property list is created, and then the fill value is set to the desired value. Then the + * property list is passed to the #H5Dcreate call. + * + * Example 2. Create a dataset with fill value set + * \code + * hid_t plist; // property list + * hid_t dataset, datatype, dataspace; + * int fillval = -1; + * + * dimsf[0] = 7; + * dimsf[1] = 8; + * dataspace = H5Screate_simple(2, dimsf, NULL); + * datatype = H5Tcopy(H5T_NATIVE_INT); + * status = H5Tset_order(datatype, H5T_ORDER_LE); + * + * // Example of Dataset Creation property list: set fill value to '-1' + * plist = H5Pcreate(H5P_DATASET_CREATE); + * status = H5Pset_fill_value(plist, datatype, &fillval); + * + * // Same as above, but use the property list + * dataset = H5Dcreate(file, "/dset", datatype, dataspace, H5P_DEFAULT, plist, H5P_DEFAULT); + * H5Dclose(dataset); + * H5Sclose(dataspace); + * H5Tclose(datatype); + * H5Pclose(plist); + * \endcode + * + * After this code is executed, the dataset has been created and written to the file. The data array is + * uninitialized. Depending on the storage strategy and fill value options that have been selected, + * some or all of the space may be allocated in the file, and fill values may be written in the file. + * + * \subsubsection subsubsec_dataset_program_transfer Data Transfer Operations on a Dataset + * Data is transferred between memory and the raw data array of the dataset through #H5Dwrite and + * #H5Dread operations. A data transfer has the following basic steps: + * \li 1. Allocate and initialize memory space as needed + * \li 2. Define the datatype of the memory elements + * \li 3. Define the elements to be transferred (a selection, or all the elements) + * \li 4. Set data transfer properties (including parameters for filters or file drivers) as needed + * \li 5. Call the @ref H5D API + * + * Note that the location of the data in the file, the datatype of the data in the file, the storage + * properties, and the filters do not need to be specified because these are stored as a permanent part + * of the dataset. A selection of elements from the dataspace is specified; the selected elements may + * be the whole dataspace. + * + * The following figure shows a diagram of a write operation which + * transfers a data array from memory to a dataset in the file (usually on disk). A read operation has + * similar parameters with the data flowing the other direction. + * + * + * + * + * + *
+ * \image html Dsets_fig3.gif "A write operation" + *
+ * + *

Memory Space

+ * The calling program must allocate sufficient memory to store the data elements to be transferred. + * For a write (from memory to the file), the memory must be initialized with the data to be written + * to the file. For a read, the memory must be large enough to store the elements that will be read. + * The amount of storage needed can be computed from the memory datatype (which defines the + * size of each data element) and the number of elements in the selection. + * + *

Memory Datatype

+ * The memory layout of a single data element is specified by the memory datatype. This specifies + * the size, alignment, and byte order of the element as well as the datatype class. Note that the + * memory datatype must be the same datatype class as the file, but may have different byte order + * and other properties. The HDF5 Library automatically transforms data elements between the + * source and destination layouts. For more information, \ref sec_datatype. + * + * For a write, the memory datatype defines the layout of the data to be written; an example is IEEE + * floating-point numbers in native byte order. If the file datatype (defined when the dataset is + * created) is different but compatible, the HDF5 Library will transform each data element when it + * is written. For example, if the file byte order is different than the native byte order, the HDF5 + * library will swap the bytes. + * + * For a read, the memory datatype defines the desired layout of the data to be read. This must be + * compatible with the file datatype, but should generally use native formats such as byte orders. + * The HDF5 library will transform each data element as it is read. + * + *

Selection

+ * The data transfer will transfer some or all of the elements of the dataset depending on the + * dataspace selection. The selection has two dataspace objects: one for the source, and one for the + * destination. These objects describe which elements of the dataspace to be transferred. Some + * (partial I/O) or all of the data may be transferred. Partial I/O is defined by defining hyperslabs or + * lists of elements in a dataspace object. + * + * The dataspace selection for the source defines the indices of the elements to be read or written. + * The two selections must define the same number of points, but the order and layout may be + * different. The HDF5 Library automatically selects and distributes the elements according to the + * selections. It might, for example, perform a scatter-gather or sub-set of the data. + * + *

Data Transfer Properties

+ * For some data transfers, additional parameters should be set using the transfer property list. The + * table below lists the categories of transfer properties. These properties set parameters for the + * HDF5 Library and may be used to pass parameters for optional filters and file drivers. For + * example, transfer properties are used to select independent or collective operation when using + * MPI-I/O. + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + *
Categories of transfer properties
PropertiesDescription
Library parametersInternal caches, buffers, B-Trees, etc.
Memory managementVariable-length memory management, data overwrite
File driver managementParameters for file drivers
Filter managementParameters for filters
+ * + *

Data Transfer Operation (Read or Write)

+ * The data transfer is done by calling #H5Dread or #H5Dwrite with the parameters described above. + * The HDF5 Library constructs the required pipeline, which will scatter-gather, transform + * datatypes, apply the requested filters, and use the correct file driver. + * + * During the data transfer, the transformations and filters are applied to each element of the data in + * the required order until all the data is transferred. + * + *

Summary

+ * To perform a data transfer, it is necessary to allocate and initialize memory, describe the source + * and destination, set required and optional transfer properties, and call the \ref H5D API. + * + *

Examples

+ * The basic procedure to write to a dataset is the following: + * \li Open the dataset. + * \li Set the dataset dataspace for the write (optional if dataspace is #H5S_ALL). + * \li Write data. + * \li Close the datatype, dataspace, and property list (as necessary). + * \li Close the dataset. + * + * Example 3 below shows example code to write a 4 x 6 array of integers. In the example, the data + * is initialized in the memory array dset_data. The dataset has already been created in the file, so it + * is opened with H5Dopen. + * + * The data is written with #H5Dwrite. The arguments are the dataset identifier, the memory + * datatype (#H5T_NATIVE_INT), the memory and file selections (#H5S_ALL in this case: the + * whole array), and the default (empty) property list. The last argument is the data to be + * transferred. + * + * Example 3. Write an array of integers + * \code + * hid_t file_id, dataset_id; // identifiers + * herr_t status; + * int i, j, dset_data[4][6]; + * + * // Initialize the dataset. + * for (i = 0; i < 4; i++) + * for (j = 0; j < 6; j++) + * dset_data[i][j] = i * 6 + j + 1; + * + * // Open an existing file. + * file_id = H5Fopen("dset.h5", H5F_ACC_RDWR, H5P_DEFAULT); + * + * // Open an existing dataset. + * dataset_id = H5Dopen(file_id, "/dset", H5P_DEFAULT); + * + * // Write the entire dataset, using 'dset_data': memory type is 'native int' + * // write the entire dataspace to the entire dataspace, no transfer properties + * status = H5Dwrite(dataset_id, H5T_NATIVE_INT, H5S_ALL, H5S_ALL, H5P_DEFAULT, dset_data); + * + * status = H5Dclose(dataset_id); + * \endcode + * + * Example 4 below shows a similar write except for setting a non-default value for the transfer + * buffer. The code is the same as Example 3, but a transfer property list is created, and the desired + * buffer size is set. The #H5Dwrite function has the same arguments, but uses the property list to set + * the buffer. + * + * Example 4. Write an array using a property list + * \code + * hid_t file_id, dataset_id; + * hid_t xferplist; + * herr_t status; + * int i, j, dset_data[4][6]; + * + * file_id = H5Fopen("dset.h5", H5F_ACC_RDWR, H5P_DEFAULT); + * dataset_id = H5Dopen(file_id, "/dset", H5P_DEFAULT); + * + * // Example: set type conversion buffer to 64MB + * xferplist = H5Pcreate(H5P_DATASET_XFER); + * status = H5Pset_buffer( xferplist, 64 * 1024 *1024, NULL, NULL); + * + * // Write the entire dataset, using 'dset_data': memory type is 'native int' + * write the entire dataspace to the entire dataspace, set the buffer size with the property list + * status = H5Dwrite(dataset_id, H5T_NATIVE_INT, H5S_ALL, H5S_ALL, xferplist, dset_data); + * + * status = H5Dclose(dataset_id); + * \endcode + * + * The basic procedure to read from a dataset is the following: + * \li Define the memory dataspace of the read (optional if dataspace is #H5S_ALL). + * \li Open the dataset. + * \li Get the dataset dataspace (if using #H5S_ALL above). + * + * Else define dataset dataspace of read. + * \li Define the memory datatype (optional). + * \li Define the memory buffer. + * \li Open the dataset. + * \li Read data. + * \li Close the datatype, dataspace, and property list (as necessary). + * \li Close the dataset. + * + * The example below shows code that reads a 4 x 6 array of integers from a dataset called “dset1”. + * First, the dataset is opened. The #H5Dread call has parameters: + * \li The dataset identifier (from #H5Dopen) + * \li The memory datatype (#H5T_NATIVE_INT) + * \li The memory and file dataspace (#H5S_ALL, the whole array) + * \li A default (empty) property list + * \li The memory to be filled + * + * Example 5. Read an array from a dataset + * \code + * hid_t file_id, dataset_id; + * herr_t status; + * int i, j, dset_data[4][6]; + * + * // Open an existing file. + * file_id = H5Fopen("dset.h5", H5F_ACC_RDWR, H5P_DEFAULT); + * + * // Open an existing dataset. + * dataset_id = H5Dopen(file_id, "/dset", H5P_DEFAULT); + * + * // read the entire dataset, into 'dset_data': memory type is 'native int' + * // read the entire dataspace to the entire dataspace, no transfer properties, + * status = H5Dread(dataset_id, H5T_NATIVE_INT, H5S_ALL, H5S_ALL, H5P_DEFAULT, dset_data); + * + * status = H5Dclose(dataset_id); + * \endcode + * + * \subsubsection subsubsec_dataset_program_read Retrieve the Properties of a Dataset + * The functions listed below allow the user to retrieve information regarding a dataset including + * the datatype, the dataspace, the dataset creation property list, and the total stored size of the data. + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + *
Retrieve dataset information
Query FunctionDescription
H5Dget_spaceRetrieve the dataspace of the dataset as stored in the file.
H5Dget_typeRetrieve the datatype of the dataset as stored in the file.
H5Dget_create_plistRetrieve the dataset creation properties.
H5Dget_storage_sizeRetrieve the total bytes for all the data of the dataset.
H5Dvlen_get_buf_sizeRetrieve the total bytes for all the variable-length data of the dataset.
+ * + * The example below illustrates how to retrieve dataset information. + * + * Example 6. Retrieve dataset + * \code + * hid_t file_id, dataset_id; + * hid_t dspace_id, dtype_id, plist_id; + * herr_t status; + * + * // Open an existing file. + * file_id = H5Fopen("dset.h5", H5F_ACC_RDWR, H5P_DEFAULT); + * + * // Open an existing dataset. + * dataset_id = H5Dopen(file_id, "/dset", H5P_DEFAULT); + * dspace_id = H5Dget_space(dataset_id); + * dtype_id = H5Dget_type(dataset_id); + * plist_id = H5Dget_create_plist(dataset_id); + * + * // use the objects to discover the properties of the dataset + * status = H5Dclose(dataset_id); + * \endcode + * + * \subsection subsec_dataset_transfer Data Transfer + * The HDF5 library implements data transfers through a pipeline which implements data + * transformations (according to the datatype and selections), chunking (as requested), and I/O + * operations using different mechanisms (file drivers). The pipeline is automatically configured by + * the HDF5 library. Metadata is stored in the file so that the correct pipeline can be constructed to + * retrieve the data. In addition, optional filters such as compression may be added to the standard + * pipeline. + * + * The figure below illustrates data layouts for different layers of an application using HDF5. The + * application data is organized as a multidimensional array of elements. The HDF5 format + * specification defines the stored layout of the data and metadata. The storage layout properties + * define the organization of the abstract data. This data is written to and read from some storage + * medium. + * + * + * + * + * + *
+ * \image html Dsets_fig4.gif "Data layouts in an application" + *
+ * + * The last stage of a write (and first stage of a read) is managed by an HDF5 file driver module. + * The virtual file layer of the HDF5 Library implements a standard interface to alternative I/O + * methods, including memory (AKA “core”) files, single serial file I/O, multiple file I/O, and + * parallel I/O. The file driver maps a simple abstract HDF5 file to the specific access methods. + * + * The raw data of an HDF5 dataset is conceived to be a multidimensional array of data elements. + * This array may be stored in the file according to several storage strategies: + * \li Contiguous + * \li Chunked + * \li Compact + * + * The storage strategy does not affect data access methods except that certain operations may be + * more or less efficient depending on the storage strategy and the access patterns. + * + * Overall, the data transfer operations (#H5Dread and #H5Dwrite) work identically for any storage + * method, for any file driver, and for any filters and transformations. The HDF5 library + * automatically manages the data transfer process. In some cases, transfer properties should or + * must be used to pass additional parameters such as MPI/IO directives when using the parallel file + * driver. + * + * \subsubsection subsubsec_dataset_transfer_pipe The Data Pipeline + * When data is written or read to or from an HDF5 file, the HDF5 library passes the data through a + * sequence of processing steps which are known as the HDF5 data pipeline. This data pipeline + * performs operations on the data in memory such as byte swapping, alignment, scatter-gather, and + * hyperslab selections. The HDF5 library automatically determines which operations are needed + * and manages the organization of memory operations such as extracting selected elements from a + * data block. The data pipeline modules operate on data buffers: each module processes a buffer + * and passes the transformed buffer to the next stage. + * + * The table below lists the stages of the data pipeline. The figure below the table shows the order + * of processing during a read or write. + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + *
Stages of the data pipeline
LayersDescription
I/O initiationInitiation of HDF5 I/O activities (#H5Dwrite and #H5Dread) in a user’s application program.
Memory hyperslab operationData is scattered to (for read), or gathered from (for write) the application’s memory buffer + * (bypassed if no datatype conversion is needed).
Datatype conversionDatatype is converted if it is different between memory and storage (bypassed if no datatype + * conversion is needed).
File hyperslab operationData is gathered from (for read), or scattered to (for write) to file space in memory (bypassed + * if no datatype conversion is needed).
Filter pipelineData is processed by filters when it passes. Data can be modified and restored here (bypassed + * if no datatype conversion is needed, no filter is enabled, or dataset is not chunked).
Virtual File LayerFacilitate easy plug-in file drivers such as MPIO or POSIX I/O.
Actual I/OActual file driver used by the library such as MPIO or STDIO.
+ * + * + * + * + * + *
+ * \image html Dsets_fig5.gif "The processing order in the data pipeline" + *
+ * + * The HDF5 library automatically applies the stages as needed. + * + * When the memory dataspace selection is other than the whole dataspace, the memory hyperslab + * stage scatters/gathers the data elements between the application memory (described by the + * selection) and a contiguous memory buffer for the pipeline. On a write, this is a gather operation; + * on a read, this is a scatter operation. + * + * When the memory datatype is different from the file datatype, the datatype conversion stage + * transforms each data element. For example, if data is written from 32-bit big-endian memory, + * and the file datatype is 32-bit little-endian, the datatype conversion stage will swap the bytes of + * every element. Similarly, when data is read from the file to native memory, byte swapping will + * be applied automatically when needed. + * + * The file hyperslab stage is similar to the memory hyperslab stage, but is managing the + * arrangement of the elements according to the dataspace selection. When data is read, data + * elements are gathered from the data blocks from the file to fill the contiguous buffers which are + * then processed by the pipeline. When data is read, the elements from a buffer are scattered to the + * data blocks of the file. + * + * \subsubsection subsubsec_dataset_transfer_filter Data Pipeline Filters + * In addition to the standard pipeline, optional stages, called filters, can be inserted in the pipeline. + * The standard distribution includes optional filters to implement compression and error checking. + * User applications may add custom filters as well. + * + * The HDF5 library distribution includes or employs several optional filters. These are listed in the + * table below. The filters are applied in the pipeline between the virtual file layer and the file + * hyperslab operation. See the figure above. The application can use any number of filters in any + * order. + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + *
Data pipeline filters
FilterDescription
gzip compressionData compression using zlib.
Szip compressionData compression using the Szip library. See The HDF Group website for more information + * regarding the Szip filter.
N-bit compressionData compression using an algorithm specialized for n-bit datatypes.
Scale-offset compressionData compression using a “scale and offset” algorithm.
ShufflingTo improve compression performance, data is regrouped by its byte position in the data + * unit. In other words, the 1st, 2nd, 3rd, and 4th bytes of integers are stored together + * respectively.
Fletcher32Fletcher32 checksum for error-detection.
+ * + * Filters may be used only for chunked data and are applied to chunks of data between the file + * hyperslab stage and the virtual file layer. At this stage in the pipeline, the data is organized as + * fixed-size blocks of elements, and the filter stage processes each chunk separately. + * + * Filters are selected by dataset creation properties, and some behavior may be controlled by data + * transfer properties. The library determines what filters must be applied and applies them in the + * order in which they were set by the application. That is, if an application calls + * #H5Pset_shuffle and then #H5Pset_deflate when creating a dataset’s creation property list, the + * library will apply the shuffle filter first and then the deflate filter. + * + * For more information, + * \li @see @ref subsubsec_dataset_filters_nbit + * \li @see @ref subsubsec_dataset_filters_scale + * + * \subsubsection subsubsec_dataset_transfer_drive File Drivers + * I/O is performed by the HDF5 virtual file layer. The file driver interface writes and reads blocks + * of data; each driver module implements the interface using different I/O mechanisms. The table + * below lists the file drivers currently supported. Note that the I/O mechanisms are separated from + * the pipeline processing: the pipeline and filter operations are identical no matter what data access + * mechanism is used. + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + *
I/O file drivers
File DriverDescription
#H5FD_COREStore in memory (optional backing store to disk file).
#H5FD_FAMILYStore in a set of files.
#H5FD_LOGStore in logging file.
#H5FD_MPIOStore using MPI/IO.
#H5FD_MULTIStore in multiple files. There are several options to control layout.
#H5FD_SEC2Serial I/O to file using Unix “section 2” functions.
#H5FD_STDIOSerial I/O to file using Unix “stdio” functions.
+ * + * Each file driver writes/reads contiguous blocks of bytes from a logically contiguous address + * space. The file driver is responsible for managing the details of the different physical storage + * methods. + * + * In serial environments, everything above the virtual file layer tends to work identically no matter + * what storage method is used. + * + * Some options may have substantially different performance depending on the file driver that is + * used. In particular, multi-file and parallel I/O may perform considerably differently from serial + * drivers depending on chunking and other settings. + * + * \subsubsection subsubsec_dataset_transfer_props Data Transfer Properties to Manage the Pipeline + * Data transfer properties set optional parameters that control parts of the data pipeline. The + * function listing below shows transfer properties that control the behavior of the library. + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + *
Data transfer property list functions
C FunctionPurpose
#H5Pset_bufferMaximum size for the type conversion buffer and the background buffer. May also supply + * pointers to application-allocated buffers.
#H5Pset_hyper_vector_sizeset the number of "I/O vectors" (offset and length pairs) which are to be + * accumulated in memory before being issued to the lower levels + * of the library for reading or writing the actual data.
#H5Pset_btree_ratiosSet the B-tree split ratios for a dataset transfer property list. The split ratios determine + * what percent of children go in the first node when a node splits.
+ * + * Some filters and file drivers require or use additional parameters from the application program. + * These can be passed in the data transfer property list. The table below shows file driver property + * list functions. + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + *
File driver property list functions
C FunctionPurpose
#H5Pset_dxpl_mpioControl the MPI I/O transfer mode (independent or collective) during data I/O operations.
#H5Pset_small_data_block_sizeReserves blocks of size bytes for the contiguous storage of the raw data portion of small + * datasets. The HDF5 Library then writes the raw data from small datasets to this reserved space + * which reduces unnecessary discontinuities within blocks of metadata and improves + * I/O performance.
#H5Pset_edc_checkDisable/enable EDC checking for read. When selected, EDC is always written.
+ * + * The transfer properties are set in a property list which is passed as a parameter of the #H5Dread or + * #H5Dwrite call. The transfer properties are passed to each pipeline stage. Each stage may use or + * ignore any property in the list. In short, there is one property list that contains all the properties. + * + * \subsubsection subsubsec_dataset_transfer_store Storage Strategies + * The raw data is conceptually a multi-dimensional array of elements that is stored as a contiguous + * array of bytes. The data may be physically stored in the file in several ways. The table below lists + * the storage strategies for a dataset. + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + *
Dataset storage strategies
Storage StrategyDescription
ContiguousThe dataset is stored as one continuous array of bytes.
Chunked The dataset is stored as fixed-size chunks.
CompactA small dataset is stored in the metadata header.
+ * + * The different storage strategies do not affect the data transfer operations of the dataset: reads and + * writes work the same for any storage strategy. + * + * These strategies are described in the following sections. + * + *

Contiguous

+ * A contiguous dataset is stored in the file as a header and a single continuous array of bytes. See + * the figure below. In the case of a multi-dimensional array, the data is serialized in row major order. By + * default, data is stored contiguously. + * + * + * + * + * + *
+ * \image html Dsets_fig6.gif "Contiguous data storage" + *
+ * + * Contiguous storage is the simplest model. It has several limitations. First, the dataset must be a + * fixed-size: it is not possible to extend the limit of the dataset or to have unlimited dimensions. In + * other words, if the number of dimensions of the array might change over time, then chunking + * storage must be used instead of contiguous. Second, because data is passed through the pipeline + * as fixed-size blocks, compression and other filters cannot be used with contiguous data. + * + *

Chunked

+ * The data of a dataset may be stored as fixed-size chunks. A chunk is a hyper- + * rectangle of any shape. When a dataset is chunked, each chunk is read or written as a single I/O + * operation, and individually passed from stage to stage of the data pipeline. + * + * + * + * + * + *
+ * \image html Dsets_fig7.gif "Chunked data storage" + *
+ * + * Chunks may be any size and shape that fits in the dataspace of the dataset. For example, a three + * dimensional dataspace can be chunked as 3-D cubes, 2-D planes, or 1-D lines. The chunks may + * extend beyond the size of the dataspace. For example, a 3 x 3 dataset might by chunked in 2 x 2 + * chunks. Sufficient chunks will be allocated to store the array, and any extra space will not be + * accessible. So, to store the 3 x 3 array, four 2 x 2 chunks would be allocated with 5 unused + * elements stored. + * + * Chunked datasets can be unlimited in any direction and can be compressed or filtered. + * + * Since the data is read or written by chunks, chunking can have a dramatic effect on performance + * by optimizing what is read and written. Note, too, that for specific access patterns such as + * parallel I/O, decomposition into chunks can have a large impact on performance. + * + * Two restrictions have been placed on chunk shape and size: + *
  • The rank of a chunk must be less than or equal to the rank of the dataset
  • + *
  • Chunk size cannot exceed the size of a fixed-size dataset; for example, a dataset consisting of + * a 5 x 4 fixed-size array cannot be defined with 10 x 10 chunks
+ * + *

Compact

+ * For contiguous and chunked storage, the dataset header information and data are stored in two + * (or more) blocks. Therefore, at least two I/O operations are required to access the data: one to + * access the header, and one (or more) to access data. For a small dataset, this is considerable + * overhead. + * + * A small dataset may be stored in a continuous array of bytes in the header block using the + * compact storage option. This dataset can be read entirely in one operation which retrieves the + * header and data. The dataset must fit in the header. This may vary depending on the metadata + * that is stored. In general, a compact dataset should be approximately 30 KB or less total size. + * + * + * + * + * + *
+ * \image html Dsets_fig8.gif "Compact data storage" + *
+ * + * \subsubsection subsubsec_dataset_transfer_partial Partial I/O Sub‐setting and Hyperslabs + * Data transfers can write or read some of the data elements of the dataset. This is controlled by + * specifying two selections: one for the source and one for the destination. Selections are specified + * by creating a dataspace with selections. + * + * Selections may be a union of hyperslabs or a list of points. A hyperslab is a contiguous hyper- + * rectangle from the dataspace. Selected fields of a compound datatype may be read or written. In + * this case, the selection is controlled by the memory and file datatypes. + * + * Summary of procedure: + * \li 1. Open the dataset + * \li 2. Define the memory datatype + * \li 3. Define the memory dataspace selection and file dataspace selection + * \li 4. Transfer data (#H5Dread or #H5Dwrite) + * + * For more information, + * @see @ref sec_dataspace + * + * \subsection subsec_dataset_allocation Allocation of Space in the File + * When a dataset is created, space is allocated in the file for its header and initial data. The amount +of space allocated when the dataset is created depends on the storage properties. When the +dataset is modified (data is written, attributes added, or other changes), additional storage may be +allocated if necessary. + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + *
Initial dataset size
ObjectSize
HeaderVariable, but typically around 256 bytes at the creation of a simple dataset with a simple + * datatype.
DataSize of the data array (number of elements x size of element). Space allocated in + * the file depends on the storage strategy and the allocation strategy.
+ * + *

Header

+ * A dataset header consists of one or more header messages containing persistent metadata + * describing various aspects of the dataset. These records are defined in the HDF5 File Format + * Specification. The amount of storage required for the metadata depends on the metadata to be + * stored. The table below summarizes the metadata. + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + *
Metadata storage sizes
Header InformationApproximate Storage Size
Datatype (required)Bytes or more. Depends on type.
Dataspace (required)Bytes or more. Depends on number of dimensions and hsize_t.
Layout (required)Points to the stored data. Bytes or more. Depends on hsize_t and number of dimensions.
FiltersDepends on the number of filters. The size of the filter message depends on the name and + * data that will be passed.
+ * + * The header blocks also store the name and values of attributes, so the total storage depends on + * the number and size of the attributes. + * + * In addition, the dataset must have at least one link, including a name, which is stored in the file + * and in the group it is linked from. + * + * The different storage strategies determine when and how much space is allocated for the data + * array. See the discussion of fill values below for a detailed explanation of the storage allocation. + * + *

Contiguous Storage

+ * For a continuous storage option, the data is stored in a single, contiguous block in the file. The + * data is nominally a fixed-size, (number of elements x size of element). The figure below shows + * an example of a two dimensional array stored as a contiguous dataset. + * + * Depending on the fill value properties, the space may be allocated when the dataset is created or + * when first written (default), and filled with fill values if specified. For parallel I/O, by default the + * space is allocated when the dataset is created. + * + * + * + * + * + *
+ * \image html Dsets_fig9.gif "A two dimensional array stored as a contiguous dataset" + *
+ * + *

Chunked Storage

+ * For chunked storage, the data is stored in one or more chunks. Each chunk is a continuous block + * in the file, but chunks are not necessarily stored contiguously. Each chunk has the same size. The + * data array has the same nominal size as a contiguous array (number of elements x size of + * element), but the storage is allocated in chunks, so the total size in the file can be larger than the + * nominal size of the array. See the figure below. + * + * If a fill value is defined, each chunk will be filled with the fill value. Chunks must be allocated + * when data is written, but they may be allocated when the file is created, as the file expands, or + * when data is written. + * + * For serial I/O, by default chunks are allocated incrementally, as data is written to the chunk. For + * a sparse dataset, chunks are allocated only for the parts of the dataset that are written. In this + * case, if the dataset is extended, no storage is allocated. + * + * For parallel I/O, by default chunks are allocated when the dataset is created or extended with fill + * values written to the chunk. + * + * In either case, the default can be changed using fill value properties. For example, using serial + * I/O, the properties can select to allocate chunks when the dataset is created. + * + * + * + * + * + *
+ * \image html Dsets_fig10.gif "A two dimensional array stored in chunks" + *
+ * + *

Changing Dataset Dimensions

+ * #H5Dset_extent is used to change the current dimensions of the dataset within the limits of the + * dataspace. Each dimension can be extended up to its maximum or unlimited. Extending the + * dataspace may or may not allocate space in the file and may or may not write fill values, if they + * are defined. See the example code below. + * + * The dimensions of the dataset can also be reduced. If the sizes specified are smaller than the + * dataset’s current dimension sizes, #H5Dset_extent will reduce the dataset’s dimension sizes to the + * specified values. It is the user’s responsibility to ensure that valuable data is not lost; + * #H5Dset_extent does not check. + * + * Using #H5Dset_extent to increase the size of a dataset + * \code + * hid_t file_id, dataset_id; + * herr_t status; + * size_t newdims[2]; + * + * // Open an existing file. + * file_id = H5Fopen("dset.h5", H5F_ACC_RDWR, H5P_DEFAULT); + * + * // Open an existing dataset. + * dataset_id = H5Dopen(file_id, "/dset", H5P_DEFAULT); + * + * // Example: dataset is 2 x 3, each dimension is UNLIMITED + * // extend to 2 x 7 + * newdims[0] = 2; + * newdims[1] = 7; + * status = H5Dset_extent(dataset_id, newdims); + * + * // dataset is now 2 x 7 + * + * status = H5Dclose(dataset_id); + * \endcode + * + * \subsubsection subsubsec_dataset_allocation_store Storage Allocation in the File: Early, Incremental, Late + * The HDF5 Library implements several strategies for when storage is allocated if and when it is + * filled with fill values for elements not yet written by the user. Different strategies are + * recommended for different storage layouts and file drivers. In particular, a parallel program + * needs storage allocated during a collective call (for example, create or extend), while serial + * programs may benefit from delaying the allocation until the data is written. + * + * Two file creation properties control when to allocate space, when to write the fill value, and the + * actual fill value to write. + * + *

When to Allocate Space

+ * The table below shows the options for when data is allocated in the file. Early allocation is done + * during the dataset create call. Certain file drivers (especially MPI-I/O and MPI-POSIX) require + * space to be allocated when a dataset is created, so all processors will have the correct view of the + * data. + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + *
File storage allocation options
StrategyDescription
EarlyAllocate storage for the dataset immediately when the dataset is created.
LateDefer allocating space for storing the dataset until the dataset is written.
IncrementalDefer allocating space for storing each chunk until the chunk is written.
DefaultUse the strategy (Early, Late, or Incremental) for the storage method and + * access method. This is the recommended strategy.
+ * + * Late allocation is done at the time of the first write to dataset. Space for the whole dataset is + * allocated at the first write. + * + * Incremental allocation (chunks only) is done at the time of the first write to the chunk. Chunks + * that have never been written are not allocated in the file. In a sparsely populated dataset, this + * option allocates chunks only where data is actually written. + * + * The “Default” property selects the option recommended as appropriate for the storage method + * and access method. The defaults are shown in the table below. Note that Early allocation is + * recommended for all Parallel I/O, while other options are recommended as the default for serial + * I/O cases. + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + *
Default storage options
Storage TypeSerial I/OParallel I/O
ContiguousLateEarly
ChunkedIncrementalEarly
CompactEarlyEarly
+ * + *

When to Write the Fill Value

+ * The second property is when to write the fill value. The possible values are “Never” and + * “Allocation”. The table below shows these options. + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + *
When to write fill values
WhenDescription
NeverFill value will never be written.
AllocationFill value is written when space is allocated. (Default for chunked and contiguous + * data storage.)
+ * + *

What Fill Value to Write

+ * The third property is the fill value to write. The table below shows the values. By default, the + * data is filled with zeros. The application may choose no fill value (Undefined). In this case, + * uninitialized data may have random values. The application may define a fill value of an + * appropriate type. For more information, @see @ref subsec_datatype_fill. + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + *
Fill values to write
What to WriteDescription
DefaultBy default, the library fills allocated space with zeros.
UndefinedAllocated space is filled with random values.
User-definedThe application specifies the fill value.
+ * + * Together these three properties control the library’s behavior. The table below summarizes the + * possibilities during the dataset create-write-close cycle. + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + *
Storage allocation and fill summary
When to allocate spaceWhen to write fill valueWhat fill value to writeLibrary create-write-close behavior
EarlyNever-Library allocates space when dataset is created, but never writes a fill value to dataset. A read + * of unwritten data returns undefined values.
LateNever-Library allocates space when dataset is written to, but never writes a fill value to the dataset. A + * read of unwritten data returns undefined values.
IncrementalNever-Library allocates space when a dataset or chunk (whichever is the smallest unit of space) + * is written to, but it never writes a fill value to a dataset or a chunk. A read of unwritten data + * returns undefined values.
-AllocationUndefinedError on creating the dataset. The dataset is not created.
EarlyAllocationDefault or User-definedAllocate space for the dataset when the dataset is created. Write the fill value (default or + * user-defined) to the entire dataset when the dataset is created.
LateAllocationDefault or User-defineAllocate space for the dataset when the application first writes data values to the dataset. + * Write the fill value to the entire dataset before writing application data values.
IncrementalAllocationDefault or User-defineAllocate space for the dataset when the application first writes data values to the dataset or + * chunk (whichever is the smallest unit of space). Write the fill value to the entire dataset + * or chunk before writing application data values.
+ * + * During the #H5Dread function call, the library behavior depends on whether space has been + * allocated, whether the fill value has been written to storage, how the fill value is defined, and + * when to write the fill value. The table below summarizes the different behaviors. + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + *
H5Dread summary
Is space allocated in the file?What is the fill value?When to write the fill value?Library read behavior
NoUndefinedanytimeError. Cannot create this dataset.
NoDefault or User-defineanytimeFill the memory buffer with the fill value.
YesUndefinedanytimeReturn data from storage (dataset). Trash is possible if the application has not written data + * to the portion of the dataset being read.
YesDefault or User-defineNeverReturn data from storage (dataset). Trash is possible if the application has not written data + * to the portion of the dataset being read.
YesDefault or User-defineAllocationReturn data from storage (dataset).
+ * + * There are two cases to consider depending on whether the space in the file has been allocated + * before the read or not. When space has not yet been allocated and if a fill value is defined, the + * memory buffer will be filled with the fill values and returned. In other words, no data has been + * read from the disk. If space has been allocated, the values are returned from the stored data. The + * unwritten elements will be filled according to the fill value. + * + * \subsubsection subsubsec_dataset_allocation_delete Deleting a Dataset from a File and Reclaiming Space + * HDF5 does not at this time provide an easy mechanism to remove a dataset from a file or to + * reclaim the storage space occupied by a deleted object. + * + * Removing a dataset and reclaiming the space it used can be done with the #H5Ldelete function + * and the h5repack utility program. With the H5Ldelete function, links to a dataset can be removed + * from the file structure. After all the links have been removed, the dataset becomes inaccessible to + * any application and is effectively removed from the file. The way to recover the space occupied + * by an unlinked dataset is to write all of the objects of the file into a new file. Any unlinked object + * is inaccessible to the application and will not be included in the new file. Writing objects to a + * new file can be done with a custom program or with the h5repack utility program. + * + * For more information, @see @ref sec_group + * + * \subsubsection subsubsec_dataset_allocation_release Releasing Memory Resources + * The system resources required for HDF5 objects such as datasets, datatypes, and dataspaces + * should be released once access to the object is no longer needed. This is accomplished via the + * appropriate close function. This is not unique to datasets but a general requirement when + * working with the HDF5 Library; failure to close objects will result in resource leaks. + * + * In the case where a dataset is created or data has been transferred, there are several objects that + * must be closed. These objects include datasets, datatypes, dataspaces, and property lists. + * + * The application program must free any memory variables and buffers it allocates. When + * accessing data from the file, the amount of memory required can be determined by calculating + * the size of the memory datatype and the number of elements in the memory selection. + * + * Variable-length data are organized in two or more areas of memory. For more information, + * \see \ref h4_vlen_datatype "Variable-length Datatypes". + * + * When writing data, the application creates an array of + * vl_info_t which contains pointers to the elements. The elements might be, for example, strings. + * In the file, the variable-length data is stored in two parts: a heap with the variable-length values + * of the data elements and an array of vl_info_t elements. When the data is read, the amount of + * memory required for the heap can be determined with the #H5Dvlen_get_buf_size call. + * + * The data transfer property may be used to set a custom memory manager for allocating variable- + * length data for a #H5Dread. This is set with the #H5Pset_vlen_mem_manager call. + * To free the memory for variable-length data, it is necessary to visit each element, free the + * variable-length data, and reset the element. The application must free the memory it has + * allocated. For memory allocated by the HDF5 Library during a read, the #H5Dvlen_reclaim + * function can be used to perform this operation. + * + * \subsubsection subsubsec_dataset_allocation_ext External Storage Properties + * The external storage format allows data to be stored across a set of non-HDF5 files. A set of + * segments (offsets and sizes) in one or more files is defined as an external file list, or EFL, and + * the contiguous logical addresses of the data storage are mapped onto these segments. Currently, + * only the #H5D_CONTIGUOUS storage format allows external storage. External storage is + * enabled by a dataset creation property. The table below shows the API. + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + *
External storage API
FunctionDescription
#H5Pset_externalThis function adds a new segment to the end of the external file list of the specified dataset + * creation property list. The segment begins a byte offset of file name and continues for size + * bytes. The space represented by this segment is adjacent to the space already represented by + * the external file list. The last segment in a file list may have the size #H5F_UNLIMITED, in + * which case the external file may be of unlimited size and no more files can be added to the + * external files list.
#H5Pget_external_countCalling this function returns the number of segments in an external file list. If the dataset + * creation property list has no external data, then zero is returned.
#H5Pget_externalThis is the counterpart for the #H5Pset_external function. Given a dataset creation + * property list and a zero-based index into that list, the file name, byte offset, and segment + * size are returned through non-null arguments. At most name_size characters are copied into + * the name argument which is not null terminated if the file name is longer than the + * supplied name buffer (this is similar to strncpy()).
+ * + * The figure below shows an example of how a contiguous, one-dimensional dataset is partitioned + * into three parts and each of those parts is stored in a segment of an external file. The top + * rectangle represents the logical address space of the dataset while the bottom rectangle represents + * an external file. + * + * + * + * + * + *
+ * \image html Dsets_fig11.gif "External file storage" + *
+ * + * The example below shows code that defines the external storage for the example. Note that the + * segments are defined in order of the logical addresses they represent, not their order within the + * external file. It would also have been possible to put the segments in separate files. Care should + * be taken when setting up segments in a single file since the library does not automatically check + * for segments that overlap. + * + * External storage + * \code + * plist = H5Pcreate (H5P_DATASET_CREATE); + * H5Pset_external (plist, "velocity.data", 3000, 1000); + * H5Pset_external (plist, "velocity.data", 0, 2500); + * H5Pset_external (plist, "velocity.data", 4500, 1500); + * \endcode + * + * The figure below shows an example of how a contiguous, two-dimensional dataset is partitioned + * into three parts and each of those parts is stored in a separate external file. The top rectangle + * represents the logical address space of the dataset while the bottom rectangles represent external + * files. + * + * + * + * + * + *
+ * \image html Dsets_fig12.gif "Partitioning a 2-D dataset for external storage" + *
+ * + * The example below shows code for the partitioning described above. In this example, the library + * maps the multi-dimensional array onto a linear address space as defined by the HDF5 format + * specification, and then maps that address space into the segments defined in the external file list. + * + * Partitioning a 2-D dataset for external storage + * \code + * plist = H5Pcreate (H5P_DATASET_CREATE); + * H5Pset_external (plist, "scan1.data", 0, 24); + * H5Pset_external (plist, "scan2.data", 0, 24); + * H5Pset_external (plist, "scan3.data", 0, 16); + * \endcode + * + * The segments of an external file can exist beyond the end of the (external) file. The library reads + * that part of a segment as zeros. When writing to a segment that exists beyond the end of a file, + * the external file is automatically extended. Using this feature, one can create a segment (or set of + * segments) which is larger than the current size of the dataset. This allows the dataset to be + * extended at a future time (provided the dataspace also allows the extension). + * + * All referenced external data files must exist before performing raw data I/O on the dataset. This + * is normally not a problem since those files are being managed directly by the application or + * indirectly through some other library. However, if the file is transferred from its original context, + * care must be taken to assure that all the external files are accessible in the new location. + * + * \subsection subsec_dataset_filters Using HDF5 Filters + * This section describes in detail how to use the n-bit, scale-offset filters and szip filters. + * + * \subsubsection subsubsec_dataset_filters_nbit Using the N‐bit Filter + * N-bit data has n significant bits, where n may not correspond to a precise number of bytes. On + * the other hand, computing systems and applications universally, or nearly so, run most efficiently + * when manipulating data as whole bytes or multiple bytes. + * + * Consider the case of 12-bit integer data. In memory, that data will be handled in at least 2 bytes, + * or 16 bits, and on some platforms in 4 or even 8 bytes. The size of such a dataset can be + * significantly reduced when written to disk if the unused bits are stripped out. + * + * The n-bit filter is provided for this purpose, packing n-bit data on output by stripping off all + * unused bits and unpacking on input, restoring the extra bits required by the computational + * processor. + * + *

N-bit Datatype

+ * An n-bit datatype is a datatype of n significant bits. Unless it is packed, an n-bit datatype is + * presented as an n-bit bitfield within a larger-sized value. For example, a 12-bit datatype might be + * presented as a 12-bit field in a 16-bit, or 2-byte, value. + * + * Currently, the datatype classes of n-bit datatype or n-bit field of a compound datatype or an array + * datatype are limited to integer or floating-point. + * + * The HDF5 user can create an n-bit datatype through a series of function calls. For example, the + * following calls create a 16-bit datatype that is stored in a 32-bit value with a 4-bit offset: + * \code + * hid_t nbit_datatype = H5Tcopy(H5T_STD_I32LE); + * H5Tset_precision(nbit_datatype, 16); + * H5Tset_offset(nbit_datatype, 4); + * \endcode + * + * In memory, one value of the above example n-bit datatype would be stored on a little-endian + * machine as follows: + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + *
byte 3byte 2byte 1byte 0
????????????SPPPPPPPPPPPPPPP????
+ * Note: Key: S - sign bit, E - exponent bit, M - mantissa bit, ? - padding bit. Sign bit is + * included in signed integer datatype precision. + *
+ * + *

N-bit Filter

+ * When data of an n-bit datatype is stored on disk using the n-bit filter, the filter packs the data by + * stripping off the padding bits; only the significant bits are retained and stored. The values on disk + * will appear as follows: + * + * + * + * + * + * + * + * + * + * + * + * + * + * + *
1st value2nd valuenth value
SPPPPPPP PPPPPPPPSPPPPPPP PPPPPPPP...
+ * Note: Key: S - sign bit, E - exponent bit, M - mantissa bit, ? - padding bit. Sign bit is + * included in signed integer datatype precision. + *
+ * + *

How Does the N-bit Filter Work?

+ * The n-bit filter always compresses and decompresses according to dataset properties supplied by + * the HDF5 library in the datatype, dataspace, or dataset creation property list. + * + * The dataset datatype refers to how data is stored in an HDF5 file while the memory datatype + * refers to how data is stored in memory. The HDF5 library will do datatype conversion when + * writing data in memory to the dataset or reading data from the dataset to memory if the memory + * datatype differs from the dataset datatype. Datatype conversion is performed by HDF5 library + * before n-bit compression and after n-bit decompression. + * + * The following sub-sections examine the common cases: + * \li N-bit integer conversions + * \li N-bit floating-point conversions + * + *

N-bit Integer Conversions

+ * Integer data with a dataset of integer datatype of less than full precision and a memory datatype + * of #H5T_NATIVE_INT, provides the simplest application of the n-bit filter. + * + * The precision of #H5T_NATIVE_INT is 8 multiplied by sizeof(int). This value, the size of an + * int in bytes, differs from platform to platform; we assume a value of 4 for the following + * illustration. We further assume the memory byte order to be little-endian. + * + * In memory, therefore, the precision of #H5T_NATIVE_INT is 32 and the offset is 0. One value of + * #H5T_NATIVE_INT is laid out in memory as follows: + * + * + * + * + *
+ * \image html Dsets_NbitInteger1.gif "H5T_NATIVE_INT in memory"
+ * Note: Key: S - sign bit, E - exponent bit, M - mantissa bit, ? - padding bit. Sign bit is + * included in signed integer datatype precision. + *
+ * + * Suppose the dataset datatype has a precision of 16 and an offset of 4. After HDF5 converts + * values from the memory datatype to the dataset datatype, it passes something like the following + * to the n-bit filter for compression: + * + * + * + * + *
+ * \image html Dsets_NbitInteger2.gif "Passed to the n-bit filter"
+ * Note: Key: S - sign bit, E - exponent bit, M - mantissa bit, ? - padding bit. Sign bit is + * included in signed integer datatype precision. + *
+ * + * Notice that only the specified 16 bits (15 significant bits and the sign bit) are retained in the + * conversion. All other significant bits of the memory datatype are discarded because the dataset + * datatype calls for only 16 bits of precision. After n-bit compression, none of these discarded bits, + * known as padding bits will be stored on disk. + * + *

N-bit Floating-point Conversions

+ * Things get more complicated in the case of a floating-point dataset datatype class. This sub- + * section provides an example that illustrates the conversion from a memory datatype of + * #H5T_NATIVE_FLOAT to a dataset datatype of class floating-point. + * + * As before, let the #H5T_NATIVE_FLOAT be 4 bytes long, and let the memory byte order be + * little-endian. Per the IEEE standard, one value of #H5T_NATIVE_FLOAT is laid out in memory + * as follows: + * + * + * + * + *
+ * \image html Dsets_NbitFloating1.gif "H5T_NATIVE_FLOAT in memory"
+ * Note: Key: S - sign bit, E - exponent bit, M - mantissa bit, ? - padding bit. Sign bit is + * included in floating-point datatype precision. + *
+ * + * Suppose the dataset datatype has a precision of 20, offset of 7, mantissa size of 13, mantissa + * position of 7, exponent size of 6, exponent position of 20, and sign position of 26. For more + * information, @see @ref subsubsec_datatype_program_define. + * + * After HDF5 converts values from the memory datatype to the dataset datatype, it passes + * something like the following to the n-bit filter for compression: + * + * + * + * + *
+ * \image html Dsets_NbitFloating2.gif "Passed to the n-bit filter"
+ * Note: Key: S - sign bit, E - exponent bit, M - mantissa bit, ? - padding bit. Sign bit is + * included in floating-point datatype precision. + *
+ * + * The sign bit and truncated mantissa bits are not changed during datatype conversion by the + * HDF5 library. On the other hand, the conversion of the 8-bit exponent to a 6-bit exponent is a + * little tricky: + * + * The bias for the new exponent in the n-bit datatype is: + * + * 2(n-1)-1 + * + * + * The following formula is used for this exponent conversion:
+ * + * exp8 - (2(8-1) -1) = exp6 - (2(6-1)-1) = actual exponent value + *
+ * where exp8 is the stored decimal value as represented by the 8-bit exponent, and exp6 is the + * stored decimal value as represented by the 6-bit exponent. + * + * In this example, caution must be taken to ensure that, after conversion, the actual exponent value + * is within the range that can be represented by a 6-bit exponent. For example, an 8-bit exponent + * can represent values from -127 to 128 while a 6-bit exponent can represent values only from -31 + * to 32. + * + *

N-bit Filter Behavior

+ * The n-bit filter was designed to treat the incoming data byte by byte at the lowest level. The + * purpose was to make the n-bit filter as generic as possible so that no pointer cast related to the + * datatype is needed. + * + * Bitwise operations are employed for packing and unpacking at the byte level. + * + * Recursive function calls are used to treat compound and array datatypes. + * + *

N-bit Compression

+ * The main idea of n-bit compression is to use a loop to compress each data element in a chunk. + * Depending on the datatype of each element, the n-bit filter will call one of four functions. Each + * of these functions performs one of the following tasks: + * \li Compress a data element of a no-op datatype + * \li Compress a data element of an atomic datatype + * \li Compress a data element of a compound datatype + * \li Compress a data element of an array datatype + * + * No-op datatypes: The n-bit filter does not actually compress no-op datatypes. Rather, it copies + * the data buffer of the no-op datatype from the non-compressed buffer to the proper location in + * the compressed buffer; the compressed buffer has no holes. The term “compress” is used here + * simply to distinguish this function from the function that performs the reverse operation during + * decompression. + * + * Atomic datatypes: The n-bit filter will find the bytes where significant bits are located and try to + * compress these bytes, one byte at a time, using a loop. At this level, the filter needs the following + * information: + *
  • The byte offset of the beginning of the current data element with respect to the + * beginning of the input data buffer
  • + *
  • Datatype size, precision, offset, and byte order
+ * + * The n-bit filter compresses from the most significant byte containing significant bits to the least + * significant byte. For big-endian data, therefore, the loop index progresses from smaller to larger + * while for little-endian, the loop index progresses from larger to smaller. + * + * In the extreme case of when the n-bit datatype has full precision, this function copies the content + * of the entire non-compressed datatype to the compressed output buffer. + * + * Compound datatypes: The n-bit filter will compress each data member of the compound + * datatype. If the member datatype is of an integer or floating-point datatype, the n-bit filter will + * call the function described above. If the member datatype is of a no-op datatype, the filter will + * call the function described above. If the member datatype is of a compound datatype, the filter + * will make a recursive call to itself. If the member datatype is of an array datatype, the filter will + * call the function described below. + * + * Array datatypes: The n-bit filter will use a loop to compress each array element in the array. If + * the base datatype of array element is of an integer or floating-point datatype, the n-bit filter will + * call the function described above. If the base datatype is of a no-op datatype, the filter will call + * the function described above. If the base datatype is of a compound datatype, the filter will call + * the function described above. If the member datatype is of an array datatype, the filter will make + * a recursive call of itself. + * + *

N-bit Decompression

+ * The n-bit decompression algorithm is very similar to n-bit compression. The only difference is + * that at the byte level, compression packs out all padding bits and stores only significant bits into + * a continuous buffer (unsigned char) while decompression unpacks significant bits and inserts + * padding bits (zeros) at the proper positions to recover the data bytes as they existed before + * compression. + * + *

Storing N-bit Parameters to Array cd_value[]

+ * All of the information, or parameters, required by the n-bit filter are gathered and stored in the + * array cd_values[] by the private function H5Z__set_local_nbit and are passed to another private + * function, H5Z__filter_nbit, by the HDF5 Library. + * These parameters are as follows: + * \li Parameters related to the datatype + * \li The number of elements within the chunk + * \li A flag indicating whether compression is needed + * + * The first and second parameters can be obtained using the HDF5 dataspace and datatype + * interface calls. + * + * A compound datatype can have members of array or compound datatype. An array datatype’s + * base datatype can be a complex compound datatype. Recursive calls are required to set + * parameters for these complex situations. + * + * Before setting the parameters, the number of parameters should be calculated to dynamically + * allocate the array cd_values[], which will be passed to the HDF5 Library. This also requires + * recursive calls. + * + * For an atomic datatype (integer or floating-point), parameters that will be stored include the + * datatype’s size, endianness, precision, and offset. + * + * For a no-op datatype, only the size is required. + * + * For a compound datatype, parameters that will be stored include the datatype’s total size and + * number of members. For each member, its member offset needs to be stored. Other parameters + * for members will depend on the respective datatype class. + * + * For an array datatype, the total size parameter should be stored. Other parameters for the array’s + * base type depend on the base type’s datatype class. + * + * Further, to correctly retrieve the parameter for use of n-bit compression or decompression later, + * parameters for distinguishing between datatype classes should be stored. + * + *

Implementation

+ * Three filter callback functions were written for the n-bit filter: + * \li H5Z__can_apply_nbit + * \li H5Z__set_local_nbit + * \li H5Z__filter_nbit + * + * These functions are called internally by the HDF5 library. A number of utility functions were + * written for the function H5Z__set_local_nbit. Compression and decompression functions were + * written and are called by function H5Z__filter_nbit. All these functions are included in the file + * H5Znbit.c. + * + * The public function #H5Pset_nbit is called by the application to set up the use of the n-bit filter. + * This function is included in the file H5Pdcpl.c. The application does not need to supply any + * parameters. + * + *

How N-bit Parameters are Stored

+ * A scheme of storing parameters required by the n-bit filter in the array cd_values[] was + * developed utilizing recursive function calls. + * + * Four private utility functions were written for storing the parameters associated with atomic + * (integer or floating-point), no-op, array, and compound datatypes: + * \li H5Z__set_parms_atomic + * \li H5Z__set_parms_array + * \li H5Z__set_parms_nooptype + * \li H5Z__set_parms_compound + * + * The scheme is briefly described below. + * + * First, assign a numeric code for datatype class atomic (integer or float), no-op, array, and + * compound datatype. The code is stored before other datatype related parameters are stored. + * + * The first three parameters of cd_values[] are reserved for: + * \li 1. The number of valid entries in the array cd_values[] + * \li 2. A flag indicating whether compression is needed + * \li 3. The number of elements in the chunk + * + * Throughout the balance of this explanation, i represents the index of cd_values[]. + * In the function H5Z__set_local_nbit: + *
  • 1. i = 2
  • + *
  • 2. Get the number of elements in the chunk and store in cd_value[i]; increment i
  • + *
  • 3. Get the class of the datatype: + *
    • For an integer or floating-point datatype, call H5Z__set_parms_atomic
    • + *
    • For an array datatype, call H5Z__set_parms_array
    • + *
    • For a compound datatype, call H5Z__set_parms_compound
    • + *
    • For none of the above, call H5Z__set_parms_noopdatatype
  • + *
  • 4. Store i in cd_value[0] and flag in cd_values[1]
+ * + * In the function H5Z__set_parms_atomic: + * \li 1. Store the assigned numeric code for the atomic datatype in cd_value[i]; increment i + * \li 2. Get the size of the atomic datatype and store in cd_value[i]; increment i + * \li 3. Get the order of the atomic datatype and store in cd_value[i]; increment i + * \li 4. Get the precision of the atomic datatype and store in cd_value[i]; increment i + * \li 5. Get the offset of the atomic datatype and store in cd_value[i]; increment i + * \li 6. Determine the need to do compression at this point + * + * In the function H5Z__set_parms_nooptype: + * \li 1. Store the assigned numeric code for the no-op datatype in cd_value[i]; increment i + * \li 2. Get the size of the no-op datatype and store in cd_value[i]; increment i + * + * In the function H5Z__set_parms_array: + *
  • 1. Store the assigned numeric code for the array datatype in cd_value[i]; increment i
  • + *
  • 2. Get the size of the array datatype and store in cd_value[i]; increment i
  • + *
  • 3. Get the class of the array’s base datatype. + *
    • For an integer or floating-point datatype, call H5Z__set_parms_atomic
    • + *
    • For an array datatype, call H5Z__set_parms_array
    • + *
    • For a compound datatype, call H5Z__set_parms_compound
    • + *
    • If none of the above, call H5Z__set_parms_noopdatatype
+ * + * In the function H5Z__set_parms_compound: + *
  • 1. Store the assigned numeric code for the compound datatype in cd_value[i]; increment i
  • + *
  • 2. Get the size of the compound datatype and store in cd_value[i]; increment i
  • + *
  • 3. Get the number of members and store in cd_values[i]; increment i
  • + *
  • 4. For each member + *
    • Get the member offset and store in cd_values[i]; increment i
    • + *
    • Get the class of the member datatype
    • + *
    • For an integer or floating-point datatype, call H5Z__set_parms_atomic
    • + *
    • For an array datatype, call H5Z__set_parms_array
    • + *
    • For a compound datatype, call H5Z__set_parms_compound
    • + *
    • If none of the above, call H5Z__set_parms_noopdatatype
+ * + *

N-bit Compression and Decompression Functions

+ * The n-bit compression and decompression functions above are called by the private HDF5 + * function H5Z__filter_nbit. The compress and decompress functions retrieve the n-bit parameters + * from cd_values[] as it was passed by H5Z__filter_nbit. Parameters are retrieved in exactly the + * same order in which they are stored and lower-level compression and decompression functions + * for different datatype classes are called. + * + * N-bit compression is not implemented in place. Due to the difficulty of calculating actual output + * buffer size after compression, the same space as that of the input buffer is allocated for the output + * buffer as passed to the compression function. However, the size of the output buffer passed by + * reference to the compression function will be changed (smaller) after the compression is + * complete. + * + *

Usage Examples

+ * + * The following code example illustrates the use of the n-bit filter for writing and reading n-bit + * integer data. + * + * N-bit compression for integer data + * \code + * #include "hdf5.h" + * #include "stdlib.h" + * #include "math.h" + * + * #define H5FILE_NAME "nbit_test_int.h5" + * #define DATASET_NAME "nbit_int" + * #define NX 200 + * #define NY 300 + * #define CH_NX 10 + * #define CH_NY 15 + * + * int main(void) + * { + * hid_t file, dataspace, dataset, datatype, mem_datatype, dset_create_props; + * hsize_t dims[2], chunk_size[2]; + * int orig_data[NX][NY]; + * int new_data[NX][NY]; + * int i, j; + * size_t precision, offset; + * + * // Define dataset datatype (integer), and set precision, offset + * datatype = H5Tcopy(H5T_NATIVE_INT); + * precision = 17; // precision includes sign bit + * if(H5Tset_precision(datatype,precision) < 0) { + * printf("Error: fail to set precision\n"); + * return -1; + * } + * offset = 4; + * if(H5Tset_offset(datatype,offset) < 0) { + * printf("Error: fail to set offset\n"); + * return -1; + * } + * + * // Copy to memory datatype + * mem_datatype = H5Tcopy(datatype); + * + * // Set order of dataset datatype + * if(H5Tset_order(datatype, H5T_ORDER_BE) < 0) { + * printf("Error: fail to set endianness\n"); + * return -1; + * } + * + * // Initialize data buffer with random data within correct + * // range corresponding to the memory datatype's precision + * // and offset. + * for (i = 0; i < NX; i++) + * for (j = 0; j < NY; j++) + * orig_data[i][j] = rand() % (int)pow(2, precision-1) << offset; + * + * // Describe the size of the array. + * dims[0] = NX; + * dims[1] = NY; + * if((dataspace = H5Screate_simple (2, dims, NULL)) < 0) { + * printf("Error: fail to create dataspace\n"); + * return -1; + * } + * + * // Create a new file using read/write access, default file + * // creation properties, and default file access properties. + * if((file = H5Fcreate (H5FILE_NAME, H5F_ACC_TRUNC, H5P_DEFAULT, H5P_DEFAULT)) < 0) { + * printf("Error: fail to create file\n"); + * return -1; + * } + * + * // Set the dataset creation property list to specify that + * // the raw data is to be partitioned into 10 x 15 element + * // chunks and that each chunk is to be compressed. + * chunk_size[0] = CH_NX; + * chunk_size[1] = CH_NY; + * if((dset_create_props = H5Pcreate (H5P_DATASET_CREATE)) < 0) { + * printf("Error: fail to create dataset property\n"); + * return -1; + * } + * if(H5Pset_chunk (dset_create_props, 2, chunk_size) < 0) { + * printf("Error: fail to set chunk\n"); + * return -1; + * } + * + * // Set parameters for n-bit compression; check the description + * // of the H5Pset_nbit function in the HDF5 Reference Manual + * // for more information. + * if(H5Pset_nbit (dset_create_props) < 0) { + * printf("Error: fail to set nbit filter\n"); + * return -1; + * } + * + * // Create a new dataset within the file. The datatype + * // and dataspace describe the data on disk, which may + * // be different from the format used in the application's + * // memory. + * if((dataset = H5Dcreate(file, DATASET_NAME, datatype, dataspace, + * H5P_DEFAULT, dset_create_props, H5P_DEFAULT)) < 0) { + * printf("Error: fail to create dataset\n"); + * return -1; + * } + * + * // Write the array to the file. The datatype and dataspace + * // describe the format of the data in the 'orig_data' buffer. + * // The raw data is translated to the format required on disk, + * // as defined above. We use default raw data transfer + * // properties. + * if(H5Dwrite (dataset, mem_datatype, H5S_ALL, H5S_ALL, H5P_DEFAULT, orig_data) < 0) { + * printf("Error: fail to write to dataset\n"); + * return -1; + * } + * H5Dclose (dataset); + * + * if((dataset = H5Dopen(file, DATASET_NAME, H5P_DEFAULT)) < 0) { + * printf("Error: fail to open dataset\n"); + * return -1; + * } + * + * // Read the array. This is similar to writing data, + * // except the data flows in the opposite direction. + * // Note: Decompression is automatic. + * if(H5Dread (dataset, mem_datatype, H5S_ALL, H5S_ALL, H5P_DEFAULT, new_data) < 0) { + * printf("Error: fail to read from dataset\n"); + * return -1; + * } + * + * H5Tclose (datatype); + * H5Tclose (mem_datatype); + * H5Dclose (dataset); + * H5Sclose (dataspace); + * H5Pclose (dset_create_props); + * H5Fclose (file); + * + * return 0; + * } + * \endcode + * + * The following code example illustrates the use of the n-bit filter for writing and reading n-bit + * floating-point data. + * + * N-bit compression for floating-point data + * \code + * #include "hdf5.h" + * + * #define H5FILE_NAME "nbit_test_float.h5" + * #define DATASET_NAME "nbit_float" + * #define NX 2 + * #define NY 5 + * #define CH_NX 2 + * #define CH_NY 5 + * + * int main(void) + * { + * hid_t file, dataspace, dataset, datatype, dset_create_props; + * hsize_t dims[2], chunk_size[2]; + * + * // orig_data[] are initialized to be within the range that + * // can be represented by dataset datatype (no precision + * // loss during datatype conversion) + * // + * float orig_data[NX][NY] = {{188384.00, 19.103516,-1.0831790e9, -84.242188, 5.2045898}, + * {-49140.000, 2350.2500, -3.2110596e-1, 6.4998865e-5, -0.0000000}}; + * float new_data[NX][NY]; + * size_t precision, offset; + * + * // Define single-precision floating-point type for dataset + * //--------------------------------------------------------------- + * // size=4 byte, precision=20 bits, offset=7 bits, + * // mantissa size=13 bits, mantissa position=7, + * // exponent size=6 bits, exponent position=20, + * // exponent bias=31. + * // It can be illustrated in little-endian order as: + * // (S - sign bit, E - exponent bit, M - mantissa bit, ? - padding bit) + * // + * // 3 2 1 0 + * // ?????SEE EEEEMMMM MMMMMMMM M??????? + * // + * // To create a new floating-point type, the following + * // properties must be set in the order of + * // set fields -> set offset -> set precision -> set size. + * // All these properties must be set before the type can + * // function. Other properties can be set anytime. Derived + * // type size cannot be expanded bigger than original size + * // but can be decreased. There should be no holes + * // among the significant bits. Exponent bias usually + * // is set 2^(n-1)-1, where n is the exponent size. + * //--------------------------------------------------------------- + * datatype = H5Tcopy(H5T_IEEE_F32BE); + * if(H5Tset_fields(datatype, 26, 20, 6, 7, 13) < 0) { + * printf("Error: fail to set fields\n"); + * return -1; + * } + * offset = 7; + * if(H5Tset_offset(datatype,offset) < 0) { + * printf("Error: fail to set offset\n"); + * return -1; + * } + * precision = 20; + * if(H5Tset_precision(datatype,precision) < 0) { + * printf("Error: fail to set precision\n"); + * return -1; + * } + * if(H5Tset_size(datatype, 4) < 0) { + * printf("Error: fail to set size\n"); + * return -1; + * } + * if(H5Tset_ebias(datatype, 31) < 0) { + * printf("Error: fail to set exponent bias\n"); + * return -1; + * } + * + * // Describe the size of the array. + * dims[0] = NX; + * dims[1] = NY; + * if((dataspace = H5Screate_simple (2, dims, NULL)) < 0) { + * printf("Error: fail to create dataspace\n"); + * return -1; + * } + * + * // Create a new file using read/write access, default file + * // creation properties, and default file access properties. + * if((file = H5Fcreate (H5FILE_NAME, H5F_ACC_TRUNC, H5P_DEFAULT, H5P_DEFAULT)) < 0) { + * printf("Error: fail to create file\n"); + * return -1; + * } + * + * // Set the dataset creation property list to specify that + * // the raw data is to be partitioned into 2 x 5 element + * // chunks and that each chunk is to be compressed. + * chunk_size[0] = CH_NX; + * chunk_size[1] = CH_NY; + * if((dset_create_props = H5Pcreate (H5P_DATASET_CREATE)) < 0) { + * printf("Error: fail to create dataset property\n"); + * return -1; + * } + * if(H5Pset_chunk (dset_create_props, 2, chunk_size) < 0) { + * printf("Error: fail to set chunk\n"); + * return -1; + * } + * + * // Set parameters for n-bit compression; check the description + * // of the H5Pset_nbit function in the HDF5 Reference Manual + * // for more information. + * if(H5Pset_nbit (dset_create_props) < 0) { + * printf("Error: fail to set nbit filter\n"); + * return -1; + * } + * + * // Create a new dataset within the file. The datatype + * // and dataspace describe the data on disk, which may + * // be different from the format used in the application's memory. + * if((dataset = H5Dcreate(file, DATASET_NAME, datatype, dataspace, H5P_DEFAULT, + * dset_create_plists, H5P_DEFAULT)) < 0) { + * printf("Error: fail to create dataset\n"); + * return -1; + * } + * + * // Write the array to the file. The datatype and dataspace + * // describe the format of the data in the 'orig_data' buffer. + * // The raw data is translated to the format required on disk, + * // as defined above. We use default raw data transfer properties. + * if(H5Dwrite (dataset, H5T_NATIVE_FLOAT, H5S_ALL, H5S_ALL, H5P_DEFAULT, orig_data) < 0) { + * printf("Error: fail to write to dataset\n"); + * return -1; + * } + * H5Dclose (dataset); + * if((dataset = H5Dopen(file, DATASET_NAME, H5P_DEFAULT))<0) { + * printf("Error: fail to open dataset\n"); + * return -1; + * } + * + * // Read the array. This is similar to writing data, + * // except the data flows in the opposite direction. + * // Note: Decompression is automatic. + * if(H5Dread (dataset, H5T_NATIVE_FLOAT, H5S_ALL, H5S_ALL, H5P_DEFAULT, new_data) < 0) { + * printf("Error: fail to read from dataset\n"); + * return -1; + * } + * H5Tclose (datatype); + * H5Dclose (dataset); + * H5Sclose (dataspace); + * H5Pclose (dset_create_props); + * H5Fclose (file); + * + * return 0 + * } + * \endcode + * + *

Limitations

+ * Because the array cd_values[] has to fit into an object header message of 64K, the n-bit filter has + * an upper limit on the number of n-bit parameters that can be stored in it. To be conservative, a + * maximum of 4K is allowed for the number of parameters. + * + * The n-bit filter currently only compresses n-bit datatypes or fields derived from integer or + * floating-point datatypes. The n-bit filter assumes padding bits of zero. This may not be true since + * the HDF5 user can set padding bit to be zero, one, or leave the background alone. However, it is + * expected the n-bit filter will be modified to adjust to such situations. + * + * The n-bit filter does not have a way to handle the situation where the fill value of a dataset is + * defined and the fill value is not of an n-bit datatype although the dataset datatype is. + * + * \subsubsection subsubsec_dataset_filters_scale Using the Scale‐offset Filter + * Generally speaking, scale-offset compression performs a scale and/or offset operation on each + * data value and truncates the resulting value to a minimum number of bits (minimum-bits) before + * storing it. + * + * The current scale-offset filter supports integer and floating-point datatypes only. For the floating- + * point datatype, float and double are supported, but long double is not supported. + * + * Integer data compression uses a straight-forward algorithm. Floating-point data compression + * adopts the GRiB data packing mechanism which offers two alternate methods: a fixed minimum- + * bits method, and a variable minimum-bits method. Currently, only the variable minimum-bits + * method is implemented. + * + * Like other I/O filters supported by the HDF5 library, applications using the scale-offset filter + * must store data with chunked storage. + * + * Integer type: The minimum-bits of integer data can be determined by the filter. For example, if + * the maximum value of data to be compressed is 7065 and the minimum value is 2970. Then the + * “span” of dataset values is equal to (max-min+1), which is 4676. If no fill value is defined for the + * dataset, the minimum-bits is: ceiling(log2(span)) = 12. With fill value set, the minimum-bits is: + * ceiling(log2(span+1)) = 13. + * + * HDF5 users can also set the minimum-bits. However, if the user gives a minimum-bits that is + * less than that calculated by the filter, the compression will be lossy. + * + * Floating-point type: The basic idea of the scale-offset filter for the floating-point type is to + * transform the data by some kind of scaling to integer data, and then to follow the procedure of + * the scale-offset filter for the integer type to do the data compression. Due to the data + * transformation from floating-point to integer, the scale-offset filter is lossy in nature. + * + * Two methods of scaling the floating-point data are used: the so-called D-scaling and E-scaling. + * D-scaling is more straightforward and easy to understand. For HDF5 1.8 release, only the + * D-scaling method had been implemented. + * + *

Design

+ * Before the filter does any real work, it needs to gather some information from the HDF5 Library + * through API calls. The parameters the filter needs are: + * \li The minimum-bits of the data value + * \li The number of data elements in the chunk + * \li The datatype class, size, sign (only for integer type), byte order, and fill value if defined + * + * Size and sign are needed to determine what kind of pointer cast to use when retrieving values + * from the data buffer. + * + * The pipeline of the filter can be divided into four parts: (1)pre-compression; (2)compression; + * (3)decompression; (4)post-decompression. + * + * Depending on whether a fill value is defined or not, the filter will handle pre-compression and + * post-decompression differently. + * + * The scale-offset filter only needs the memory byte order, size of datatype, and minimum-bits for + * compression and decompression. + * + * Since decompression has no access to the original data, the minimum-bits and the minimum + * value need to be stored with the compressed data for decompression and post-decompression. + * + *

Integer Type

+ * Pre-compression: During pre-compression minimum-bits is calculated if it is not set by the user. + * For more information on how minimum-bits are calculated, @see @ref subsubsec_dataset_filters_nbit. + * + * If the fill value is defined, finding the maximum and minimum values should ignore the data + * element whose value is equal to the fill value. + * + * If no fill value is defined, the value of each data element is subtracted by the minimum value + * during this stage. + * + * If the fill value is defined, the fill value is assigned to the maximum value. In this way minimum- + * bits can represent a data element whose value is equal to the fill value and subtracts the + * minimum value from a data element whose value is not equal to the fill value. + * + * The fill value (if defined), the number of elements in a chunk, the class of the datatype, the size + * of the datatype, the memory order of the datatype, and other similar elements will be stored in + * the HDF5 object header for the post-decompression usage. + * + * After pre-compression, all values are non-negative and are within the range that can be stored by + * minimum-bits. + * + * Compression: All modified data values after pre-compression are packed together into the + * compressed data buffer. The number of bits for each data value decreases from the number of + * bits of integer (32 for most platforms) to minimum-bits. The value of minimum-bits and the + * minimum value are added to the data buffer and the whole buffer is sent back to the library. In + * this way, the number of bits for each modified value is no more than the size of minimum-bits. + * + * Decompression: In this stage, the number of bits for each data value is resumed from minimum- + * bits to the number of bits of integer. + * + * Post-decompression: For the post-decompression stage, the filter does the opposite of what it + * does during pre-compression except that it does not calculate the minimum-bits or the minimum + * value. These values were saved during compression and can be retrieved through the resumed + * data buffer. If no fill value is defined, the filter adds the minimum value back to each data + * element. + * + * If the fill value is defined, the filter assigns the fill value to the data element whose value is equal + * to the maximum value that minimum-bits can represent and adds the minimum value back to + * each data element whose value is not equal to the maximum value that minimum-bits can + * represent. + * + * @anchor h4_float_datatype

Floating-point Type

+ * The filter will do data transformation from floating-point type to integer type and then handle the + * data by using the procedure for handling the integer data inside the filter. Insignificant bits of + * floating-point data will be cut off during data transformation, so this filter is a lossy compression + * method. + * + * There are two scaling methods: D-scaling and E-scaling. The HDF5 1.8 release only supports D- + * scaling. D-scaling is short for decimal scaling. E-scaling should be similar conceptually. In order + * to transform data from floating-point to integer, a scale factor is introduced. The minimum value + * will be calculated. Each data element value will subtract the minimum value. The modified data + * will be multiplied by 10 (Decimal) to the power of scale_factor, and only the integer part will be + * kept and manipulated through the routines for the integer type of the filter during pre- + * compression and compression. Integer data will be divided by 10 to the power of scale_factor to + * transform back to floating-point data during decompression and post-decompression. Each data + * element value will then add the minimum value, and the floating-point data are resumed. + * However, the resumed data will lose some insignificant bits compared with the original value. + * + * For example, the following floating-point data are manipulated by the filter, and the D-scaling + * factor is 2. + * {104.561, 99.459, 100.545, 105.644} + * + * The minimum value is 99.459, each data element subtracts 99.459, the modified data is + * {5.102, 0, 1.086, 6.185} + * + * Since the D-scaling factor is 2, all floating-point data will be multiplied by 10^2 with this result: + * {510.2, 0, 108.6, 618.5} + * + * The digit after decimal point will be rounded off, and then the set looks like: + * {510, 0, 109, 619} + * + * After decompression, each value will be divided by 10^2 and will be added to the offset 99.459. + * The floating-point data becomes + * {104.559, 99.459, 100.549, 105.649} + * + * The relative error for each value should be no more than 5* (10^(D-scaling factor +1)). + * D-scaling sometimes is also referred as a variable minimum-bits method since for different datasets + * the minimum-bits to represent the same decimal precision will vary. The data value is modified + * to 2 to power of scale_factor for E-scaling. E-scaling is also called fixed-bits method since for + * different datasets the minimum-bits will always be fixed to the scale factor of E-scaling. + * Currently, HDF5 ONLY supports the D-scaling (variable minimum-bits) method. + * + *

Implementation

+ * The scale-offset filter implementation was written and included in the file H5Zscaleoffset.c. + * Function #H5Pset_scaleoffset was written and included in the file “H5Pdcpl.c”. The HDF5 user + * can supply minimum-bits by calling function #H5Pset_scaleoffset. + * + * The scale-offset filter was implemented based on the design outlined in this section. However, + * the following factors need to be considered: + *
  1. + * The filter needs the appropriate cast pointer whenever it needs to retrieve data values. + *
  2. + *
  3. + * The HDF5 Library passes to the filter the to-be-compressed data in the format of the dataset + * datatype, and the filter passes back the decompressed data in the same format. If a fill value is + * defined, it is also in dataset datatype format. For example, if the byte order of the dataset data- + * type is different from that of the memory datatype of the platform, compression or decompression performs + * an endianness conversion of data buffer. Moreover, it should be aware that + * memory byte order can be different during compression and decompression. + *
  4. + *
  5. + * The difference of endianness and datatype between file and memory should be considered + * when saving and retrieval of minimum-bits, minimum value, and fill value. + *
  6. + * If the user sets the minimum-bits to full precision of the datatype, no operation is needed at + * the filter side. If the full precision is a result of calculation by the filter, then the minimum-bits + * needs to be saved for decompression but no compression or decompression is needed (only a + * copy of the input buffer is needed).
  7. + *
  8. + * If by calculation of the filter, the minimum-bits is equal to zero, special handling is needed. + * Since it means all values are the same, no compression or decompression is needed. But the + * minimum-bits and minimum value still need to be saved during compression.
  9. + *
  10. + * For floating-point data, the minimum value of the dataset should be calculated at first. Each + * data element value will then subtract the minimum value to obtain the “offset” data. The offset + * data will then follow the steps outlined above in the discussion of floating-point types to do data + * transformation to integer and rounding. For more information, @see @ref h4_float_datatype. + *
+ * + *

Usage Examples

+ * The following code example illustrates the use of the scale-offset filter for writing and reading + * integer data. + * + * Scale-offset compression integer data + * \code + * #include "hdf5.h" + * #include "stdlib.h" + * + * #define H5FILE_NAME "scaleoffset_test_int.h5" + * #define DATASET_NAME "scaleoffset_int" + * #define NX 200 + * #define NY 300 + * #define CH_NX 10 + * #define CH_NY 15 + * int main(void) + * { + * hid_t file, dataspace, dataset, datatype, dset_create_props; + * hsize_t dims[2], chunk_size[2]; + * int orig_data[NX][NY]; + * int new_data[NX][NY]; + * int i, j, fill_val; + * + * // Define dataset datatype + * datatype = H5Tcopy(H5T_NATIVE_INT); + * + * // Initialize data buffer + * for (i=0; i < NX; i++) + * for (j=0; j < NY; j++) + * orig_data[i][j] = rand() % 10000; + * + * // Describe the size of the array. + * dims[0] = NX; + * dims[1] = NY; + * if((dataspace = H5Screate_simple (2, dims, NULL)) < 0) { + * printf("Error: fail to create dataspace\n"); + * return -1; + * } + * + * // Create a new file using read/write access, default file + * // creation properties, and default file access properties. + * if((file = H5Fcreate (H5FILE_NAME, H5F_ACC_TRUNC, H5P_DEFAULT, H5P_DEFAULT)) < 0) { + * printf("Error: fail to create file\n"); + * return -1; + * } + * + * // Set the dataset creation property list to specify that + * // the raw data is to be partitioned into 10 x 15 element + * // chunks and that each chunk is to be compressed. + * chunk_size[0] = CH_NX; + * chunk_size[1] = CH_NY; + * if((dset_create_props = H5Pcreate (H5P_DATASET_CREATE)) < 0) { + * printf("Error: fail to create dataset property\n"); + * return -1; + * } + * if(H5Pset_chunk (dset_create_props, 2, chunk_size) < 0) { + * printf("Error: fail to set chunk\n"); + * return -1; + * } + * + * // Set the fill value of dataset + * fill_val = 10000; + * if (H5Pset_fill_value(dset_create_props, H5T_NATIVE_INT, &fill_val)<0) { + * printf("Error: can not set fill value for dataset\n"); + * return -1; + * } + * + * // Set parameters for scale-offset compression. Check the + * // description of the H5Pset_scaleoffset function in the + * // HDF5 Reference Manual for more information. + * if(H5Pset_scaleoffset (dset_create_props, H5Z_SO_INT, H5Z_SO_INT_MINIMUMBITS_DEFAULT) < 0) { + * printf("Error: fail to set scaleoffset filter\n"); + * return -1; + * } + * + * // Create a new dataset within the file. The datatype + * // and dataspace describe the data on disk, which may + * // or may not be different from the format used in the + * // application's memory. The link creation and + * // dataset access property list parameters are passed + * // with default values. + * if((dataset = H5Dcreate (file, DATASET_NAME, datatype, dataspace, H5P_DEFAULT, + * dset_create_props, H5P_DEFAULT)) < 0) { + * printf("Error: fail to create dataset\n"); + * return -1; + * } + * + * // Write the array to the file. The datatype and dataspace + * // describe the format of the data in the 'orig_data' buffer. + * // We use default raw data transfer properties. + * if(H5Dwrite (dataset, H5T_NATIVE_INT, H5S_ALL, H5S_ALL, H5P_DEFAULT, orig_data) < 0) { + * printf("Error: fail to write to dataset\n"); + * return -1; + * } + * + * H5Dclose (dataset); + * + * if((dataset = H5Dopen(file, DATASET_NAME, H5P_DEFAULT)) < 0) { + * printf("Error: fail to open dataset\n"); + * return -1; + * } + * + * // Read the array. This is similar to writing data, + * // except the data flows in the opposite direction. + * // Note: Decompression is automatic. + * if(H5Dread (dataset, H5T_NATIVE_INT, H5S_ALL, H5S_ALL, H5P_DEFAULT, new_data) < 0) { + * printf("Error: fail to read from dataset\n"); + * return -1; + * } + * + * H5Tclose (datatype); + * H5Dclose (dataset); + * H5Sclose (dataspace); + * H5Pclose (dset_create_props); + * H5Fclose (file); + * + * return 0; + * } + * \endcode + * + * The following code example illustrates the use of the scale-offset filter (set for variable + * minimum-bits method) for writing and reading floating-point data. + * + * Scale-offset compression floating-point data + * \code + * #include "hdf5.h" + * #include "stdlib.h" + * + * #define H5FILE_NAME "scaleoffset_test_float_Dscale.h5" + * #define DATASET_NAME "scaleoffset_float_Dscale" + * #define NX 200 + * #define NY 300 + * #define CH_NX 10 + * #define CH_NY 15 + * + * int main(void) + * { + * hid_t file, dataspace, dataset, datatype, dset_create_props; + * hsize_t dims[2], chunk_size[2]; + * float orig_data[NX][NY]; + * float new_data[NX][NY]; + * float fill_val; + * int i, j; + * + * // Define dataset datatype + * datatype = H5Tcopy(H5T_NATIVE_FLOAT); + * + * // Initialize data buffer + * for (i=0; i < NX; i++) + * for (j=0; j < NY; j++) + * orig_data[i][j] = (rand() % 10000) / 1000.0; + * + * // Describe the size of the array. + * dims[0] = NX; + * dims[1] = NY; + * if((dataspace = H5Screate_simple (2, dims, NULL)) < 0) { + * printf("Error: fail to create dataspace\n"); + * return -1; + * } + * + * // Create a new file using read/write access, default file + * // creation properties, and default file access properties. + * if((file = H5Fcreate (H5FILE_NAME, H5F_ACC_TRUNC, H5P_DEFAULT, H5P_DEFAULT)) < 0) { + * printf("Error: fail to create file\n"); + * return -1; + * } + * + * // Set the dataset creation property list to specify that + * // the raw data is to be partitioned into 10 x 15 element + * // chunks and that each chunk is to be compressed. + * chunk_size[0] = CH_NX; + * chunk_size[1] = CH_NY; + * if((dset_create_props = H5Pcreate (H5P_DATASET_CREATE)) < 0) { + * printf("Error: fail to create dataset property\n"); + * return -1; + * } + * if(H5Pset_chunk (dset_create_props, 2, chunk_size) < 0) { + * printf("Error: fail to set chunk\n"); + * return -1; + * } + * + * // Set the fill value of dataset + * fill_val = 10000.0; + * if (H5Pset_fill_value(dset_create_props, H5T_NATIVE_FLOAT, &fill_val) < 0) { + * printf("Error: can not set fill value for dataset\n"); + * return -1; + * } + * + * // Set parameters for scale-offset compression; use variable + * // minimum-bits method, set decimal scale factor to 3. Check + * // the description of the H5Pset_scaleoffset function in the + * // HDF5 Reference Manual for more information. + * if(H5Pset_scaleoffset (dset_create_props, H5Z_SO_FLOAT_DSCALE, 3) < 0) { + * printf("Error: fail to set scaleoffset filter\n"); + * return -1; + * } + * + * // Create a new dataset within the file. The datatype + * // and dataspace describe the data on disk, which may + * // or may not be different from the format used in the + * // application's memory. + * if((dataset = H5Dcreate (file, DATASET_NAME, datatype, dataspace, H5P_DEFAULT, + * dset_create_props, H5P_DEFAULT)) < 0) { + * printf("Error: fail to create dataset\n"); + * return -1; + * } + * + * // Write the array to the file. The datatype and dataspace + * // describe the format of the data in the 'orig_data' buffer. + * // We use default raw data transfer properties. + * if(H5Dwrite (dataset, H5T_NATIVE_FLOAT, H5S_ALL, H5S_ALL, H5P_DEFAULT, orig_data) < 0) { + * printf("Error: fail to write to dataset\n"); + * return -1; + * } + * + * H5Dclose (dataset); + * + * if((dataset = H5Dopen(file, DATASET_NAME, H5P_DEFAULT)) < 0) { + * printf("Error: fail to open dataset\n"); + * return -1; + * } + * + * // Read the array. This is similar to writing data, + * // except the data flows in the opposite direction. + * // Note: Decompression is automatic. + * if(H5Dread (dataset, H5T_NATIVE_FLOAT, H5S_ALL, H5S_ALL, H5P_DEFAULT, new_data) < 0) { + * printf("Error: fail to read from dataset\n"); + * return -1; + * } + * + * H5Tclose (datatype); + * H5Dclose (dataset); + * H5Sclose (dataspace); + * H5Pclose (dset_create_props); + * H5Fclose (file); + * + * return 0; + * } + * \endcode + * + *

Limitations

+ * For floating-point data handling, there are some algorithmic limitations to the GRiB data packing + * mechanism: + *
  1. + * Both the E-scaling and D-scaling methods are lossy compression + *
  2. + *
  3. + * For the D-scaling method, since data values have been rounded to integer values (positive) + * before truncating to the minimum-bits, their range is limited by the maximum value that can be + * represented by the corresponding unsigned integer type (the same size as that of the floating- + * point type) + *
+ * + *

Suggestions

+ * The following are some suggestions for using the filter for floating-point data: + *
  1. + * It is better to convert the units of data so that the units are within certain common range (for + * example, 1200m to 1.2km) + *
  2. + *
  3. + * If data values to be compressed are very near to zero, it is strongly recommended that the + * user sets the fill value away from zero (for example, a large positive number); if the user does + * nothing, the HDF5 library will set the fill value to zero, and this may cause undesirable + * compression results + *
  4. + *
  5. + * Users are not encouraged to use a very large decimal scale factor (for example, 100) for the + * D-scaling method; this can cause the filter not to ignore the fill value when finding maximum + * and minimum values, and they will get a much larger minimum-bits (poor compression) + *
+ * + * \subsubsection subsubsec_dataset_filters_szip Using the Szip Filter + * See The HDF Group website for further information regarding the Szip filter. + * + * Previous Chapter \ref sec_group - Next Chapter \ref sec_datatype + * + */ + +/** + * \defgroup H5D Datasets (H5D) * * Use the functions in this module to manage HDF5 datasets, including the * transfer of data between memory and disk and the description of dataset diff --git a/src/H5Emodule.h b/src/H5Emodule.h index 58a3517..f2e6d44 100644 --- a/src/H5Emodule.h +++ b/src/H5Emodule.h @@ -29,30 +29,558 @@ #define H5_MY_PKG_ERR H5E_ERROR #define H5_MY_PKG_INIT YES -/**\defgroup H5E H5E - * - * Use the functions in this module to manage HDF5 error stacks and error - * messages. - * - * - * - * - * - * - * - * - * - * - * - *
CreateRead
- * \snippet{lineno} H5E_examples.c create - * - * \snippet{lineno} H5E_examples.c read - *
UpdateDelete
- * \snippet{lineno} H5E_examples.c update - * - * \snippet{lineno} H5E_examples.c delete - *
+/** \page H5E_UG HDF5 Error Handling + * + * \section sec_error HDF5 Error Handling + * + * The HDF5 library provides an error reporting mechanism for both the library itself and for user + * application programs. It can trace errors through function stack and error information like file + * name, function name, line number, and error description. + * + * \subsection subsec_error_intro Introduction + * The HDF5 Library provides an error reporting mechanism for both the library itself and for user application + * programs. It can trace errors through function stack and error information like file name, function name, + * line number, and error description. + * + * \ref subsec_error_ops discusses the basic error concepts such as error stack, error record, and error + * message and describes the related API functions. These concepts and functions are sufficient for + * application programs to trace errors inside the HDF5 Library. + * + * \ref subsec_error_adv talks about the advanced concepts of error + * class and error stack handle and talks about the related functions. With these concepts and functions, an + * application library or program using the HDF5 Library can have its own error report blended with HDF5’s + * error report. + * + * Starting with Release 1.8, we have a new set of Error Handling API functions. For the purpose of backward + * compatibility with version 1.6 and before, we still keep the old API functions, \ref H5Epush1, + * \ref H5Eprint1, \ref H5Ewalk1, \ref H5Eclear1, \ref H5Eget_auto1, \ref H5Eset_auto1. These functions do + * not have the error stack as a parameter. The library allows them to operate on the default error stack. + * (The H5E compatibility macros will choose the correct function based on the parameters) + * + * The old API is similar to functionality discussed in \ref subsec_error_ops. The functionality discussed in + * \ref subsec_error_adv,the ability of allowing applications to add their own error records, is the new + * design for the Error Handling API. + * + * \subsection subsec_error_H5E Error Handling Function Summaries + * @see H5E reference manual + * + * \subsection subsec_error_program Programming Model for Error Handling + * This section is under construction. + * + * \subsection subsec_error_ops Basic Error Handling Operations + * Let us first try to understand the error stack. An error stack is a collection of error records. Error + * records can be pushed onto or popped off the error stack. By default, when an error occurs deep within + * the HDF5 Library, an error record is pushed onto an error stack and that function returns a failure + * indication. + * Its caller detects the failure, pushes another record onto the stack, and returns a failure indication. + * This continues until the API function called by the application returns a failure indication. The next + * API function being called will reset the error stack. All HDF5 Library error records belong to the same + * error class. For more information, see \ref subsec_error_adv. + * + * \subsubsection subsubsec_error_ops_stack Error Stack and Error Message + * In normal circumstances, an error causes the stack to be printed on the standard error stream + * automatically. + * This automatic error stack is the library’s default stack. For all the functions in this section, whenever + * an error stack ID is needed as a parameter, \ref H5E_DEFAULT can be used to indicate the library’s default + * stack. The first error record of the error stack, number #000, is produced by the API function itself and + * is usually sufficient to indicate to the application what went wrong. + * + * + * + * + * + *
Example: An Error Message
+ *

If an application calls \ref H5Tclose on a + * predefined datatype then the following message is + * printed on the standard error stream. This is a + * simple error that has only one component, the API + * function; other errors may have many components. + *

+ * HDF5-DIAG: Error detected in HDF5 (1.10.9) thread 0.
+ *    #000: H5T.c line ### in H5Tclose(): predefined datatype
+ *       major: Function argument
+ *       minor: Bad value
+ *         
+ *
+ * In the example above, we can see that an error record has a major message and a minor message. A major + * message generally indicates where the error happens. The location can be a dataset or a dataspace, for + * example. A minor message explains further details of the error. An example is “unable to open file”. + * Another specific detail about the error can be found at the end of the first line of each error record. + * This error description is usually added by the library designer to tell what exactly goes wrong. In the + * example above, the “predefined datatype” is an error description. + * + * \subsubsection subsubsec_error_ops_print Print and Clear an Error Stack + * Besides the automatic error report, the error stack can also be printed and cleared by the functions + * \ref H5Eprint2 and \ref H5Eclear2. If an application wishes to make explicit + * calls to \ref H5Eprint2 to print the error stack, the automatic printing should be turned off + * to prevent error messages from being displayed twice (see \ref H5Eset_auto2). + * + * To print an error stack: + * \code + * herr_t H5Eprint2(hid_t error_stack, FILE * stream) + * \endcode + * This function prints the error stack specified by error_stack on the specified stream, stream. If the + * error stack is empty, a one‐line message will be printed. The following is an example of such a message. + * This message would be generated if the error was in the HDF5 Library. + * \code + * HDF5-DIAG: Error detected in HDF5 Library version: 1.10.9 thread 0. + * \endcode + * + * To clear an error stack: + * \code + * herr_t H5Eclear2(hid_t error_stack) + * \endcode + * The \ref H5Eclear2 function shown above clears the error stack specified by error_stack. + * \ref H5E_DEFAULT can be passed in to clear the current error stack. The current stack is also cleared + * whenever an API function is called; there are certain exceptions to this rule such as \ref H5Eprint2. + * + * \subsubsection subsubsec_error_ops_mute Mute Error Stack + * Sometimes an application calls a function for the sake of its return value, fully expecting the function + * to fail; sometimes the application wants to call \ref H5Eprint2 explicitly. In these situations, + * it would be misleading if an error message were still automatically printed. Using the + * \ref H5Eset_auto2 function can control the automatic printing of error messages. + * + * To enable or disable automatic printing of errors: + * \code + * herr_t H5Eset_auto2(hid_t error_stack, H5E_auto_t func, void *client_data) + * \endcode + * The \ref H5Eset_auto2 function can be used to turn on or off the automatic printing of errors + * for the error stack specified by error_stack. When turned on (non‐null func pointer), any API function + * which returns an error indication will first call func, passing it client_data as an argument. When the + * library is first initialized the auto printing function is set to \ref H5Eprint2 and client_data + * is the standard error stream pointer, stderr. + * + * To see the current settings: + * \code + * herr_t H5Eget_auto(hid_t error_stack, H5E_auto_t * func, void **client_data) + * \endcode + * The function above returns the current settings for the automatic error stack traversal function, func, and + * its data, client_data. If either or both of the arguments are null, then the value is not returned. + * + * + * + * + * + * + *
Example: Turn off error messages while probing a function
+ *

An application can temporarily turn off error messages while “probing” a function. See the + * example below. + *

+ * ***  Save old error handler  ***
+ * H5E_auto2_t oldfunc;
+ * void *old_client_data;
+ * H5Eget_auto2(error_stack, &old_func, &old_client_data);
+ * ***  Turn off error handling  ***
+ * H5Eset_auto2(error_stack, NULL, NULL);
+ * ***  Probe. Likely to fail, but that’s okay  ***
+ * status = H5Fopen (......);
+ * ***  Restore previous error handler  ***
+ * H5Eset_auto2(error_stack, old_func, old_client_data);
+ *         
+ *
+ * + * + * + * + * + * + *
Example: Disable automatic printing and explicitly print error messages
+ *

Or automatic printing can be disabled altogether and error messages can be explicitly printed. + *

+ * ***  Turn off error handling permanently  ***
+ * H5Eset_auto2(error_stack, NULL, NULL);
+ * ***  If failure, print error message  ***
+ * if (H5Fopen (....)<0) {
+ *     H5Eprint2(H5E_DEFAULT, stderr);
+ * exit (1);
+ * }
+ *         
+ *
+ * + * + * \subsubsection subsubsec_error_ops_custom_print Customized Printing of an Error Stack + * Applications are allowed to define an automatic error traversal function other than the default + * \ref H5Eprint(). For instance, one can define a function that prints a simple, one‐line error message to + * the standard error stream and then exits. The first example below defines a such a function. The second + * example below installs the function as the error handler. + * + * + * + * + * + * + *
Example: Defining a function to print a simple error message
+ *

+ * herr_t
+ * my_hdf5_error_handler(void *unused)
+ * {
+ *     fprintf (stderr, “An HDF5 error was detected. Bye.\\n”);
+ *     exit (1);
+ * }
+ *
+ *         
+ *
+ * + * + * + * + * + * + *
Example: The user‐defined error handler
+ *

+ * H5Eset_auto2(H5E_DEFAULT, my_hdf5_error_handler, NULL);
+ *         
+ *
+ * + * \subsubsection subsubsec_error_ops_walk Walk through the Error Stack + * The \ref H5Eprint2 function is actually just a wrapper around the more complex \ref H5Ewalk function + * which traverses an error stack and calls a user‐defined function for each member of the stack. The example + * below shows how \ref H5Ewalk is used. + * \code + * herr_t H5Ewalk(hid_t err_stack, H5E_direction_t direction, + * H5E_walk_t func, void *client_data) + * \endcode + * The error stack err_stack is traversed and func is called for each member of the stack. Its arguments + * are an integer sequence number beginning at zero (regardless of direction) and the client_data + * pointer. If direction is \ref H5E_WALK_UPWARD, then traversal begins at the inner‐most function that + * detected the error and concludes with the API function. Use \ref H5E_WALK_DOWNWARD for the opposite + * order. + * + * \subsubsection subsubsec_error_ops_travers Traverse an Error Stack with a Callback Function + * An error stack traversal callback function takes three arguments: n is a sequence number beginning at + * zero for each traversal, eptr is a pointer to an error stack member, and client_data is the same pointer + * used in the example above passed to \ref H5Ewalk. See the example below. + * \code + * typedef herr_t (*H5E_walk_t)(unsigned n, H5E_error2_t *eptr, void *client_data) + * \endcode + * The H5E_error2_t structure is shown below. + * \code + * typedef struct { + * hid_t cls_id; + * hid_t maj_num; + * hid_t min_num; + * unsigned line; + * const char *func_name; + * const char *file_name; + * const char *desc; + * } H5E_error2_t; + * \endcode + * The maj_num and min_num are major and minor error IDs, func_name is the name of the function where + * the error was detected, file_name and line locate the error within the HDF5 Library source code, and + * desc points to a description of the error. + * + * + * + * + * + * + *
Example: A user‐defined callback function
+ *

The following example shows a user‐defined callback function. + *

+ *     \#define MSG_SIZE 64
+ *     herr_t
+ *     custom_print_cb(unsigned n, const H5E_error2_t *err_desc, void *client_data)
+ *     {
+ *         FILE *stream = (FILE *)client_data;
+ *         char maj[MSG_SIZE];
+ *         char min[MSG_SIZE];
+ *         char cls[MSG_SIZE];
+ *         const int indent = 4;
+ *
+ *         ***  Get descriptions for the major and minor error numbers  ***
+ *         if(H5Eget_class_name(err_desc->cls_id, cls, MSG_SIZE) < 0)
+ *             TEST_ERROR;
+ *         if(H5Eget_msg(err_desc->maj_num, NULL, maj, MSG_SIZE) < 0)
+ *             TEST_ERROR;
+ *         if(H5Eget_msg(err_desc->min_num, NULL, min, MSG_SIZE) < 0)
+ *             TEST_ERROR;
+ *         fprintf (stream, “%*serror #%03d: %s in %s():
+ *                 line %u\\n”,
+ *                 indent, “”, n, err_desc->file_name,
+ *                 err_desc->func_name, err_desc->line);
+ *         fprintf (stream, “%*sclass: %s\\n”, indent*2, “”, cls);
+ *         fprintf (stream, “%*smajor: %s\\n”, indent*2, “”, maj);
+ *         fprintf (stream, “%*sminor: %s\\n”, indent*2, “”, min);
+ *         return 0;
+ *     error:
+ *         return -1;
+ *     }
+ *         
+ *
+ * + *

Programming Note for C++ Developers Using C Functions

+ * If a C routine that takes a function pointer as an argument is called from within C++ code, the C routine + * should be returned from normally. + * + * Examples of this kind of routine include callbacks such as \ref H5Pset_elink_cb and + * \ref H5Pset_type_conv_cb and + * functions such as \ref H5Tconvert and \ref H5Ewalk2. + * + * Exiting the routine in its normal fashion allows the HDF5 C Library to clean up its work properly. In other + * words, if the C++ application jumps out of the routine back to the C++ “catch” statement, the library is + * not given the opportunity to close any temporary data structures that were set up when the routine was + * called. The C++ application should save some state as the routine is started so that any problem that + * occurs might be diagnosed. + * + * \subsection subsec_error_adv Advanced Error Handling Operations + * The section above, see \ref subsec_error_ops, discusses the basic error + * handling operations of the library. In that section, all the error records on the error stack are from the + * library itself. In this section, we are going to introduce the operations that allow an application program + * to push its own error records onto the error stack once it declares an error class of its own through the + * HDF5 Error API. + * + * + * + * + * + * + *
Example: An Error Report
+ *

An error report shows both the library’s error record and the application’s error records. + * See the example below. + *

+ * Error Test-DIAG: Error detected in Error Program (1.0)
+ *         thread 8192:
+ *     #000: ../../hdf5/test/error_test.c line ### in main():
+ *         Error test failed
+ *       major: Error in test
+ *       minor: Error in subroutine
+ *     #001: ../../hdf5/test/error_test.c line ### in
+ *         test_error(): H5Dwrite failed as supposed to
+ *       major: Error in IO
+ *       minor: Error in H5Dwrite
+ *   HDF5-DIAG: Error detected in HDF5 (1.10.9) thread #####:
+ *     #002: ../../hdf5/src/H5Dio.c line ### in H5Dwrite():
+ *         not a dataset
+ *       major: Invalid arguments to routine
+ *       minor: Inappropriate type
+ *         
+ *
+ * In the line above error record #002 in the example above, the starting phrase is HDF5. This is the error + * class name of the HDF5 Library. All of the library’s error messages (major and minor) are in this default + * error class. The Error Test in the beginning of the line above error record #000 is the name of the + * application’s error class. The first two error records, #000 and #001, are from application’s error class. + * By definition, an error class is a group of major and minor error messages for a library (the HDF5 Library + * or an application library built on top of the HDF5 Library) or an application program. The error class can + * be registered for a library or program through the HDF5 Error API. Major and minor messages can be defined + * in an error class. An application will have object handles for the error class and for major and minor + * messages for further operation. See the example below. + * + * + * + * + * + * + *
Example: The user‐defined error handler
+ *

+ * \#define MSG_SIZE 64
+ * herr_t
+ * custom_print_cb(unsigned n, const H5E_error2_t *err_desc,
+ * void* client_data)
+ * {
+ *     FILE *stream = (FILE *)client_data;
+ *     char maj[MSG_SIZE];
+ *     char min[MSG_SIZE];
+ *     char cls[MSG_SIZE];
+ *     const int indent = 4;
+ *
+ *     ***  Get descriptions for the major and minor error numbers  ***
+ *     if(H5Eget_class_name(err_desc->cls_id, cls, MSG_SIZE) < 0)
+ *         TEST_ERROR;
+ *     if(H5Eget_msg(err_desc->maj_num, NULL, maj, MSG_SIZE) < 0)
+ *         TEST_ERROR;
+ *     if(H5Eget_msg(err_desc->min_num, NULL, min, MSG_SIZE) < 0)
+ *         TEST_ERROR;
+ *     fprintf (stream, “%*serror #%03d: %s in %s():
+ *             line %u\\n”,
+ *             indent, “”, n, err_desc->file_name,
+ *             err_desc->func_name, err_desc->line);
+ *     fprintf (stream, “%*sclass: %s\\n”, indent*2, “”, cls);
+ *     fprintf (stream, “%*smajor: %s\\n”, indent*2, “”, maj);
+ *     fprintf (stream, “%*sminor: %s\\n”, indent*2, “”, min);
+ *     return 0;
+ * error:
+ *     return -1;
+ * }
+ *         
+ *
+ * + * \subsubsection subsubsec_error_adv_more More Error API Functions + * The Error API has functions that can be used to register or unregister an error class, to create or close + * error messages, and to query an error class or error message. These functions are illustrated below. + * + * To register an error class: + * \code + * hid_t H5Eregister_class(const char* cls_name, const char* lib_name, const char* version) + * \endcode + * This function registers an error class with the HDF5 Library so that the application library or program + * can report errors together with the HDF5 Library. + * + * To add an error message to an error class: + * \code + * hid_t H5Ecreate_msg(hid_t class, H5E_type_t msg_type, const char* mesg) + * \endcode + * This function adds an error message to an error class defined by an application library or program. The + * error message can be either major or minor which is indicated by parameter msg_type. + * + * To get the name of an error class: + * \code + * ssize_t H5Eget_class_name(hid_t class_id, char* name, size_t size) + * \endcode + * This function retrieves the name of the error class specified by the class ID. + * + * To retrieve an error message: + * \code + * ssize_t H5Eget_msg(hid_t mesg_id, H5E_type_t* mesg_type, char* mesg, size_t size) + * \endcode + * This function retrieves the error message including its length and type. + * + * To close an error message: + * \code + * herr_t H5Eclose_msg(hid_t mesg_id) + * \endcode + * This function closes an error message. + * + * To remove an error class: + * \code + * herr_t H5Eunregister_class(hid_t class_id) + * \endcode + * This function removes an error class from the Error API. + * + * + * + * + * + * + *
Example: Create an error class and error messages
+ *

The example below shows how an application creates an error class and error messages. + *

+ * ***  Create an error class  ***
+ * class_id = H5Eregister_class(ERR_CLS_NAME, PROG_NAME, PROG_VERS);
+ * ***  Retrieve class name  ***
+ * H5Eget_class_name(class_id, cls_name, cls_size);
+ * ***  Create a major error message in the class  ***
+ * maj_id = H5Ecreate_msg(class_id, H5E_MAJOR, “... ...”);
+ * ***  Create a minor error message in the class  ***
+ * min_id = H5Ecreate_msg(class_id, H5E_MINOR, “... ...”);
+ *         
+ *
+ * + * + * + * + * + * + *
Example: Closing error messages and unregistering the error class
+ *

The example below shows how an application closes error messages and unregisters the error + * class. + *

+ *    H5Eclose_msg(maj_id);
+ *    H5Eclose_msg(min_id);
+ *    H5Eunregister_class(class_id);
+ *         
+ *
+ * + * \subsubsection subsubsec_error_adv_app Pushing an Application Error Message onto Error Stack + * An application can push error records onto or pop error records off of the error stack just as the library + * does internally. An error stack can be registered, and an object handle can be returned to the application + * so that the application can manipulate a registered error stack. + * + * To register the current stack: + * \code + * hid_t H5Eget_current_stack(void) + * \endcode + * This function registers the current error stack, returns an object handle, and clears the current error + * stack. + * An empty error stack will also be assigned an ID. + * + * To replace the current error stack with another: + * \code + * herr_t H5Eset_current_stack(hid_t error_stack) + * \endcode + * This function replaces the current error stack with another error stack specified by error_stack and + * clears the current error stack. The object handle error_stack is closed after this function call. + * + * To push a new error record to the error stack: + * \code + * herr_t H5Epush(hid_t error_stack, const char* file, const char* func, + * unsigned line, hid_t cls_id, hid_t major_id, hid_t minor_id, + * const char* desc, ... ) + * \endcode + * This function pushes a new error record onto the error stack for the current thread. + * + * To delete some error messages: + * \code + * herr_t H5Epop(hid_t error_stack, size_t count) + * \endcode + * This function deletes some error messages from the error stack. + * + * To retrieve the number of error records: + * \code + * int H5Eget_num(hid_t error_stack) + * \endcode + * This function retrieves the number of error records from an error stack. + * + * To clear the error stack: + * \code + * herr_t H5Eclear_stack(hid_t error_stack) + * \endcode + * This function clears the error stack. + * + * To close the object handle for an error stack: + * \code + * herr_t H5Eclose_stack(hid_t error_stack) + * \endcode + * This function closes the object handle for an error stack and releases its resources. + * + * + * + * + * + * + *
Example: Pushing an error message to an error stack
+ *

The example below shows how an application pushes an error record onto the default error stack. + *

+ * ***  Make call to HDF5 I/O routine  ***
+ * if((dset_id=H5Dopen(file_id, dset_name, access_plist)) < 0)
+ * {
+ *     ***  Push client error onto error stack  ***
+ *     H5Epush(H5E_DEFAULT,__FILE__,FUNC,__LINE__,cls_id,
+ *             CLIENT_ERR_MAJ_IO,CLIENT_ERR_MINOR_OPEN, “H5Dopen failed”);
+ * }
+ * ***  Indicate error occurred in function  ***
+ * return 0;
+ *         
+ *
+ * + * + * + * + * + * + *
Example: Registering the error stack
+ *

The example below shows how an application registers the current error stack and + * creates an object handle to avoid another HDF5 function from clearing the error stack. + *

+ * if (H5Dwrite(dset_id, mem_type_id, mem_space_id, file_space_id, dset_xfer_plist_id, buf) < 0)
+ * {
+ * ***  Push client error onto error stack  ***
+ *     H5Epush2(H5E_DEFAULT,__FILE__,FUNC,__LINE__,cls_id,
+ *             CLIENT_ERR_MAJ_IO,CLIENT_ERR_MINOR_HDF5,
+ *             “H5Dwrite failed”);
+ * ***  Preserve the error stack by assigning an object handle to it  ***
+ *     error_stack = H5Eget_current_stack();
+ * ***  Close dataset  ***
+ *     H5Dclose(dset_id);
+ * ***  Replace the current error stack with the preserved one  ***
+ *     H5Eset_current_stack(error_stack);
+ * }
+ * return 0;
+ *         
+ *
+ * + * Previous Chapter \ref sec_attribute - Next Chapter \ref sec_plist + * + * \defgroup H5E Error Handling (H5E) * * \internal The \c FUNC_ENTER macro clears the error stack whenever an * interface function is entered. When an error is detected, an entry @@ -77,6 +605,8 @@ * error stack. The error stack is statically allocated to reduce the * complexity of handling errors within the \ref H5E package. * + * @see sec_error + * */ #endif /* H5Emodule_H */ diff --git a/src/H5Epublic.h b/src/H5Epublic.h index 0254c37..6e47d28 100644 --- a/src/H5Epublic.h +++ b/src/H5Epublic.h @@ -899,8 +899,8 @@ H5_DLL herr_t H5Ewalk1(H5E_direction_t direction, H5E_walk1_t func, void *client * * \deprecated 1.8.0 Function deprecated in this release. * - * \details Given a major error number, H5Eget_major() returns a constant - * character string that describes the error. + * \details H5Eget_major() returns a constant + * character string that describes the error, given a major error number. * * \attention This function returns a dynamically allocated string (\c char * array). An application calling this function must free the memory @@ -920,8 +920,8 @@ H5_DLL char *H5Eget_major(H5E_major_t maj); * * \deprecated 1.8.0 Function deprecated and return type changed in this release. * - * \details Given a minor error number, H5Eget_minor() returns a constant - * character string that describes the error. + * \details H5Eget_minor() returns a constant + * character string that describes the error, given a minor error number. * * \attention In the Release 1.8.x series, H5Eget_minor() returns a string of * dynamic allocated \c char array. An application calling this diff --git a/src/H5FDmpio.h b/src/H5FDmpio.h index a70f34b..ba508eb 100644 --- a/src/H5FDmpio.h +++ b/src/H5FDmpio.h @@ -104,10 +104,10 @@ H5_DLL herr_t H5Pset_fapl_mpio(hid_t fapl_id, MPI_Comm comm, MPI_Info info); * \param[out] info MPI-2 info object * \returns \herr_t * - * \details If the file access property list is set to the #H5FD_MPIO driver, - * H5Pget_fapl_mpio() returns duplicates of the stored MPI communicator + * \details H5Pget_fapl_mpio() returns duplicates of the stored MPI communicator * and Info object through the \p comm and \p info pointers, if those - * values are non-null. + * values are non-null. The file access property list must be set to the + * #H5FD_MPIO driver. * * Since the MPI communicator and Info object are duplicates of the * stored information, future modifications to the access property list diff --git a/src/H5FDsec2.h b/src/H5FDsec2.h index 541ac71..d0811ed 100644 --- a/src/H5FDsec2.h +++ b/src/H5FDsec2.h @@ -26,7 +26,21 @@ extern "C" { #endif -H5_DLL hid_t H5FD_sec2_init(void); +H5_DLL hid_t H5FD_sec2_init(void); +/** + * \ingroup FAPL + * + * \brief Sets the sec2 driver + * + * \fapl_id + * \returns \herr_t + * + * \details H5Pset_fapl_sec2() modifies the file access property list to use + * the sec2 driver, H5FDsec2. + * + * \since 1.4.0 + * + */ H5_DLL herr_t H5Pset_fapl_sec2(hid_t fapl_id); #ifdef __cplusplus diff --git a/src/H5FDstdio.h b/src/H5FDstdio.h index 9db92ed..6d1cdf3 100644 --- a/src/H5FDstdio.h +++ b/src/H5FDstdio.h @@ -38,7 +38,7 @@ H5_DLL hid_t H5FD_stdio_init(void); * \returns \herr_t * * \details H5Pset_fapl_stdio() modifies the file access property list to use - * the standard I/O driver, H5FDstdio(). + * the standard I/O driver, H5FDstdio. * * \since 1.4.0 * diff --git a/src/H5Fmodule.h b/src/H5Fmodule.h index 81c1ede..523d6bf 100644 --- a/src/H5Fmodule.h +++ b/src/H5Fmodule.h @@ -29,7 +29,1448 @@ #define H5_MY_PKG_ERR H5E_FILE #define H5_MY_PKG_INIT YES -/**\defgroup H5F H5F +/** \page H5F_UG The HDF5 File + * + * \section sec_file The HDF5 File + * \subsection subsec_file_intro Introduction + * The purpose of this chapter is to describe how to work with HDF5 data files. + * + * If HDF5 data is to be written to or read from a file, the file must first be explicitly created or + * opened with the appropriate file driver and access privileges. Once all work with the file is + * complete, the file must be explicitly closed. + * + * This chapter discusses the following: + * \li File access modes + * \li Creating, opening, and closing files + * \li The use of file creation property lists + * \li The use of file access property lists + * \li The use of low-level file drivers + * + * This chapter assumes an understanding of the material presented in the data model chapter. For + * more information, @see @ref sec_data_model. + * + * \subsection subsec_file_access_modes File Access Modes + * There are two issues regarding file access: + *
  • What should happen when a new file is created but a file of the same name already + * exists? Should the create action fail, or should the existing file be overwritten?
  • + *
  • Is a file to be opened with read-only or read-write access?
+ * + * Four access modes address these concerns. Two of these modes can be used with #H5Fcreate, and + * two modes can be used with #H5Fopen. + * \li #H5Fcreate accepts #H5F_ACC_EXCL or #H5F_ACC_TRUNC + * \li #H5Fopen accepts #H5F_ACC_RDONLY or #H5F_ACC_RDWR + * + * The access modes are described in the table below. + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + *
Access flags and modes
Access FlagResulting Access Mode
#H5F_ACC_EXCLIf the file already exists, #H5Fcreate fails. If the file does not exist, + * it is created and opened with read-write access. (Default)
#H5F_ACC_TRUNCIf the file already exists, the file is opened with read-write access, + * and new data will overwrite any existing data. If the file does not exist, + * it is created and opened with read-write access.
#H5F_ACC_RDONLYAn existing file is opened with read-only access. If the file does not + * exist, #H5Fopen fails. (Default)
#H5F_ACC_RDWRAn existing file is opened with read-write access. If the file does not + * exist, #H5Fopen fails.
+ * + * By default, #H5Fopen opens a file for read-only access; passing #H5F_ACC_RDWR allows + * read-write access to the file. + * + * By default, #H5Fcreate fails if the file already exists; only passing #H5F_ACC_TRUNC allows + * the truncating of an existing file. + * + * \subsection subsec_file_creation_access File Creation and File Access Properties + * File creation and file access property lists control the more complex aspects of creating and + * accessing files. + * + * File creation property lists control the characteristics of a file such as the size of the userblock, + * a user-definable data block; the size of data address parameters; properties of the B-trees that are + * used to manage the data in the file; and certain HDF5 Library versioning information. + * + * For more information, @see @ref subsubsec_file_property_lists_props. + * + * This section has a more detailed discussion of file creation properties. If you have no special + * requirements for these file characteristics, you can simply specify #H5P_DEFAULT for the default + * file creation property list when a file creation property list is called for. + * + * File access property lists control properties and means of accessing a file such as data alignment + * characteristics, metadata block and cache sizes, data sieve buffer size, garbage collection + * settings, and parallel I/O. Data alignment, metadata block and cache sizes, and data sieve buffer + * size are factors in improving I/O performance. + * + * For more information, @see @ref subsubsec_file_property_lists_access. + * + * This section has a more detailed discussion of file access properties. If you have no special + * requirements for these file access characteristics, you can simply specify #H5P_DEFAULT for the + * default file access property list when a file access property list is called for. + * + * + * + * + * + * + *
Figure 10 - More sample file structures
+ * \image html UML_FileAndProps.gif "UML model for an HDF5 file and its property lists" + *
+ * + * \subsection subsec_file_drivers Low-level File Drivers + * The concept of an HDF5 file is actually rather abstract: the address space for what is normally + * thought of as an HDF5 file might correspond to any of the following at the storage level: + * \li Single file on a standard file system + * \li Multiple files on a standard file system + * \li Multiple files on a parallel file system + * \li Block of memory within an application’s memory space + * \li More abstract situations such as virtual files + * + * This HDF5 address space is generally referred to as an HDF5 file regardless of its organization at + * the storage level. + * + * HDF5 accesses a file (the address space) through various types of low-level file drivers. The + * default HDF5 file storage layout is as an unbuffered permanent file which is a single, contiguous + * file on local disk. Alternative layouts are designed to suit the needs of a variety of systems, + * environments, and applications. + * + * \subsection subsec_file_program_model Programming Model for Files + * Programming models for creating, opening, and closing HDF5 files are described in the + * sub-sections below. + * + * \subsubsection subsubsec_file_program_model_create Creating a New File + * The programming model for creating a new HDF5 file can be summarized as follows: + * \li Define the file creation property list + * \li Define the file access property list + * \li Create the file + * + * First, consider the simple case where we use the default values for the property lists. See the + * example below. + * + * Creating an HDF5 file using property list defaults + * \code + * file_id = H5Fcreate ("SampleFile.h5", H5F_ACC_EXCL, H5P_DEFAULT, H5P_DEFAULT) + * \endcode + * + * Note: The example above specifies that #H5Fcreate should fail if SampleFile.h5 already exists. + * + * A more complex case is shown in the example below. In this example, we define file creation + * and access property lists (though we do not assign any properties), specify that #H5Fcreate + * should fail if SampleFile.h5 already exists, and create a new file named SampleFile.h5. The example + * does not specify a driver, so the default driver, #H5FD_SEC2, will be used. + * + * Creating an HDF5 file using property lists + * \code + * fcplist_id = H5Pcreate (H5P_FILE_CREATE) + * <...set desired file creation properties...> + * faplist_id = H5Pcreate (H5P_FILE_ACCESS) + * <...set desired file access properties...> + * file_id = H5Fcreate ("SampleFile.h5", H5F_ACC_EXCL, fcplist_id, faplist_id) + * \endcode + * Notes: + * 1. A root group is automatically created in a file when the file is first created. + * + * 2. File property lists, once defined, can be reused when another file is created within the same + * application. + * + * \subsubsection subsubsec_file_program_model_open Opening an Existing File + * The programming model for opening an existing HDF5 file can be summarized as follows: + *
  • Define or modify the file access property list including a low-level file driver (optional)
  • + *
  • Open the file
+ * + * The code in the example below shows how to open an existing file with read-only access. + * + * Opening an HDF5 file + * \code + * faplist_id = H5Pcreate (H5P_FILE_ACCESS) + * status = H5Pset_fapl_stdio (faplist_id) + * file_id = H5Fopen ("SampleFile.h5", H5F_ACC_RDONLY, faplist_id) + * \endcode + * + * \subsubsection subsubsec_file_program_model_close Closing a File + * The programming model for closing an HDF5 file is very simple: + * \li Close file + * + * We close SampleFile.h5 with the code in the example below. + * + * Closing an HDF5 file + * \code + * status = H5Fclose (file_id) + * \endcode + * Note that #H5Fclose flushes all unwritten data to storage and that file_id is the identifier returned + * for SampleFile.h5 by #H5Fopen. + * + * More comprehensive discussions regarding all of these steps are provided below. + * + * \subsection subsec_file_h5dump Using h5dump to View a File + * h5dump is a command-line utility that is included in the HDF5 distribution. This program + * provides a straight-forward means of inspecting the contents of an HDF5 file. You can use + * h5dump to verify that a program is generating the intended HDF5 file. h5dump displays ASCII + * output formatted according to the HDF5 DDL grammar. + * + * The following h5dump command will display the contents of SampleFile.h5: + * \code + * h5dump SampleFile.h5 + * \endcode + * + * If no datasets or groups have been created in and no data has been written to the file, the output + * will look something like the following: + * \code + * HDF5 "SampleFile.h5" { + * GROUP "/" { + * } + * } + * \endcode + * + * Note that the root group, indicated above by /, was automatically created when the file was created. + * + * h5dump is described on the + * Tools + * page under + * + * Libraries and Tools Reference. + * The HDF5 DDL grammar is described in the document \ref DDLBNF110. + * + * \subsection subsec_file_summary File Function Summaries + * General library (\ref H5 functions and macros), (\ref H5F functions), file related + * (\ref H5P functions), and file driver (\ref H5P functions) are listed below. + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + *
General library functions and macros
FunctionPurpose
#H5check_versionVerifies that HDF5 library versions are consistent.
#H5closeFlushes all data to disk, closes all open identifiers, and cleans up memory.
#H5dont_atexitInstructs the library not to install the atexit cleanup routine.
#H5garbage_collectGarbage collects on all free-lists of all types.
#H5get_libversionReturns the HDF library release number.
#H5openInitializes the HDF5 library.
#H5set_free_list_limitsSets free-list size limits.
#H5_VERSION_GEDetermines whether the version of the library being used is greater than or equal + * to the specified version.
#H5_VERSION_LEDetermines whether the version of the library being used is less than or equal + * to the specified version.
+ * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + *
File functions
FunctionPurpose
#H5Fclear_elink_file_cacheClears the external link open file cache for a file.
#H5FcloseCloses HDF5 file.
#H5FcreateCreates new HDF5 file.
#H5FflushFlushes data to HDF5 file on storage medium.
#H5Fget_access_plistReturns a file access property list identifier.
#H5Fget_create_plistReturns a file creation property list identifier.
#H5Fget_file_imageRetrieves a copy of the image of an existing, open file.
#H5Fget_filesizeReturns the size of an HDF5 file.
#H5Fget_freespaceReturns the amount of free space in a file.
#H5Fget_infoReturns global information for a file.
#H5Fget_intentDetermines the read/write or read-only status of a file.
#H5Fget_mdc_configObtain current metadata cache configuration for target file.
#H5Fget_mdc_hit_rateObtain target file’s metadata cache hit rate.
#H5Fget_mdc_sizeObtain current metadata cache size data for specified file.
#H5Fget_mpi_atomicityRetrieves the atomicity mode in use.
#H5Fget_nameRetrieves the name of the file to which the object belongs.
#H5Fget_obj_countReturns the number of open object identifiers for an open file.
#H5Fget_obj_idsReturns a list of open object identifiers.
#H5Fget_vfd_handleReturns pointer to the file handle from the virtual file driver.
#H5Fis_hdf5Determines whether a file is in the HDF5 format.
#H5FmountMounts a file.
#H5FopenOpens an existing HDF5 file.
#H5FreopenReturns a new identifier for a previously-opened HDF5 file.
#H5Freset_mdc_hit_rate_statsReset hit rate statistics counters for the target file.
#H5Fset_mdc_configUse to configure metadata cache of target file.
#H5Fset_mpi_atomicityUse to set the MPI atomicity mode.
#H5FunmountUnmounts a file.
+ * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + *
File creation property list functions
FunctionPurpose
#H5Pset_userblock/#H5Pget_userblockSets/retrieves size of userblock.
#H5Pset_sizes/#H5Pget_sizesSets/retrieves byte size of offsets and lengths used to address objects in HDF5 file.
#H5Pset_sym_k/#H5Pget_sym_kSets/retrieves size of parameters used to control symbol table nodes.
#H5Pset_istore_k/#H5Pget_istore_kSets/retrieves size of parameter used to control B-trees for indexing chunked datasets.
#H5Pset_file_imageSets an initial file image in a memory buffer.
#H5Pget_file_imageRetrieves a copy of the file image designated as the initial content and structure of a file.
#H5Pset_shared_mesg_nindexes/#H5Pget_shared_mesg_nindexesSets or retrieves number of shared object header message indexes in file + * creation property list.
#H5Pset_shared_mesg_indexConfigures the specified shared object header message index.
#H5Pget_shared_mesg_indexRetrieves the configuration settings for a shared message index.
#H5Pset_shared_mesg_phase_change/#H5Pget_shared_mesg_phase_changeSets or retrieves shared object header message storage phase change thresholds.
#H5Pget_version
+ * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + *
File access property list functions
FunctionPurpose
#H5Pset_alignment/#H5Pget_alignmentSets/retrieves alignment properties.
#H5Pset_cache/#H5Pget_cacheSets/retrieves metadata cache and raw data chunk cache parameters.
#H5Pset_elink_file_cache_size/#H5Pget_elink_file_cache_sizeSets/retrieves the size of the external link open file cache from the specified + * file access property list.
#H5Pset_gc_references/#H5Pget_gc_referencesSets/retrieves garbage collecting references flag.
#H5Pset_family_offsetSets offset property for low-level access to a file in a family of files.
#H5Pget_family_offsetRetrieves a data offset from the file access property list.
#H5Pset_meta_block_size/#H5Pget_meta_block_sizeSets the minimum metadata blocksize or retrieves the current metadata block size setting.
#H5Pset_mdc_configSet the initial metadata cache configuration in the indicated File Access Property List + * to the supplied value.
#H5Pget_mdc_configGet the current initial metadata cache config-uration from the indicated File Access + * Property List.
#H5Pset_sieve_buf_size/#H5Pget_sieve_buf_sizeSets/retrieves maximum size of data sieve buffer.
#H5Pset_libver_boundsSets bounds on library versions, and indirectly format versions, to be used + * when creating objects.
#H5Pget_libver_boundsRetrieves library version bounds settings that indirectly control the format + * versions used when creating objects.
#H5Pset_small_data_block_sizeSets the size of a contiguous block reserved for small data.
#H5Pget_small_data_block_sizeRetrieves the current small data block size setting.
+ * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + *
File driver functions
FunctionPurpose
#H5Pset_driverSets a file driver.
#H5Pget_driverReturns the identifier for the driver used to create a file.
#H5Pget_driver_infoReturns a pointer to file driver information.
#H5Pset_fapl_core/#H5Pget_fapl_coreSets the driver for buffered memory files (in RAM) or retrieves information regarding + * the driver.
#H5Pset_fapl_direct/#H5Pget_fapl_directSets up use of the direct I/O driver or retrieves the direct I/O driver settings.
#H5Pset_fapl_family/#H5Pget_fapl_familySets driver for file families, designed for systems that do not support files + * larger than 2 gigabytes, or retrieves information regarding driver.
#H5Pset_fapl_logSets logging driver.
#H5Pset_fapl_mpio/#H5Pget_fapl_mpioSets driver for files on parallel file systems (MPI I/O) or retrieves information + * regarding the driver.
H5Pset_fapl_mpiposix/H5Pget_fapl_mpiposixNo longer available.
#H5Pset_fapl_multi/#H5Pget_fapl_multiSets driver for multiple files, separating categories of metadata and raw data, + * or retrieves information regarding driver.
#H5Pset_fapl_sec2Sets driver for unbuffered permanent files or retrieves information regarding driver.
#H5Pset_fapl_splitSets driver for split files, a limited case of multiple files with one metadata file + * and one raw data file.
#H5Pset_fapl_stdioSets driver for buffered permanent files.
#H5Pset_fapl_windowsSets the Windows I/O driver.
#H5Pset_multi_typeSpecifies type of data to be accessed via the MULTI driver enabling more direct access.
#H5Pget_multi_typeRetrieves type of data property for MULTI driver.
+ * + * \subsection subsec_file_create Creating or Opening an HDF5 File + * This section describes in more detail how to create and how to open files. + * + * New HDF5 files are created and opened with #H5Fcreate; existing files are opened with + * #H5Fopen. Both functions return an object identifier which must eventually be released by calling + * #H5Fclose. + * + * To create a new file, call #H5Fcreate: + * \code + * hid_t H5Fcreate (const char *name, unsigned flags, hid_t fcpl_id, hid_t fapl_id) + * \endcode + * + * #H5Fcreate creates a new file named name in the current directory. The file is opened with read + * and write access; if the #H5F_ACC_TRUNC flag is set, any pre-existing file of the same name in + * the same directory is truncated. If #H5F_ACC_TRUNC is not set or #H5F_ACC_EXCL is set and + * if a file of the same name exists, #H5Fcreate will fail. + * + * The new file is created with the properties specified in the property lists fcpl_id and fapl_id. + * fcpl is short for file creation property list. fapl is short for file access property list. Specifying + * #H5P_DEFAULT for either the creation or access property list will use the library’s default + * creation or access properties. + * + * If #H5Fcreate successfully creates the file, it returns a file identifier for the new file. This + * identifier will be used by the application any time an object identifier, an OID, for the file is + * required. Once the application has finished working with a file, the identifier should be released + * and the file closed with #H5Fclose. + * + * To open an existing file, call #H5Fopen: + * \code + * hid_t H5Fopen (const char *name, unsigned flags, hid_t fapl_id) + * \endcode + * + * #H5Fopen opens an existing file with read-write access if #H5F_ACC_RDWR is set and read-only + * access if #H5F_ACC_RDONLY is set. + * + * fapl_id is the file access property list identifier. Alternatively, #H5P_DEFAULT indicates that the + * application relies on the default I/O access parameters. Creating and changing access property + * lists is documented further below. + * + * A file can be opened more than once via multiple #H5Fopen calls. Each such call returns a unique + * file identifier and the file can be accessed through any of these file identifiers as long as they + * remain valid. Each of these file identifiers must be released by calling #H5Fclose when it is no + * longer needed. + * + * For more information, @see @ref subsubsec_file_property_lists_access. + * For more information, @see @ref subsec_file_property_lists. + * + * \subsection subsec_file_closes Closing an HDF5 File + * #H5Fclose both closes a file and releases the file identifier returned by #H5Fopen or #H5Fcreate. + * #H5Fclose must be called when an application is done working with a file; while the HDF5 + * Library makes every effort to maintain file integrity, failure to call #H5Fclose may result in the + * file being abandoned in an incomplete or corrupted state. + * + * To close a file, call #H5Fclose: + * \code + * herr_t H5Fclose (hid_t file_id) + * \endcode + * This function releases resources associated with an open file. After closing a file, the file + * identifier, file_id, cannot be used again as it will be undefined. + * + * #H5Fclose fulfills three purposes: to ensure that the file is left in an uncorrupted state, to ensure + * that all data has been written to the file, and to release resources. Use #H5Fflush if you wish to + * ensure that all data has been written to the file but it is premature to close it. + * + * Note regarding serial mode behavior: When #H5Fclose is called in serial mode, it closes the file + * and terminates new access to it, but it does not terminate access to objects that remain + * individually open within the file. That is, if #H5Fclose is called for a file but one or more objects + * within the file remain open, those objects will remain accessible until they are individually + * closed. To illustrate, assume that a file, fileA, contains a dataset, data_setA, and that both are + * open when #H5Fclose is called for fileA. data_setA will remain open and accessible, including + * writable, until it is explicitly closed. The file will be automatically and finally closed once all + * objects within it have been closed. + * + * Note regarding parallel mode behavior: Once #H5Fclose has been called in parallel mode, access + * is no longer available to any object within the file. + * + * \subsection subsec_file_property_lists File Property Lists + * Additional information regarding file structure and access are passed to #H5Fcreate and + * #H5Fopen through property list objects. Property lists provide a portable and extensible method of + * modifying file properties via simple API functions. There are two kinds of file-related property + * lists: + * \li File creation property lists + * \li File access property lists + * + * In the following sub-sections, we discuss only one file creation property, userblock size, in detail + * as a model for the user. Other file creation and file access properties are mentioned and defined + * briefly, but the model is not expanded for each; complete syntax, parameter, and usage + * information for every property list function is provided in the \ref H5P + * section of the HDF5 Reference Manual. + * + * For more information, @see @ref sec_plist. + * + * \subsubsection subsubsec_file_property_lists_create Creating a Property List + * If you do not wish to rely on the default file creation and access properties, you must first create + * a property list with #H5Pcreate. + * \code + * hid_t H5Pcreate (hid_t cls_id) + * \endcode + * cls_id is the type of property list being created. In this case, the appropriate values are + * #H5P_FILE_CREATE for a file creation property list and #H5P_FILE_ACCESS for a file access + * property list. + * + * Thus, the following calls create a file creation property list and a file access property list with + * identifiers fcpl_id and fapl_id, respectively: + * \code + * fcpl_id = H5Pcreate (H5P_FILE_CREATE) + * fapl_id = H5Pcreate (H5P_FILE_ACCESS) + * \endcode + * + * Once the property lists have been created, the properties themselves can be modified via the + * functions described in the following sub-sections. + * + * \subsubsection subsubsec_file_property_lists_props File Creation Properties + * File creation property lists control the file metadata, which is maintained in the superblock of the + * file. These properties are used only when a file is first created. + * + *

Userblock Size

+ * \code + * herr_t H5Pset_userblock (hid_t plist, hsize_t size) + * herr_t H5Pget_userblock (hid_t plist, hsize_t *size) + * \endcode + * + * The userblock is a fixed-length block of data located at the beginning of the file and is ignored + * by the HDF5 library. This block is specifically set aside for any data or information that + * developers determine to be useful to their applications but that will not be used by the HDF5 + * library. The size of the userblock is defined in bytes and may be set to any power of two with a + * minimum size of 512 bytes. In other words, userblocks might be 512, 1024, or 2048 bytes in + * size. + * + * This property is set with #H5Pset_userblock and queried via #H5Pget_userblock. For example, if + * an application needed a 4K userblock, then the following function call could be used: + * \code + * status = H5Pset_userblock(fcpl_id, 4096) + * \endcode + * + * The property list could later be queried with: + * \code + * status = H5Pget_userblock(fcpl_id, size) + * \endcode + * and the value 4096 would be returned in the parameter size. + * + * Other properties, described below, are set and queried in exactly the same manner. Syntax and + * usage are detailed in the @ref H5P section of the HDF5 Reference Manual. + * + *

Offset and Length Sizes

+ * This property specifies the number of bytes used to store the offset and length of objects in the + * HDF5 file. Values of 2, 4, and 8 bytes are currently supported to accommodate 16-bit, 32-bit, + * and 64-bit file address spaces. + * + * These properties are set and queried via #H5Pset_sizes and #H5Pget_sizes. + * + *

Symbol Table Parameters

+ * The size of symbol table B-trees can be controlled by setting the 1/2-rank and 1/2-node size + * parameters of the B-tree. + * + * These properties are set and queried via #H5Pset_sym_k and #H5Pget_sym_k + * + *

Indexed Storage Parameters

+ * The size of indexed storage B-trees can be controlled by setting the 1/2-rank and 1/2-node size + * parameters of the B-tree. + * + * These properties are set and queried via #H5Pset_istore_k and #H5Pget_istore_k. + * + *

Version Information

+ * Various objects in an HDF5 file may over time appear in different versions. The HDF5 Library + * keeps track of the version of each object in the file. + * + * Version information is retrieved via #H5Pget_version. + * + * \subsubsection subsubsec_file_property_lists_access File Access Properties + * This section discusses file access properties that are not related to the low-level file drivers. File + * drivers are discussed separately later in this chapter. + * For more information, @see @ref subsec_file_alternate_drivers. + * + * File access property lists control various aspects of file I/O and structure. + * + *

Data Alignment

+ * Sometimes file access is faster if certain data elements are aligned in a specific manner. This can + * be controlled by setting alignment properties via the #H5Pset_alignment function. There are two + * values involved: + * \li A threshold value + * \li An alignment interval + * + * Any allocation request at least as large as the threshold will be aligned on an address that is a + * multiple of the alignment interval. + * + *

Metadata Block Allocation Size

+ * Metadata typically exists as very small chunks of data; storing metadata elements in a file + * without blocking them can result in hundreds or thousands of very small data elements in the + * file. This can result in a highly fragmented file and seriously impede I/O. By blocking metadata + * elements, these small elements can be grouped in larger sets, thus alleviating both problems. + * + * #H5Pset_meta_block_size sets the minimum size in bytes of metadata block allocations. + * #H5Pget_meta_block_size retrieves the current minimum metadata block allocation size. + * + *

Metadata Cache

+ * Metadata and raw data I/O speed are often governed by the size and frequency of disk reads and + * writes. In many cases, the speed can be substantially improved by the use of an appropriate + * cache. + * + * #H5Pset_cache sets the minimum cache size for both metadata and raw data and a preemption + * value for raw data chunks. #H5Pget_cache retrieves the current values. + * + *

Data Sieve Buffer Size

+ * Data sieve buffering is used by certain file drivers to speed data I/O and is most commonly when + * working with dataset hyperslabs. For example, using a buffer large enough to hold several pieces + * of a dataset as it is read in for hyperslab selections will boost performance noticeably. + * + * #H5Pset_sieve_buf_size sets the maximum size in bytes of the data sieve buffer. + * #H5Pget_sieve_buf_size retrieves the current maximum size of the data sieve buffer. + * + *

Garbage Collection References

+ * Dataset region references and other reference types use space in an HDF5 file’s global heap. If + * garbage collection is on (1) and the user passes in an uninitialized value in a reference structure, + * the heap might become corrupted. When garbage collection is off (0), however, and the user reuses + * a reference, the previous heap block will be orphaned and not returned to the free heap + * space. When garbage collection is on, the user must initialize the reference structures to 0 or risk + * heap corruption. + * + * #H5Pset_gc_references sets the garbage collecting references flag. + * + * \subsection subsec_file_alternate_drivers Alternate File Storage Layouts and Low-level File Drivers + * The concept of an HDF5 file is actually rather abstract: the address space for what is normally + * thought of as an HDF5 file might correspond to any of the following: + * \li Single file on standard file system + * \li Multiple files on standard file system + * \li Multiple files on parallel file system + * \li Block of memory within application’s memory space + * \li More abstract situations such as virtual files + * + * This HDF5 address space is generally referred to as an HDF5 file regardless of its organization at + * the storage level. + * + * HDF5 employs an extremely flexible mechanism called the virtual file layer, or VFL, for file + * I/O. A full understanding of the VFL is only necessary if you plan to write your own drivers + * @see \ref VFL in the HDF5 Technical Notes. + * + * For our + * purposes here, it is sufficient to know that the low-level drivers used for file I/O reside in the + * VFL, as illustrated in the following figure. Note that H5FD_STREAM is not available with 1.8.x + * and later versions of the library. + * + * + * + * + * + *
+ * \image html VFL_Drivers.gif "I/O path from application to VFL and low-level drivers to storage" + *
+ * + * As mentioned above, HDF5 applications access HDF5 files through various low-level file + * drivers. The default driver for that layout is the POSIX driver (also known as the SEC2 driver), + * #H5FD_SEC2. Alternative layouts and drivers are designed to suit the needs of a variety of + * systems, environments, and applications. The drivers are listed in the table below. + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + *
Supported file drivers
Driver NameDriver IdentifierDescriptionRelated API
POSIX#H5FD_SEC2This driver uses POSIX file-system functions like read and write to perform I/O to a single, + * permanent file on local disk with no system buffering. This driver is POSIX-compliant and is + * the default file driver for all systems.#H5Pset_fapl_sec2
Direct#H5FD_DIRECTThis is the #H5FD_SEC2 driver except data is written to or read from the file + * synchronously without being cached by the system.#H5Pset_fapl_direct
Log#H5FD_LOGThis is the #H5FD_SEC2 driver with logging capabilities.#H5Pset_fapl_log
Windows#H5FD_WINDOWSThis driver was modified in HDF5-1.8.8 to be a wrapper of the POSIX driver, + * #H5FD_SEC2. This change should not affect user applications.#H5Pset_fapl_windows
STDIO#H5FD_STDIOThis driver uses functions from the standard C stdio.h to perform I/O + * to a single, permanent file on local disk with additional system buffering.#H5Pset_fapl_stdio
Memory#H5FD_COREWith this driver, an application can work with a file in memory for faster reads and + * writes. File contents are kept in memory until the file is closed. At closing, the memory + * version of the file can be written back to disk or abandoned.#H5Pset_fapl_core
Family#H5FD_FAMILYWith this driver, the HDF5 file’s address space is partitioned into pieces and sent to + * separate storage files using an underlying driver of the user’s choice. This driver is for + * systems that do not support files larger than 2 gigabytes.#H5Pset_fapl_family
Multi#H5FD_MULTIWith this driver, data can be stored in multiple files according to the type of the data. + * I/O might work better if data is stored in separate files based on the type of data. The Split + * driver is a special case of this driver.#H5Pset_fapl_multi
SplitH5FD_SPLITThis file driver splits a file into two parts. One part stores metadata, and the other part + * stores raw data. This splitting a file into two parts is a limited case of the Multi driver.#H5Pset_fapl_split
Parallel#H5FD_MPIOThis is the standard HDF5 file driver for parallel file systems. This driver uses the MPI + * standard for both communication and file I/O.#H5Pset_fapl_mpio
Parallel POSIXH5FD_MPIPOSIXThis driver is no longer available
StreamH5FD_STREAMThis driver is no longer available.
+ * + * For more information, see the HDF5 Reference Manual entries for the function calls shown in + * the column on the right in the table above. + * + * Note that the low-level file drivers manage alternative file storage layouts. Dataset storage + * layouts (chunking, compression, and external dataset storage) are managed independently of file + * storage layouts. + * + * If an application requires a special-purpose low-level driver, the VFL provides a public API for + * creating one. For more information on how to create a driver, + * @see @ref VFL in the HDF5 Technical Notes. + * + * \subsubsection subsubsec_file_alternate_drivers_id Identifying the Previously‐used File Driver + * When creating a new HDF5 file, no history exists, so the file driver must be specified if it is to be + * other than the default. + * + * When opening existing files, however, the application may need to determine which low-level + * driver was used to create the file. The function #H5Pget_driver is used for this purpose. See the + * example below. + * + * Identifying a driver + * \code + * hid_t H5Pget_driver (hid_t fapl_id) + * \endcode + * + * #H5Pget_driver returns a constant identifying the low-level driver for the access property list + * fapl_id. For example, if the file was created with the POSIX (aka SEC2) driver, + * #H5Pget_driver returns #H5FD_SEC2. + * + * If the application opens an HDF5 file without both determining the driver used to create the file + * and setting up the use of that driver, the HDF5 Library will examine the superblock and the + * driver definition block to identify the driver. + * See the HDF5 File Format Specification + * for detailed descriptions of the superblock and the driver definition block. + * + * \subsubsection subsubsec_file_alternate_drivers_sec2 The POSIX (aka SEC2) Driver + * The POSIX driver, #H5FD_SEC2, uses functions from section 2 of the POSIX manual to access + * unbuffered files stored on a local file system. This driver is also known as the SEC2 driver. The + * HDF5 Library buffers metadata regardless of the low-level driver, but using this driver prevents + * data from being buffered again by the lowest layers of the library. + * + * The function #H5Pset_fapl_sec2 sets the file access properties to use the POSIX driver. See the + * example below. + * + * Using the POSIX, aka SEC2, driver + * \code + * herr_t H5Pset_fapl_sec2 (hid_t fapl_id) + * \endcode + * + * Any previously-defined driver properties are erased from the property list. + * + * Additional parameters may be added to this function in the future. Since there are no additional + * variable settings associated with the POSIX driver, there is no H5Pget_fapl_sec2 function. + * + * \subsubsection subsubsec_file_alternate_drivers_direct The Direct Driver + * The Direct driver, #H5FD_DIRECT, functions like the POSIX driver except that data is written to + * or read from the file synchronously without being cached by the system. + * + * The functions #H5Pset_fapl_direct and #H5Pget_fapl_direct are used to manage file access properties. + * See the example below. + * + * Using the Direct driver + * \code + * herr_t H5Pset_fapl_direct(hid_t fapl_id, size_t alignment, size_t block_size, size_t cbuf_size) + * herr_t H5Pget_fapl_direct(hid_t fapl_id, size_t *alignment, size_t *block_size, size_t *cbuf_size) + * \endcode + * + * #H5Pset_fapl_direct sets the file access properties to use the Direct driver; any previously defined + * driver properties are erased from the property list. #H5Pget_fapl_direct retrieves the file access + * properties used with the Direct driver. fapl_id is the file access property list identifier. + * alignment is the memory alignment boundary. block_size is the file system block size. + * cbuf_size is the copy buffer size. + * + * Additional parameters may be added to this function in the future. + * + * \subsubsection subsubsec_file_alternate_drivers_log The Log Driver + * The Log driver, #H5FD_LOG, is designed for situations where it is necessary to log file access + * activity. + * + * The function #H5Pset_fapl_log is used to manage logging properties. See the example below. + * + * Logging file access + * \code + * herr_t H5Pset_fapl_log (hid_t fapl_id, const char *logfile, unsigned int flags, size_t buf_size) + * \endcode + * + * #H5Pset_fapl_log sets the file access property list to use the Log driver. File access characteristics + * are identical to access via the POSIX driver. Any previously defined driver properties are erased + * from the property list. + * + * Log records are written to the file logfile. + * + * The logging levels set with the verbosity parameter are shown in the table below. + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + *
Logging levels
LevelComments
0Performs no logging.
1Records where writes and reads occur in the file.
2Records where writes and reads occur in the file and what kind of data is written + * at each location. This includes raw data or any of several types of metadata + * (object headers, superblock, B-tree data, local headers, or global headers).
+ * + * There is no H5Pget_fapl_log function. + * + * Additional parameters may be added to this function in the future. + * + * \subsubsection subsubsec_file_alternate_drivers_win The Windows Driver + * The Windows driver, #H5FD_WINDOWS, was modified in HDF5-1.8.8 to be a wrapper of the + * POSIX driver, #H5FD_SEC2. In other words, if the Windows drivers is used, any file I/O will + * instead use the functionality of the POSIX driver. This change should be transparent to all user + * applications. The Windows driver used to be the default driver for Windows systems. The + * POSIX driver is now the default. + * + * The function #H5Pset_fapl_windows sets the file access properties to use the Windows driver. + * See the example below. + * + * Using the Windows driver + * \code + * herr_t H5Pset_fapl_windows (hid_t fapl_id) + * \endcode + * + * Any previously-defined driver properties are erased from the property list. + * + * Additional parameters may be added to this function in the future. Since there are no additional + * variable settings associated with the POSIX driver, there is no H5Pget_fapl_windows function. + * + * \subsubsection subsubsec_file_alternate_drivers_stdio The STDIO Driver + * The STDIO driver, #H5FD_STDIO, accesses permanent files in a local file system like the + * POSIX driver does. The STDIO driver also has an additional layer of buffering beneath the + * HDF5 Library. + * + * The function #H5Pset_fapl_stdio sets the file access properties to use the STDIO driver. See the + * example below. + * + * Using the STDIO driver + * \code + * herr_t H5Pset_fapl_stdio (hid_t fapl_id) + * \endcode + * + * Any previously defined driver properties are erased from the property list. + * + * Additional parameters may be added to this function in the future. Since there are no additional + * variable settings associated with the STDIO driver, there is no H5Pget_fapl_stdio function. + * + * \subsubsection subsubsec_file_alternate_drivers_mem The Memory (aka Core) Driver + * There are several situations in which it is reasonable, sometimes even required, to maintain a file + * entirely in system memory. You might want to do so if, for example, either of the following + * conditions apply: + *
  • Performance requirements are so stringent that disk latency is a limiting factor
  • + *
  • You are working with small, temporary files that will not be retained and, thus, + * need not be written to storage media
+ * + * The Memory driver, #H5FD_CORE, provides a mechanism for creating and managing such in memory files. + * The functions #H5Pset_fapl_core and #H5Pget_fapl_core manage file access + * properties. See the example below. + * + * Managing file access for in-memory files + * \code + * herr_t H5Pset_fapl_core (hid_t access_properties, size_t block_size, hbool_t backing_store) + * herr_t H5Pget_fapl_core (hid_t access_properties, size_t *block_size), hbool_t *backing_store) + * \endcode + * + * #H5Pset_fapl_core sets the file access property list to use the Memory driver; any previously + * defined driver properties are erased from the property list. + * + * Memory for the file will always be allocated in units of the specified block_size. + * + * The backing_store Boolean flag is set when the in-memory file is created. + * backing_store indicates whether to write the file contents to disk when the file is closed. If + * backing_store is set to 1 (TRUE), the file contents are flushed to a file with the same name as the + * in-memory file when the file is closed or access to the file is terminated in memory. If + * backing_store is set to 0 (FALSE), the file is not saved. + * + * The application is allowed to open an existing file with the #H5FD_CORE driver. While using + * #H5Fopen to open an existing file, if backing_store is set to 1 and the flag for #H5Fopen is set to + * #H5F_ACC_RDWR, changes to the file contents will be saved to the file when the file is closed. + * If backing_store is set to 0 and the flag for #H5Fopen is set to #H5F_ACC_RDWR, changes to the + * file contents will be lost when the file is closed. If the flag for #H5Fopen is set to + * #H5F_ACC_RDONLY, no change to the file will be allowed either in memory or on file. + * + * If the file access property list is set to use the Memory driver, #H5Pget_fapl_core will return + * block_size and backing_store with the relevant file access property settings. + * + * Note the following important points regarding in-memory files: + *
  • Local temporary files are created and accessed directly from memory without ever + * being written to disk
  • + *
  • Total file size must not exceed the available virtual memory
  • + *
  • Only one HDF5 file identifier can be opened for the file, the identifier returned by + * #H5Fcreate or #H5Fopen
  • + *
  • The changes to the file will be discarded when access is terminated unless + * backing_store is set to 1
+ * + * Additional parameters may be added to these functions in the future. + * + * @see + * HDF5 File Image Operations + * section for information on more advanced usage of the Memory file driver, and + * @see + * Modified Region Writes + * section for information on how to set write operations so that only modified regions are written + * to storage. + * + * \subsubsection subsubsec_file_alternate_drivers_family The Family Driver + * HDF5 files can become quite large, and this can create problems on systems that do not support + * files larger than 2 gigabytes. The HDF5 file family mechanism is designed to solve the problems + * this creates by splitting the HDF5 file address space across several smaller files. This structure + * does not affect how metadata and raw data are stored: they are mixed in the address space just as + * they would be in a single, contiguous file. + * + * HDF5 applications access a family of files via the Family driver, #H5FD_FAMILY. The + * functions #H5Pset_fapl_family and #H5Pget_fapl_family are used to manage file family + * properties. See the example below. + * + * Managing file family properties + * \code + * herr_t H5Pset_fapl_family (hid_t fapl_id, + * hsize_t memb_size, hid_t member_properties) + * herr_t H5Pget_fapl_family (hid_t fapl_id, + * hsize_t *memb_size, hid_t *member_properties) + * \endcode + * + * Each member of the family is the same logical size though the size and disk storage reported by + * file system listing tools may be substantially smaller. Examples of file system listing tools are + * \code + * ls -l + * \endcode + * on a Unix system or the detailed folder listing on an Apple or Microsoft Windows + * system. The name passed to #H5Fcreate or #H5Fopen should include a printf(3c)-style integer + * format specifier which will be replaced with the family member number. The first family + * member is numbered zero (0). + * + * #H5Pset_fapl_family sets the access properties to use the Family driver; any previously defined + * driver properties are erased from the property list. member_properties will serve as the file + * access property list for each member of the file family. memb_size specifies the logical size, in + * bytes, of each family member. memb_size is used only when creating a new file or truncating an + * existing file; otherwise the member size is determined by the size of the first member of the + * family being opened. Note: If the size of the off_t type is four bytes, the maximum family + * member size is usually 2^31-1 because the byte at offset 2,147,483,647 is generally inaccessible. + * + * #H5Pget_fapl_family is used to retrieve file family properties. If the file access property list is set + * to use the Family driver, member_properties will be returned with a pointer to a copy of the + * appropriate member access property list. If memb_size is non-null, it will contain the logical + * size, in bytes, of family members. + * + * Additional parameters may be added to these functions in the future. + * + *

Unix Tools and an HDF5 Utility

+ * It occasionally becomes necessary to repartition a file family. A command-line utility for this + * purpose, h5repart, is distributed with the HDF5 library. + * + * \code + * h5repart [-v] [-b block_size[suffix]] [-m member_size[suffix]] source destination + * \endcode + * + * h5repart repartitions an HDF5 file by copying the source file or file family to the destination file + * or file family, preserving holes in the underlying UNIX files. Families are used for the source + * and/or destination if the name includes a printf-style integer format such as %d. The -v switch + * prints input and output file names on the standard error stream for progress monitoring, -b sets + * the I/O block size (the default is 1KB), and -m sets the output member size if the destination is a + * family name (the default is 1GB). block_size and member_size may be suffixed with the letters + * g, m, or k for GB, MB, or KB respectively. + * + * The h5repart utility is described on the Tools page of the HDF5 Reference Manual. + * + * An existing HDF5 file can be split into a family of files by running the file through split(1) on a + * UNIX system and numbering the output files. However, the HDF5 Library is lazy about + * extending the size of family members, so a valid file cannot generally be created by + * concatenation of the family members. + * + * Splitting the file and rejoining the segments by concatenation (split(1) and cat(1) on UNIX + * systems) does not generate files with holes; holes are preserved only through the use of h5repart. + * + * \subsubsection subsubsec_file_alternate_drivers_multi The Multi Driver + * In some circumstances, it is useful to separate metadata from raw data and some types of + * metadata from other types of metadata. Situations that would benefit from use of the Multi driver + * include the following: + *
  • In networked situations where the small metadata files can be kept on local disks but + * larger raw data files must be stored on remote media
  • + *
  • In cases where the raw data is extremely large
  • + *
  • In situations requiring frequent access to metadata held in RAM while the raw data + * can be efficiently held on disk
+ * + * In either case, access to the metadata is substantially easier with the smaller, and possibly more + * localized, metadata files. This often results in improved application performance. + * + * The Multi driver, #H5FD_MULTI, provides a mechanism for segregating raw data and different + * types of metadata into multiple files. The functions #H5Pset_fapl_multi and + * #H5Pget_fapl_multi are used to manage access properties for these multiple files. See the example + * below. + * + * Managing access properties for multiple files + * \code + * herr_t H5Pset_fapl_multi (hid_t fapl_id, const H5FD_mem_t *memb_map, const hid_t *memb_fapl, + * const char * const *memb_name, const haddr_t *memb_addr, + * hbool_t relax) + * herr_t H5Pget_fapl_multi (hid_t fapl_id, const H5FD_mem_t *memb_map, const hid_t *memb_fapl, + * const char **memb_name, const haddr_t *memb_addr, hbool_t *relax) + * \endcode + * + * #H5Pset_fapl_multi sets the file access properties to use the Multi driver; any previously defined + * driver properties are erased from the property list. With the Multi driver invoked, the application + * will provide a base name to #H5Fopen or #H5Fcreate. The files will be named by that base name as + * modified by the rule indicated in memb_name. File access will be governed by the file access + * property list memb_properties. + * + * See #H5Pset_fapl_multi and #H5Pget_fapl_multi in the HDF5 Reference Manual for descriptions + * of these functions and their usage. + * + * Additional parameters may be added to these functions in the future. + * + * \subsubsection subsubsec_file_alternate_drivers_split The Split Driver + * The Split driver, H5FD_SPLIT, is a limited case of the Multi driver where only two files are + * created. One file holds metadata, and the other file holds raw data. + * The function #H5Pset_fapl_split is used to manage Split file access properties. See the example + * below. + * + * Managing access properties for split files + * \code + * herr_t H5Pset_fapl_split (hid_t access_properties, const char *meta_extension, + * hid_t meta_properties,const char *raw_extension, hid_t raw_properties) + * \endcode + * + * #H5Pset_fapl_split sets the file access properties to use the Split driver; any previously defined + * driver properties are erased from the property list. + * + * With the Split driver invoked, the application will provide a base file name such as file_name to + * #H5Fcreate or #H5Fopen. The metadata and raw data files in storage will then be named + * file_name.meta_extension and file_name.raw_extension, respectively. For example, if + * meta_extension is defined as .meta and raw_extension is defined as .raw, the final filenames will + * be file_name.meta and file_name.raw. + * + * Each file can have its own file access property list. This allows the creative use of other lowlevel + * file drivers. For instance, the metadata file can be held in RAM and accessed via the + * Memory driver while the raw data file is stored on disk and accessed via the POSIX driver. + * Metadata file access will be governed by the file access property list in meta_properties. Raw + * data file access will be governed by the file access property list in raw_properties. + * + * Additional parameters may be added to these functions in the future. Since there are no + * additional variable settings associated with the Split driver, there is no H5Pget_fapl_split + * function. + * + * \subsubsection subsubsec_file_alternate_drivers_par The Parallel Driver + * Parallel environments require a parallel low-level driver. HDF5’s default driver for parallel + * systems is called the Parallel driver, #H5FD_MPIO. This driver uses the MPI standard for both + * communication and file I/O. + * + * The functions #H5Pset_fapl_mpio and #H5Pget_fapl_mpio are used to manage file access + * properties for the #H5FD_MPIO driver. See the example below. + * + * Managing parallel file access properties + * \code + * herr_t H5Pset_fapl_mpio (hid_t fapl_id, MPI_Comm comm, MPI_info info) + * herr_t H5Pget_fapl_mpio (hid_t fapl_id, MPI_Comm *comm, MPI_info *info) + * \endcode + * + * The file access properties managed by #H5Pset_fapl_mpio and retrieved by + * #H5Pget_fapl_mpio are the MPI communicator, comm, and the MPI info object, info. comm and + * info are used for file open. info is an information object much like an HDF5 property list. Both + * are defined in MPI_FILE_OPEN of MPI-2. + * + * The communicator and the info object are saved in the file access property list fapl_id. + * fapl_id can then be passed to MPI_FILE_OPEN to create and/or open the file. + * + * #H5Pset_fapl_mpio and #H5Pget_fapl_mpio are available only in the parallel HDF5 Library and + * are not collective functions. The Parallel driver is available only in the parallel HDF5 Library. + * + * Additional parameters may be added to these functions in the future. + * + * \subsection subsec_file_examples Code Examples for Opening and Closing Files + * \subsubsection subsubsec_file_examples_trunc Example Using the H5F_ACC_TRUNC Flag + * The following example uses the #H5F_ACC_TRUNC flag when it creates a new file. The default + * file creation and file access properties are also used. Using #H5F_ACC_TRUNC means the + * function will look for an existing file with the name specified by the function. In this case, that + * name is FILE. If the function does not find an existing file, it will create one. If it does find an + * existing file, it will empty the file in preparation for a new set of data. The identifier for the + * "new" file will be passed back to the application program. + * For more information, @see @ref subsec_file_access_modes. + * + * Creating a file with default creation and access properties + * \code + * hid_t file; // identifier + * + * // Create a new file using H5F_ACC_TRUNC access, default + * // file creation properties, and default file access + * // properties. + * file = H5Fcreate(FILE, H5F_ACC_TRUNC, H5P_DEFAULT, H5P_DEFAULT); + * + * // Close the file. + * status = H5Fclose(file); + * \endcode + * + * \subsubsection subsubsec_file_examples_props Example with the File Creation Property List + * The example below shows how to create a file with 64-bit object offsets and lengths. + * + * Creating a file with 64-bit offsets + * \code + * hid_t create_plist; + * hid_t file_id; + * + * create_plist = H5Pcreate(H5P_FILE_CREATE); + * H5Pset_sizes(create_plist, 8, 8); + * file_id = H5Fcreate(“test.h5”, H5F_ACC_TRUNC, create_plist, H5P_DEFAULT); + * . + * . + * . + * + * H5Fclose(file_id); + * \endcode + * + * \subsubsection subsubsec_file_examples_access Example with the File Access Property List + * This example shows how to open an existing file for independent datasets access by MPI parallel + * I/O: + * + * Opening an existing file for parallel I/O + * \code + * hid_t access_plist; + * hid_t file_id; + * + * access_plist = H5Pcreate(H5P_FILE_ACCESS); + * H5Pset_fapl_mpi(access_plist, MPI_COMM_WORLD, MPI_INFO_NULL); + * + * // H5Fopen must be called collectively + * file_id = H5Fopen(“test.h5”, H5F_ACC_RDWR, access_plist); + * . + * . + * . + * + * // H5Fclose must be called collectively + * H5Fclose(file_id); + * \endcode + * + * \subsection subsec_file_multiple Working with Multiple HDF5 Files + * Multiple HDF5 files can be associated so that the files can be worked with as though all the + * information is in a single HDF5 file. A temporary association can be set up by means of the + * #H5Fmount function. A permanent association can be set up by means of the external link + * function #H5Lcreate_external. + * + * The purpose of this section is to describe what happens when the #H5Fmount function is used to + * mount one file on another. + * + * When a file is mounted on another, the mounted file is mounted at a group, and the root group of + * the mounted file takes the place of that group until the mounted file is unmounted or until the + * files are closed. + * + * The figure below shows two files before one is mounted on the other. File1 has two groups and + * three datasets. The group that is the target of the A link has links, Z and Y, to two of the datasets. + * The group that is the target of the B link has a link, W, to the other dataset. File2 has three + * groups and three datasets. The groups in File2 are the targets of the AA, BB, and CC links. The + * datasets in File2 are the targets of the ZZ, YY, and WW links. + * + * + * + * + * + *
+ * \image html Files_fig3.gif "Two separate files" + *
+ * + * The figure below shows the two files after File2 has been mounted File1 at the group that is the + * target of the B link. + * + * + * + * + * + *
+ * \image html Files_fig4.gif "File2 mounted on File1" + *
+ * + * Note: In the figure above, the dataset that is the target of the W link is not shown. That dataset is + * masked by the mounted file. + * + * If a file is mounted on a group that has members, those members are hidden until the mounted + * file is unmounted. There are two ways around this if you need to work with a group member. + * One is to mount the file on an empty group. Another is to open the group member before you + * mount the file. Opening the group member will return an identifier that you can use to locate the + * group member. + * + * The example below shows how #H5Fmount might be used to mount File2 onto File1. + * + * Using H5Fmount + * \code + * status = H5Fmount(loc_id, "/B", child_id, plist_id) + * \endcode + * + * Note: In the code example above, loc_id is the file identifier for File1, /B is the link path to the + * group where File2 is mounted, child_id is the file identifier for File2, and plist_id is a property + * list identifier. + * For more information, @see @ref sec_group. + * + * See the entries for #H5Fmount, #H5Funmount, and #H5Lcreate_external in the HDF5 Reference Manual. + * + * Previous Chapter \ref sec_program - Next Chapter \ref sec_group + * + */ + +/** + * \defgroup H5F Files (H5F) * * Use the functions in this module to manage HDF5 files. * diff --git a/src/H5Gmodule.h b/src/H5Gmodule.h index a0e121d..a112a40 100644 --- a/src/H5Gmodule.h +++ b/src/H5Gmodule.h @@ -29,7 +29,929 @@ #define H5_MY_PKG_ERR H5E_SYM #define H5_MY_PKG_INIT YES -/** \defgroup H5G H5G +/** \page H5G_UG HDF5 Groups + * + * \section sec_group HDF5 Groups + * \subsection subsec_group_intro Introduction + * As suggested by the name Hierarchical Data Format, an HDF5 file is hierarchically structured. + * The HDF5 group and link objects implement this hierarchy. + * + * In the simple and most common case, the file structure is a tree structure; in the general case, the + * file structure may be a directed graph with a designated entry point. The tree structure is very + * similar to the file system structures employed on UNIX systems, directories and files, and on + * Apple and Microsoft Windows systems, folders and files. HDF5 groups are analogous + * to the directories and folders; HDF5 datasets are analogous to the files. + * + * The one very important difference between the HDF5 file structure and the above-mentioned file + * system analogs is that HDF5 groups are linked as a directed graph, allowing circular references; + * the file systems are strictly hierarchical, allowing no circular references. The figures below + * illustrate the range of possibilities. + * + * In the first figure below, the group structure is strictly hierarchical, identical to the file system + * analogs. + * + * In the next two figures below, the structure takes advantage of the directed graph’s allowance of + * circular references. In the second figure, GroupA is not only a member of the root group, /, but a + * member of GroupC. Since Group C is a member of Group B and Group B is a member of Group + * A, Dataset1 can be accessed by means of the circular reference /Group A/Group B/Group + * C/Group A/Dataset1. The third figure below illustrates an extreme case in which GroupB is a + * member of itself, enabling a reference to a member dataset such as /Group A/Group B/Group + * B/Group B/Dataset2. + * + * + * + * + * + *
+ * \image html Groups_fig1.gif "A file with a strictly hierarchical group structure" + *
+ * + * + * + * + * + *
+ * \image html Groups_fig2.gif "A file with a circular reference" + *
+ * + * + * + * + * + *
+ * \image html Groups_fig3.gif "A file with one group as a member of itself" + *
+ * + * As becomes apparent upon reflection, directed graph structures can become quite complex; + * caution is advised! + * + * The balance of this chapter discusses the following topics: + * \li The HDF5 group object (or a group) and its structure in more detail + * \li HDF5 link objects (or links) + * \li The programming model for working with groups and links + * \li HDF5 functions provided for working with groups, group members, and links + * \li Retrieving information about objects in a group + * \li Discovery of the structure of an HDF5 file and the contained objects + * \li Examples of file structures + * + * \subsection subsec_group_descr Description of the Group Object + * \subsubsection subsubsec_group_descr_object The Group Object + * Abstractly, an HDF5 group contains zero or more objects and every object must be a member of + * at least one group. The root group, the sole exception, may not belong to any group. + * + * + * + * + * + *
+ * \image html Groups_fig4.gif "Abstract model of the HDF5 group object" + *
+ * + * Group membership is actually implemented via link objects. See the figure above. A link object + * is owned by a group and points to a named object. Each link has a name, and each link points to + * exactly one object. Each named object has at least one and possibly many links to it. + * + * There are three classes of named objects: group, dataset, and committed datatype (formerly + * called named datatype). See the figure below. Each of these objects is the member of at least one + * group, which means there is at least one link to it. + * + * + * + * + * + *
+ * \image html Groups_fig5.gif "Classes of named objects" + *
+ * + * The primary operations on a group are to add and remove members and to discover member + * objects. These abstract operations, as listed in the figure below, are implemented in the \ref H5G + * APIs. For more information, @see @ref subsec_group_function. + * + * To add and delete members of a group, links from the group to existing objects in the file are + * created and deleted with the link and unlink operations. When a new named object is created, the + * HDF5 Library executes the link operation in the background immediately after creating the + * object (in other words, a new object is added as a member of the group in which it is created + * without further user intervention). + * + * Given the name of an object, the get_object_info method retrieves a description of the object, + * including the number of references to it. The iterate method iterates through the members of the + * group, returning the name and type of each object. + * + * + * + * + * + *
+ * \image html Groups_fig6.gif "The group object" + *
+ * + * Every HDF5 file has a single root group, with the name /. The root group is identical to any other + * HDF5 group, except: + * \li The root group is automatically created when the HDF5 file is created (#H5Fcreate). + * \li The root group has no parent, but by convention has a reference count of 1. + * \li The root group cannot be deleted (in other words, unlinked)! + * + * \subsubsection subsubsec_group_descr_model The Hierarchy of Data Objects + * An HDF5 file is organized as a rooted, directed graph using HDF5 group objects. The named + * data objects are the nodes of the graph, and the links are the directed arcs. Each arc of the graph + * has a name, with the special name / reserved for the root group. New objects are created and then + * inserted into the graph with a link operation that is automatically executed by the library; + * existing objects are inserted into the graph with a link operation explicitly called by the user, + * which creates a named link from a group to the object. + * + * An object can be the target of more than one link. + * + * The names on the links must be unique within each group, but there may be many links with the + * same name in different groups. These are unambiguous, because some ancestor must have a + * different name, or else they are the same object. The graph is navigated with path names, + * analogous to Unix file systems. For more information, @see @ref subsubsec_group_descr_path. + * + * An object can be opened with a full path starting at the root group, or with a relative path and a + * starting point. That starting point is always a group, though it may be the current working group, + * another specified group, or the root group of the file. Note that all paths are relative to a single + * HDF5 file. In this sense, an HDF5 file is analogous to a single UNIX file system. + * + * It is important to note that, just like the UNIX file system, HDF5 objects do not have names, the + * names are associated with paths. An object has an object identifier that is unique within the file, + * but a single object may have many names because there may be many paths to the same object. + * An object can be renamed, or moved to another group, by adding and deleting links. In this case, + * the object itself never moves. For that matter, membership in a group has no implication for the + * physical location of the stored object. + * + * Deleting a link to an object does not necessarily delete the object. The object remains available + * as long as there is at least one link to it. After all links to an object are deleted, it can no longer + * be opened, and the storage may be reclaimed. + * + * It is also important to realize that the linking mechanism can be used to construct very complex + * graphs of objects. For example, it is possible for an object to be shared between several groups + * and even to have more than one name in the same group. It is also possible for a group to be a + * member of itself, or to create other cycles in the graph, such as in the case where a child group is + * linked to one of its ancestors. + * + * HDF5 also has soft links similar to UNIX soft links. A soft link is an object that has a name and + * a path name for the target object. The soft link can be followed to open the target of the link just + * like a regular or hard link. The differences are that the hard link cannot be created if the target + * object does not exist and it always points to the same object. A soft link can be created with any + * path name, whether or not the object exists; it may or may not, therefore, be possible to follow a + * soft link. Furthermore, a soft link’s target object may be changed. + * + * \subsubsection subsubsec_group_descr_path HDF5 Path Names + * The structure of the HDF5 file constitutes the name space for the objects in the file. A path name + * is a string of components separated by slashes (/). Each component is the name of a hard or soft + * link which points to an object in the file. The slash not only separates the components, but + * indicates their hierarchical relationship; the component indicated by the link name following a + * slash is a always a member of the component indicated by the link name preceding that slash. + * + * The first component in the path name may be any of the following: + * \li The special character dot (., a period), indicating the current group + * \li The special character slash (/), indicating the root group + * \li Any member of the current group + * + * Component link names may be any string of ASCII characters not containing a slash or a dot + * (/ and ., which are reserved as noted above). However, users are advised to avoid the use of + * punctuation and non-printing characters, as they may create problems for other software. The + * figure below provides a BNF grammar for HDF5 path names. + * + * A BNF grammar for HDF5 path names + * \code + * PathName ::= AbsolutePathName | RelativePathName + * Separator ::= "/" ["/"]* + * AbsolutePathName ::= Separator [ RelativePathName ] + * RelativePathName ::= Component [ Separator RelativePathName ]* + * Component ::= "." | Characters + * Characters ::= Character+ - { "." } + * Character ::= {c: c Î { { legal ASCII characters } - {'/'} } + * \endcode + * + * An object can always be addressed by either a full or an absolute path name, starting at the root + * group, or by a relative path name, starting in a known location such as the current working + * group. As noted elsewhere, a given object may have multiple full and relative path names. + * + * Consider, for example, the file illustrated in the figure below. Dataset1 can be identified by either + * of these absolute path names: + * /GroupA/Dataset1 + * + * /GroupA/GroupB/GroupC/Dataset1 + * + * Since an HDF5 file is a directed graph structure, and is therefore not limited to a strict tree + * structure, and since this illustrated file includes the sort of circular reference that a directed graph + * enables, Dataset1 can also be identified by this absolute path name: + * /GroupA/GroupB/GroupC/GroupA/Dataset1 + * + * Alternatively, if the current working location is GroupB, Dataset1 can be identified by either of + * these relative path names: + * GroupC/Dataset1 + * + * GroupC/GroupA/Dataset1 + * + * Note that relative path names in HDF5 do not employ the ../ notation, the UNIX notation + * indicating a parent directory, to indicate a parent group. + * + * + * + * + * + *
+ * \image html Groups_fig2.gif "A file with a circular reference" + *
+ * + * \subsubsection subsubsec_group_descr_impl Group Implementations in HDF5 + * The original HDF5 group implementation provided a single indexed structure for link storage. A + * new group implementation, as of HDF5 Release 1.8.0, enables more efficient compact storage + * for very small groups, improved link indexing for large groups, and other advanced features. + *
    + *
  • The original indexed format remains the default. Links are stored in a B-tree in the + * group’s local heap.
  • + *
  • Groups created in the new compact-or-indexed format, the implementation introduced + * with Release 1.8.0, can be tuned for performance, switching between the compact and + * indexed formats at thresholds set in the user application. + *
      + *
    • The compact format will conserve file space and processing overhead when + * working with small groups and is particularly valuable when a group contains + * no links. Links are stored as a list of messages in the group’s header.
    • + *
    • The indexed format will yield improved performance when working with large + * groups. A large group may contain thousands to millions of members. Links + * are stored in a fractal heap and indexed with an improved B-tree.
    • + *
  • + *
  • The new implementation also enables the use of link names consisting of non-ASCII + * character sets (see #H5Pset_char_encoding) and is required for all link types other than + * hard or soft links; the link types other than hard or soft links are external links and + * user-defined links @see @ref H5L APIs.
  • + *
+ * + * The original group structure and the newer structures are not directly interoperable. By default, a + * group will be created in the original indexed format. An existing group can be changed to a + * compact-or-indexed format if the need arises; there is no capability to change back. As stated + * above, once in the compact-or-indexed format, a group can switch between compact and indexed + * as needed. + * + * Groups will be initially created in the compact-or-indexed format only when one or more of the + * following conditions is met: + *
    + *
  • The low version bound value of the library version bounds property has been set to + * Release 1.8.0 or later in the file access property list (see #H5Pset_libver_bounds). + * Currently, that would require an #H5Pset_libver_bounds call with the low parameter + * set to #H5F_LIBVER_LATEST. + * + * When this property is set for an HDF5 file, all objects in the file will be created using + * the latest available format; no effort will be made to create a file that can be read by + * older libraries.
  • + *
  • The creation order tracking property, #H5P_CRT_ORDER_TRACKED, has been set + * in the group creation property list (see #H5Pset_link_creation_order).
  • + *
+ * + * An existing group, currently in the original indexed format, will be converted to the compact-or- + * indexed format upon the occurrence of any of the following events: + *
    + *
  • An external or user-defined link is inserted into the group. + *
  • A link named with a string composed of non-ASCII characters is inserted into the + * group. + *
+ * + * The compact-or-indexed format offers performance improvements that will be most notable at + * the extremes (for example, in groups with zero members and in groups with tens of thousands of + * members). But measurable differences may sometimes appear at a threshold as low as eight + * group members. Since these performance thresholds and criteria differ from application to + * application, tunable settings are provided to govern the switch between the compact and indexed + * formats (see #H5Pset_link_phase_change). Optimal thresholds will depend on the application and + * the operating environment. + * + * Future versions of HDF5 will retain the ability to create, read, write, and manipulate all groups + * stored in either the original indexed format or the compact-or-indexed format. + * + * \subsection subsec_group_h5dump Using h5dump + * You can use h5dump, the command-line utility distributed with HDF5, to examine a file for + * purposes either of determining where to create an object within an HDF5 file or to verify that + * you have created an object in the intended place. + * + * In the case of the new group created later in this chapter, the following h5dump command will + * display the contents of FileA.h5: + * \code + * h5dump FileA.h5 + * \endcode + * + * For more information, @see @ref subsubsec_group_program_create. + * + * Assuming that the discussed objects, GroupA and GroupB are the only objects that exist in + * FileA.h5, the output will look something like the following: + * \code + * HDF5 "FileA.h5" { + * GROUP "/" { + * GROUP GroupA { + * GROUP GroupB { + * } + * } + * } + * } + * \endcode + * + * h5dump is described on the “HDF5 Tools” page of the \ref RM. + * + * The HDF5 DDL grammar is described in the @ref DDLBNF110. + * + * \subsection subsec_group_function Group Function Summaries + * Functions that can be used with groups (\ref H5G functions) and property list functions that can used + * with groups (\ref H5P functions) are listed below. A number of group functions have been + * deprecated. Most of these have become link (\ref H5L) or object (\ref H5O) functions. These replacement + * functions are also listed below. + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + *
Group functions
FunctionPurpose
#H5GcreateCreates a new empty group and gives it a name. The + * C function is a macro: \see \ref api-compat-macros.
#H5Gcreate_anonCreates a new empty group without linking it into the file structure.
#H5GopenOpens an existing group for modification and returns a group identifier for that group. + * The C function is a macro: \see \ref api-compat-macros.
#H5GcloseCloses the specified group.
#H5Gget_create_plistGets a group creation property list identifier.
#H5Gget_infoRetrieves information about a group. Use instead of H5Gget_num_objs.
#H5Gget_info_by_idxRetrieves information about a group according to the group’s position within an index.
#H5Gget_info_by_nameRetrieves information about a group.
+ * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + *
Link and object functions
FunctionPurpose
#H5Lcreate_hardCreates a hard link to an object. Replaces H5Glink and H5Glink2.
#H5Lcreate_softCreates a soft link to an object. Replaces H5Glink and H5Glink2.
#H5Lcreate_externalCreates a soft link to an object in a different file. Replaces H5Glink and H5Glink2.
#H5Lcreate_udCreates a link of a user-defined type.
#H5Lget_valReturns the value of a symbolic link. Replaces H5Gget_linkval.
#H5LiterateIterates through links in a group. Replaces H5Giterate. + * See also #H5Ovisit and #H5Lvisit.
#H5Literate_by_nameIterates through links in a group.
#H5LvisitRecursively visits all links starting from a specified group.
#H5OvisitRecursively visits all objects accessible from a specified object.
#H5Lget_infoReturns information about a link. Replaces H5Gget_objinfo.
#H5Oget_infoRetrieves the metadata for an object specified by an identifier. Replaces H5Gget_objinfo.
#H5Lget_name_by_idxRetrieves name of the nth link in a group, according to the order within a specified field + * or index. Replaces H5Gget_objname_by_idx.
#H5Oget_info_by_idxRetrieves the metadata for an object, identifying the object by an index position. Replaces + * H5Gget_objtype_by_idx.
#H5Oget_info_by_nameRetrieves the metadata for an object, identifying the object by location and relative name.
#H5Oset_commentSets the comment for specified object. Replaces H5Gset_comment.
#H5Oget_commentGets the comment for specified object. Replaces H5Gget_comment.
#H5LdeleteRemoves a link from a group. Replaces H5Gunlink.
#H5LmoveRenames a link within an HDF5 file. Replaces H5Gmove and H5Gmove2.
+ * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + *
Group creation property list functions
FunctionPurpose
#H5Pall_filters_availVerifies that all required filters are available.
#H5Pget_filterReturns information about a filter in a pipeline. The + * C function is a macro: \see \ref api-compat-macros.
#H5Pget_filter_by_idReturns information about the specified filter. The + * C function is a macro: \see \ref api-compat-macros.
#H5Pget_nfiltersReturns the number of filters in the pipeline.
#H5Pmodify_filterModifies a filter in the filter pipeline.
#H5Premove_filterDeletes one or more filters in the filter pipeline.
#H5Pset_deflateSets the deflate (GNU gzip) compression method and compression level.
#H5Pset_filterAdds a filter to the filter pipeline.
#H5Pset_fletcher32Sets up use of the Fletcher32 checksum filter.
#H5Pset_link_phase_changeSets the parameters for conversion between compact and dense groups.
#H5Pget_link_phase_changeQueries the settings for conversion between compact and dense groups.
#H5Pset_est_link_infoSets estimated number of links and length of link names in a group.
#H5Pget_est_link_infoQueries data required to estimate required local heap or object header size.
#H5Pset_nlinksSets maximum number of soft or user-defined link traversals.
#H5Pget_nlinksRetrieves the maximum number of link traversals.
#H5Pset_link_creation_orderSets creation order tracking and indexing for links in a group.
#H5Pget_link_creation_orderQueries whether link creation order is tracked and/or indexed in a group.
#H5Pset_create_intermediate_groupSpecifies in the property list whether to create missing intermediate groups.
#H5Pget_create_intermediate_groupDetermines whether the property is set to enable creating missing intermediate groups.
#H5Pset_char_encodingSets the character encoding used to encode a string. Use to set ASCII or UTF-8 character + * encoding for object names.
#H5Pget_char_encodingRetrieves the character encoding used to create a string.
+ * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + *
Other external link functions
FunctionPurpose
#H5Pset_elink_file_cache_sizeSets the size of the external link open file cache from the specified + * file access property list.
#H5Pget_elink_file_cache_sizeRetrieves the size of the external link open file cache from the specified + * file access property list.
#H5Fclear_elink_file_cacheClears the external link open file cache for a file.
+ * + * \subsection subsec_group_program Programming Model for Groups + * The programming model for working with groups is as follows: + *
  1. Create a new group or open an existing one.
  2. + *
  3. Perform the desired operations on the group. + *
    • Create new objects in the group.
    • + *
    • Insert existing objects as group members.
    • + *
    • Delete existing members.
    • + *
    • Open and close member objects.
    • + *
    • Access information regarding member objects.
    • + *
    • Iterate across group members.
    • + *
    • Manipulate links.
    + *
  4. Terminate access to the group (Close the group).
+ * + * \subsubsection subsubsec_group_program_create Creating a Group + * To create a group, use #H5Gcreate, specifying the location and the path of the new group. The + * location is the identifier of the file or the group in a file with respect to which the new group is to + * be identified. The path is a string that provides either an absolute path or a relative path to the + * new group. For more information, @see @ref subsubsec_group_descr_path. + * + * A path that begins with a slash (/) is + * an absolute path indicating that it locates the new group from the root group of the HDF5 file. A + * path that begins with any other character is a relative path. When the location is a file, a relative + * path is a path from that file’s root group; when the location is a group, a relative path is a path + * from that group. + * + * The sample code in the example below creates three groups. The group Data is created in the + * root directory; two groups are then created in /Data, one with absolute path, the other with a + * relative path. + * + * Creating three new groups + * \code + * hid_t file; + * file = H5Fopen(....); + * + * group = H5Gcreate(file, "/Data", H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT); + * group_new1 = H5Gcreate(file, "/Data/Data_new1", H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT); + * group_new2 = H5Gcreate(group, "Data_new2", H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT); + * \endcode + * The third #H5Gcreate parameter optionally specifies how much file space to reserve to store the + * names that will appear in this group. If a non-positive value is supplied, a default size is chosen. + * + * \subsubsection subsubsec_group_program_open Opening a Group and Accessing an Object in that Group + * Though it is not always necessary, it is often useful to explicitly open a group when working + * with objects in that group. Using the file created in the example above, the example below + * illustrates the use of a previously-acquired file identifier and a path relative to that file to open + * the group Data. + * + * Any object in a group can be also accessed by its absolute or relative path. To open an object + * using a relative path, an application must first open the group or file on which that relative path + * is based. To open an object using an absolute path, the application can use any location identifier + * in the same file as the target object; the file identifier is commonly used, but object identifier for + * any object in that file will work. Both of these approaches are illustrated in the example below. + * + * Using the file created in the examples above, the example below provides sample code + * illustrating the use of both relative and absolute paths to access an HDF5 data object. The first + * sequence (two function calls) uses a previously-acquired file identifier to open the group Data, + * and then uses the returned group identifier and a relative path to open the dataset CData. The + * second approach (one function call) uses the same previously-acquired file identifier and an + * absolute path to open the same dataset. + * + * Open a dataset with relative and absolute paths + * \code + * group = H5Gopen(file, "Data", H5P_DEFAULT); + * + * dataset1 = H5Dopen(group, "CData", H5P_DEFAULT); + * dataset2 = H5Dopen(file, "/Data/CData", H5P_DEFAULT); + * \endcode + * + * \subsubsection subsubsec_group_program_dataset Creating a Dataset in a Specific Group + * Any dataset must be created in a particular group. As with groups, a dataset may be created in a + * particular group by specifying its absolute path or a relative path. The example below illustrates + * both approaches to creating a dataset in the group /Data. + * + * Create a dataset with absolute and relative paths + * \code + * dataspace = H5Screate_simple(RANK, dims, NULL); + * dataset1 = H5Dcreate(file, "/Data/CData", H5T_NATIVE_INT, dataspace, + * H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT); + * group = H5Gopen(file, "Data", H5P_DEFAULT); + * dataset2 = H5Dcreate(group, "Cdata2", H5T_NATIVE_INT, dataspace, + * H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT); + * \endcode + * + * \subsubsection subsubsec_group_program_close Closing a Group + * To ensure the integrity of HDF5 objects and to release system resources, an application should + * always call the appropriate close function when it is through working with an HDF5 object. In + * the case of groups, H5Gclose ends access to the group and releases any resources the HDF5 + * library has maintained in support of that access, including the group identifier. + * + * As illustrated in the example below, all that is required for an H5Gclose call is the group + * identifier acquired when the group was opened; there are no relative versus absolute path + * considerations. + * + * Close a group + * \code + * herr_t status; + * + * status = H5Gclose(group); + * \endcode + * + * A non-negative return value indicates that the group was successfully closed and the resources + * released; a negative return value indicates that the attempt to close the group or release resources + * failed. + * + * \subsubsection subsubsec_group_program_links Creating Links + * As previously mentioned, every object is created in a specific group. Once created, an object can + * be made a member of additional groups by means of links created with one of the H5Lcreate_* + * functions. + * + * A link is, in effect, a path by which the target object can be accessed; it therefore has a name + * which functions as a single path component. A link can be removed with an #H5Ldelete call, + * effectively removing the target object from the group that contained the link (assuming, of + * course, that the removed link was the only link to the target object in the group). + * + *

Hard Links

+ * There are two kinds of links, hard links and symbolic links. Hard links are reference counted; + * symbolic links are not. When an object is created, a hard link is automatically created. An object + * can be deleted from the file by removing all the hard links to it. + * + * Working with the file from the previous examples, the code in the example below illustrates the + * creation of a hard link, named Data_link, in the root group, /, to the group Data. Once that link is + * created, the dataset Cdata can be accessed via either of two absolute paths, /Data/Cdata or + * /Data_Link/Cdata. + * + * Create a hard link + * \code + * status = H5Lcreate_hard(Data_loc_id, "Data", DataLink_loc_id, "Data_link", H5P_DEFAULT, H5P_DEFAULT); + * + * dataset1 = H5Dopen(file, "/Data_link/CData", H5P_DEFAULT); + * dataset2 = H5Dopen(file, "/Data/CData", H5P_DEFAULT); + * \endcode + * + * The example below shows example code to delete a link, deleting the hard link Data from the + * root group. The group /Data and its members are still in the file, but they can no longer be + * accessed via a path using the component /Data. + * + * Delete a link + * \code + * status = H5Ldelete(Data_loc_id, "Data", H5P_DEFAULT); + * + * dataset1 = H5Dopen(file, "/Data_link/CData", H5P_DEFAULT); + * // This call should succeed; all path components still exist + * dataset2 = H5Dopen(file, "/Data/CData", H5P_DEFAULT); + * // This call will fail; the path component '/Data' has been deleted. + * \endcode + * + * When the last hard link to an object is deleted, the object is no longer accessible. #H5Ldelete will + * not prevent you from deleting the last link to an object. To see if an object has only one link, use + * the #H5Oget_info function. If the value of the rc (reference count) field in the is greater than 1, + * then the link can be deleted without making the object inaccessible. + * + * The example below shows #H5Oget_info to the group originally called Data. + * + * Finding the number of links to an object + * \code + * status = H5Oget_info(Data_loc_id, object_info); + * \endcode + * + * It is possible to delete the last hard link to an object and not make the object inaccessible. + * Suppose your application opens a dataset, and then deletes the last hard link to the dataset. While + * the dataset is open, your application still has a connection to the dataset. If your application + * creates a hard link to the dataset before it closes the dataset, then the dataset will still be + * accessible. + * + *

Symbolic Links

+ * Symbolic links are objects that assign a name in a group to a path. Notably, the target object is + * determined only when the symbolic link is accessed, and may, in fact, not exist. Symbolic links + * are not reference counted, so there may be zero, one, or more symbolic links to an object. + * + * The major types of symbolic links are soft links and external links. Soft links are symbolic links + * within an HDF5 file and are created with the #H5Lcreate_soft function. Symbolic links to objects + * located in external files, in other words external links, can be created with the + * #H5Lcreate_external function. Symbolic links are removed with the #H5Ldelete function. + * + * The example below shows the creating two soft links to the group /Data. + * + * Create a soft link + * \code + * status = H5Lcreate_soft(path_to_target, link_loc_id, "Soft2", H5P_DEFAULT, H5P_DEFAULT); + * status = H5Lcreate_soft(path_to_target, link_loc_id, "Soft3", H5P_DEFAULT, H5P_DEFAULT); + * dataset = H5Dopen(file, "/Soft2/CData", H5P_DEFAULT); + * \endcode + * + * With the soft links defined in the example above, the dataset CData in the group /Data can now + * be opened with any of the names /Data/CData, /Soft2/CData, or /Soft3/CData. + * + * In release 1.8.7, a cache was added to hold the names of files accessed via external links. The + * size of this cache can be changed to help improve performance. For more information, see the + * entry in the \ref RM for the #H5Pset_elink_file_cache_size function call. + * + *

Note Regarding Hard Links and Soft Links

+ * Note that an object’s existence in a file is governed by the presence of at least one hard link to + * that object. If the last hard link to an object is removed, the object is removed from the file and + * any remaining soft link becomes a dangling link, a link whose target object does not exist. + * + *

Moving or Renaming Objects, and a Warning

+ * An object can be renamed by changing the name of a link to it with #H5Lmove. This has the same + * effect as creating a new link with the new name and deleting the link with the old name. + * + * Exercise caution in the use of #H5Lmove and #H5Ldelete as these functions each include a step + * that unlinks a pointer to an HDF5 object. If the link that is removed is on the only path leading to + * an HDF5 object, that object will become permanently inaccessible in the file. + * + *
Scenario 1: Removing the Last Link
+ * To avoid removing the last link to an object or otherwise making an object inaccessible, use the + * #H5Oget_info function. Make sure that the value of the reference count field (rc) is greater than 1. + * + *
Scenario 2: Moving a Link that Isolates an Object
+ * Consider the following example: assume that the group group2 can only be accessed via the + * following path, where top_group is a member of the file’s root group: + * /top_group/group1/group2/ + * + * Using #H5Lmove, top_group is renamed to be a member ofgroup2. At this point, since + * top_group was the only route from the root group to group1, there is no longer a path by which + * one can access group1, group2, or any member datasets. And since top_group is now a member + * of group2, top_group itself and any member datasets have thereby also become inaccessible. + * + *

Mounting a File

+ * An external link is a permanent connection between two files. A temporary connection can be set + * up with the #H5Fmount function. For more information, @see sec_file. + * For more information, see the #H5Fmount function in the \ref RM. + * + * \subsubsection subsubsec_group_program_info Discovering Information about Objects + * There is often a need to retrieve information about a particular object. The #H5Lget_info and + * #H5Oget_info functions fill this niche by returning a description of the object or link in an + * #H5L_info_t or #H5O_info_t structure. + * + * \subsubsection subsubsec_group_program_objs Discovering Objects in a Group + * To examine all the objects or links in a group, use the #H5Literate or #H5Ovisit functions to + * examine the objects, and use the #H5Lvisit function to examine the links. #H5Literate is useful + * both with a single group and in an iterative process that examines an entire file or section of a + * file (such as the contents of a group or the contents of all the groups that are members of that + * group) and acts on objects as they are encountered. #H5Ovisit recursively visits all objects + * accessible from a specified object. #H5Lvisit recursively visits all the links starting from a + * specified group. + * + * \subsubsection subsubsec_group_program_all Discovering All of the Objects in the File + * The structure of an HDF5 file is self-describing, meaning that an application can navigate an + * HDF5 file to discover and understand all the objects it contains. This is an iterative process + * wherein the structure is traversed as a graph, starting at one node and recursively visiting linked + * nodes. To explore the entire file, the traversal should start at the root group. + * + * \subsection subsec_group_examples Examples of File Structures + * This section presents several samples of HDF5 file structures. + * + * Figure 9 shows examples of the structure of a file with three groups and one dataset. The file in + * part a contains three groups: the root group and two member groups. In part b, the dataset + * dset1 has been created in /group1. In part c, a link named dset2 from /group2 to the dataset has + * been added. Note that there is only one copy of the dataset; there are two links to it and it can be + * accessed either as /group1/dset1 or as /group2/dset2. + * + * Part d illustrates that one of the two links to the dataset can be deleted. In this case, the link from + * /group1 + * has been removed. The dataset itself has not been deleted; it is still in the file but can only be + * accessed as + * /group2/dset2 + * + * + * + * + * + * + * + * + * + * + * + *
Figure 9 - Some file structures
+ * \image html Groups_fig9_a.gif "a) The file contains three groups: the root group, /group1, and /group2." + * + * \image html Groups_fig9_b.gif "b) The dataset dset1 (or /group1/dset1) is created in /group1." + *
+ * \image html Groups_fig9_aa.gif "c) A link named dset2 to the same dataset is created in /group2." + * + * \image html Groups_fig9_bb.gif "d) The link from /group1 to dset1 is removed. The dataset is + * still in the file, but can be accessed only as /group2/dset2." + *
+ * + * Figure 10 illustrates loops in an HDF5 file structure. The file in part a contains three groups + * and a dataset; group2 is a member of the root group and of the root group’s other member group, + * group1. group2 thus can be accessed by either of two paths: /group2 or /group1/GXX. Similarly, + * the dataset can be accessed either as /group2/dset1 or as /group1/GXX/dset1. + * + * Part b illustrates a different case: the dataset is a member of a single group but with two links, or + * names, in that group. In this case, the dataset again has two names, /group1/dset1 and + * /group1/dset2. + * + * In part c, the dataset dset1 is a member of two groups, one of which can be accessed by either of + * two names. The dataset thus has three path names: /group1/dset1, /group2/dset2, and + * /group1/GXX/dset2. + * + * And in part d, two of the groups are members of each other and the dataset is a member of both + * groups. In this case, there are an infinite number of paths to the dataset because GXX and + * GYY can be traversed any number of times on the way from the root group, /, to the dataset. This + * can yield a path name such as /group1/GXX/GYY/GXX/GYY/GXX/dset2. + * + * + * + * + * + * + * + * + * + * + * + *
Figure 10 - More sample file structures
+ * \image html Groups_fig10_a.gif "a) dset1 has two names: /group2/dset1 and /group1/GXX/dset1." + * + * \image html Groups_fig10_b.gif "b) dset1 again has two names: /group1/dset1 and /group1/dset2." + *
+ * \image html Groups_fig10_c.gif "c) dset1 has three names: /group1/dset1, /group2/dset2, and + * /group1/GXX/dset2." + * + * \image html Groups_fig10_d.gif "d) dset1 has an infinite number of available path names." + *
+ * + * Figure 11 takes us into the realm of soft links. The original file, in part a, contains only three + * hard links. In part b, a soft link named dset2 from group2 to /group1/dset1 has been created, + * making this dataset accessible as /group2/dset2. + * + * In part c, another soft link has been created in group2. But this time the soft link, dset3, points + * to a target object that does not yet exist. That target object, dset, has been added in part d and is + * now accessible as either /group2/dset or /group2/dset3. + * + * It could be said that HDF5 extends the organizing concepts of a file system to the internal + * structure of a single file. + * + * + * + * + * + * + * + * + * + * + * + *
Figure 11 - Hard and soft links
+ * \image html Groups_fig11_a.gif "a) The file contains only hard links." + * + * \image html Groups_fig11_b.gif "b) A soft link is added from group2 to /group1/dset1." + *
+ * \image html Groups_fig11_c.gif "c) A soft link named dset3 is added with a target that does not yet exist." + * + * \image html Groups_fig11_d.gif "d) The target of the soft link is created or linked." + *
+ * + * Previous Chapter \ref sec_file - Next Chapter \ref sec_dataset + * + */ + +/** + * \defgroup H5G Groups (H5G) * * Use the functions in this module to manage HDF5 groups. * diff --git a/src/H5Gpublic.h b/src/H5Gpublic.h index 1d8f8fb..74f0da7 100644 --- a/src/H5Gpublic.h +++ b/src/H5Gpublic.h @@ -167,7 +167,7 @@ H5_DLL hid_t H5Gcreate2(hid_t loc_id, const char *name, hid_t lcpl_id, hid_t gcp * H5Gclose() when the group is no longer needed so that resource * leaks will not develop. * - * \see H5Olink(), H5Dcreate(), Using Identifiers + * \see H5Olink(), H5Dcreate(), \ref api-compat-macros * * \since 1.8.0 * @@ -660,7 +660,7 @@ H5_DLL herr_t H5Glink2(hid_t cur_loc_id, const char *cur_name, H5G_link_t type, * * \attention Exercise care in moving groups as it is possible to render data in * a file inaccessible with H5Gmove(). See The Group Interface in the - * HDF5 User's Guide. + * \ref UG. * * \version 1.8.0 Function deprecated in this release. * @@ -691,7 +691,7 @@ H5_DLL herr_t H5Gmove(hid_t src_loc_id, const char *src_name, const char *dst_na * * \attention Exercise care in moving groups as it is possible to render data in * a file inaccessible with H5Gmove2(). See The Group Interface in the - * HDF5 User's Guide. + * \ref UG. * * \version 1.8.0 Function deprecated in this release. * @@ -728,11 +728,11 @@ H5_DLL herr_t H5Gmove2(hid_t src_loc_id, const char *src_name, hid_t dst_loc_id, * Note that space identified as freespace is available for re-use only * as long as the file remains open; once a file has been closed, the * HDF5 library loses track of freespace. See “Freespace Management” in - * the HDF5 User's Guide for further details. + * the \ref UG for further details. * * \attention Exercise care in moving groups as it is possible to render data in * a file inaccessible with H5Gunlink(). See The Group Interface in the - * HDF5 User's Guide. + * \ref UG. * * \version 1.8.0 Function deprecated in this release. * diff --git a/src/H5Imodule.h b/src/H5Imodule.h index d77591d..1ad9f1b 100644 --- a/src/H5Imodule.h +++ b/src/H5Imodule.h @@ -29,7 +29,8 @@ #define H5_MY_PKG_ERR H5E_ATOM #define H5_MY_PKG_INIT NO -/**\defgroup H5I H5I +/** + * \defgroup H5I Identifiers (H5I) * * Use the functions in this module to manage identifiers defined by the HDF5 * library. See \ref H5IUD for user-defined identifiers and identifier diff --git a/src/H5Lmodule.h b/src/H5Lmodule.h index cffd25c..f61b891 100644 --- a/src/H5Lmodule.h +++ b/src/H5Lmodule.h @@ -29,7 +29,8 @@ #define H5_MY_PKG_ERR H5E_LINK #define H5_MY_PKG_INIT YES -/**\defgroup H5L H5L +/** + * \defgroup H5L Links (H5L) * * Use the functions in this module to manage HDF5 links and link types. * diff --git a/src/H5Mmodule.h b/src/H5Mmodule.h index 848f63f..6c49a45 100644 --- a/src/H5Mmodule.h +++ b/src/H5Mmodule.h @@ -26,10 +26,24 @@ #define H5_MY_PKG_ERR H5E_MAP #define H5_MY_PKG_INIT YES -/**\defgroup H5M H5M +/** + * \page H5M_UG The HDF5 VOL Data Mapping + * \Bold{The HDF5 Data Mapping can only be used with the HDF5 VOL connectors that + * implement map objects.} The native HDF5 library does not support this feature. + * + * \section sec_map The HDF5 Map Object * * \todo Describe the map life cycle. * + * \todo How does MAPL fit into \ref subsubsec_plist_class. + * + * Previous Chapter \ref sec_vol - Next Chapter \ref sec_addition + * + */ + +/** + * \defgroup H5M VOL Mapping (H5M) + * * \details \Bold{The interface can only be used with the HDF5 VOL connectors that * implement map objects.} The native HDF5 library does not support this * feature. diff --git a/src/H5Omodule.h b/src/H5Omodule.h index 8afba29..977861b 100644 --- a/src/H5Omodule.h +++ b/src/H5Omodule.h @@ -29,7 +29,8 @@ #define H5_MY_PKG_ERR H5E_OHDR #define H5_MY_PKG_INIT YES -/**\defgroup H5O H5O +/** + * \defgroup H5O Objects (H5O) * * Use the functions in this module to manage HDF5 objects. * diff --git a/src/H5Opublic.h b/src/H5Opublic.h index ec2d97a..cc131e1 100644 --- a/src/H5Opublic.h +++ b/src/H5Opublic.h @@ -755,7 +755,7 @@ H5_DLL herr_t H5Olink(hid_t obj_id, hid_t new_loc_id, const char *new_name, hid_ * * An object’s reference count is the number of hard links in the * file that point to that object. See the “Programming Model” - * section of the HDF5 Groups chapter in the -- HDF5 User’s Guide + * section of the HDF5 Groups chapter in the -- \ref UG * for a more complete discussion of reference counts. * * If a user application needs to determine an object’s reference @@ -790,7 +790,7 @@ H5_DLL herr_t H5Oincr_refcount(hid_t object_id); * * An object’s reference count is the number of hard links in the * file that point to that object. See the “Programming Model” - * section of the HDF5 Groups chapter in the HDF5 User’s Guide + * section of the HDF5 Groups chapter in the \ref UG * for a more complete discussion of reference counts. * * If a user application needs to determine an object’s reference diff --git a/src/H5PLmodule.h b/src/H5PLmodule.h index a093096..84d3583 100644 --- a/src/H5PLmodule.h +++ b/src/H5PLmodule.h @@ -2,7 +2,7 @@ * Copyright by The HDF Group. * * All rights reserved. * * * - * This file is part of HDF5. The full HDF5 copyright notice, including * + * This file is part of HDF5. The full HDF5 copyright notice, including * * terms governing use, modification, and redistribution, is contained in * * the COPYING file, which can be found at the root of the source code * * distribution tree, or in https://www.hdfgroup.org/licenses. * @@ -27,7 +27,8 @@ #define H5_MY_PKG_ERR H5E_PLUGIN #define H5_MY_PKG_INIT YES -/**\defgroup H5PL H5PL +/** + * \defgroup H5PL Dynamically-loaded Plugins (H5PL) * * Use the functions in this module to manage the loading behavior of HDF5 * plugins. diff --git a/src/H5Pmodule.h b/src/H5Pmodule.h index 66a9574..c7ab3bd 100644 --- a/src/H5Pmodule.h +++ b/src/H5Pmodule.h @@ -29,7 +29,860 @@ #define H5_MY_PKG_ERR H5E_PLIST #define H5_MY_PKG_INIT YES -/**\defgroup H5P H5P +/** \page H5P_UG Properties and Property Lists in HDF5 + * + * \section sec_plist Properties and Property Lists in HDF5 + * + * HDF5 property lists are the main vehicle to configure the + * behavior of HDF5 API functions. + * + * Typically, property lists are created by instantiating one of the built-in + * or user-defined property list classes. After adding suitable properties, + * property lists are used when opening or creating HDF5 items, or when reading + * or writing data. Property lists can be modified by adding or changing + * properties. Property lists are deleted by closing the associated handles. + * + * \subsection subsec_plist_intro Introduction + * + * HDF5 properties and property lists make it possible to shape or modify an HDF5 file, group, + * dataset, attribute, committed datatype, or even an I/O stream, in a number of ways. For example, + * you can do any of the following: + * \li Customize the storage layout of a file to suit a project or task. + * \li Create a chunked dataset. + * \li Apply compression or filters to raw data. + * \li Use either ASCII or UTF-8 character encodings. + * \li Create missing groups on the fly. + * \li Switch between serial and parallel I/O. + * \li Create consistency within a single file or across an international project. + * + * Some properties enable an HDF5 application to take advantage of the capabilities of a specific + * computing environment while others make a file more compact; some speed the reading or + * writing of data while others enable more record-keeping at a per-object level. HDF5 offers + * nearly one hundred specific properties that can be used in literally thousands of combinations to + * maximize the usability of HDF5-stored data. + * + * At the most basic level, a property list is a collection of properties, represented by name/value + * pairs that can be passed to various HDF5 functions, usually modifying default settings. A + * property list inherits a set of properties and values from a property list class. But that statement + * hardly provides a complete picture; in the rest of this section and in the next section, + * \ref subsec_plist_class , we will discuss these things in much more detail. + * After reading that material, the reader should have a reasonably complete understanding of how + * properties and property lists can be used in HDF5 applications. + * + * + * + * + * + *
+ * \image html PropListEcosystem.gif "The HDF5 property environment" + *
+ * + * The remaining sections in this chapter discuss the following topics: + * \li What are properties, property lists, and property list classes? + * \li Property list programming model + * \li Generic property functions + * \li Summary listings of property list functions + * \li Additional resources + * + * The discussions and function listings in this chapter focus on general property operations, object + * and link properties, and related functions. + * + * File, group, dataset, datatype, and attribute properties are discussed in the chapters devoted to + * those features, where that information will be most convenient to users. For example, \ref sec_dataset + * discusses dataset creation property lists and functions, dataset access property lists and + * functions, and dataset transfer property lists and functions. This chapter does not duplicate those + * discussions. + * + * Generic property operations are an advanced feature and are beyond the scope of this guide. + * + * This chapter assumes an understanding of the following chapters of this \ref UG + * \li \ref sec_data_model + * \li \ref sec_program + * + * \subsection subsec_plist_class Property List Classes, Property Lists, and Properties + * + * HDF5 property lists and the property list interface \ref H5P provide a mechanism for storing + * characteristics of objects in an HDF5 file and economically passing them around in an HDF5 + * application. In this capacity, property lists significantly reduce the burden of additional function + * parameters throughout the HDF5 API. Another advantage of property lists is that features can + * often be added to HDF5 by adding only property list functions to the API; this is particularly true + * when all other requirements of the feature can be accomplished internally to the library. + * + * For instance, a file creation operation needs to know several things about a file, such as the size + * of the userblock or the sizes of various file data structures. Bundling this information as a + * property list simplifies the interface by reducing the number of parameters to the function + * \ref H5Fcreate. + * + * As illustrated in the figure above ("The HDF5 property environment"), the HDF5 property + * environment is a three-level hierarchy: + * \li Property list classes + * \li Property lists + * \li Properties + * + * The following subsections discuss property list classes, property lists, and properties in more detail. + * + * \subsubsection subsubsec_plist_class Property List Classes + * + * A property list class defines the roles that property lists of that class can play. Each class includes + * all properties that are valid for that class with each property set to its default value. HDF5 offers + * a property lists class for each of the following situations. + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + *
Property list classes in HDF5
Property List ClassFor further discussion
+ * File creation (FCPL) + * + * \ref H5P_FILE_CREATE + * + * See various sections of \ref sec_file + *
+ * File access (FAPL) + * + * \ref H5P_FILE_ACCESS + * + * Used only as \ref H5P_DEFAULT. + *
+ * File mount (FMPL) + * + * \ref H5P_FILE_MOUNT + * + * For more information, see \ref FileMountProps "File Mount Properties" + *
+ * Object creation (OCPL) + * + * \ref H5P_OBJECT_CREATE + * + * See \ref OCPL + *
+ * Object copy (OCPYPL) + * + * \ref H5P_OBJECT_COPY + * + * + *
+ * Group creation (GCPL) + * + * \ref H5P_GROUP_CREATE + * + * See \ref subsec_group_program + *
+ * Group access (GAPL) + * + * \ref H5P_GROUP_ACCESS + * + * + *
+ * Link creation (LCPL) + * + * \ref H5P_LINK_CREATE + * + * See examples in \ref subsec_plist_program and \ref LCPL + *
+ * Link access (LAPL) + * + * \ref H5P_LINK_ACCESS + * + * + *
+ * Dataset creation (DCPL) + * + * \ref H5P_DATASET_CREATE + * + * See \ref subsec_dataset_program + *
+ * Dataset access (DAPL) + * + * \ref H5P_DATASET_ACCESS + * + * + *
+ * Dataset transfer (DXPL) + * + * \ref H5P_DATASET_XFER + * + * + *
+ * Datatype creation (TCPL) + * + * \ref H5P_DATATYPE_CREATE + * + * See various sections of \ref sec_datatype + *
+ * String creation (STRCPL) + * + * \ref H5P_STRING_CREATE + * + * See \ref subsec_dataset_program and \ref subsec_datatype_program + *
+ * Attribute creation (ACPL) + * + * \ref H5P_ATTRIBUTE_CREATE + * + * See \ref subsec_attribute_work. + *
+ * + * Note: In the table above, the abbreviations to the right of each property list class name in this + * table are widely used in both HDF5 programmer documentation and HDF5 source code. For + * example, \ref FCPL (FCPL) is the file creation property list, \ref OCPL (OCPL) is the object creation + * property list, \ref OCPYPL (OCPYPL) is object copy property list, and \ref STRCPL (STRCPL) is the string + * creation property list. These abbreviations may appear in either uppercase or lowercase. + * + * The “HDF5 property list class inheritance hierarchy” figure, immediately following, illustrates + * the inheritance hierarchy of HDF5’s property list classes. Properties are defined at the root of the + * HDF5 property environment (\ref PLCR in the figure below). Property list + * classes then inherit properties from that root, either directly or indirectly through a parent class. + * In every case, a property list class inherits only the properties relevant to its role. For example, + * the \ref OCPL (OCPL) inherits all properties that are relevant to the + * creation of any object while the \ref GCPL (GCPL) inherits only those + * properties that are relevant to group creation. + * + * + * + * + * + *
+ * \image html PropListClassInheritance.gif "HDF5 property list class inheritance hierarchy" + *
+ * Note: In the figure above, property list classes displayed in black are directly accessible through + * the programming interface; the root of the property environment and the \ref STRCPL and \ref OCPL + * property list classes, in gray above, are not user-accessible. The red empty set symbol indicates + * that the \ref FMPL (FMPL) is an empty class; that is, it has no set table + * properties. For more information, see \ref FileMountProps "File Mount Properties". Abbreviations + * used in this figure are defined in the preceding table, \ref table_plist "Property list classes in HDF5". + * + * \subsubsection subsubsec_plist_lists Property Lists + * + * A property list is a collection of related properties that are used together in specific + * circumstances. A new property list created from a property list class inherits the properties of the + * property list class and each property’s default value. A fresh dataset creation property list, for + * example, includes all of the HDF5 properties relevant to the creation of a new dataset. + * + * Property lists are implemented as containers holding a collection of name/value pairs. Each pair + * specifies a property name and a value for the property. A property list usually contains + * information for one to many properties. + * + * HDF5’s default property values are designed to be reasonable for general use cases. Therefore, + * an application can often use a property list without modification. On the other hand, adjusting + * property list settings is a routine action and there are many reasons for an application to do so. + * + * A new property list may either be derived from a property list class or copied from an existing + * property list. When a property list is created from a property list class, it contains all the + * properties that are relevant to the class, with each property set to its default value. A new + * property list created by copying an existing property list will contain the same properties and + * property values as the original property list. In either case, the property values can be changed as + * needed through the HDF5 API. + * + * Property lists can be freely reused to create consistency. For example, a single set of file, group, + * and dataset creation property lists might be created at the beginning of a project and used to + * create hundreds, thousands, even millions, of consistent files, file structures, and datasets over + * the project’s life. When such consistency is important to a project, this is an economical means + * of providing it. + * + * \subsubsection subsubsec_plist_props Properties + * + * A property is the basic element of the property list hierarchy. HDF5 offers nearly one hundred + * properties controlling things ranging from file access rights, to the storage layout of a dataset, + * through optimizing the use of a parallel computing environment. + * + * Further examples include the following: + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + *
PurposeExamplesProperty List
+ * Specify the driver to be used to open a file + * + * A POSIX driver or an MPI IO driver + * + * \ref FAPL + *
+ * Specify filters to be applied to a dataset + * + * Gzip compression or checksum evaluation + * + * \ref DCPL + *
+ * Specify whether to record key times associated with an object + * + * Creation time and/or last-modified time + * + * \ref OCPL + *
+ * Specify the access mode for a file opened via an external link + * + * Read-only or read-write + * + * \ref LAPL + *
+ * + * Each property is initialized with a default value. For each property, there are one or more + * dedicated H5Pset_*calls that can be used to change that value. + * + *

Creation, access, and transfer properties:

+ * + * Properties fall into one of several major categories: creation properties, access properties, and + * transfer properties. + * + * Creation properties control permanent object characteristics. These characteristics must be + * established when an object is created, cannot change through the life of the object (they are + * immutable), and the property setting usually has a permanent presence in the file. + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + *
Examples of creation properties include:
+ *

+ * Whether a dataset is stored in a compact, contiguous, or chunked layout
+ *
+ * The default for this dataset creation property (\ref H5Pset_layout) is that a dataset is + * stored in a contiguous block. This works well for datasets with a known size limit that + * will fit easily in system memory.
+ *
+ * A chunked layout is important if a dataset is to be compressed, to enable extending + * the dataset’s size, or to enable caching during I/O.
+ *
+ * A compact layout is suitable only for very small datasets because the raw data is + * stored in the object header. + *

+ *
+ *

+ * Creation of intermediate groups when adding an object to an HDF5 file
+ *
+ * This link creation property, \ref H5Pset_create_intermediate_group, enables an + * application to add an object in a file without having to know that the group or group + * hierarchy containing that object already exists. With this property set, HDF5 + * automatically creates missing groups. If this property is not set, an application must + * verify that each group in the path exists, and create those that do not, before creating + * the new object; if any group is missing, the create operation will fail. + *

+ *
+ *

+ * Whether an HDF5 file is a single file or a set of tightly related files that form a virtual + * HDF5 file
+ *
+ * Certain file creation properties enable the application to select one of several file + * layouts. Examples of the available layouts include a standard POSIX-compliant + * layout (\ref H5Pset_fapl_sec2), a family of files (\ref H5Pset_fapl_family), and a split file + * layout that separates raw data and metadata into separate files (\ref H5Pset_fapl_split). + * These and other file layout options are discussed in \ref subsec_file_alternate_drivers. + *

+ *
+ *

+ * To enable error detection when creating a dataset
+ *
+ * In settings where data integrity is vulnerable, it may be desirable to set + * checksumming when datasets are created (\ref H5Pset_fletcher32). A subsequent + * application will then have a means to verify data integrity when reading the dataset. + *

+ *
+ * + * Access properties control transient object characteristics. These characteristics may change with + * the circumstances under which an object is accessed. + * + * + * + * + * + * + * + * + * + *
Examples of access properties include:
+ *

+ * The driver used to open a file
+ *
+ * For example, a file might be created with the MPI I/O driver (\ref H5Pset_fapl_mpio) + * during high-speed data acquisition in a parallel computing environment. The same + * file might later be analyzed in a serial computing environment with I/O access + * handled through the serial POSIX driver (\ref H5Pset_fapl_sec2). + *

+ *
+ *

+ * Optimization settings in specialized environments
+ *
+ * Optimizations differ across computing environments and according to the needs of + * the task being performed, so are transient by nature. + *

+ *
+ * + * Transfer properties apply only to datasets and control transient aspects of data I/O. These + * characteristics may change with the circumstances under which data is accessed. + * + * + * + * + * + * + * + * + * + *
Examples of dataset transfer properties include:
+ *

+ * To enable error detection when reading a dataset
+ *
+ * If checksumming has been set on a dataset (with \ref H5Pset_fletcher32, in the dataset + * creation property list), an application reading that dataset can choose whether to check + * for data integrity (\ref H5Pset_edc_check). + *

+ *
+ *

+ * Various properties to optimize chunked data I/O on parallel computing systems
+ *
+ * HDF5 provides several properties for tuning I/O of chunked datasets in a parallel + * computing environment (\ref H5Pset_dxpl_mpio_chunk_opt, \ref H5Pset_dxpl_mpio_chunk_opt_num, + * \ref H5Pset_dxpl_mpio_chunk_opt_ratio, and \ref H5Pget_mpio_actual_chunk_opt_mode).
+ *
+ * Optimal settings differ due to the characteristics of a computing environment and due + * to an application’s data access patterns; even when working with the same file, these + * settings might change for every application and every platform. + *

+ *
+ * + * \subsection subsec_plist_program Programming Model for Properties and Property Lists + * + * The programming model for HDF5 property lists is actually quite simple: + * \li Create a property list. + * \li Modify the property list, if required. + * \li Use the property list. + * \li Close the property list. + * + * There are nuances, of course, but that is the basic process. + * + * In some cases, you will not have to define property lists at all. If the default property settings are + * sufficient for your application, you can tell HDF5 to use the default property list. + * + * The following sections first discuss the use of default property lists, then each step of the + * programming model, and finally a few less frequently used property list operations. + * + * \subsubsection subsubsec_plist_default Using Default Property Lists + * + * Default property lists can simplify many routine HDF5 tasks because you do not always have to + * create every property list you use. + * + * An application that would be well-served by HDF5’s default property settings can use the default + * property lists simply by substituting the value \ref H5P_DEFAULT for a property list identifier. + * HDF5 will then apply the default property list for the appropriate property list class. + * + * For example, the function \ref H5Dcreate2 calls for a link creation property list, a dataset creation + * property list, and a dataset access property list. If the default properties are suitable for a dataset, + * this call can be made as + * \code + * dset_id = H5Dcreate2( loc_id, name, dtype_id, space_id, H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT ); + * \endcode + * HDF5 will then apply the default link creation, dataset creation, and dataset access property lists + * correctly. + * + * Of course, you would not want to do this without considering where it is appropriate, as there + * may be unforeseen consequences. Consider, for example, the use of chunked datasets. Optimal + * chunking is quite dependent on the makeup of the dataset and the most common access patterns, + * both of which must be taken into account in setting up the size and shape of chunks. + * + * \subsubsection subsubsec_plist_basic Basic Steps of the Programming Model + * + * The steps of the property list programming model are described in the sub-sections below. + * + *

Create a Property List

+ * + * A new property list can be created either as an instance of a property list class or by copying an + * existing property list. Consider the following examples. A new dataset creation property list is + * first created "from scratch" with \ref H5Pcreate. A second dataset creation property list is then + * created by copying the first one with \ref H5Pcopy. + * + * \code + * dcplA_id = H5Pcreate (H5P_DATASET_CREATE); + * \endcode + * + * The new dataset creation property list is created as an instance of the property list class + * \ref H5P_DATASET_CREATE. + * + * The new dataset creation property list’s identifier is returned in dcplA_id and the property list is + * initialized with default dataset creation property values. + * + * A list of valid classes appears in the table \ref table_plist "Property list classes in HDF5". + * + * \code + * dcplB_id = H5Pcopy (dcplA_id); + * \endcode + * + * A new dataset creation property list, dcplB_id, is created as a copy of dcplA_id and is initialized + * with dataset creation property values currently in dcplA_id. + * + * At this point, dcplA_id and dcplB_id are identical; they will both contain any modified property + * values that were changed in dcplA_id before dcplB_id was created. They may, however, diverge + * as additional property values are reset in each. + * + * While we are creating property lists, let’s create a link creation property list; we will need this + * property list when the new dataset is linked into the file below: + * \code + * lcplAB_id = H5Pcreate (H5P_LINK_CREATE); + * \endcode + * + *

Change Property Values

+ * + * This section describes how to set property values. + * + * Later in this section, the dataset creation property lists dcplA_id and dcplB_id created in the + * section above will be used respectively to create chunked and contiguous datasets. To set this up, + * we must set the layout property in each property list. The following example sets dcplA_id for + * chunked datasets and dcplB_id for contiguous datasets: + * \code + * error = H5Pset_layout (dcplA_id, H5D_CHUNKED); + * error = H5Pset_layout (dcplB_id, H5D_CONTIGUOUS); + * \endcode + * + * Since dcplA_id specifies a chunked layout, we must also set the number of dimensions and the + * size of the chunks. The example below specifies that datasets created with dcplA_id will be + * 3-dimensional and that the chunk size will be 100 in each dimension: + * \code + * error = H5Pset_chunk (dcplA_id, 3, [100,100,100]); + * \endcode + * + * These datasets will be created with UTF-8 encoded names. To accomplish that, the following + * example sets the character encoding property in the link creation property list to create link + * names with UTF-8 encoding: + * \code + * error = H5Pset_char_encoding (lcplAB_id, H5T_CSET_UTF8); + * \endcode + * + * dcplA_id can now be used to create chunked datasets and dcplB_id to create contiguous datasets. + * And with the use of lcplAB_id, they will be created with UTF-8 encoded names. + * + *

Use the Property List

+ * + * Once the required property lists have been created, they can be used to control various HDF5 + * processes. For illustration, consider dataset creation. + * + * Assume that the datatype dtypeAB and the dataspaces dspaceA and dspaceB have been defined + * and that the location identifier locAB_id specifies the group AB in the current HDF5 file. We + * have already created the required link creation and dataset creation property lists. + * For the sake of illustration, we assume that the default dataset access property list meets our application + * requirements. The following calls would create the datasets dsetA and dsetB in the group AB. + * The raw data in dsetA will be contiguous while dsetB raw data will be chunked; both datasets + * will have UTF-8 encoded link names: + * + * \code + * dsetA_id = H5Dcreate2( locAB_id, dsetA, dtypeAB, dspaceA_id, + * lcplAB_id, dcplA_id, H5P_DEFAULT ); + * dsetB_id = H5Dcreate2( locAB_id, dsetB, dtypeAB, dspaceB_id, + * lcplAB_id, dcplB_id, H5P_DEFAULT ); + * \endcode + * + *

Close the Property List

+ * + * Generally, creating or opening anything in an HDF5 file results in an HDF5 identifier. These + * identifiers are of HDF5 type hid_t and include things like file identifiers, often expressed as + * file_id; dataset identifiers, dset_id; and property list identifiers, plist_id. To reduce the risk of + * memory leaks, all of these identifiers must be closed once they are no longer needed. + * + * Property list identifiers are no exception to this rule, and \ref H5Pclose is used for this purpose. The + * calls immediately following would close the property lists created and used in the examples above. + * + * \code + * error = H5Pclose (dcplA_id); + * error = H5Pclose (dcplB_id); + * error = H5Pclose (lcplAB_id); + * \endcode + * + * \subsubsection subsubsec_plist_additional Additional Property List Operations + * + * A few property list operations fall outside of the programming model described above. This + * section describes those operations. + * + *

Query the Class of an Existing Property List

+ * + * Occasionally an application will have a property list but not know the corresponding property list + * class. A call such as in the following example will retrieve the unknown class of a known property list: + * \code + * PList_Class = H5Pget_class (dcplA_id); + * \endcode + * + * Upon this function’s return, PList_Class will contain the value \ref H5P_DATASET_CREATE indicating that + * dcplA_id is a dataset creation property list. + + *

Determine Current Creation Property List Settings in an Existing Object

+ * + * After a file has been created, another application may work on the file without knowing how the + * creation properties for the file were set up. Retrieving these property values is often unnecessary; + * HDF5 can read the data and knows how to deal with any properties it encounters. + * + * But sometimes an application must do something that requires knowing the creation property + * settings. HDF5 makes the acquisition of this information fairly straight-forward; for each + * property setting call, H5Pset_*, there is a corresponding H5Pget_*call to retrieve the property’s + * current setting. + * + * Consider the following examples which illustrate the determination of dataset layout and chunking settings: + * + * The application must first identify the creation property list with the appropriate get creation property + * list call. There is one such call for each kind of object. + * + * \ref H5Dget_create_plist will return a property list identifier for the creation property list that was + * used to create the dataset. Call it DCPL1_id. + * + * \ref H5Pset_layout sets a dataset’s layout to be compact, contiguous, or chunked. + * + * \ref H5Pget_layout called with DCPL1_id will return the dataset’s layout, + * either \ref H5D_COMPACT, \ref H5D_CONTIGUOUS, or \ref H5D_CHUNKED. + * + * \ref H5Pset_chunk sets the rank of a dataset, that is the number of dimensions it will have, and the + * maximum size of each dimension. + * + * \ref H5Pget_chunk, also called with DCPL1_id, will return the rank of the dataset and the maximum + * size of each dimension. + * + * If a creation property value has not been explicitly set, these H5Pget_calls will return the + * property’s default value. + * + *

Determine Access Property Settings

+ * + * Access property settings are quite different from creation properties. Since access property + * settings are not retained in an HDF5 file or object, there is normally no knowledge of the settings + * that were used in the past. On the other hand, since access properties do not affect characteristics + * of the file or object, this is not normally an issue. For more information, see "Access and + * Creation Property Exceptions." + * + * One circumstance under which an application might need to determine access property settings + * might be when a file or object is already open but the application does not know the property list + * settings. In that case, the application can use the appropriate get access property list + * call to retrieve a property list identifier. For example, if the dataset dsetA + * from the earlier examples is still open, the following call would return an identifier for the dataset + * access property list in use: + * \code + * dsetA_dacpl_id = H5Dget_access_plist( dsetA_id ); + * \endcode + * + * The application could then use the returned property list identifier to analyze the property settings + * + * \subsection subsec_plist_generic Generic Properties Interface and User-defined Properties + * + * HDF5’s generic property interface provides tools for managing the entire property hierarchy and + * for the creation and management of user-defined property lists and properties. This interface also + * makes it possible for an application or a driver to create, modify, and manage custom properties, + * property lists, and property list classes. A comprehensive list of functions for this interface + * appears under "Generic Property Operations (Advanced)" in the "H5P: Property List Interface" + * section of the \ref RM. + * + * Further discussion of HDF5’s generic property interface and user-defined properties and + * property lists is beyond the scope of this document. + * + * \subsection subsec_plist_H5P Property List Function Summaries + * + * General property functions, generic property functions and macros, property functions that are + * used with multiple types of objects, and object and link property functions are listed below. + * + * Property list functions that apply to a specific type of object are listed in the chapter that + * discusses that object. For example, the \ref sec_dataset chapter has two property list function listings: + * one for dataset creation property list functions and one for dataset access property list functions. + * As has been stated, this chapter is not intended to describe every property list function. + * + * \ref H5P reference manual + * + * \subsection subsec_plist_resources Additional Property List Resources + * Property lists are ubiquitous in an HDF5 environment and are therefore discussed in many places + * in HDF5 documentation. The following sections and listings in the \ref UG are of + * particular interest: + * \li In the \ref sec_data_model chapter, see \ref subsubsec_data_model_abstract_plist. + * \li In the \ref sec_file chapter, see the following sections and listings: + *
  • \ref subsec_file_creation_access
  • + *
  • \ref subsec_file_property_lists
  • + *
  • \ref subsubsec_file_examples_props
  • + *
  • \ref subsubsec_file_examples_access
  • + *
  • "File creation property list functions (H5P)"
  • + *
  • "File access property list functions (H5P)"
  • + *
  • "File driver functions (H5P)"
+ * \li In the \ref sec_attribute chapter, see "Attribute creation property list functions (H5P)". + * \li In the \ref sec_group chapter, see "Group creation property list functions (H5P)". + * \li Property lists are discussed throughout \ref sec_dataset. + * + * All property list functions are described in the \ref H5P section of the + * \ref RM. The function index at the top of the page provides a categorized listing + * grouped by property list class. Those classes are listed below: + * \li File creation properties + * \li File access properties + * \li Group creation properties + * \li Dataset creation properties + * \li Dataset access properties + * \li Dataset transfer properties + * \li Link creation properties + * \li Link access properties + * \li Object creation properties + * \li Object copy properties + * + * Additional categories not related to the class structure are as follows: + * \li General property list operations + * \li Generic property list functions + * + * The general property functions can be used with any property list; the generic property functions + * constitute an advanced feature. + * + * The in-memory file image feature of HDF5 uses property lists in a manner that differs + * substantially from their use elsewhere in HDF5. Those who plan to use in-memory file images + * must study "File Image Operations" (PDF) in the Advanced Topics in HDF5collection. + * + * \subsection subsec_plist_notes Notes + * + * \anchor FileMountProps

File Mount Properties

+ * + * While the file mount property list class \ref H5P_FILE_MOUNT is a valid HDF5 property list class, + * no file mount properties are defined by the HDF5 Library. References to a file mount property + * list should always be expressed as \ref H5P_DEFAULT, meaning the default file mount property list. + * + *

Access and Creation Property Exceptions

+ * + * There are a small number of exceptions to the rule that creation properties are always retained in + * a file or object and access properties are never retained. + * + * The following properties are file access properties but they are not transient; they have + * permanent and different effects on a file. They could be validly classified as file creation + * properties as they must be set at creation time to properly create the file. But they are access + * properties because they must also be set when a file is reopened to properly access the file. + * + * + * + * + * + * + * + * + * + * + * + * + * + * + *
PropertyRelated function
+ * Family file driver + * + * \ref H5Pset_fapl_family + *
+ * Split file driver + * + * \ref H5Pset_fapl_split + *
+ * Core file driver + * + * \ref H5Pset_fapl_core + *
+ * + * The following is a link creation property, but it is not relevant after an object has been created + * and is not retained in the file or object. + * + * + * + * + * + * + *
PropertyRelated function
+ * Create missing intermediate groups + * + * \ref H5Pset_create_intermediate_group + *
+ * + * Previous Chapter \ref sec_error - Next Chapter \ref sec_vol + * + * \defgroup H5P Property Lists (H5P) * * Use the functions in this module to manage HDF5 property lists and property * list classes. HDF5 property lists are the main vehicle to configure the @@ -61,135 +914,118 @@ * * * - * \defgroup ALCAPL Attribute and Link Creation Properties - * \ingroup H5P + * \defgroup STRCPL String Creation Properties * Currently, there are only two creation properties that you can use to control * the creation of HDF5 attributes and links. The first creation property, the * choice of a character encoding, applies to both attributes and links. * The second creation property applies to links only, and advises the library * to automatically create missing intermediate groups when creating new objects. + * \ingroup H5P * - * \defgroup DAPL Dataset Access Properties + * \defgroup LCPL Link Creation Properties + * The first creation property, the choice of a character encoding, applies to + * both attributes and links. + * The second creation property applies to links only, and advises the library + * to automatically create missing intermediate groups when creating new objects. + * \ingroup STRCPL + * + * @see STRCPL + * + * \defgroup ACPL Attribute Creation Properties + * The creation property, the choice of a character encoding, applies to attributes. + * \ingroup STRCPL + * + * @see STRCPL + * + * \defgroup LAPL Link Access Properties * \ingroup H5P + * + * \defgroup DAPL Dataset Access Properties * Use dataset access properties to modify the default behavior of the HDF5 * library when accessing datasets. The properties include adjusting the size * of the chunk cache, providing prefixes for external content and virtual * dataset file paths, and controlling flush behavior, etc. These properties * are \Emph{not} persisted with datasets, and can be adjusted at runtime before * a dataset is created or opened. + * \ingroup LAPL * * \defgroup DCPL Dataset Creation Properties - * \ingroup H5P * Use dataset creation properties to control aspects of dataset creation such * as fill time, storage layout, compression methods, etc. * Unlike dataset access and transfer properties, creation properties \Emph{are} * stored with the dataset, and cannot be changed once a dataset has been * created. + * \ingroup OCPL * * \defgroup DXPL Dataset Transfer Properties - * \ingroup H5P * Use dataset transfer properties to customize certain aspects of reading * and writing datasets such as transformations, MPI-IO I/O mode, error * detection, etc. These properties are \Emph{not} persisted with datasets, * and can be adjusted at runtime before a dataset is read or written. + * \ingroup H5P * * \defgroup FAPL File Access Properties - * \ingroup H5P * Use file access properties to modify the default behavior of the HDF5 * library when accessing files. The properties include selecting a virtual * file driver (VFD), configuring the metadata cache (MDC), control * file locking, etc. These properties are \Emph{not} persisted with files, and * can be adjusted at runtime before a file is created or opened. + * \ingroup H5P * * \defgroup FCPL File Creation Properties - * \ingroup H5P * Use file creation properties to control aspects of file creation such * as setting a file space management strategy or creating a user block. * Unlike file access properties, creation properties \Emph{are} * stored with the file, and cannot be changed once a file has been * created. + * \ingroup GCPL * * \defgroup GAPL General Access Properties - * \ingroup H5P * The functions in this section can be applied to different kinds of property * lists. + * \ingroup LAPL * * \defgroup GCPL Group Creation Properties - * \ingroup H5P * Use group creation properties to control aspects of group creation such * as storage layout, compression, and link creation order tracking. * Unlike file access properties, creation properties \Emph{are} * stored with the group, and cannot be changed once a group has been * created. + * \ingroup OCPL * - * \defgroup GPLO General Property List Operations - * \ingroup H5P - * + * \defgroup PLCR Property List Class Root * Use the functions in this module to manage HDF5 property lists. - * - * - * - * - * - * - * - * - * - * - * - *
CreateRead
- * \snippet{lineno} H5P_examples.c create - * - * \snippet{lineno} H5P_examples.c read - *
UpdateDelete
- * \snippet{lineno} H5P_examples.c update - * - * \snippet{lineno} H5P_examples.c delete - *
- * - * \defgroup GPLOA General Property List Operations (Advanced) * \ingroup H5P * + * \defgroup PLCRA Property List Class Root (Advanced) * You can create and customize user-defined property list classes using the * functions described below. Arbitrary user-defined properties can also * be inserted into existing property lists as so-called temporary properties. - * - * - * - * - * - * - * - * - * - * - * - * - *
CreateRead
- * \snippet{lineno} H5P_examples.c create_class - * - * \snippet{lineno} H5P_examples.c read_class - *
UpdateDelete
- * \snippet{lineno} H5P_examples.c update_class - * - * \snippet{lineno} H5P_examples.c delete_class - *
- * - * \defgroup LAPL Link Access Properties * \ingroup H5P * * - * \defgroup MAPL Map Access Properties - * \ingroup H5P - * \defgroup OCPL Object Creation Properties * \ingroup H5P * + * \defgroup OCPYPL Object Copy Properties + * \ingroup H5P * - * \defgroup OCPPL Object Copy Properties + * \defgroup FMPL File Mount Properties + * Empty property class. * \ingroup H5P * * + * \defgroup TCPL Datatype Creation Properties + * TCPL isn't supported yet. + * \ingroup OCPL + * + * + * \defgroup TAPL Datatype Access Properties + * TAPL isn't supported yet. + * \ingroup LAPL + * + * + * */ #endif /* H5Pmodule_H */ diff --git a/src/H5Ppublic.h b/src/H5Ppublic.h index 211ac71..1f4503d 100644 --- a/src/H5Ppublic.h +++ b/src/H5Ppublic.h @@ -392,7 +392,7 @@ H5_DLLVAR hid_t H5P_CLS_LINK_ACCESS_ID_g; H5_DLLVAR hid_t H5P_CLS_VOL_INITIALIZE_ID_g; H5_DLLVAR hid_t H5P_CLS_REFERENCE_ACCESS_ID_g; -/* Default roperty list IDs */ +/* Default property list IDs */ /* (Internal to library, do not use! Use macros above) */ H5_DLLVAR hid_t H5P_LST_FILE_CREATE_ID_g; H5_DLLVAR hid_t H5P_LST_FILE_ACCESS_ID_g; @@ -421,7 +421,7 @@ H5_DLLVAR hid_t H5P_LST_REFERENCE_ACCESS_ID_g; /* Generic property list routines */ /** - * \ingroup GPLO + * \ingroup PLCR * * \brief Terminates access to a property list * @@ -439,7 +439,7 @@ H5_DLLVAR hid_t H5P_LST_REFERENCE_ACCESS_ID_g; */ H5_DLL herr_t H5Pclose(hid_t plist_id); /** - * \ingroup GPLOA + * \ingroup PLCRA * * \brief Closes an existing property list class * @@ -456,7 +456,7 @@ H5_DLL herr_t H5Pclose(hid_t plist_id); */ H5_DLL herr_t H5Pclose_class(hid_t plist_id); /** - * \ingroup GPLO + * \ingroup PLCR * * \brief Copies an existing property list to create a new property list * @@ -473,7 +473,7 @@ H5_DLL herr_t H5Pclose_class(hid_t plist_id); */ H5_DLL hid_t H5Pcopy(hid_t plist_id); /** - * \ingroup GPLOA + * \ingroup PLCRA * * \brief Copies a property from one list or class to another * @@ -509,7 +509,7 @@ H5_DLL hid_t H5Pcopy(hid_t plist_id); */ H5_DLL herr_t H5Pcopy_prop(hid_t dst_id, hid_t src_id, const char *name); /** - * \ingroup GPLO + * \ingroup PLCR * * \brief Creates a new property list as an instance of a property list class * @@ -633,7 +633,7 @@ H5_DLL herr_t H5Pcopy_prop(hid_t dst_id, hid_t src_id, const char *name); */ H5_DLL hid_t H5Pcreate(hid_t cls_id); /** - * \ingroup GPLOA + * \ingroup PLCRA * * \brief Creates a new property list class * @@ -676,7 +676,7 @@ H5_DLL hid_t H5Pcreate_class(hid_t parent, const char *name, H5P_cls_create_func H5P_cls_copy_func_t copy, void *copy_data, H5P_cls_close_func_t close, void *close_data); /** - * \ingroup GPLO + * \ingroup PLCR * * \brief Decodes property list received in a binary object buffer and * returns a new property list identifier @@ -705,7 +705,7 @@ H5_DLL hid_t H5Pcreate_class(hid_t parent, const char *name, H5P_cls_create_func */ H5_DLL hid_t H5Pdecode(const void *buf); /** - * \ingroup GPLO + * \ingroup PLCR * * \brief Encodes the property values in a property list into a binary * buffer @@ -759,7 +759,7 @@ H5_DLL hid_t H5Pdecode(const void *buf); */ H5_DLL herr_t H5Pencode2(hid_t plist_id, void *buf, size_t *nalloc, hid_t fapl_id); /** - * \ingroup GPLOA + * \ingroup PLCRA * * \brief Compares two property lists or classes for equality * @@ -779,7 +779,7 @@ H5_DLL herr_t H5Pencode2(hid_t plist_id, void *buf, size_t *nalloc, hid_t fapl_i */ H5_DLL htri_t H5Pequal(hid_t id1, hid_t id2); /** - * \ingroup GPLOA + * \ingroup PLCRA * * \brief Queries whether a property name exists in a property list or * class @@ -797,7 +797,7 @@ H5_DLL htri_t H5Pequal(hid_t id1, hid_t id2); */ H5_DLL htri_t H5Pexist(hid_t plist_id, const char *name); /** - * \ingroup GPLOA + * \ingroup PLCRA * * \brief Queries the value of a property * @@ -829,7 +829,7 @@ H5_DLL htri_t H5Pexist(hid_t plist_id, const char *name); */ H5_DLL herr_t H5Pget(hid_t plist_id, const char *name, void *value); /** - *\ingroup GPLO + *\ingroup PLCR * * \brief Returns the property list class identifier for a property list * @@ -892,7 +892,7 @@ H5_DLL herr_t H5Pget(hid_t plist_id, const char *name, void *value); */ H5_DLL hid_t H5Pget_class(hid_t plist_id); /** - * \ingroup GPLOA + * \ingroup PLCRA * * \brief Retrieves the name of a class * @@ -1036,7 +1036,7 @@ H5_DLL hid_t H5Pget_class(hid_t plist_id); */ H5_DLL char *H5Pget_class_name(hid_t pclass_id); /** - * \ingroup GPLOA + * \ingroup PLCRA * * \brief Retrieves the parent class of a property class * @@ -1052,7 +1052,7 @@ H5_DLL char *H5Pget_class_name(hid_t pclass_id); */ H5_DLL hid_t H5Pget_class_parent(hid_t pclass_id); /** - * \ingroup GPLOA + * \ingroup PLCRA * * \brief Queries the number of properties in a property list or class * @@ -1075,7 +1075,7 @@ H5_DLL hid_t H5Pget_class_parent(hid_t pclass_id); */ H5_DLL herr_t H5Pget_nprops(hid_t id, size_t *nprops); /** - * \ingroup GPLOA + * \ingroup PLCRA * * \brief Queries the size of a property value in bytes * @@ -1096,7 +1096,7 @@ H5_DLL herr_t H5Pget_nprops(hid_t id, size_t *nprops); */ H5_DLL herr_t H5Pget_size(hid_t id, const char *name, size_t *size); /** - * \ingroup GPLOA + * \ingroup PLCRA * * \brief Registers a temporary property with a property list * @@ -1346,7 +1346,7 @@ H5_DLL herr_t H5Pinsert2(hid_t plist_id, const char *name, size_t size, void *va H5P_prp_get_func_t get, H5P_prp_delete_func_t prp_del, H5P_prp_copy_func_t copy, H5P_prp_compare_func_t compare, H5P_prp_close_func_t close); /** - * \ingroup GPLOA + * \ingroup PLCRA * * \brief Determines whether a property list is a member of a class * @@ -1366,7 +1366,7 @@ H5_DLL herr_t H5Pinsert2(hid_t plist_id, const char *name, size_t size, void *va */ H5_DLL htri_t H5Pisa_class(hid_t plist_id, hid_t pclass_id); /** - * \ingroup GPLOA + * \ingroup PLCRA * * \brief Iterates over properties in a property class or list * @@ -1412,7 +1412,7 @@ H5_DLL htri_t H5Pisa_class(hid_t plist_id, hid_t pclass_id); */ H5_DLL int H5Piterate(hid_t id, int *idx, H5P_iterate_t iter_func, void *iter_data); /** - * \ingroup GPLOA + * \ingroup PLCRA * * \brief Registers a permanent property with a property list class * @@ -1693,7 +1693,7 @@ H5_DLL herr_t H5Pregister2(hid_t cls_id, const char *name, size_t size, void *de H5P_prp_delete_func_t prp_del, H5P_prp_copy_func_t copy, H5P_prp_compare_func_t compare, H5P_prp_close_func_t close); /** - * \ingroup GPLOA + * \ingroup PLCRA * * \brief Removes a property from a property list * @@ -1719,7 +1719,7 @@ H5_DLL herr_t H5Pregister2(hid_t cls_id, const char *name, size_t size, void *de */ H5_DLL herr_t H5Premove(hid_t plist_id, const char *name); /** - * \ingroup GPLOA + * \ingroup PLCRA * * \brief Sets a property list value * @@ -1751,7 +1751,7 @@ H5_DLL herr_t H5Premove(hid_t plist_id, const char *name); */ H5_DLL herr_t H5Pset(hid_t plist_id, const char *name, const void *value); /** - * \ingroup GPLOA + * \ingroup PLCRA * * \brief Removes a property from a property list class * @@ -1770,8 +1770,6 @@ H5_DLL herr_t H5Pset(hid_t plist_id, const char *name, const void *value); */ H5_DLL herr_t H5Punregister(hid_t pclass_id, const char *name); -/* Object creation property list (OCPL) routines */ - /** * \ingroup DCPL * @@ -1791,6 +1789,9 @@ H5_DLL herr_t H5Punregister(hid_t pclass_id, const char *name); * */ H5_DLL htri_t H5Pall_filters_avail(hid_t plist_id); + +/* Object creation property list (OCPL) routines */ + /** * \ingroup OCPL * @@ -8055,7 +8056,7 @@ H5_DLL herr_t H5Pget_mpio_no_collective_cause(hid_t plist_id, uint32_t *local_no /* Link creation property list (LCPL) routines */ /** - * \ingroup ALCAPL + * \ingroup STRCPL * * \brief Determines whether property is set to enable creating missing * intermediate groups @@ -8086,7 +8087,7 @@ H5_DLL herr_t H5Pget_mpio_no_collective_cause(hid_t plist_id, uint32_t *local_no */ H5_DLL herr_t H5Pget_create_intermediate_group(hid_t plist_id, unsigned *crt_intmd /*out*/); /** - * \ingroup ALCAPL + * \ingroup STRCPL * * \brief Specifies in property list whether to create missing * intermediate groups @@ -8470,7 +8471,7 @@ H5_DLL herr_t H5Pget_map_iterate_hints(hid_t mapl_id, size_t *key_prefetch_size /* String creation property list (STRCPL) routines */ /** - * \ingroup ALCAPL + * \ingroup STRCPL * * \brief Retrieves the character encoding used to create a link or * attribute name @@ -8499,7 +8500,7 @@ H5_DLL herr_t H5Pget_map_iterate_hints(hid_t mapl_id, size_t *key_prefetch_size */ H5_DLL herr_t H5Pget_char_encoding(hid_t plist_id, H5T_cset_t *encoding /*out*/); /** - * \ingroup ALCAPL + * \ingroup STRCPL * * \brief Sets the character encoding used to encode link and attribute * names @@ -8540,7 +8541,6 @@ H5_DLL herr_t H5Pget_char_encoding(hid_t plist_id, H5T_cset_t *encoding /*out*/) */ H5_DLL herr_t H5Pset_char_encoding(hid_t plist_id, H5T_cset_t encoding); -/* Link access property list (LAPL) routines */ /** * \ingroup LAPL * @@ -8899,7 +8899,7 @@ H5_DLL herr_t H5Pset_nlinks(hid_t plist_id, size_t nlinks); /* Object copy property list (OCPYPL) routines */ /** - * \ingroup OCPPL + * \ingroup OCPYPL * * \brief Adds a path to the list of paths that will be searched in the * destination file for a matching committed datatype @@ -9014,7 +9014,7 @@ H5_DLL herr_t H5Pset_nlinks(hid_t plist_id, size_t nlinks); */ H5_DLL herr_t H5Padd_merge_committed_dtype_path(hid_t plist_id, const char *path); /** - * \ingroup OCPPL + * \ingroup OCPYPL * * \brief Clears the list of paths stored in the object copy property list * @@ -9065,7 +9065,7 @@ H5_DLL herr_t H5Padd_merge_committed_dtype_path(hid_t plist_id, const char *path */ H5_DLL herr_t H5Pfree_merge_committed_dtype_paths(hid_t plist_id); /** - * \ingroup OCPPL + * \ingroup OCPYPL * * \brief Retrieves the properties to be used when an object is copied * @@ -9090,7 +9090,7 @@ H5_DLL herr_t H5Pfree_merge_committed_dtype_paths(hid_t plist_id); */ H5_DLL herr_t H5Pget_copy_object(hid_t plist_id, unsigned *copy_options /*out*/); /** - * \ingroup OCPPL + * \ingroup OCPYPL * * \brief Retrieves the callback function from the specified object copy * property list @@ -9128,7 +9128,7 @@ H5_DLL herr_t H5Pget_copy_object(hid_t plist_id, unsigned *copy_options /*out*/) */ H5_DLL herr_t H5Pget_mcdt_search_cb(hid_t plist_id, H5O_mcdt_search_cb_t *func, void **op_data); /** - * \ingroup OCPPL + * \ingroup OCPYPL * * \brief Sets properties to be used when an object is copied * @@ -9221,7 +9221,7 @@ H5_DLL herr_t H5Pget_mcdt_search_cb(hid_t plist_id, H5O_mcdt_search_cb_t *func, */ H5_DLL herr_t H5Pset_copy_object(hid_t plist_id, unsigned copy_options); /** - * \ingroup OCPPL + * \ingroup OCPYPL * * \brief Sets the callback function that H5Ocopy() will invoke before * searching the entire destination file for a matching committed @@ -9319,7 +9319,7 @@ H5_DLL herr_t H5Pset_mcdt_search_cb(hid_t plist_id, H5O_mcdt_search_cb_t func, v /* Typedefs */ /** - * \ingroup GPLOA + * \ingroup PLCRA * * \brief Registers a permanent property with a property list class * @@ -9449,7 +9449,7 @@ H5_DLL herr_t H5Pregister1(hid_t cls_id, const char *name, size_t size, void *de H5P_prp_get_func_t prp_get, H5P_prp_delete_func_t prp_del, H5P_prp_copy_func_t prp_copy, H5P_prp_close_func_t prp_close); /** - * \ingroup GPLOA + * \ingroup PLCRA * * \brief Registers a temporary property with a property list * @@ -9561,7 +9561,7 @@ H5_DLL herr_t H5Pinsert1(hid_t plist_id, const char *name, size_t size, void *va H5P_prp_delete_func_t prp_delete, H5P_prp_copy_func_t prp_copy, H5P_prp_close_func_t prp_close); /** - * \ingroup GPLO + * \ingroup PLCRA * * \brief Encodes the property values in a property list into a binary * buffer diff --git a/src/H5Rmodule.h b/src/H5Rmodule.h index fe28bb2..c561058 100644 --- a/src/H5Rmodule.h +++ b/src/H5Rmodule.h @@ -26,33 +26,12 @@ #define H5_MY_PKG_INIT YES /** - * \defgroup H5R H5R + * \defgroup H5R References (H5R) * * Use the functions in this module to manage HDF5 references. Referents can * be HDF5 objects, attributes, and selections on datasets a.k.a. dataset * regions. * - * - * - * - * - * - * - * - * - * - * - * - *
CreateRead
- * \snippet{lineno} H5R_examples.c create - * - * \snippet{lineno} H5R_examples.c read - *
UpdateDelete
- * \snippet{lineno} H5R_examples.c update - * - * \snippet{lineno} H5R_examples.c delete - *
- * */ #endif /* H5Rmodule_H */ diff --git a/src/H5Smodule.h b/src/H5Smodule.h index 010f4a6..9c6682e 100644 --- a/src/H5Smodule.h +++ b/src/H5Smodule.h @@ -29,7 +29,1494 @@ #define H5_MY_PKG_ERR H5E_DATASPACE #define H5_MY_PKG_INIT YES -/**\defgroup H5S H5S +/** \page H5S_UG Dataspaces and Partial I/O + * + * + * \section sec_dataspace HDF5 Dataspaces and Partial I/O + * + * HDF5 dataspaces describe the \Emph{shape} of datasets in memory or in HDF5 + * files. Dataspaces can be empty (#H5S_NULL), a singleton (#H5S_SCALAR), or + * a multi-dimensional, regular grid (#H5S_SIMPLE). Dataspaces can be re-shaped. + * + * Subsets of dataspaces can be "book-marked" or used to restrict I/O operations + * using \Emph{selections}. Furthermore, certain set operations are supported + * for selections. + * + * \subsection subsec_dataspace_intro Introduction + * + * The HDF5 \Emph{dataspace} is a required component of an HDF5 dataset or attribute definition. The dataspace + * defines the size and shape of the dataset or attribute raw data. In other words, a dataspace defines the + * number of dimensions and the size of each dimension of the multidimensional array in which the raw data + * is represented. The dataspace must be defined when the dataset or attribute is created. + * + * The \Emph{dataspace} is also used during dataset I/O operations, defining the elements of the dataset that + * participate in the I/O operation. + * + * This chapter explains the \Emph{dataspace} object and its use in dataset and attribute creation and data + * transfer. It also describes selection operations on a dataspace used to implement sub‐setting, + * sub‐sampling, and scatter‐gather access to datasets. + * + * \subsection subsec_dataspace_function Dataspace Function Summaries + * @see H5S reference manual provides a reference list of dataspace functions, the H5S APIs. + * + * \subsection subsec_dataspace_program Definition of Dataspace Objects and the Dataspace Programming Model + * + * This section introduces the notion of the HDF5 dataspace object and a programming model for creating + * and working with dataspaces. + * + * \subsubsection subsubsec_dataspace_program_object Dataspace Objects + * + * An HDF5 dataspace is a required component of an HDF5 dataset or attribute. A dataspace defines the size + * and the shape of a dataset’s or an attribute’s raw data. Currently, HDF5 supports the following types of + * the dataspaces: + * \li Scalar dataspaces + * \li Simple dataspaces + * \li Null dataspaces + * + * A scalar dataspace, #H5S_SCALAR, represents just one element, a scalar. Note that the datatype of this one + * element may be very complex; example would be a compound structure with members being of any + * allowed HDF5 datatype, including multidimensional arrays, strings, and nested compound structures. By + * convention, the rank of a scalar dataspace is always 0 (zero); think of it geometrically as a single, + * dimensionless point, though that point may be complex. + * + * A simple dataspace, #H5S_SIMPLE , is a multidimensional array of elements. The dimensionality of the + * dataspace (or the rank of the array) is fixed and is defined at creation time. The size of each dimension + * can grow during the life time of the dataspace from the current size up to the maximum size. Both the + * current size and the maximum size are specified at creation time. The sizes of dimensions at any particular + * time in the life of a dataspace are called the current dimensions, or the dataspace extent. They can be + * queried along with the maximum sizes. + * + * A null dataspace, #H5S_NULL, contains no data elements. Note that no selections can be applied to a null + * dataset as there is nothing to select. + * + * As shown in the UML diagram in the figure below, an HDF5 simple dataspace object has three attributes: + * the rank or number of dimensions; the current sizes, expressed as an array of length rank with each element + * of the array denoting the current size of the corresponding dimension; and the maximum sizes, + * expressed as an array of length rank with each element of the array denoting the maximum size of the + * corresponding dimension. + * + * + * + * + * + *
+ * \image html Dspace_simple.gif "A simple dataspace" + *
+ * + * \em Note: A simple dataspace is defined by its rank, the current size of each dimension, and the maximum + * size of each dimension. + * + * The size of a current dimension cannot be greater than the maximum size, which can be unlimited, specified + * as #H5S_UNLIMITED. Note that while the HDF5 file format and library impose no maximum size on an + * unlimited dimension, practically speaking its size will always be limited to the biggest integer available + * on the particular system being used. + * + * Dataspace rank is restricted to 32, the standard limit in C on the rank of an array, in the current + * implementation of the HDF5 Library. The HDF5 file format, on the other hand, allows any rank up to the + * maximum integer value on the system, so the library restriction can be raised in the future if higher + * dimensionality is required. + * + * Note that most of the time Fortran applications calling HDF5 will work with dataspaces of rank less than + * or equal to seven, since seven is the maximum number of dimensions in a Fortran array. But dataspace rank + * is not limited to seven for Fortran applications. + * + * The current dimensions of a dataspace, also referred to as the dataspace extent, define the bounding box + * for dataset elements that can participate in I/O operations. + * + * \subsubsection subsubsec_dataspace_program_model Dataspace Programming Model + * + * The programming model for creating and working with HDF5 dataspaces can be summarized as follows: + * \li 1. Create a dataspace + * \li 2. Use the dataspace to create a dataset in the file or to describe a data array in memory + * \li 3. Modify the dataspace to define dataset elements that will participate in I/O operations + * \li 4. Use the modified dataspace while reading/writing dataset raw data or to create a region reference + * \li 5. Close the dataspace when no longer needed + * + * The rest of this section will address steps 1, 2, and 5 of the programming model; steps 3 and 4 will be + * discussed in later sections of this chapter. + * + *

Creating a Dataspace

+ * + * A dataspace can be created by calling the \ref H5Screate function. Since the + * definition of a simple dataspace requires the specification of dimensionality (or rank) and initial and + * maximum dimension sizes, the HDF5 Library provides a convenience API, \ref H5Screate_simple to create a + * simple dataspace in one step. + * + * The following examples illustrate the usage of these APIs. + * + *

Creating a Scalar Dataspace

+ * + * Creating a Scalar Dataspace + * \code + * hid_t space_id; + * . . . + * space_id = H5Screate(H5S_SCALAR); + * \endcode + * As mentioned above, the dataspace will contain only one element. Scalar dataspaces are used more often + * for describing attributes that have just one value. For example, the attribute temperature with the value + * Celsius is used to indicate that the dataset with this attribute stores temperature values using the + * Celsius scale. + * + *

Creating a Null Dataspace

+ * + * A null dataspace is created with the \ref H5Screate function. + * \code + * hid_t space_id; + * . . . + * space_id = H5Screate(H5S_NULL); + * \endcode + * As mentioned above, the dataspace will contain no elements. + * + *

Creating a Simple Dataspace

+ * + * Let’s assume that an application wants to store a two‐dimensional array of data, A(20,100). During the + * life of the application, the first dimension of the array can grow up to 30; there is no restriction on + * the size of the second dimension. The following steps are used to declare a dataspace for the dataset + * in which the array data will be stored. + * \code + * hid_t space_id; + * int rank = 2; + * hsize_t current_dims[2] = {20, 100}; + * hsize_t max_dims[2] = {30, H5S_UNLIMITED}; + * . . . + * space_id = H5Screate(H5S_NULL); + * H5Sset_extent_simple(space_id, rank, current_dims, max_dims); + * \endcode + * + * Alternatively, the convenience APIs H5Screate_simple/h5screate_simple_f can replace the + * H5Screate/h5screate_f and H5Sset_extent_simple/h5sset_extent_simple_f calls. + * \code + * space_id = H5Screate_simple(rank, current_dims, max_dims); + * \endcode + * + * In this example, a dataspace with current dimensions of 20 by 100 is created. The first dimension can be + * extended only up to 30. The second dimension, however, is declared unlimited; it can be extended up to + * the largest available integer value on the system. + * + * Note that when there is a difference between the current dimensions and the maximum dimensions of an + * array, then chunking storage must be used. In other words, if the number of dimensions may change over + * the life of the dataset, then chunking must be used. If the array dimensions are fixed (if the number of + * current dimensions is equal to the maximum number of dimensions when the dataset is created), then + * contiguous storage can be used. For more information, see "Data Transfer". + * + * Maximum dimensions can be the same as current dimensions. In such a case, the sizes of dimensions + * cannot be changed during the life of the dataspace object. In C, \c NULL can be used to indicate to the + * \ref H5Screate_simple and \ref H5Sset_extent_simple functions that the maximum sizes of all dimensions + * are the same as the current sizes. + * \code + * space_id = H5Screate_simple(rank, current_dims, NULL); + * \endcode + * The created dataspace will have current and maximum dimensions of 20 and 100 correspondingly, and the + * sizes of those dimensions cannot be changed. + * + *

C versus Fortran Dataspaces

+ * + * Dataspace dimensions are numbered from 1 to rank. HDF5 uses C storage conventions, assuming that the + * last listed dimension is the fastest‐changing dimension and the first‐listed dimension is the slowest + * changing. The HDF5 file format storage layout specification adheres to the C convention and the HDF5 + * Library adheres to the same convention when storing dataspace dimensions in the file. This affects how + * C programs and tools interpret data written from Fortran programs and vice versa. The example below + * illustrates the issue. + * + * When a Fortran application describes a dataspace to store an array as A(20,100), it specifies the value of + * the first dimension to be 20 and the second to be 100. Since Fortran stores data by columns, the + * first‐listed dimension with the value 20 is the fastest‐changing dimension and the last‐listed dimension + * with the value 100 is the slowest‐changing. In order to adhere to the HDF5 storage convention, the HDF5 + * Fortran wrapper transposes dimensions, so the first dimension becomes the last. The dataspace dimensions + * stored in the file will be 100,20 instead of 20,100 in order to correctly describe the Fortran data that + * is stored in 100 columns, each containing 20 elements. + * + * When a Fortran application reads the data back, the HDF5 Fortran wrapper transposes the dimensions + * once more, returning the first dimension to be 20 and the second to be 100, describing correctly the sizes + * of the array that should be used to read data in the Fortran array A(20,100). + * + * When a C application reads data back, the dimensions will come out as 100 and 20, correctly describing + * the size of the array to read data into, since the data was written as 100 records of 20 elements each. + * Therefore C tools such as h5dump and h5ls always display transposed dimensions and values for the data + * written by a Fortran application. + * + * Consider the following simple example of equivalent C 3 x 5 and Fortran 5 x 3 arrays. As illustrated in + * the figure below, a C application will store a 3 x 5 2‐dimensional array as three 5‐element rows. In order + * to store the same data in the same order, a Fortran application must view the array as a 5 x 3 array with + * three 5‐element columns. The dataspace of this dataset, as written from Fortran, will therefore be + * described as 5 x 3 in the application but stored and described in the file according to the C convention + * as a 3 x 5 array. This ensures that C and Fortran applications will always read the data in the order in + * which it was written. The HDF5 Fortran interface handles this transposition automatically. + * \code + * // C + * \#define NX 3 // dataset dimensions + * \#define NY 5 + * . . . + * int data[NX][NY]; // data to write + * . . . + * // Data and output buffer initialization. + * for (j = 0; j < NX; j++) + * for (i = 0; i < NY; i++) + * data[j][i] = i + j; + * // + * // 1 2 3 4 5 + * // 6 7 8 9 10 + * // 11 12 13 14 15 + * // + * . . . + * dims[0] = NX; + * dims[1] = NY; + * dataspace = H5Screate_simple(RANK, dims, NULL); + * \endcode + * + * \code + * ! Fortran + * INTEGER, PARAMETER :: NX = 3 + * INTEGER, PARAMETER :: NX = 5 + * . . . + * INTEGER(HSIZE_T), DIMENSION(2) :: dims = (/NY, NX/) ! Dataset dimensions + * . . . + * ! + * ! Initialize data + * ! + * do i = 1, NY + * do j = 1, NX + * data(i,j) = i + (j-1)*NY + * enddo + * enddo + * ! + * ! Data + * ! + * ! 1 6 11 + * ! 2 7 12 + * ! 3 8 13 + * ! 4 9 14 + * ! 5 10 15 + * . . . + * CALL h5screate_simple_f(rank, dims, dspace_id, error) + * \endcode + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + *
Comparing C and Fortran dataspaces
+ * A dataset stored by a C program in a 3 x 5 array: + *
+ * \image html Dspace_CvsF1.gif + *
+ * The same dataset stored by a Fortran program in a 5 x 3 array: + *
+ * \image html Dspace_CvsF2.gif + *
+ * The first dataset above as written to an HDF5 file from C or the second dataset above as written + * from Fortran: + *
+ * \image html Dspace_CvsF3.gif + *
+ * The first dataset above as written to an HDF5 file from Fortran: + *
+ * \image html Dspace_CvsF4.gif + *
+ * + * Note: The HDF5 Library stores arrays along the fastest‐changing dimension. This approach is often + * referred to as being “in C order.” C, C++, and Java work with arrays in row‐major order. In other words, + * the row, or the last dimension, is the fastest‐changing dimension. Fortran, on the other hand, handles + * arrays in column‐major order making the column, or the first dimension, the fastest‐changing dimension. + * Therefore, the C and Fortran arrays illustrated in the top portion of this figure are stored identically + * in an HDF5 file. This ensures that data written by any language can be meaningfully read, interpreted, + * and manipulated by any other. + * + *

Finding Dataspace Characteristics

+ * + * The HDF5 Library provides several APIs designed to query the characteristics of a dataspace. + * + * The function \ref H5Sis_simple returns information about the type of a dataspace. + * This function is rarely used and currently supports only simple and scalar dataspaces. + * + * To find out the dimensionality, or rank, of a dataspace, use \ref H5Sget_simple_extent_ndims. + * \ref H5Sget_simple_extent_dims can also be used to find out the rank. See + * the example below. If both functions return 0 for the value of rank, then the dataspace is scalar. + * + * To query the sizes of the current and maximum dimensions, use \ref H5Sget_simple_extent_dims. + * + * The following example illustrates querying the rank and dimensions of a dataspace using these functions. + * \code + * hid_t space_id; + * int rank; + * hsize_t *current_dims; + * hsize_t *max_dims; + * . . . + * rank = H5Sget_simple_extent_ndims(space_id); + * // (or rank = H5Sget_simple_extent_dims(space_id, NULL, NULL);) + * current_dims = (hsize_t)malloc(rank * sizeof(hsize_t)); + * max_dims = (hsize_t)malloc(rank * sizeof(hsize_t)); + * H5Sget_simple_extent_dims(space_id, current_dims, max_dims); + * // Print values here + * \endcode + * + * \subsection subsec_dataspace_transfer Dataspaces and Data Transfer + * + * Read and write operations transfer data between an HDF5 file on disk and in memory. The shape that the + * array data takes in the file and in memory may be the same, but HDF5 also allows users the ability to + * represent data in memory in a different shape than in the file. If the shape of an array in the file and + * in memory will be the same, then the same dataspace definition can be used for both. If the shape of an + * array in memory needs to be different than the shape in the file, then the dataspace definition for the + * shape of the array in memory can be changed. During a read operation, the array will be read into the + * different shape in memory, and during a write operation, the array will be written to the file in the + * shape specified by the dataspace in the file. The only qualification is that the number of elements read + * or written must be the same in both the source and the destination dataspaces. + * + * Item a in the figure below shows a simple example of a read operation in which the data is stored as a 3 + * by 4 array in the file (item b) on disk, but the program wants it to be a 4 by 3 array in memory. This is + * accomplished by setting the memory dataspace to describe the desired memory layout, as in item c. The read + * operation reads the data in the file array into the memory array. + * + * + * + * + * + *
+ * \image html Dspace_read.gif "Data layout before and after a read operation" + *
+ * + * + * + * + * + *
+ * \image html Dspace_move.gif "Moving data from disk to memory" + *
+ + * Both the source and destination are stored as contiguous blocks of storage with the elements in the order + * specified by the dataspace. The figure above shows one way the elements might be organized. In item a, + * the elements are stored as 3 blocks of 4 elements. The destination is an array of 12 elements in memory + * (see item c). As the figure suggests, the transfer reads the disk blocks into a memory buffer (see item b), + * and then writes the elements to the correct locations in memory. A similar process occurs in reverse when + * data is written to disk. + * + * \subsubsection subsubsec_dataspace_transfer_select Data Selection + * + * In addition to rearranging data, the transfer may select the data elements from the source and destination. + * + * Data selection is implemented by creating a dataspace object that describes the selected elements (within + * the hyper rectangle) rather than the whole array. Two dataspace objects with selections can be used in + * data transfers to read selected elements from the source and write selected elements to the destination. + * When data is transferred using the dataspace object, only the selected elements will be transferred. + * + * This can be used to implement partial I/O, including: + * \li Sub‐setting ‐ reading part of a large dataset + * \li Sampling ‐ reading selected elements (for example, every second element) of a dataset + * \li Scatter‐gather ‐ read non‐contiguous elements into contiguous locations (gather) or read contiguous + * elements into non‐contiguous locations (scatter) or both + * + * To use selections, the following steps are followed: + * \li 1. Get or define the dataspace for the source and destination + * \li 2. Specify one or more selections for source and destination dataspaces + * \li 3. Transfer data using the dataspaces with selections + * + * A selection is created by applying one or more selections to a dataspace. A selection may override any + * other selections (#H5S_SELECT_SET) or may be “Ored” with previous selections on the same dataspace + * (#H5S_SELECT_OR). In the latter case, the resulting selection is the union of the selection and all + * previously selected selections. Arbitrary sets of points from a dataspace can be selected by specifying + * an appropriate set of selections. + * + * Two selections are used in data transfer, so the source and destination must be compatible, as described + * below. + * + * There are two forms of selection, hyperslab and point. A selection must be either a point selection or a + * set of hyperslab selections. Selections cannot be mixed. + * + * The definition of a selection within a dataspace, not the data in the selection, cannot be saved to the + * file unless the selection definition is saved as a region reference. For more information, + * see \ref subsec_dataspace_refer. + * + *

Hyperslab Selection

+ * + * A hyperslab is a selection of elements from a hyper rectangle. An HDF5 hyperslab is a rectangular pattern + * defined by four arrays. The four arrays are summarized in the table below. + * + * The offset defines the origin of the hyperslab in the original dataspace. + * + * The stride is the number of elements to increment between selected elements. A stride of ‘1’ is every + * element, a stride of ‘2’ is every second element, etc. Note that there may be a different stride for + * each dimen‐sion of the dataspace. The default stride is 1. + * + * The count is the number of elements in the hyperslab selection. When the stride is 1, the selection is a + * hyper rectangle with a corner at the offset and size count[0] by count[1] by.... When stride is greater + * than one, the hyperslab bounded by the offset and the corners defined by stride[n] * count[n]. + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + *
Hyperslab elements
+ * Parameter + * + * Description + *
+ * Offset + * + * The starting location for the hyperslab. + *
+ * Stride + * + * The number of elements to separate each element or block to be selected. + *
+ * Count + * + * The number of elements or blocks to select along each dimension. + *
+ * Block + * + * The size of the block selected from the dataspace. + *
+ * + * The block is a count on the number of repetitions of the hyperslab. The default block size is '1', which is + * one hyperslab. A block of 2 would be two hyperslabs in that dimension, with the second starting at + * offset[n] + (count[n] * stride[n]) + 1. + * + * A hyperslab can be used to access a sub‐set of a large dataset. The figure below shows an example of a + * hyperslab that reads a rectangle from the middle of a larger two dimensional array. The destination is the + * same shape as the source. + * + * + * + * + * + *
+ * \image html Dspace_subset.gif "Access a sub‐set of data with a hyperslab" + *
+ * + * Hyperslabs can be combined to select complex regions of the source and destination. The figure below + * shows an example of a transfer from one non‐rectangular region into another non‐rectangular region. The + * source is defined as the union of two hyperslabs, and the destination is the union of three hyperslabs. + * + * + * + * + * + *
+ * \image html Dspace_complex.gif "Build complex regions with hyperslab unions" + *
+ * + * Hyperslabs may also be used to collect or scatter data from regular patterns. The figure below shows an + * example where the source is a repeating pattern of blocks, and the destination is a single, one dimensional + * array. + * + * + * + * + * + *
+ * \image html Dspace_combine.gif "Use hyperslabs to combine or disperse data" + *
+ * + *

Select Points

+ * + * The second type of selection is an array of points such as coordinates. Essentially, this selection is a + * list of all the points to include. The figure below shows an example of a transfer of seven elements from + * a two dimensional dataspace to a three dimensional dataspace using a point selection to specify the points. + * + * + * + * + * + *
+ * \image html Dspace_point.gif "Point selection" + *
+ * + *

Rules for Defining Selections

+ * + * A selection must have the same number of dimensions (rank) as the dataspace it is applied to, although it + * may select from only a small region such as a plane from a 3D dataspace. Selections do not affect the + * extent of the dataspace, the selection may be larger than the dataspace. The boundaries of selections are + * reconciled with the extent at the time of the data transfer. + * + *

Data Transfer with Selections

+ * + * A data transfer (read or write) with selections is the same as any read or write, except the source + * and destination dataspace have compatible selections. + * + * During the data transfer, the following steps are executed by the library: + * \li The source and destination dataspaces are checked to assure that the selections are compatible. + *
  • Each selection must be within the current extent of the dataspace. A selection may be + * defined to extend outside the current extent of the dataspace, but the dataspace cannot be + * accessed if the selection is not valid at the time of the access.
  • + *
  • The total number of points selected in the source and destination must be the same. Note + * that the dimensionality of the source and destination can be different (for example, the + * source could be 2D, the destination 1D or 3D), and the shape can be different, but the number of + * elements selected must be the same.
+ * \li The data is transferred, element by element. + * + * Selections have an iteration order for the points selected, which can be any permutation of the dimensions + * involved (defaulting to 'C' array order) or a specific order for the selected points, for selections + * composed of single array elements with \ref H5Sselect_elements. + * + * The elements of the selections are transferred in row‐major, or C order. That is, it is assumed that the + * first dimension varies slowest, the second next slowest, and so forth. For hyperslab selections, the order + * can be any permutation of the dimensions involved (defaulting to ‘C’ array order). When multiple hyperslabs + * are combined, the hyperslabs are coalesced into contiguous reads and writes. + * + * In the case of point selections, the points are read and written in the order specified. + * + * \subsubsection subsubsec_dataspace_transfer_model Programming Model + * + *

Selecting Hyperslabs

+ * + * Suppose we want to read a 3x4 hyperslab from a dataset in a file beginning at the element <1,2> in the + * dataset, and read it into a 7 x 7 x 3 array in memory. See the figure below. In order to do this, we must + * create a dataspace that describes the overall rank and dimensions of the dataset in the file as well as + * the position and size of the hyperslab that we are extracting from that dataset. + * + * + * + * + * + *
+ * \image html Dspace_select.gif "Selecting a hyperslab" + *
+ * + * The code in the first example below illustrates the selection of the hyperslab in the file dataspace. + * The second example below shows the definition of the destination dataspace in memory. Since the in‐memory + * dataspace has three dimensions, the hyperslab is an array with three dimensions with the last dimension + * being 1: <3,4,1>. The third example below shows the read using the source and destination dataspaces + * with selections. + * + * Selecting a hyperslab + * \code + * //get the file dataspace. + * dataspace = H5Dget_space(dataset); // dataspace identifier + * + * // Define hyperslab in the dataset. + * offset[0] = 1; + * offset[1] = 2; + * count[0] = 3; + * count[1] = 4; + * status = H5Sselect_hyperslab(dataspace, H5S_SELECT_SET, offset, NULL, count, NULL); + * \endcode + * + * Defining the destination memory + * \code + * // Define memory dataspace. + * dimsm[0] = 7; + * dimsm[1] = 7; + * dimsm[2] = 3; + * memspace = H5Screate_simple(3,dimsm,NULL); + * + * // Define memory hyperslab. + * offset_out[0] = 3; + * offset_out[1] = 0; + * offset_out[2] = 0; + * count_out[0] = 3; + * count_out[1] = 4; + * count_out[2] = 1; + * status = H5Sselect_hyperslab(memspace, H5S_SELECT_SET, offset_out, NULL, count_out, NULL); + * \endcode + * + * A sample read specifying source and destination dataspaces + * \code + * ret = H5Dread(dataset, H5T_NATIVE_INT, memspace,dataspace, H5P_DEFAULT, data); + * \endcode + * + *

Example with Strides and Blocks

+ * + * Consider an 8 x 12 dataspace into which we want to write eight 3 x 2 blocks in a two dimensional array + * from a source dataspace in memory that is a 50‐element one dimensional array. See the figure below. + * + * + * + * + * + *
+ * \image html Dspace_write1to2.gif "Write from a one dimensional array to a two dimensional array" + *
+ * + * The example below shows code to write 48 elements from the one dimensional array to the file dataset + * starting with the second element in vector. The destination hyperslab has the following parameters: + * offset=(0,1), stride=(4,3), count=(2,4), block=(3,2). The source has the parameters: offset=(1), + * stride=(1), count=(48), block=(1). After these operations, the file dataspace will have the values + * shown in item b in the figure above. Notice that the values are inserted in the file dataset in + * row‐major order. + * + * Write from a one dimensional array to a two dimensional array + * \code + * // Select hyperslab for the dataset in the file, using 3 x 2 blocks, (4,3) stride (2,4) + * // count starting at the position (0,1). + * offset[0] = 0; offset[1] = 1; + * stride[0] = 4; stride[1] = 3; + * count[0] = 2; count[1] = 4; + * block[0] = 3; block[1] = 2; + * ret = H5Sselect_hyperslab(fid, H5S_SELECT_SET, offset, stride, count, block); + * + * // Create dataspace for the first dataset. + * mid1 = H5Screate_simple(MSPACE1_RANK, dim1, NULL); + * + * // Select hyperslab. + * // We will use 48 elements of the vector buffer starting + * // at the second element. Selected elements are + * // 1 2 3 . . . 48 + * offset[0] = 1; + * stride[0] = 1; + * count[0] = 48; + * block[0] = 1; + * ret = H5Sselect_hyperslab(mid1, H5S_SELECT_SET, offset, stride, count, block); + * + * // Write selection from the vector buffer to the dataset in the file. + * ret = H5Dwrite(dataset, H5T_NATIVE_INT, midd1, fid, H5P_DEFAULT, vector) + * \endcode + * + *

Selecting a Union of Hyperslabs

+ * + * The HDF5 Library allows the user to select a union of hyperslabs and write or read the selection into + * another selection. The shapes of the two selections may differ, but the number of elements must be + * equal. + * + * + * + * + * + *
+ * \image html Dspace_transfer.gif "Transferring hyperslab unions" + *
+ * + * The figure above shows the transfer of a selection that is two overlapping hyperslabs from the dataset + * into a union of hyperslabs in the memory dataset. Note that the destination dataset has a different shape + * from the source dataset. Similarly, the selection in the memory dataset could have a different shape than + * the selected union of hyperslabs in the original file. For simplicity, the selection is that same shape + * at the destination. + * + * To implement this transfer, it is necessary to: + * \li 1. Get the source dataspace + * \li 2. Define one hyperslab selection for the source + * \li 3. Define a second hyperslab selection, unioned with the first + * \li 4. Get the destination dataspace + * \li 5. Define one hyperslab selection for the destination + * \li 6. Define a second hyperslab selection, unioned with the first + * \li 7. Execute the data transfer (H5Dread or H5Dwrite) using the source and destination dataspaces + * + * The example below shows example code to create the selections for the source dataspace (the file). The + * first hyperslab is size 3 x 4 and the left upper corner at the position (1,2). The hyperslab is a simple + * rectangle, so the stride and block are 1. The second hyperslab is 6 x 5 at the position (2,4). The second + * selection is a union with the first hyperslab (#H5S_SELECT_OR). + * + * Select source hyperslabs + * \code + * fid = H5Dget_space(dataset); + * + * // Select first hyperslab for the dataset in the file. + * offset[0] = 1; offset[1] = 2; + * block[0] = 1; block[1] = 1; + * stride[0] = 1; stride[1] = 1; + * count[0] = 3; count[1] = 4; + * ret = H5Sselect_hyperslab(fid, H5S_SELECT_SET, offset, stride, count, block); + * + * // Add second selected hyperslab to the selection. + * offset[0] = 2; offset[1] = 4; + * block[0] = 1; block[1] = 1; + * stride[0] = 1; stride[1] = 1; + * count[0] = 6; count[1] = 5; + * ret = H5Sselect_hyperslab(fid, H5S_SELECT_OR, offset, stride, count, block); + * \endcode + * + * The example below shows example code to create the selection for the destination in memory. The steps + * are similar. In this example, the hyperslabs are the same shape, but located in different positions in the + * dataspace. The first hyperslab is 3 x 4 and starts at (0,0), and the second is 6 x 5 and starts at (1,2). + * Finally, the H5Dread call transfers the selected data from the file dataspace to the selection in memory. + * In this example, the source and destination selections are two overlapping rectangles. In general, any + * number of rectangles can be OR’ed, and they do not have to be contiguous. The order of the selections + * does not matter, but the first should use #H5S_SELECT_SET ; subsequent selections are unioned using + * #H5S_SELECT_OR. + * + * It is important to emphasize that the source and destination do not have to be the same shape (or number + * of rectangles). As long as the two selections have the same number of elements, the data can be + * transferred. + * + * Select destination hyperslabs + * \code + * // Create memory dataspace. + * mid = H5Screate_simple(MSPACE_RANK, mdim, NULL); + * + * // Select two hyperslabs in memory. Hyperslabs has the + * // same size and shape as the selected hyperslabs for + * // the file dataspace. + * offset[0] = 0; offset[1] = 0; + * block[0] = 1; block[1] = 1; + * stride[0] = 1; stride[1] = 1; + * count[0] = 3; count[1] = 4; + * ret = H5Sselect_hyperslab(mid, H5S_SELECT_SET, offset, stride, count, block); + * + * offset[0] = 1; offset[1] = 2; + * block[0] = 1; block[1] = 1; + * stride[0] = 1; stride[1] = 1; + * count[0] = 6; count[1] = 5; + * ret = H5Sselect_hyperslab(mid, H5S_SELECT_OR, offset, stride, count, block); + * + * ret = H5Dread(dataset, H5T_NATIVE_INT, mid, fid, H5P_DEFAULT, matrix_out); + * \endcode + * + *

Selecting a List of Independent Points

+ * + * It is also possible to specify a list of elements to read or write using the function H5Sselect_elements. + * + * The procedure is similar to hyperslab selections. + * \li 1. Get the source dataspace + * \li 2. Set the selected points + * \li 3. Get the destination dataspace + * \li 4. Set the selected points + * \li 5. Transfer the data using the source and destination dataspaces + * + * The figure below shows an example where four values are to be written to four separate points in a two + * dimensional dataspace. The source dataspace is a one dimensional array with the values 53, 59, 61, 67. + * The destination dataspace is an 8 x 12 array. The elements are to be written to the points + * (0,0), (3,3), (3,5), and (5,6). In this example, the source does not require a selection. The example + * below the figure shows example code to implement this transfer. + * + * A point selection lists the exact points to be transferred and the order they will be transferred. The + * source and destination are required to have the same number of elements. A point selection can be used + * with a hyperslab (for example, the source could be a point selection and the destination a hyperslab, + * or vice versa), so long as the number of elements selected are the same. + * + * + * + * + * + *
+ * \image html Dspace_separate.gif "Write data to separate points" + *
+ * + * Write data to separate points + * \code + * hsize_t dim2[] = {4}; + * int values[] = {53, 59, 61, 67}; + * + * // file dataspace + * hssize_t coord[4][2]; + * + * // Create dataspace for the second dataset. + * mid2 = H5Screate_simple(1, dim2, NULL); + * + * // Select sequence of NPOINTS points in the file dataspace. + * coord[0][0] = 0; coord[0][1] = 0; + * coord[1][0] = 3; coord[1][1] = 3; + * coord[2][0] = 3; coord[2][1] = 5; + * coord[3][0] = 5; coord[3][1] = 6; + * + * ret = H5Sselect_elements(fid, H5S_SELECT_SET, NPOINTS, (const hssize_t **)coord); + * + * ret = H5Dwrite(dataset, H5T_NATIVE_INT, mid2, fid, H5P_DEFAULT, values); + * \endcode + * + *

Combinations of Selections

+ * + * Selections are a very flexible mechanism for reorganizing data during a data transfer. With different + * combinations of dataspaces and selections, it is possible to implement many kinds of data transfers + * including sub‐setting, sampling, and reorganizing the data. The table below gives some example combinations + * of source and destination, and the operations they implement. + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + *
Selection operations
+ *

Source

+ *
+ *

Destination

+ *
+ *

Operation

+ *
+ *

All

+ *
+ *

All

+ *
+ *

Copy whole array

+ *
+ *

All

+ *
+ *

All (different shape)

+ *
+ *

Copy and reorganize array

+ *
+ *

Hyperslab

+ *
+ *

All

+ *
+ *

Sub-set

+ *
+ *

Hyperslab

+ *
+ *

Hyperslab (same shape)

+ *
+ *

Selection

+ *
+ *

Hyperslab

+ *
+ *

Hyperslab (different shape)

+ *
+ *

Select and rearrange

+ *
+ *

Hyperslab with stride or block

+ *
+ *

All or hyperslab with stride 1

+ *
+ *

Sub-sample, scatter

+ *
+ *

Hyperslab

+ *
+ *

Points

+ *
+ *

Scatter

+ *
+ *

Points

+ *
+ *

Hyperslab or all

+ *
+ *

Gather

+ *
+ *

Points

+ *
+ *

Points (same)

+ *
+ *

Selection

+ *
+ *

Points

+ *
+ *

Points (different)

+ *
+ *

Reorder points

+ *
+ * + * \subsection subsec_dataspace_select Dataspace Selection Operations and Data Transfer + * + * This section is under construction. + * + * \subsection subsec_dataspace_refer References to Dataset Regions + * + * Another use of selections is to store a reference to a region of a dataset. An HDF5 object reference + * object is a pointer to an object (dataset, group, or committed datatype) in the file. A selection can + * be used to create a pointer to a set of selected elements of a dataset, called a region reference. The + * selection can be either a point selection or a hyperslab selection. + * + * A region reference is an object maintained by the HDF5 Library. The region reference can be stored in a + * dataset or attribute, and then read. The dataset or attribute is defined to have the special datatype, + * #H5T_STD_REF_DSETREG. + * + * To discover the elements and/or read the data, the region reference can be dereferenced. The + * #H5Rdereference call returns an identifier for the dataset, and then the selected dataspace can be + * retrieved with a call to #H5Rget_region(). The selected dataspace can be used to read the selected data + * elements. + * + * For more information, \see subsubsec_datatype_other_refs. + * + * \subsubsection subsubsec_dataspace_refer_use Example Uses for Region References + * + * Region references are used to implement stored pointers to data within a dataset. For example, features + * in a large dataset might be indexed by a table. See the figure below. This table could be stored as an + * HDF5 dataset with a compound datatype, for example, with a field for the name of the feature and a region + * reference to point to the feature in the dataset. See the second figure below. + * + * + * + * + * + *
+ * \image html Dspace_features.gif " Features indexed by a table" + *
+ * + * + * + * + * + *
+ * \image html Dspace_features_cmpd.gif "Storing the table with a compound datatype" + *
+ * + * + * \subsubsection subsubsec_dataspace_refer_create Creating References to Regions + * + * To create a region reference: + * \li 1. Create or open the dataset that contains the region + * \li 2. Get the dataspace for the dataset + * \li 3. Define a selection that specifies the region + * \li 4. Create a region reference using the dataset and dataspace with selection + * \li 5. Write the region reference(s) to the desired dataset or attribute + * + * The figure below shows a diagram of a file with three datasets. Dataset D1 and D2 are two dimensional + * arrays of integers. Dataset R1 is a one dimensional array of references to regions in D1 and D2. The + * regions can be any valid selection of the dataspace of the target dataset. + * + * + * + * + *
+ * \image html Dspace_three_datasets.gif "A file with three datasets" + *
+ * Note: In the figure above, R1 is a 1 D array of region pointers; each pointer refers to a selection + * in one dataset. + * + * The example below shows code to create the array of region references. The references are created in an + * array of type #hdset_reg_ref_t. Each region is defined as a selection on the dataspace of the dataset, + * and a reference is created using \ref H5Rcreate(). The call to \ref H5Rcreate() specifies the file, + * dataset, and the dataspace with selection. + * + * Create an array of region references + * \code + * // create an array of 4 region references + * hdset_reg_ref_t ref[4]; + * + * // Create a reference to the first hyperslab in the first Dataset. + * offset[0] = 1; offset[1] = 1; + * count[0] = 3; count[1] = 2; + * status = H5Sselect_hyperslab(space_id, H5S_SELECT_SET, offset, NULL, count, NULL); + * status = H5Rcreate(&ref[0], file_id, "D1", H5R_DATASET_REGION, space_id); + * + * // The second reference is to a union of hyperslabs in the first Dataset + * offset[0] = 5; offset[1] = 3; + * count[0] = 1; count[1] = 4; + * status = H5Sselect_none(space_id); + * status = H5Sselect_hyperslab(space_id, H5S_SELECT_SET, offset, NULL, count, NULL); + * offset[0] = 6; offset[1] = 5; + * count[0] = 1; count[1] = 2; + * status = H5Sselect_hyperslab(space_id, H5S_SELECT_OR, offset, NULL, count, NULL); + * status = H5Rcreate(&ref[1], file_id, "D1", H5R_DATASET_REGION, space_id); + * + * // the fourth reference is to a selection of points in the first Dataset + * status = H5Sselect_none(space_id); + * coord[0][0] = 4; coord[0][1] = 4; + * coord[1][0] = 2; coord[1][1] = 6; + * coord[2][0] = 3; coord[2][1] = 7; + * coord[3][0] = 1; coord[3][1] = 5; + * coord[4][0] = 5; coord[4][1] = 8; + * + * status = H5Sselect_elements(space_id, H5S_SELECT_SET, num_points, (const hssize_t **)coord); + * status = H5Rcreate(&ref[3], file_id, "D1", H5R_DATASET_REGION, space_id); + * + * // the third reference is to a hyperslab in the second Dataset + * offset[0] = 0; offset[1] = 0; + * count[0] = 4; count[1] = 6; + * status = H5Sselect_hyperslab(space_id2, H5S_SELECT_SET, offset, NULL, count, NULL); + * status = H5Rcreate(&ref[2], file_id, "D2", H5R_DATASET_REGION, space_id2); + * \endcode + * + * When all the references are created, the array of references is written to the dataset R1. The + * dataset is declared to have datatype #H5T_STD_REF_DSETREG. See the example below. + * + * Write the array of references to a dataset + * \code + * Hsize_t dimsr[1]; + * dimsr[0] = 4; + * + * // Dataset with references. + * spacer_id = H5Screate_simple(1, dimsr, NULL); + * dsetr_id = H5Dcreate(file_id, "R1", H5T_STD_REF_DSETREG, spacer_id, H5P_DEFAULT, H5P_DEFAULT, + * H5P_DEFAULT); + * + * // Write dataset with the references. + * status = H5Dwrite(dsetr_id, H5T_STD_REF_DSETREG, H5S_ALL, H5S_ALL, H5P_DEFAULT, ref); + * + * \endcode + * + * When creating region references, the following rules are enforced. + * \li The selection must be a valid selection for the target dataset, just as when transferring data + * \li The dataset must exist in the file when the reference is created; #H5Rcreate + * \li The target dataset must be in the same file as the stored reference + * + * \subsubsection subsubsec_dataspace_refer_read Reading References to Regions + * + * To retrieve data from a region reference, the reference must be read from the file, and then the data can + * be retrieved. The steps are: + * \li 1. Open the dataset or attribute containing the reference objects + * \li 2. Read the reference object(s) + * \li 3. For each region reference, get the dataset (#H5Rdereference) and dataspace (#H5Rget_region) + * \li 4. Use the dataspace and datatype to discover what space is needed to store the data, allocate the + * correct storage and create a dataspace and datatype to define the memory data layout + * + * The example below shows code to read an array of region references from a dataset, and then read the + * data from the first selected region. Note that the region reference has information that records the + * dataset (within the file) and the selection on the dataspace of the dataset. After dereferencing the + * regions reference, the datatype, number of points, and some aspects of the selection can be discovered. + * (For a union of hyperslabs, it may not be possible to determine the exact set of hyperslabs that has been + * combined.) + * The table below the code example shows the inquiry functions. + * + * When reading data from a region reference, the following rules are enforced: + * \li The target dataset must be present and accessible in the file + * \li The selection must be a valid selection for the dataset + * + * Read an array of region references; read from the first selection + * \code + * dsetr_id = H5Dopen (file_id, "R1", H5P_DEFAULT); + * status = H5Dread(dsetr_id, H5T_STD_REF_DSETREG, H5S_ALL, H5S_ALL, H5P_DEFAULT, ref_out); + * + * // Dereference the first reference. + * // 1) get the dataset (H5Rdereference) + * // 2) get the selected dataspace (H5Rget_region) + * + * dsetv_id = H5Rdereference(dsetr_id, H5R_DATASET_REGION, &ref_out[0]); + * space_id = H5Rget_region(dsetr_id, H5R_DATASET_REGION, &ref_out[0]); + * + * // Discover how many points and shape of the data + * ndims = H5Sget_simple_extent_ndims(space_id); + * H5Sget_simple_extent_dims(space_id,dimsx,NULL); + * + * // Read and display hyperslab selection from the dataset. + * dimsy[0] = H5Sget_select_npoints(space_id); + * spacex_id = H5Screate_simple(1, dimsy, NULL); + * + * status = H5Dread(dsetv_id, H5T_NATIVE_INT, H5S_ALL, space_id, H5P_DEFAULT, data_out); + * printf("Selected hyperslab: "); + * for (i = 0; i < 8; i++) { + * printf("\n"); + * for (j = 0; j < 10; j++) + * printf("%d ", data_out[i][j]); + * } + * printf("\n"); + * \endcode + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + *
The inquiry functions
+ *

Function

+ *
+ *

Information

+ *
+ * @ref H5Sget_select_npoints + * + *

The number of elements in the selection (hyperslab or point selection).

+ *
+ * @ref H5Sget_select_bounds + * + *

The bounding box that encloses the selected points (hyperslab or point selection).

+ *
+ * @ref H5Sget_select_hyper_nblocks + * + *

The number of blocks in the selection.

+ *
+ * @ref H5Sget_select_hyper_blocklist + * + *

A list of the blocks in the selection.

+ *
+ * @ref H5Sget_select_elem_npoints + * + *

The number of points in the selection.

+ *
+ * @ref H5Sget_select_elem_pointlist + * + *

The points.

+ *
+ * + * + * \subsection subsec_dataspace_sample Sample Programs + * + * This section contains the full programs from which several of the code examples in this chapter were + * derived. The h5dump output from the program’s output file immediately follows each program. + * + * h5_write.c + * \code + * #include "hdf5.h" + * + * #define H5FILE_NAME "SDS.h5" + * #define DATASETNAME "C Matrix" + * #define NX 3 + * #define NY 5 + * #define RANK 2 // dataset dimensions + * + * int + * main (void) + * { + * hid_t file, dataset; // file and dataset identifiers + * hid_t datatype, dataspace; // identifiers + * hsize_t dims[2]; // dataset dimensions + * herr_t status; + * int data[NX][NY]; // data to write + * int i, j; + * + * // + * // Data and output buffer initialization. + * for (j = 0; j < NX; j++) { + * for (i = 0; i < NY; i++) + * data[j][i] = i + 1 + j*NY; + * } + * // 1 2 3 4 5 + * // 6 7 8 9 10 + * // 11 12 13 14 15 + * + * // Create a new file using H5F_ACC_TRUNC access, + * // default file creation properties, and default file + * // access properties. + * file = H5Fcreate(H5FILE_NAME, H5F_ACC_TRUNC, H5P_DEFAULT, H5P_DEFAULT); + * + * // Describe the size of the array and create the data space for fixed + * // size dataset. + * dims[0] = NX; + * dims[1] = NY; + * dataspace = H5Screate_simple(RANK, dims, NULL); + * + * // Create a new dataset within the file using defined dataspace and + * // datatype and default dataset creation properties. + * dataset = H5Dcreate(file, DATASETNAME, H5T_NATIVE_INT, dataspace, H5P_DEFAULT, + * H5P_DEFAULT, H5P_DEFAULT); + * + * // Write the data to the dataset using default transfer properties. + * status = H5Dwrite(dataset, H5T_NATIVE_INT, H5S_ALL, H5S_ALL, H5P_DEFAULT, data); + * + * // Close/release resources. + * H5Sclose(dataspace); + * H5Dclose(dataset); + * H5Fclose(file); + * + * return 0; + * } + * + * SDS.out + * ------- + * HDF5 "SDS.h5" { + * GROUP "/" { + * DATASET "C Matrix" { + * DATATYPE H5T_STD_I32BE + * DATASPACE SIMPLE { ( 3, 5 ) / ( 3, 5 ) } + * DATA { + * 1, 2, 3, 4, 5, + * 6, 7, 8, 9, 10, + * 11, 12, 13, 14, 15 + * } + * } + * + * \endcode + * + * h5_write.f90 + * \code + * ---------- + * PROGRAM DSETEXAMPLE + * + * USE HDF5 ! This module contains all necessary modules + * + * IMPLICIT NONE + * + * CHARACTER(LEN=7), PARAMETER :: filename = "SDSf.h5" ! File name + * CHARACTER(LEN=14), PARAMETER :: dsetname = "Fortran Matrix" ! Dataset name + * INTEGER, PARAMETER :: NX = 3 + * INTEGER, PARAMETER :: NY = 5 + * + * INTEGER(HID_T) :: file_id ! File identifier + * INTEGER(HID_T) :: dset_id ! Dataset identifier + * INTEGER(HID_T) :: dspace_id ! Dataspace identifier + * + * INTEGER(HSIZE_T), DIMENSION(2) :: dims = (/3,5/) ! Dataset dimensions + * INTEGER :: rank = 2 ! Dataset rank + * INTEGER :: data(NX,NY) + * INTEGER :: error ! Error flag + * INTEGER :: i, j + * + * ! + * ! Initialize data + * ! + * do i = 1, NX + * do j = 1, NY + * data(i,j) = j + (i-1)*NY + * enddo + * enddo + * ! + * ! Data + * ! + * ! 1 2 3 4 5 + * ! 6 7 8 9 10 + * ! 11 12 13 14 15 + * + * ! + * ! Initialize FORTRAN interface. + * ! + * CALLh5open_f(error) + * + * ! + * ! Create a new file using default properties. + * ! + * CALL h5fcreate_f(filename, H5F_ACC_TRUNC_F, file_id, error) + * + * ! + * ! Create the dataspace. + * ! + * CALL h5screate_simple_f(rank, dims, dspace_id, error) + * + * ! + * ! Create and write dataset using default properties. + * ! + * CALL h5dcreate_f(file_id, dsetname, H5T_NATIVE_INTEGER, dspace_id, & + * dset_id, error, H5P_DEFAULT_F, H5P_DEFAULT_F, & + * H5P_DEFAULT_F) + * + * CALL h5dwrite_f(dset_id, H5T_NATIVE_INTEGER, data, dims, error) + * + * ! + * ! End access to the dataset and release resources used by it. + * ! + * CALL h5dclose_f(dset_id, error) + * + * ! + * ! Terminate access to the data space. + * ! + * CALL h5sclose_f(dspace_id, error) + * + * ! + * ! Close the file. + * ! + * CALL h5fclose_f(file_id, error) + * + * ! + * ! Close FORTRAN interface. + * ! + * CALL h5close_f(error) + * + * END PROGRAM DSETEXAMPLE + * + * SDSf.out + * -------- + * HDF5 "SDSf.h5" { + * GROUP "/" { + * DATASET "Fortran Matrix" { + * DATATYPE H5T_STD_I32BE + * DATASPACE SIMPLE { ( 5, 3 ) / ( 5, 3 ) } + * DATA { + * 1, 6, 11, + * 2, 7, 12, + * 3, 8, 13, + * 4, 9, 14, + * 5, 10, 15 + * } + * } + * } + * } + * + * \endcode + * + * h5_write_tr.f90 + * \code + * PROGRAM DSETEXAMPLE + * + * USE HDF5 ! This module contains all necessary modules + * + * IMPLICIT NONE + * + * CHARACTER(LEN=10), PARAMETER :: filename = "SDSf_tr.h5" ! File name + * CHARACTER(LEN=24), PARAMETER :: dsetname = "Fortran Transpose Matrix"! Dataset name + * + * INTEGER, PARAMETER :: NX = 3 + * INTEGER, PARAMETER :: NY = 5 + * + * INTEGER(HID_T) :: file_id ! File identifier + * INTEGER(HID_T) :: dset_id ! Dataset identifier + * INTEGER(HID_T) :: dspace_id ! Dataspace identifier + * + * INTEGER(HSIZE_T), DIMENSION(2) :: dims = (/NY, NX/) ! Dataset dimensions + * INTEGER :: rank = 2 ! Dataset rank + * INTEGER :: data(NY,NX) + * + * INTEGER :: error ! Error flag + * INTEGER :: i, j + * + * ! + * ! Initialize data + * ! + * do i = 1, NY + * do j = 1, NX + * data(i,j) = i + (j-1)*NY + * enddo + * enddo + * + * ! + * ! Data + * ! + * ! 1 6 11 + * ! 2 7 12 + * ! 3 8 13 + * ! 4 9 14 + * ! 5 10 15 + * + * ! + * ! Initialize FORTRAN interface. + * ! + * CALL h5open_f(error) + * + * ! + * ! Create a new file using default properties. + * ! + * CALL h5fcreate_f(filename, H5F_ACC_TRUNC_F, file_id, error) + * + * ! + * ! Create the dataspace. + * ! + * CALL h5screate_simple_f(rank, dims, dspace_id, error) + * + * ! + * ! Create and write dataset using default properties. + * ! + * CALL h5dcreate_f(file_id, dsetname, H5T_NATIVE_INTEGER, dspace_id, & + * dset_id, error, H5P_DEFAULT_F, H5P_DEFAULT_F, & + * H5P_DEFAULT_F) + * CALL h5dwrite_f(dset_id, H5T_NATIVE_INTEGER, data, dims, error) + * + * ! + * ! End access to the dataset and release resources used by it. + * ! + * CALL h5dclose_f(dset_id, error) + * + * ! + * ! Terminate access to the data space. + * ! + * CALL h5sclose_f(dspace_id, error) + * + * ! + * ! Close the file. + * ! + * CALL h5fclose_f(file_id, error) + * + * ! + * ! Close FORTRAN interface. + * ! + * CALL h5close_f(error) + * + * END PROGRAM DSETEXAMPLE + * + * SDSf_tr.out + * ----------- + * HDF5 "SDSf_tr.h5" { + * GROUP "/" { + * DATASET "Fortran Transpose Matrix" { + * DATATYPE H5T_STD_I32LE + * DATASPACE SIMPLE { ( 3, 5 ) / ( 3, 5 ) } + * DATA { + * 1, 2, 3, 4, 5, + * 6, 7, 8, 9, 10, + * 11, 12, 13, 14, 15 + * } + * } + * } + * } + * + * \endcode + * + * Previous Chapter \ref sec_datatype - Next Chapter \ref sec_attribute + * + */ + +/** + * \defgroup H5S Dataspaces (H5S) * * Use the functions in this module to manage HDF5 dataspaces \Emph{and} selections. * @@ -41,6 +1528,7 @@ * using \Emph{selections}. Furthermore, certain set operations are supported * for selections. * + * */ #endif /* H5Smodule_H */ diff --git a/src/H5Tmodule.h b/src/H5Tmodule.h index 30ac702..83f7467 100644 --- a/src/H5Tmodule.h +++ b/src/H5Tmodule.h @@ -29,7 +29,3837 @@ #define H5_MY_PKG_ERR H5E_DATATYPE #define H5_MY_PKG_INIT YES -/**\defgroup H5T H5T +/** \page H5T_UG HDF5 Datatypes + * + * \section sec_datatype HDF5 Datatypes + * HDF5 datatypes describe the element type of HDF5 datasets and attributes. + * There's a large set of predefined datatypes, but users may find it useful + * to define new datatypes through a process called \Emph{derivation}. + * + * The element type is automatically persisted as part of the HDF5 metadata of + * attributes and datasets. Additionally, datatype definitions can be persisted + * to HDF5 files and linked to groups as HDF5 datatype objects or so-called + * \Emph{committed datatypes}. + * + * \subsection subsec_datatype_intro Introduction and Definitions + * + * An HDF5 dataset is an array of data elements, arranged according to the specifications + * of the dataspace. In general, a data element is the smallest addressable unit of storage + * in the HDF5 file. (Compound datatypes are the exception to this rule.) The HDF5 datatype + * defines the storage format for a single data element. See the figure below. + * + * The model for HDF5 attributes is extremely similar to datasets: an attribute has a dataspace + * and a data type, as shown in the figure below. The information in this chapter applies to both + * datasets and attributes. + * + * + * + * + * + *
+ * \image html Dtypes_fig1.gif "Datatypes, dataspaces, and datasets" + *
+ * + * Abstractly, each data element within the dataset is a sequence of bits, interpreted as a single + * value from a set of values (for example, a number or a character). For a given datatype, there is a + * standard or convention for representing the values as bits, and when the bits are represented in a + * particular storage the bits are laid out in a specific storage scheme such as 8-bit bytes with a + * specific ordering and alignment of bytes within the storage array. + * + * HDF5 datatypes implement a flexible, extensible, and portable mechanism for specifying and + * discovering the storage layout of the data elements, determining how to interpret the elements + * (for example, as floating point numbers), and for transferring data from different compatible + * layouts. + * + * An HDF5 datatype describes one specific layout of bits. A dataset has a single datatype which + * applies to every data element. When a dataset is created, the storage datatype is defined. After + * the dataset or attribute is created, the datatype cannot be changed. + * \li The datatype describes the storage layout of a singledata element + * \li All elements of the dataset must have the same type + * \li The datatype of a dataset is immutable + * + * When data is transferred (for example, a read or write), each end point of the transfer has a + * datatype, which describes the correct storage for the elements. The source and destination may + * have different (but compatible) layouts, in which case the data elements are automatically + * transformed during the transfer. + * + * HDF5 datatypes describe commonly used binary formats for numbers (integers + * and floating point) and characters (ASCII). A given computing architecture and programming language + * supports certain number and character representations. For example, a computer may support 8-, + * 16-, 32-, and 64-bit signed integers, stored in memory in little-endian byte order. These would + * presumably correspond to the C programming language types \Emph{char}, \Emph{short}, + * \Emph{int}, and \Emph{long}. + * + * When reading and writing from memory, the HDF5 library must know the appropriate datatype + * that describes the architecture specific layout. The HDF5 library provides the platform + * independent \Emph{NATIVE} types, which are mapped to an appropriate datatype for each platform. + * So the type #H5T_NATIVE_INT is an alias for the appropriate descriptor for each platform. + * + * Data in memory has a datatype: + * \li The storage layout in memory is architecture-specific + * \li The HDF5 \Emph{NATIVE} types are predefined aliases for the architecture-specific memory layout + * \li The memory datatype need not be the same as the stored datatype of the dataset + * + * In addition to numbers and characters, an HDF5 datatype can describe more abstract classes of + * types including enumerations, strings, bit strings, and references (pointers to objects in the HDF5 + * file). HDF5 supports several classes of composite datatypes which are combinations of one or + * more other datatypes. In addition to the standard predefined datatypes, users can define new + * datatypes within the datatype classes. + * + * The HDF5 datatype model is very general and flexible: + * \li For common simple purposes, only predefined types will be needed + * \li Datatypes can be combined to create complex structured datatypes + * \li If needed, users can define custom atomic datatypes + * \li Committed datatypes can be shared by datasets or attributes + * + * \subsection subsec_datatype_model Datatype Model + * The HDF5 library implements an object-oriented model of datatypes. HDF5 datatypes are + * organized as a logical set of base types, or datatype classes. Each datatype class defines + * a format for representing logical values as a sequence of bits. For example the #H5T_INTEGER + * class is a format for representing twos complement integers of various sizes. + * + * A datatype class is defined as a set of one or more datatype properties. A datatype property is + * a property of the bit string. The datatype properties are defined by the logical model of the + * datatype class. For example, the integer class (twos complement integers) has properties such as + * “signed or unsigned”, “length”, and “byte-order”. The float class (IEEE floating point numbers) + * has these properties, plus “exponent bits”, “exponent sign”, etc. + * + * A datatype is derived from one datatype class: a given datatype has a specific value for the + * datatype properties defined by the class. For example, for 32-bit signed integers, stored + * big-endian, the HDF5 datatype is a sub-type of integer with the properties set to + * signed=1, size=4(bytes), and byte-order=BE. + * + * The HDF5 datatype API (H5T functions) provides methods to create datatypes of different + * datatype classes, to set the datatype properties of a new datatype, and to discover the datatype + * properties of an existing datatype. + * + * The datatype for a dataset is stored in the HDF5 file as part of the metadata for the dataset. + * A datatype can be shared by more than one dataset in the file if the datatype is saved to the + * file with a name. This shareable datatype is known as a committed datatype. In the past, + * this kind of datatype was called a named datatype. + * + * When transferring data (for example, a read or write), the data elements of the source and + * destination storage must have compatible types. As a general rule, data elements with the same + * datatype class are compatible while elements from different datatype classes are not compatible. + * When transferring data of one datatype to another compatible datatype, the HDF5 Library uses + * the datatype properties of the source and destination to automatically transform each data + * element. For example, when reading from data stored as 32-bit signed integers, big + * endian into 32-bit signed integers, little-endian, the HDF5 Library will automatically swap the + * bytes. + * + * Thus, data transfer operations (\ref H5Dread, \ref H5Dwrite, \ref H5Aread, \ref H5Awrite) require + * a datatype for both the source and the destination. + * + * + * + * + * + *
+ * \image html Dtypes_fig2.gif "The datatype model" + *
+ * + * The HDF5 library defines a set of predefined datatypes, corresponding to commonly used + * storage formats, such as twos complement integers, IEEE Floating point numbers, etc., 4- + * and 8-byte sizes, big-endian and little-endian byte orders. In addition, a user can derive types with + * custom values for the properties. For example, a user program may create a datatype to describe + * a 6-bit integer, or a 600-bit floating point number. + * + * In addition to atomic datatypes, the HDF5 library supports composite datatypes. A composite + * datatype is an aggregation of one or more datatypes. Each class of composite datatypes has + * properties that describe the organization of the composite datatype. See the figure below. + * Composite datatypes include: + * \li Compound datatypes: structured records + * \li Array: a multidimensional array of a datatype + * \li Variable-length: a one-dimensional array of a datatype + * + * + * + * + * + *
+ * \image html Dtypes_fig3.gif "Composite datatypes" + *
+ * + * \subsubsection subsubsec_datatype_model_class Datatype Classes and Properties + * The figure below shows the HDF5 datatype classes. Each class is defined to have a set of + * properties which describe the layout of the data element and the interpretation of the bits. The + * table below lists the properties for the datatype classes. + * + * + * + * + * + *
+ * \image html Dtypes_fig4.gif "Datatype classes" + *
+ * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + *
Datatype classes and their properties
+ * Class + * + * Description + * + * Properties + * + * Notes + *
+ * Integer + * + * Twos complement integers + * + * Size (bytes), precision (bits), offset (bits), pad, byte order, signed/unsigned + * + *
+ * Float + * + * Floating Point numbers + * + * Size (bytes), precision (bits), offset (bits), pad, byte order, sign position, + * exponent position, exponent size (bits), exponent sign, exponent bias, mantissa position, + * mantissa (size) bits, mantissa sign, mantissa normalization, internal padding + * + * See IEEE 754 for a definition of these properties. These properties describe + * non-IEEE 754 floating point formats as well. + *
+ * Character + * + * Array of 1-byte character encoding + * + * Size (characters), Character set, byte order, pad/no pad, pad character + * + * Currently, ASCII and UTF-8 are supported. + *
+ * Bitfield + * + * String of bits + * + * Size (bytes), precision (bits), offset (bits), pad, byte order + * + * A sequence of bit values packed into one or more bytes. + *
+ * Opaque + * + * Uninterpreted data + * + * Size (bytes), precision (bits), offset (bits), pad, byte order, tag + * + * A sequence of bytes, stored and retrieved as a block. + * The ‘tag’ is a string that can be used to label the value. + *
+ * Enumeration + * + * A list of discrete values, with symbolic names in the form of strings. + * + * Number of elements, element names, element values + * + * Enumeration is a list of pairs (name, value). The name is a string; the + * value is an unsigned integer. + *
+ * Reference + * + * Reference to object or region within the HDF5 file + * + * + * + * @see H5R + *
+ * Array + * + * Array (1-4 dimensions) of data elements + * + * Number of dimensions, dimension sizes, base datatype + * + * The array is accessed atomically: no selection or sub-setting. + *
+ * Variable-length + * + * A variable-length 1-dimensional array of data elements + * + * Current size, base type + * + * + *
+ * Compound + * + * A Datatype of a sequence of Datatypes + * + * Number of members, member names, member types, member offset, member class, + * member size, byte order + * + * + *
+ * + * \subsubsection subsubsec_datatype_model_predefine Predefined Datatypes + * The HDF5 library predefines a modest number of commonly used datatypes. These types have + * standard symbolic names of the form H5T_arch_base where arch is an architecture name and + * base is a programming type name Table 2. New types can be derived from the predefined + * types by copying the predefined type \ref H5Tcopy() and then modifying the result. + * + * The base name of most types consists of a letter to indicate the class Table 3, a precision in + * bits, and an indication of the byte order Table 4. + * + * Table 5 shows examples of predefined datatypes. The full list can be found in the + * \ref PDT section of the \ref RM. + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + *
Table 2. Architectures used in predefined datatypes
+ * Architecture Name + * + * Description + *
+ * IEEE + * + * IEEE-754 standard floating point types in various byte orders. + *
+ * STD + * + * This is an architecture that contains semi-standard datatypes like signed + * two’s complement integers, unsigned integers, and bitfields in various + * byte orders. + *
+ * C
FORTRAN + *
+ * Types which are specific to the C or Fortran programming languages + * are defined in these architectures. For instance, #H5T_C_S1 defines a + * base string type with null termination which can be used to derive string + * types of other lengths. + *
+ * NATIVE + * + * This architecture contains C-like datatypes for the machine on which + * the library was compiled. The types were actually defined by running + * the H5detect program when the library was compiled. In order to be + * portable, applications should almost always use this architecture + * to describe things in memory. + *
+ * CRAY + * + * Cray architectures. These are word-addressable, big-endian systems + * with non-IEEE floating point. + *
+ * INTEL + * + * All Intel and compatible CPU’s. + * These are little-endian systems with IEEE floating-point. + *
+ * MIPS + * + * All MIPS CPU’s commonly used in SGI systems. These are big-endian + * systems with IEEE floating-point. + *
+ * ALPHA + * + * All DEC Alpha CPU’s, little-endian systems with IEEE floating-point. + *
+ * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + *
Table 3. Base types
+ * Base + * + * Description + *
+ * B + * + * Bitfield + *
+ * F + * + * Floating point + *
+ * I + * + * Signed integer + *
+ * R + * + * References + *
+ * S + * + * Character string + *
+ * U + * + * Unsigned integer + *
+ * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + *
Table 4. Byte order
+ * Order + * + * Description + *
+ * BE + * + * Big-endian + *
+ * LE + * + * Little-endian + *
+ * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + *
Table 5. Some predefined datatypes
+ * Example + * + * Description + *
+ * #H5T_IEEE_F64LE + * + * Eight-byte, little-endian, IEEE floating-point + *
+ * #H5T_IEEE_F32BE + * + * Four-byte, big-endian, IEEE floating point + *
+ * #H5T_STD_I32LE + * + * Four-byte, little-endian, signed two’s complement integer + *
+ * #H5T_STD_U16BE + * + * Two-byte, big-endian, unsigned integer + *
+ * #H5T_C_S1 + * + * One-byte,null-terminated string of eight-bit characters + *
+ * #H5T_INTEL_B64 + * + * Eight-byte bit field on an Intel CPU + *
+ * #H5T_STD_REF_OBJ + * + * Reference to an entire object in a file + *
+ * + * The HDF5 library predefines a set of \Emph{NATIVE} datatypes which are similar to C type names. + * The native types are set to be an alias for the appropriate HDF5 datatype for each platform. For + * example, #H5T_NATIVE_INT corresponds to a C int type. On an Intel based PC, this type is the same as + * #H5T_STD_I32LE, while on a MIPS system this would be equivalent to #H5T_STD_I32BE. Table 6 shows + * examples of \Emph{NATIVE} types and corresponding C types for a common 32-bit workstation. + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + *
Table 6. Native and 32-bit C datatypes
+ * Example + * + * Corresponding C Type + *
+ * #H5T_NATIVE_CHAR + * + * char + *
+ * #H5T_NATIVE_SCHAR + * + * signed char + *
+ * #H5T_NATIVE_UCHAR + * + * unsigned char + *
+ * #H5T_NATIVE_SHORT + * + * short + *
+ * #H5T_NATIVE_USHORT + * + * unsigned short + *
+ * #H5T_NATIVE_INT + * + * int + *
+ * #H5T_NATIVE_UINT + * + * unsigned + *
+ * #H5T_NATIVE_LONG + * + * long + *
+ * #H5T_NATIVE_ULONG + * + * unsigned long + *
+ * #H5T_NATIVE_LLONG + * + * long long + *
+ * #H5T_NATIVE_ULLONG + * + * unsigned long long + *
+ * #H5T_NATIVE_FLOAT + * + * float + *
+ * #H5T_NATIVE_DOUBLE + * + * double + *
+ * #H5T_NATIVE_LDOUBLE + * + * long double + *
+ * #H5T_NATIVE_HSIZE + * + * hsize_t + *
+ * #H5T_NATIVE_HSSIZE + * + * hssize_t + *
+ * #H5T_NATIVE_HERR + * + * herr_t + *
+ * #H5T_NATIVE_HBOOL + * + * hbool_t + *
+ * #H5T_NATIVE_B8 + * + * 8-bit unsigned integer or 8-bit buffer in memory + *
+ * #H5T_NATIVE_B16 + * + * 16-bit unsigned integer or 16-bit buffer in memory + *
+ * #H5T_NATIVE_B32 + * + * 32-bit unsigned integer or 32-bit buffer in memory + *
+ * #H5T_NATIVE_B64 + * + * 64-bit unsigned integer or 64-bit buffer in memory + *
+ * + * \subsection subsec_datatype_usage How Datatypes are Used + * + * \subsubsection subsubsec_datatype_usage_object The Datatype Object and the HDF5 Datatype API + * The HDF5 library manages datatypes as objects. The HDF5 datatype API manipulates the + * datatype objects through C function calls. New datatypes can be created from scratch or + * copied from existing datatypes. When a datatype is no longer needed its resources should be released by + * calling \ref H5Tclose(). + * + * The datatype object is used in several roles in the HDF5 data model and library. Essentially, a + * datatype is used whenever the form at of data elements is needed. There are four major uses of + * datatypes in the HDF5 library: at dataset creation, during data transfers, when discovering the + * contents of a file, and for specifying user-defined datatypes. See the table below. + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + *
Table 7. Datatype uses
+ * Use + * + * Description + *
+ * Dataset creation + * + * The datatype of the data elements must be declared when the dataset is created. + *
+ * Dataset transfer + * + * The datatype (format) of the data elements must be defined for both the source and destination. + *
+ * Discovery + * + * The datatype of a dataset can be interrogated to retrieve a complete description of the storage layout. + *
+ * Creating user-defined datatypes + * + * Users can define their own datatypes by creating datatype objects and setting their properties. + *
+ * + * \subsubsection subsubsec_datatype_usage_create Dataset Creation + * All the data elements of a dataset have the same datatype. When a dataset is created, the datatype + * for the data elements must be specified. The datatype of a dataset can never be changed. The + * example below shows the use of a datatype to create a dataset called “/dset”. In this example, the + * dataset will be stored as 32-bit signed integers in big-endian order. + * + * Using a datatype to create a dataset + * \code + * hid_t dt; + * + * dt = H5Tcopy(H5T_STD_I32BE); + * dataset_id = H5Dcreate(file_id, “/dset”, dt, dataspace_id, H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT); + * \endcode + * + * \subsubsection subsubsec_datatype_usage_transfer Data Transfer (Read and Write) + * Probably the most common use of datatypes is to write or read data from a dataset or attribute. In + * these operations, each data element is transferred from the source to the destination (possibly + * rearranging the order of the elements). Since the source and destination do not need to be + * identical (in other words, one is disk and the other is memory), the transfer requires + * both the format of the source element and the destination element. Therefore, data transfers use two + * datatype objects, for the source and destination. + * + * When data is written, the source is memory and the destination is disk (file). The memory + * datatype describes the format of the data element in the machine memory, and the file datatype + * describes the desired format of the data element on disk. Similarly, when reading, the source + * datatype describes the format of the data element on disk, and the destination datatype describes + * the format in memory. + * + * In the most common cases, the file datatype is the datatype specified when + * the dataset was + * created, and the memory datatype should be the appropriate \Emph{NATIVE} type. + * The examples below show samples of writing data to and reading data from a dataset. The data + * in memory is declared C type ‘int’, and the datatype #H5T_NATIVE_INT corresponds to this + * type. The datatype of the dataset should be of datatype class #H5T_INTEGER. + * + * Writing to a dataset + * \code + * int dset_data[DATA_SIZE]; + * + * status = H5Dwrite(dataset_id, H5T_NATIVE_INT, H5S_ALL, H5S_ALL, H5P_DEFAULT, dset_data); + * \endcode + * + * Reading from a dataset + * \code + * int dset_data[DATA_SIZE]; + * + * status = H5Dread(dataset_id, H5T_NATIVE_INT, H5S_ALL, H5S_ALL, H5P_DEFAULT, dset_data); + * \endcode + * + * \subsubsection subsubsec_datatype_usage_discover Discovery of Data Format + * The HDF5 Library enables a program to + * determine the datatype class and properties for any + * datatype. In order to discover the storage format of data in a dataset, the datatype is obtained, and + * the properties are determined by queries to the datatype object. The example below shows code + * that analyzes the datatype for an integer and prints out a description of its storage properties + * (byte order, signed, size). + * + * Discovering datatype properties + * \code + * switch (H5Tget_class(type)) { + * case H5T_INTEGER: + * ord = H5Tget_order(type); + * sgn = H5Tget_sign(type); + * printf(“Integer ByteOrder= ”); + * switch (ord) { + * case H5T_ORDER_LE: + * printf(“LE”); + * break; + * case H5T_ORDER_BE: + * printf(“BE”); + * break; + * } + * printf(“ Sign= ”); + * switch (sgn) { + * case H5T_SGN_NONE: + * printf(“false”); + * break; + * case H5T_SGN_2: + * printf(“true”); + * break; + * } + * printf(“ Size= ”); + * sz = H5Tget_size(type); + * printf(“%d”, sz); + * printf(“\n”); + * break; + * case H5T_???? + * ... + * break; + * } + * \endcode + * + * \subsubsection subsubsec_datatype_usage_user Creating and Using User‐defined Datatypes + * Most programs will primarily use the predefined datatypes described above, possibly in + * composite data types such as compound or array datatypes. However, the HDF5 datatype model + * is extremely general; a user program can define a great variety of atomic datatypes (storage + * layouts). In particular, the datatype properties can define signed and unsigned integers of any + * size and byte order, and floating point numbers with different formats, size, and byte order. The + * HDF5 datatype API provides methods to set these properties. + * + * User-defined types can be used to define the layout of data in memory; examples might match + * some platform specific number format or application defined bit-field. The user-defined type can + * also describe data in the file such as an application-defined format. The user-defined types can be + * translated to and from standard types of the same class, as described above. + * + * \subsection subsec_datatype_function Datatype Function Summaries + * @see H5T reference manual provides a reference list of datatype functions, the H5T APIs. + * + * \subsection subsec_datatype_program Programming Model for Datatypes + * The HDF5 Library implements an object-oriented model of datatypes. HDF5 datatypes are + * organized as a logical set of base types, or datatype classes. The HDF5 Library manages + * datatypes as objects. The HDF5 datatype API manipulates the datatype objects through C + * function calls. The figure below shows the abstract view of the datatype object. The table below + * shows the methods (C functions) that operate on datatype objects. New datatypes can be created + * from scratch or copied from existing datatypes. + * + * + * + * + * + *
+ * \image html Dtypes_fig5.gif "The datatype object" + *
+ * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + *
Table 8. General operations on datatype objects
+ * API Function + * + * Description + *
+ * \ref hid_t \ref H5Tcreate (\ref H5T_class_t class, size_t size) + * + * Create a new datatype object of datatype class . The following datatype classes care supported + * with this function: + * \li #H5T_COMPOUND + * \li #H5T_OPAQUE + * \li #H5T_ENUM + * \li Other datatypes are created with \ref H5Tcopy(). + *
+ * \ref hid_t \ref H5Tcopy (\ref hid_t type) + * + * Obtain a modifiable transient datatype which is a copy of type. If type is a dataset identifier + * then the type returned is a modifiable transient copy of the datatype of the specified dataset. + *
+ * \ref hid_t \ref H5Topen (\ref hid_t location, const char *name, #H5P_DEFAULT) + * + * Open a committed datatype. The committed datatype returned by this function is read-only. + *
+ * \ref htri_t \ref H5Tequal (\ref hid_t type1, \ref hid_t type2) + * + * Determines if two types are equal. + *
+ * \ref herr_t \ref H5Tclose (\ref hid_t type) + * + * Releases resources associated with a datatype obtained from \ref H5Tcopy, \ref H5Topen, or + * \ref H5Tcreate. It is illegal to close an immutable transient datatype (for example, predefined types). + *
+ * \ref herr_t \ref H5Tcommit (\ref hid_t location, const char *name, hid_t type, + * #H5P_DEFAULT, #H5P_DEFAULT, #H5P_DEFAULT) + * + * Commit a transient datatype (not immutable) to a file to become a committed datatype. Committed + * datatypes can be shared. + *
+ * \ref htri_t \ref H5Tcommitted (\ref hid_t type) + * + * Test whether the datatype is transient or committed (named). + *
+ * \ref herr_t \ref H5Tlock (\ref hid_t type) + * + * Make a transient datatype immutable (read-only and not closable). Predefined types are locked. + *
+ * + * In order to use a datatype, the object must be created (\ref H5Tcreate), or a reference obtained by + * cloning from an existing type (\ref H5Tcopy), or opened (\ref H5Topen). In addition, a reference to the + * datatype of a dataset or attribute can be obtained with \ref H5Dget_type or \ref H5Aget_type. For + * composite datatypes a reference to the datatype for members or base types can be obtained + * (\ref H5Tget_member_type, \ref H5Tget_super). When the datatype object is no longer needed, the + * reference is discarded with \ref H5Tclose. + * + * Two datatype objects can be tested to see if they are the same with \ref H5Tequal. This function + * returns true if the two datatype references refer to the same datatype object. However, if two + * datatype objects define equivalent datatypes (the same datatype class and datatype properties), + * they will not be considered ‘equal’. + * + * A datatype can be written to the file as a first class object (\ref H5Tcommit). This is a committed + * datatype and can be used in thesame way as any other datatype. + * + * \subsubsection subsubsec_datatype_program_discover Discovery of Datatype Properties + * Any HDF5 datatype object can be queried to discover all of its datatype properties. For each + * datatype class, there are a set of API functions to retrieve the datatype properties for this class. + * + *

Properties of Atomic Datatypes

+ * Table 9 lists the functions to discover the properties of atomic datatypes. Table 10 lists the + * queries relevant to specific numeric types. Table 11 gives the properties for atomic string + * datatype, and Table 12 gives the property of the opaque datatype. + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + *
Table 9. Functions to discover properties of atomic datatypes
+ * API Function + * + * Description + *
+ * \ref H5T_class_t \ref H5Tget_class (\ref hid_t type) + * + * The datatype class: #H5T_INTEGER, #H5T_FLOAT, #H5T_STRING, #H5T_BITFIELD, #H5T_OPAQUE, #H5T_COMPOUND, + * #H5T_REFERENCE, #H5T_ENUM, #H5T_VLEN, #H5T_ARRAY + *
+ * size_t \ref H5Tget_size (\ref hid_t type) + * + * The total size of the element in bytes, including padding which may appear on either side of the + * actual value. + *
+ * \ref H5T_order_t \ref H5Tget_order (\ref hid_t type) + * + * The byte order describes how the bytes of the datatype are laid out in memory. If the lowest memory + * address contains the least significant byte of the datum then it is said to be little-endian or + * #H5T_ORDER_LE. If the bytes are in the opposite order then they are said to be big-endianor #H5T_ORDER_BE. + *
+ * size_t \ref H5Tget_precision (\ref hid_t type) + * + * The precision property identifies the number of significant bits of a datatype and the offset property + * (defined below) identifies its location. Some datatypes occupy more bytes than what is needed to store + * the value. For instance, a short on a Cray is 32 significant bits in an eight-byte field. + *
+ * int \ref H5Tget_offset (\ref hid_t type) + * + * The offset property defines the bit location of the least significant bit of a bit field whose length + * is precision. + *
+ * \ref herr_t \ref H5Tget_pad (\ref hid_t type, \ref H5T_pad_t *lsb, \ref H5T_pad_t *msb) + * + * Padding is the bits of a data element which are not significant as defined by the precision and offset + * properties. Padding in the low-numbered bits is lsb padding and padding in the high-numbered bits is msb + * padding. Padding bits can be set to zero (#H5T_PAD_ZERO) or one (#H5T_PAD_ONE). + *
+ * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + *
Table 10. Functions to discover properties of atomic datatypes
+ * API Function + * + * Description + *
+ * \ref H5T_sign_t \ref H5Tget_sign (\ref hid_t type) + * + * (INTEGER)Integer data can be signed two’s complement (#H5T_SGN_2) or unsigned (#H5T_SGN_NONE). + *
+ * \ref herr_t \ref H5Tget_fields (\ref hid_t type, size_t *spos, size_t *epos, size_t *esize, + * size_t*mpos, size_t *msize) + * + * (FLOAT)A floating-point data element has bit fields which are the exponent and mantissa as well as a + * mantissa sign bit. These properties define the location (bit position of least significant bit of the + * field) and size (in bits) of each field. The sign bit is always of length one and none of the fields + * are allowed to overlap. + *
+ * size_t \ref H5Tget_ebias (\ref hid_t type) + * + * (FLOAT)A floating-point data element has bit fields which are the exponent and + * mantissa as well as a mantissa sign bit. These properties define the location (bit + * position of least significant bit of the field) and size (in bits) of + * each field. The sign bit is always of length one and none of the + * fields are allowed to overlap. + *
+ * \ref H5T_norm_t \ref H5Tget_norm (\ref hid_t type) + * + * (FLOAT)This property describes the normalization method of the mantissa. + *
  • #H5T_NORM_MSBSET: the mantissa is shifted left (if non-zero) until the first bit + * after the radix point is set and the exponent is adjusted accordingly. All bits of the + * mantissa after the radix point are stored.
  • + *
  • #H5T_NORM_IMPLIED: the mantissa is shifted left \(if non-zero) until the first + * bit after the radix point is set and the exponent is adjusted accordingly. The first + * bit after the radix point is not stored since it’s always set.
  • + *
  • #H5T_NORM_NONE: the fractional part of the mantissa is stored without normalizing it.
+ *
+ * \ref H5T_pad_t \ref H5Tget_inpad (\ref hid_t type) + * + * (FLOAT)If any internal bits (that is, bits between the sign bit, the mantissa field, + * and the exponent field but within the precision field) are unused, then they will be + * filled according to the value of this property. The padding can be: + * #H5T_PAD_BACKGROUND, #H5T_PAD_ZERO,or #H5T_PAD_ONE. + *
+ * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + *
Table 11. Functions to discover properties of atomic string datatypes
+ * API Function + * + * Description + *
+ * \ref H5T_cset_t \ref H5Tget_cset (\ref hid_t type) + * + * Two character sets are currently supported: + * ASCII (#H5T_CSET_ASCII) and UTF-8 (#H5T_CSET_UTF8). + *
+ * \ref H5T_str_t \ref H5Tget_strpad (\ref hid_t type) + * + * The string datatype has a fixed length, but the string may be shorter than the length. + * This property defines the storage mechanism for the left over bytes. The options are: + * \li #H5T_STR_NULLTERM + * \li #H5T_STR_NULLPAD + * \li #H5T_STR_SPACEPAD. + *
+ * + * + * + * + * + * + * + * + * + * + * + *
Table 12. Functions to discover properties of atomic opaque datatypes
+ * API Function + * + * Description + *
+ * char* \ref H5Tget_tag(\ref hid_t type_id) + * + * A user-defined string. + *
+ * + *

Properties of Composite Datatypes

+ * The composite datatype classes can also be analyzed to discover their datatype properties and the + * datatypes that are members or base types of the composite datatype. The member or base type + * can, in turn, be analyzed. The table below lists the functions that can access the datatype + * properties of the different composite datatypes. + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + *
Table 13. Functions to discover properties of composite datatypes
+ * API Function + * + * Description + *
+ * int \ref H5Tget_nmembers(\ref hid_t type_id) + * + * (COMPOUND)The number of fields in the compound datatype. + *
+ * \ref H5T_class_t \ref H5Tget_member_class (\ref hid_t cdtype_id, unsigned member_no) + * + * (COMPOUND)The datatype class of compound datatype member member_no. + *
+ * char* \ref H5Tget_member_name (\ref hid_t type_id, unsigned field_idx) + * + * (COMPOUND)The name of field field_idx of a compound datatype. + *
+ * size_t \ref H5Tget_member_offset (\ref hid_t type_id, unsigned memb_no) + * + * (COMPOUND)The byte offset of the beginning of a field within a compound datatype. + *
+ * \ref hid_t \ref H5Tget_member_type (\ref hid_t type_id, unsigned field_idx) + * + * (COMPOUND)The datatype of the specified member. + *
+ * int \ref H5Tget_array_ndims (\ref hid_t adtype_id) + * + * (ARRAY)The number of dimensions (rank) of the array datatype object. + *
+ * int \ref H5Tget_array_dims (\ref hid_t adtype_id, hsize_t *dims[]) + * + * (ARRAY)The sizes of the dimensions and the dimension permutations of the array datatype object. + *
+ * \ref hid_t \ref H5Tget_super(\ref hid_t type) + * + * (ARRAY, VL, ENUM)The base datatype from which the datatype type is derived. + *
+ * \ref herr_t \ref H5Tenum_nameof(\ref hid_t type, const void *value, char *name, size_t size) + * + * (ENUM)The symbol name that corresponds to the specified value of the enumeration datatype. + *
+ * \ref herr_t \ref H5Tenum_valueof(\ref hid_t type, const char *name, void *value) + * + * (ENUM)The value that corresponds to the specified name of the enumeration datatype. + *
+ * \ref herr_t \ref H5Tget_member_value (\ref hid_t type unsigned memb_no, void *value) + * + * (ENUM)The value of the enumeration datatype member memb_no. + *
+ * + * \subsubsection subsubsec_datatype_program_define Definition of Datatypes + * The HDF5 library enables user programs to create and modify datatypes. The essential steps are: + *
  • 1. Create a new datatype object of a specific composite datatype class, or copy an existing + * atomic datatype object
  • + *
  • 2. Set properties of the datatype object
  • + *
  • 3. Use the datatype object
  • + *
  • 4. Close the datatype object
+ * + * To create a user-defined atomic datatype, the procedure is to clone a predefined datatype of the + * appropriate datatype class (\ref H5Tcopy), and then set the datatype properties appropriate to the + * datatype class. The table below shows how to create a datatype to describe a 1024-bit unsigned + * integer. + * + * Create a new datatype + * \code + * hid_t new_type = H5Tcopy (H5T_NATIVE_INT); + * + * H5Tset_precision(new_type, 1024); + * H5Tset_sign(new_type, H5T_SGN_NONE); + * \endcode + * + * Composite datatypes are created with a specific API call for each datatype class. The table below + * shows the creation method for each datatype class. A newly created datatype cannot be used until the + * datatype properties are set. For example, a newly created compound datatype has no members and cannot + * be used. + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + *
Table 14. Functions to create each datatype class
+ * Datatype Class + * + * Function to Create + *
+ * COMPOUND + * + * #H5Tcreate + *
+ * OPAQUE + * + * #H5Tcreate + *
+ * ENUM + * + * #H5Tenum_create + *
+ * ARRAY + * + * #H5Tarray_create + *
+ * VL + * + * #H5Tvlen_create + *
+ * + * Once the datatype is created and the datatype properties set, the datatype object can be used. + * + * Predefined datatypes are defined by the library during initialization using the same mechanisms + * as described here. Each predefined datatype is locked (\ref H5Tlock), so that it cannot be changed or + * destroyed. User-defined datatypes may also be locked using \ref H5Tlock. + * + *

User-defined Atomic Datatypes

+ * Table 15 summarizes the API methods that set properties of atomic types. Table 16 shows + * properties specific to numeric types, Table 17 shows properties specific to the string datatype + * class. Note that offset, pad, etc. do not apply to strings. Table 18 shows the specific property of + * the OPAQUE datatype class. + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + *
Table 15. API methods that set properties of atomic datatypes
+ * Functions + * + * Description + *
+ * \ref herr_t \ref H5Tset_size (\ref hid_t type, size_t size) + * + * Set the total size of the element in bytes. This includes padding which may appear on either + * side of the actual value. If this property is reset to a smaller value which would cause the + * significant part of the data to extend beyond the edge of the datatype, then the offset property + * is decremented a bit at a time. If the offset reaches zero and the significant part of the data + * still extends beyond the edge of the datatype then the precision property is decremented a bit at + * a time. Decreasing the size of a datatype may fail if the #H5T_FLOAT bit fields would extend beyond + * the significant part of the type. + *
+ * \ref herr_t \ref H5Tset_order (\ref hid_t type, \ref H5T_order_t order) + * + * Set the byte order to little-endian (#H5T_ORDER_LE) or big-endian (#H5T_ORDER_BE). + *
+ * \ref herr_t \ref H5Tset_precision (\ref hid_t type, size_t precision) + * + * Set the number of significant bits of a datatype. The offset property (defined below) identifies + * its location. The size property defined above represents the entire size (in bytes) of the datatype. + * If the precision is decreased then padding bits are inserted on the MSB side of the significant + * bits (this will fail for #H5T_FLOAT types if it results in the sign,mantissa, or exponent bit field + * extending beyond the edge of the significant bit field). On the other hand, if the precision is + * increased so that it “hangs over” the edge of the total size then the offset property is decremented + * a bit at a time. If the offset reaches zero and the significant bits still hang over the edge, then + * the total size is increased a byte at a time. + *
+ * \ref herr_t \ref H5Tset_offset (\ref hid_t type, size_t offset) + * + * Set the bit location of the least significant bit of a bit field whose length is precision. The + * bits of the entire data are numbered beginning at zero at the least significant bit of the least + * significant byte (the byte at the lowest memory address for a little-endian type or the byte at + * the highest address for a big-endian type). The offset property defines the bit location of the + * least significant bit of a bit field whose length is precision. If the offset is increased so the + * significant bits “hang over” the edge of the datum, then the size property is automatically incremented. + *
+ * \ref herr_t \ref H5Tset_pad (\ref hid_t type, \ref H5T_pad_t lsb, \ref H5T_pad_t msb) + * + * Set the padding to zeros (#H5T_PAD_ZERO) or ones (#H5T_PAD_ONE). Padding is the bits of a + * data element which are not significant as defined by the precision and offset properties. Padding + * in the low-numbered bits is lsb padding and padding in the high-numbered bits is msb padding. + *
+ * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + *
Table 16. API methods that set properties of numeric datatypes
+ * Functions + * + * Description + *
+ * \ref herr_t \ref H5Tset_sign (\ref hid_t type, \ref H5T_sign_t sign) + * + * (INTEGER)Integer data can be signed two’s complement (#H5T_SGN_2) or unsigned (#H5T_SGN_NONE). + *
+ * \ref herr_t \ref H5Tset_fields (\ref hid_t type, size_t spos, size_t epos, size_t esize, + * size_t mpos, size_t msize) + * + * (FLOAT)Set the properties define the location (bit position of least significant bit of the field) + * and size (in bits) of each field. The sign bit is always of length one and none of the fields are + * allowed to overlap. + *
+ * \ref herr_t \ref H5Tset_ebias (\ref hid_t type, size_t ebias) + * + * (FLOAT)The exponent is stored as a non-negative value which is ebias larger than the true exponent. + *
+ * \ref herr_t \ref H5Tset_norm (\ref hid_t type, \ref H5T_norm_t norm) + * + * (FLOAT)This property describes the normalization method of the mantissa. + *
  • #H5T_NORM_MSBSET: the mantissa is shifted left (if non-zero) until the first bit + * after theradix point is set and the exponent is adjusted accordingly. All bits of the + * mantissa after the radix point are stored.
  • + *
  • #H5T_NORM_IMPLIED: the mantissa is shifted left (if non-zero) until the first bit + * after the radix point is set and the exponent is adjusted accordingly. The first bit after + * the radix point is not stored since it is always set.
  • + *
  • #H5T_NORM_NONE: the fractional part of the mantissa is stored without normalizing it.
+ *
+ * \ref herr_t \ref H5Tset_inpad (\ref hid_t type, \ref H5T_pad_t inpad) + * + * (FLOAT) +If any internal bits (that is, bits between the sign bit, the mantissa field, +and the exponent field but within the precision field) are unused, then they will be +filled according to the value of this property. The padding can be: + * \li #H5T_PAD_BACKGROUND + * \li #H5T_PAD_ZERO + * \li #H5T_PAD_ONE + *
+ * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + *
Table 17. API methods that set properties of string datatypes
+ * Functions + * + * Description + *
+ * \ref herr_t \ref H5Tset_size (\ref hid_t type, size_t size) + * + * Set the length of the string, in bytes. The precision is automatically set to 8*size. + *
+ * \ref herr_t \ref H5Tset_precision (\ref hid_t type, size_t precision) + * + * The precision must be a multiple of 8. + *
+ * \ref herr_t \ref H5Tset_cset (\ref hid_t type_id, \ref H5T_cset_t cset) + * + * Two character sets are currently supported: + * \li ASCII (#H5T_CSET_ASCII) + * \li UTF-8 (#H5T_CSET_UTF8). + *
+ * \ref herr_t \ref H5Tset_strpad (\ref hid_t type_id, H5T_str_t strpad) + * + * The string datatype has a fixed length, but the string may be shorter than the length. This + * property defines the storage mechanism for the left over bytes. The method used to store + * character strings differs with the programming language: + * \li C usually null terminates strings + * \li Fortran left-justifies and space-pads strings + * + * Valid string padding values, as passed in the parameter strpad, are as follows: + * \li #H5T_STR_NULLTERM: Null terminate (as C does) + * \li #H5T_STR_NULLPAD: Pad with zeros + * \li #H5T_STR_SPACEPAD: Pad with spaces (as FORTRAN does) + *
+ * + * + * + * + * + * + * + * + * + * + * + *
Table 18. API methods that set properties of opaque datatypes
+ * Functions + * + * Description + *
+ * \ref herr_t \ref H5Tset_tag (\ref hid_t type_id, const char *tag) + * + * Tags the opaque datatype type_id with an ASCII identifier tag. + *
+ * + *

Examples

+ * The example below shows how to create a 128-bit little-endian signed integer type. Increasing + * the precision of a type automatically increases the total size. Note that the proper + * procedure is to begin from a type of the intended datatype class which in this case is a + * NATIVE INT. + * + * Create a new 128-bit little-endian signed integer datatype + * \code + * hid_t new_type = H5Tcopy (H5T_NATIVE_INT); + * H5Tset_precision (new_type, 128); + * H5Tset_order (new_type, H5T_ORDER_LE); + * \endcode + * + * The figure below shows the storage layout as the type is defined. The \ref H5Tcopy creates a + * datatype that is the same as #H5T_NATIVE_INT. In this example, suppose this is a 32-bit + * big-endian number (Figure a). The precision is set to 128 bits, which automatically extends + * the size to 8 bytes (Figure b). Finally, the byte order is set to little-endian (Figure c). + * + * + * + * + * + *
+ * \image html Dtypes_fig6.gif "The storage layout for a new 128-bit little-endian signed integer datatype" + *
+ * + * The significant bits of a data element can be offset from the beginning of the memory for that + * element by an amount of padding. The offset property specifies the number of bits of padding + * that appear to the “right of” the value. The table and figure below show how a 32-bit unsigned + * integer with 16-bits of precision having the value 0x1122 will be laid out in memory. + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + *
Table 19. Memory Layout for a 32-bit unsigned integer
+ * Byte Position + * + * Big-Endian
Offset=0 + *
+ * Big-Endian
Offset=16 + *
+ * Little-Endian
Offset=0 + *
+ * Little-Endian
Offset=16 + *
+ * 0: + * + * [pad] + * + * [0x11] + * + * [0x22] + * + * [pad] + *
+ * 1: + * + * [pad] + * + * [0x22] + * + * [0x11] + * + * [pad] + *
+ * 2: + * + * [0x11] + * + * [pad] + * + * [pad] + * + * [0x22] + *
+ * 3: + * + * [0x22] + * + * [pad] + * + * [pad] + * + * [0x11] + *
+ * + * + * + * + * + *
+ * \image html Dtypes_fig7.gif "Memory Layout for a 32-bit unsigned integer" + *
+ * + * If the offset is incremented then the total size is incremented also if necessary to prevent + * significant bits of the value from hanging over the edge of the datatype. + * + * The bits of the entire data are numbered beginning at zero at the least significant bit of the least + * significant byte (the byte at the lowest memory address for a little-endian type or the byte at the + * highest address for a big-endian type). The offset property defines the bit location of the least + * significant bit of a bit field whose length is precision. If the offset is increased so the significant + * bits “hang over” the edge of the datum, then the size property is automatically incremented. + * + * To illustrate the properties of the integer datatype class, the example below shows how to create + * a user-defined datatype that describes a 24-bit signed integer that starts on the third bit of a 32-bit + * word. The datatype is specialized from a 32-bit integer, the precision is set to 24 bits, and the + * offset is set to 3. + * + * A user-defined datatype with a 24-bit signed integer + * \code + * hid_t dt; + * + * dt = H5Tcopy(H5T_SDT_I32LE); + * H5Tset_precision(dt, 24); + * H5Tset_offset(dt,3); + * H5Tset_pad(dt, H5T_PAD_ZERO, H5T_PAD_ONE); + * \endcode + * + * The figure below shows the storage layout for a data element. Note that the unused bits in the + * offset will be set to zero and the unused bits at the end will be set to one, as specified in the + * \ref H5Tset_pad call. + * + * + * + * + *
+ * \image html Dtypes_fig8.gif "A user-defined integer datatype with a range of -1,048,583 to 1,048,584" + *
+ * + * To illustrate a user-defined floating point number, the example below shows how to create a 24-bit + * floating point number that starts 5 bits into a 4 byte word. The floating point number is defined to + * have a mantissa of 19 bits (bits 5-23), an exponent of 3 bits (25-27), and the sign bit is bit 28. + * (Note that this is an illustration of what can be done and is not necessarily a floating point + * format that a user would require.) + * + * A user-defined datatype with a 24-bit floating point datatype + * \code + * hid_t dt; + * + * dt = H5Tcopy(H5T_SDT_F32LE); + * H5Tset_precision(dt, 24); + * H5Tset_fields (dt, 28, 25, 3, 5, 19); + * H5Tset_pad(dt, H5T_PAD_ZERO, H5T_PAD_ONE); + * H5Tset_inpad(dt, H5T_PAD_ZERO); + * \endcode + * + * + * + * + * + *
+ * \image html Dtypes_fig9.gif "A user-defined floating point datatype" + *
+ * The figure above shows the storage layout of a data element for this datatype. Note that there is + * an unused bit (24) between the mantissa and the exponent. This bit is filled with the inpad value + * which in this case is 0. + * + * The sign bit is always of length one and none of the fields are allowed to overlap. When + * expanding a floating-point type one should set the precision first; when decreasing the size one + * should set the field positions and sizes first. + * + *

Composite Datatypes

+ * All composite datatypes must be user-defined; there are no predefined composite datatypes. + * + *

Compound Datatypes

+ * The subsections below describe how to create a compound datatype and how to write and read + * data of a compound datatype. + * + *

Defining Compound Datatypes

+ * + * Compound datatypes are conceptually similar to a C struct or Fortran derived types. The + * compound datatype defines a contiguous sequence of bytes, which are formatted using one up to + * 2^16 datatypes (members). A compound datatype may have any number of members, in any + * order, and the members may have any datatype, including compound. Thus, complex nested + * compound datatypes can be created. The total size of the compound datatype is greater than or + * equal to the sum of the size of its members, up to a maximum of 2^32 bytes. HDF5 does not + * support datatypes with distinguished records or the equivalent of C unions or Fortran + * EQUIVALENCE statements. + * + * Usually a C struct or Fortran derived type will be defined to hold a data point in memory, and the + * offsets of the members in memory will be the offsets of the struct members from the beginning + * of an instance of the struct. The HDF5 C library provides a macro #HOFFSET (s,m)to calculate + * the member’s offset. The HDF5 Fortran applications have to calculate offsets by using sizes of + * members datatypes and by taking in consideration the order of members in the Fortran derived type. + * \code + * HOFFSET(s,m) + * \endcode + * This macro computes the offset of member m within a struct s + * \code + * offsetof(s,m) + * \endcode + * This macro defined in stddef.h does exactly the same thing as the HOFFSET()macro. + * + * Note for Fortran users: Offsets of Fortran structure members correspond to the offsets within a + * packed datatype (see explanation below) stored in an HDF5 file. + * + * Each member of a compound datatype must have a descriptive name which is the key used to + * uniquely identify the member within the compound datatype. A member name in an HDF5 + * datatype does not necessarily have to be the same as the name of the member in the C struct or + * Fortran derived type, although this is often the case. Nor does one need to define all members of + * the C struct or Fortran derived type in the HDF5 compound datatype (or vice versa). + * + * Unlike atomic datatypes which are derived from other atomic datatypes, compound datatypes are + * created from scratch. First, one creates an empty compound datatype and specifies its total size. + * Then members are added to the compound datatype in any order. Each member type is inserted + * at a designated offset. Each member has a name which is the key used to uniquely identify the + * member within the compound datatype. + * + * The example below shows a way of creating an HDF5 C compound datatype to describe a + * complex number. This is a structure with two components, “real” and “imaginary”, and each + * component is a double. An equivalent C struct whose type is defined by the complex_tstruct is + * shown. + * + * A compound datatype for complex numbers in C + * \code + * typedef struct { + * double re; //real part + * double im; //imaginary part + * } complex_t; + * + * hid_t complex_id = H5Tcreate (H5T_COMPOUND, sizeof (complex_t)); + * H5Tinsert (complex_id, “real”, HOFFSET(complex_t,re), + * H5T_NATIVE_DOUBLE); + * H5Tinsert (complex_id, “imaginary”, HOFFSET(complex_t,im), + * H5T_NATIVE_DOUBLE); + * \endcode + * + * The example below shows a way of creating an HDF5 Fortran compound datatype to describe a + * complex number. This is a Fortran derived type with two components, “real” and “imaginary”, + * and each component is DOUBLE PRECISION. An equivalent Fortran TYPE whose type is defined + * by the TYPE complex_t is shown. + * + * A compound datatype for complex numbers in Fortran + * \code + * TYPE complex_t + * DOUBLE PRECISION re ! real part + * DOUBLE PRECISION im; ! imaginary part + * END TYPE complex_t + * + * CALL h5tget_size_f(H5T_NATIVE_DOUBLE, re_size, error) + * CALL h5tget_size_f(H5T_NATIVE_DOUBLE, im_size, error) + * complex_t_size = re_size + im_size + * CALL h5tcreate_f(H5T_COMPOUND_F, complex_t_size, type_id) + * offset = 0 + * CALL h5tinsert_f(type_id, “real”, offset, H5T_NATIVE_DOUBLE, error) + * offset = offset + re_size + * CALL h5tinsert_f(type_id, “imaginary”, offset, H5T_NATIVE_DOUBLE, error) + * \endcode + * + * Important Note: The compound datatype is created with a size sufficient to hold all its members. + * In the C example above, the size of the C struct and the #HOFFSET macro are used as a + * convenient mechanism to determine the appropriate size and offset. Alternatively, the size and + * offset could be manually determined: the size can be set to 16 with “real” at offset 0 and + * “imaginary” at offset 8. However, different platforms and compilers have different sizes for + * “double” and may have alignment restrictions which require additional padding within the + * structure. It is much more portable to use the #HOFFSET macro which assures that the values will + * be correct for any platform. + * + * The figure below shows how the compound datatype would be laid out assuming that + * NATIVE_DOUBLE are 64-bit numbers and that there are no alignment requirements. The total + * size of the compound datatype will be 16 bytes, the “real” component will start at byte 0, and + * “imaginary” will start at byte 8. + * + * + * + * + * + *
+ * \image html Dtypes_fig10.gif "Layout of a compound datatype" + *
+ * + * The members of a compound datatype may be any HDF5 datatype including the compound, + * array, and variable-length (VL) types. The figure and example below show the memory layout + * and code which creates a compound datatype composed of two complex values, and each + * complex value is also a compound datatype as in the figure above. + * + * + * + * + * + *
+ * \image html Dtypes_fig11.gif "Layout of a compound datatype nested in a compound datatype" + *
+ * + * Code for a compound datatype nested in a compound datatype + * \code + * typedef struct { + * complex_t x; + * complex_t y; + * } surf_t; + * + * hid_t complex_id, surf_id; // hdf5 datatypes + * + * complex_id = H5Tcreate (H5T_COMPOUND, sizeof(complex_t)); + * H5Tinsert (complex_id, “re”, HOFFSET(complex_t, re), H5T_NATIVE_DOUBLE); + * H5Tinsert (complex_id, “im”, HOFFSET(complex_t, im), H5T_NATIVE_DOUBLE); + * + * surf_id = H5Tcreate (H5T_COMPOUND, sizeof(surf_t)); + * H5Tinsert (surf_id, “x”, HOFFSET(surf_t, x), complex_id); + * H5Tinsert (surf_id, “y”, HOFFSET(surf_t, y), complex_id); + * \endcode + * + * Note that a similar result could be accomplished by creating a compound datatype and inserting + * four fields. See the figure below. This results in the same layout as the figure above. The difference + * would be how the fields are addressed. In the first case, the real part of ‘y’ is called ‘y.re’; + * in the second case it is ‘y-re’. + * + * Another compound datatype nested in a compound datatype + * \code + * typedef struct { + * complex_t x; + * complex_t y; + * } surf_t; + * + * hid_t surf_id = H5Tcreate (H5T_COMPOUND, sizeof(surf_t)); + * H5Tinsert (surf_id, “x-re”, HOFFSET(surf_t, x.re), H5T_NATIVE_DOUBLE); + * H5Tinsert (surf_id, “x-im”, HOFFSET(surf_t, x.im), H5T_NATIVE_DOUBLE); + * H5Tinsert (surf_id, “y-re”, HOFFSET(surf_t, y.re), H5T_NATIVE_DOUBLE); + * H5Tinsert (surf_id, “y-im”, HOFFSET(surf_t, y.im), H5T_NATIVE_DOUBLE); + * \endcode + * + * The members of a compound datatype do not always fill all the bytes. The #HOFFSET macro + * assures that the members will be laid out according to the requirements of the platform and + * language. The example below shows an example of a C struct which requires extra bytes of + * padding on many platforms. The second element, ‘b’, is a 1-byte character followed by an 8 byte + * double, ‘c’. On many systems, the 8-byte value must be stored on a 4-or 8-byte boundary. This + * requires the struct to be larger than the sum of the size of its elements. + * + * In the example below, sizeof and #HOFFSET are used to assure that the members are inserted at + * the correct offset to match the memory conventions of the platform. The figure below shows how + * this data element would be stored in memory, assuming the double must start on a 4-byte + * boundary. Notice the extra bytes between ‘b’ and ‘c’. + * + * A compound datatype that requires padding + * \code + * typedef struct { + * int a; + * char b; + * double c; + * } s1_t; + * + * hid_t s1_tid = H5Tcreate (H5T_COMPOUND, sizeof(s1_t)); + * H5Tinsert (s1_tid, “x-im”, HOFFSET(s1_t, a), H5T_NATIVE_INT); + * H5Tinsert (s1_tid, “y-re”, HOFFSET(s1_t, b), H5T_NATIVE_CHAR); + * H5Tinsert (s1_tid, “y-im”, HOFFSET(s1_t, c), H5T_NATIVE_DOUBLE); + * \endcode + * + * + * + * + * + *
+ * \image html Dtypes_fig12.gif "Memory layout of a compound datatype that requires padding" + *
+ * + * However, data stored on disk does not require alignment, so unaligned versions of compound + * data structures can be created to improve space efficiency on disk. These unaligned compound + * datatypes can be created by computing offsets by hand to eliminate inter-member padding, or the + * members can be packed by calling #H5Tpack (which modifies a datatype directly, so it is usually + * preceded by a call to #H5Tcopy). + * + * The example below shows how to create a disk version of the compound datatype from the + * figure above in order to store data on disk in as compact a form as possible. Packed compound + * datatypes should generally not be used to describe memory as they may violate alignment + * constraints for the architecture being used. Note also that using a packed datatype for disk + * storage may involve a higher data conversion cost. + * + * Create a packed compound datatype in C + * \code + * hid_t s2_tid = H5Tcopy (s1_tid); + * H5Tpack (s2_tid); + * \endcode + * + * The example below shows the sequence of Fortran calls to create a packed compound datatype. + * An HDF5 Fortran compound datatype never describes a compound datatype in memory and + * compound data is ALWAYS written by fields as described in the next section. Therefore packing + * is not needed unless the offset of each consecutive member is not equal to the sum of the sizes of + * the previous members. + * + * Create a packed compound datatype in Fortran + * \code + * CALL h5tcopy_f(s1_id, s2_id, error) + * CALL h5tpack_f(s2_id, error) + * \endcode + * + *

Creating and Writing Datasets with Compound Datatypes

+ * + * Creating datasets with compound datatypes is similar to creating datasets with any other HDF5 + * datatypes. But writing and reading may be different since datasets that have compound datatypes + * can be written or read by a field (member) or subsets of fields (members). The compound + * datatype is the only composite datatype that supports “sub-setting” by the elements the datatype + * is built from. + * + * The example below shows a C example of creating and writing a dataset with a compound + * datatype. + * + * + * Create and write a dataset with a compound datatype in C + * \code + * typedef struct s1_t { + * int a; + * float b; + * double c; + * } s1_t; + * + * s1_t data[LENGTH]; + * + * // Initialize data + * for (i = 0; i < LENGTH; i++) { + * data[i].a = i; + * data[i].b = i*i; + * data[i].c = 1./(i+1); + * } + * + * ... + * + * s1_tid = H5Tcreate (H5T_COMPOUND, sizeof(s1_t)); + * H5Tinsert(s1_tid, “a_name”, HOFFSET(s1_t, a), H5T_NATIVE_INT); + * H5Tinsert(s1_tid, “b_name”, HOFFSET(s1_t, b), H5T_NATIVE_FLOAT); + * H5Tinsert(s1_tid, “c_name”, HOFFSET(s1_t, c), H5T_NATIVE_DOUBLE); + * + * ... + * + * dataset_id = H5Dcreate(file_id, “SDScompound.h5”, s1_t, + * space_id, H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT); + * H5Dwrite (dataset_id, s1_tid, H5S_ALL, H5S_ALL, H5P_DEFAULT, data); + * \endcode + * + * The example below shows the content of the file written on a little-endian machine. + * Create and write a little-endian dataset with a compound datatype in C + * \code + * HDF5 “SDScompound.h5” { + * GROUP “/” { + * DATASET “ArrayOfStructures” { + * DATATYPE H5T_COMPOUND { + * H5T_STD_I32LE “a_name”; + * H5T_IEEE_F32LE “b_name”; + * H5T_IEEE_F64LE “c_name”; + * } + * DATASPACE SIMPLE { ( 3 ) / ( 3 ) } + * DATA { + * (0): { + * 0, + * 0, + * 1 + * }, + * (1): { + * 0, + * 1, + * 0.5 + * }, + * (2): { + * 0, + * 4, + * 0.333333 + * } + * } + * } + * } + * } + * \endcode + * + * It is not necessary to write the whole data at once. Datasets with compound datatypes can be + * written by field or by subsets of fields. In order to do this one has to remember to set the transfer + * property of the dataset using the H5Pset_preserve call and to define the memory datatype that + * corresponds to a field. The example below shows how float and double fields are written to the + * dataset. + * + * Writing floats and doubles to a dataset + * \code + * typedef struct sb_t { + * float b; + * double c; + * } sb_t; + * + * typedef struct sc_t { + * float b; + * double c; + * } sc_t; + * sb_t data1[LENGTH]; + * sc_t data2[LENGTH]; + * + * // Initialize data + * for (i = 0; i < LENGTH; i++) { + * data1.b = i * i; + * data2.c = 1./(i + 1); + * } + * + * ... + * + * // Create dataset as in example 15 + * + * ... + * + * // Create memory datatypes corresponding to float + * // and double datatype fields + * + * sb_tid = H5Tcreate (H5T_COMPOUND, sizeof(sb_t)); + * H5Tinsert(sb_tid, “b_name”, HOFFSET(sb_t, b), H5T_NATIVE_FLOAT); + * sc_tid = H5Tcreate (H5T_COMPOUND, sizeof(sc_t)); + * H5Tinsert(sc_tid, “c_name”, HOFFSET(sc_t, c), H5T_NATIVE_DOUBLE); + * + * ... + * + * // Set transfer property + * xfer_id = H5Pcreate(H5P_DATASET_XFER); + * H5Pset_preserve(xfer_id, 1); + * H5Dwrite (dataset_id, sb_tid, H5S_ALL, H5S_ALL, xfer_id, data1); + * H5Dwrite (dataset_id, sc_tid, H5S_ALL, H5S_ALL, xfer_id, data2); + * \endcode + * + * The figure below shows the content of the file written on a little-endian machine. Only float and + * double fields are written. The default fill value is used to initialize the unwritten integer field. + * Writing floats and doubles to a dataset on a little-endian system + * \code + * HDF5 “SDScompound.h5” { + * GROUP “/” { + * DATASET “ArrayOfStructures” { + * DATATYPE H5T_COMPOUND { + * H5T_STD_I32LE “a_name”; + * H5T_IEEE_F32LE “b_name”; + * H5T_IEEE_F64LE “c_name”; + * } + * DATASPACE SIMPLE { ( 3 ) / ( 3 ) } + * DATA { + * (0): { + * 0, + * 0, + * 1 + * }, + * (1): { + * 0, + * 1, + * 0.5 + * }, + * (2): { + * 0, + * 4, + * 0.333333 + * } + * } + * } + * } + * } + * \endcode + * + * The example below contains a Fortran example that creates and writes a dataset with a + * compound datatype. As this example illustrates, writing and reading compound datatypes in + * Fortran is always done by fields. The content of the written file is the same as shown in the + * example above. + * Create and write a dataset with a compound datatype in Fortran + * \code + * ! One cannot write an array of a derived datatype in + * ! Fortran. + * TYPE s1_t + * INTEGER a + * REAL b + * DOUBLE PRECISION c + * END TYPE s1_t + * TYPE(s1_t) d(LENGTH) + * ! Therefore, the following code initializes an array + * ! corresponding to each field in the derived datatype + * ! and writesthose arrays to the dataset + * + * INTEGER, DIMENSION(LENGTH) :: a + * REAL, DIMENSION(LENGTH) :: b + * DOUBLE PRECISION, DIMENSION(LENGTH) :: c + * + * ! Initialize data + * do i = 1, LENGTH + * a(i) = i-1 + * b(i) = (i-1) * (i-1) + * c(i) = 1./i + * enddo + * + * ... + * + * ! Set dataset transfer property to preserve partially + * ! initialized fields during write/read to/from dataset + * ! with compound datatype. + * ! + * CALL h5pcreate_f(H5P_DATASET_XFER_F, plist_id, error) + * CALL h5pset_preserve_f(plist_id, .TRUE., error) + * + * ... + * + * ! + * ! Create compound datatype. + * ! + * ! First calculate total size by calculating sizes of + * ! each member + * ! + * CALL h5tget_size_f(H5T_NATIVE_INTEGER, type_sizei, error) + * CALL h5tget_size_f(H5T_NATIVE_REAL, type_sizer, error) + * CALL h5tget_size_f(H5T_NATIVE_DOUBLE, type_sized, error) + * type_size = type_sizei + type_sizer + type_sized + * CALL h5tcreate_f(H5T_COMPOUND_F, type_size, dtype_id, error) + * ! + * ! Insert members + * ! + * ! + * ! INTEGER member + * ! + * offset = 0 + * CALL h5tinsert_f(dtype_id, “a_name”, offset, H5T_NATIVE_INTEGER, error) + * ! + * ! REAL member + * ! + * offset = offset + type_sizei + * CALL h5tinsert_f(dtype_id, “b_name”, offset, H5T_NATIVE_REAL, error) + * ! + * ! DOUBLE PRECISION member + * ! + * offset = offset + type_sizer + * CALL h5tinsert_f(dtype_id, “c_name”, offset, H5T_NATIVE_DOUBLE, error) + * ! + * ! Create the dataset with compound datatype. + * ! + * CALL h5dcreate_f(file_id, dsetname, dtype_id, dspace_id, &dset_id, error, H5P_DEFAULT_F, + * H5P_DEFAULT_F, H5P_DEFAULT_F) + * ! + * + * ... + * + * ! Create memory types. We have to create a compound + * ! datatype for each member we want to write. + * ! + * CALL h5tcreate_f(H5T_COMPOUND_F, type_sizei, dt1_id, error) + * offset = 0 + * CALL h5tinsert_f(dt1_id, “a_name”, offset, H5T_NATIVE_INTEGER, error) + * ! + * CALL h5tcreate_f(H5T_COMPOUND_F, type_sizer, dt2_id, error) + * offset = 0 + * CALL h5tinsert_f(dt2_id, “b_name”, offset, H5T_NATIVE_REAL, error) + * ! + * CALL h5tcreate_f(H5T_COMPOUND_F, type_sized, dt3_id, error) + * offset = 0 + * CALL h5tinsert_f(dt3_id, “c_name”, offset, H5T_NATIVE_DOUBLE, error) + * ! + * ! Write data by fields in the datatype. Fields order + * ! is not important. + * ! + * CALL h5dwrite_f(dset_id, dt3_id, c, data_dims, error, xfer_prp = plist_id) + * CALL h5dwrite_f(dset_id, dt2_id, b, data_dims, error, xfer_prp = plist_id) + * CALL h5dwrite_f(dset_id, dt1_id, a, data_dims, error, xfer_prp = plist_id) + * \endcode + * + *

Reading Datasets with Compound Datatypes

+ * + * Reading datasets with compound datatypes may be a challenge. For general applications there is + * no way to know a priori the corresponding C structure. Also, C structures cannot be allocated on + * the fly during discovery of the dataset’s datatype. For general C, C++, Fortran and Java + * application the following steps will be required to read and to interpret data from the dataset with + * compound datatype: + * \li 1. Get the identifier of the compound datatype in the file with the #H5Dget_type call + * \li 2. Find the number of the compound datatype members with the #H5Tget_nmembers call + * \li 3. Iterate through compound datatype members + *
  • Get member class with the #H5Tget_member_class call
  • + *
  • Get member name with the #H5Tget_member_name call
  • + *
  • Check class type against predefined classes + *
    • #H5T_INTEGER
    • + *
    • #H5T_FLOAT
    • + *
    • #H5T_STRING
    • + *
    • #H5T_BITFIELD
    • + *
    • #H5T_OPAQUE
    • + *
    • #H5T_COMPOUND
    • + *
    • #H5T_REFERENCE
    • + *
    • #H5T_ENUM
    • + *
    • #H5T_VLEN
    • + *
    • #H5T_ARRAY
    + *
  • + *
  • If class is #H5T_COMPOUND, then go to step 2 and repeat all steps under step 3. If + * class is not #H5T_COMPOUND, then a member is of an atomic class and can be read + * to a corresponding buffer after discovering all necessary information specific to each + * atomic type (for example, size of the integer or floats, super class for enumerated and + * array datatype, and its sizes)
+ * + * The examples below show how to read a dataset with a known compound datatype. + * + * The first example below shows the steps needed to read data of a known structure. First, build a + * memory datatype the same way it was built when the dataset was created, and then second use + * the datatype in an #H5Dread call. + * + * Read a dataset using a memory datatype + * \code + * typedef struct s1_t { + * int a; + * float b; + * double c; + * } s1_t; + * + * s1_t *data; + * + * ... + * + * s1_tid = H5Tcreate(H5T_COMPOUND, sizeof(s1_t)); + * H5Tinsert(s1_tid, “a_name”, HOFFSET(s1_t, a), H5T_NATIVE_INT); + * H5Tinsert(s1_tid, “b_name”, HOFFSET(s1_t, b), H5T_NATIVE_FLOAT); + * H5Tinsert(s1_tid, “c_name”, HOFFSET(s1_t, c), H5T_NATIVE_DOUBLE); + * + * ... + * + * dataset_id = H5Dopen(file_id, “SDScompound.h5”, H5P_DEFAULT); + * + * ... + * + * data = (s1_t *) malloc (sizeof(s1_t)*LENGTH); + * H5Dread(dataset_id, s1_tid, H5S_ALL, H5S_ALL, H5P_DEFAULT, data); + * \endcode + * + * Instead of building a memory datatype, the application could use the + * #H5Tget_native_type function. See the example below. + * + * Read a dataset using H5Tget_native_type + * \code + * typedef struct s1_t { + * int a; + * float b; + * double c; + * } s1_t; + * + * s1_t *data; + * hid_t file_s1_t, mem_s1_t; + * + * ... + * + * dataset_id = H5Dopen(file_id, “SDScompound.h5”, H5P_DEFAULT); + * // Discover datatype in the file + * file_s1_t = H5Dget_type(dataset_id); + * // Find corresponding memory datatype + * mem_s1_t = H5Tget_native_type(file_s1_t, H5T_DIR_DEFAULT); + * + * ... + * + * data = (s1_t *) malloc (sizeof(s1_t)*LENGTH); + * H5Dread (dataset_id,mem_s1_tid, H5S_ALL, H5S_ALL, H5P_DEFAULT, data); + * \endcode + * + * The example below shows how to read just one float member of a compound datatype. + * + * Read one floating point member of a compound datatype + * \code + * typedef struct sf_t { + * float b; + * } sf_t; + * + * sf_t *data; + * + * ... + * + * sf_tid = H5Tcreate(H5T_COMPOUND, sizeof(sf_t)); + * H5Tinsert(sf_tid, “b_name”, HOFFSET(sf_t, b), H5T_NATIVE_FLOAT); + * + * ... + * + * dataset_id = H5Dopen(file_id, “SDScompound.h5”, H5P_DEFAULT); + * + * ... + * + * data = (sf_t *) malloc (sizeof(sf_t) * LENGTH); + * H5Dread(dataset_id, sf_tid, H5S_ALL, H5S_ALL, H5P_DEFAULT, data); + * \endcode + * + * The example below shows how to read float and double members of a compound datatype into a + * structure that has those fields in a different order. Please notice that #H5Tinsert calls can be used + * in an order different from the order of the structure’s members. + * + * Read float and double members of a compound datatype + * \code + * typedef struct sdf_t { + * double c; + * float b; + * } sdf_t; + * + * sdf_t *data; + * + * ... + * + * sdf_tid = H5Tcreate(H5T_COMPOUND, sizeof(sdf_t)); + * H5Tinsert(sdf_tid, “b_name”, HOFFSET(sdf_t, b), H5T_NATIVE_FLOAT); + * H5Tinsert(sdf_tid, “c_name”, HOFFSET(sdf_t, c), H5T_NATIVE_DOUBLE); + * + * ... + * + * dataset_id = H5Dopen(file_id, “SDScompound.h5”, H5P_DEFAULT); + * + * ... + * + * data = (sdf_t *) malloc (sizeof(sdf_t) * LENGTH); + * H5Dread(dataset_id, sdf_tid, H5S_ALL, H5S_ALL, H5P_DEFAULT, data); + * \endcode + * + *

Array

+ * + * Many scientific datasets have multiple measurements for each point in a space. There are several + * natural ways to represent this data, depending on the variables and how they are used in + * computation. See the table and the figure below. + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + *
Representing data with multiple measurements
+ *

Storage Strategy

+ *
+ *

Stored as

+ *
+ *

Remarks

+ *
Multiple planes + * + * Several datasets with identical dataspaces + * + * This is optimal when variables are accessed individually, or when often uses only selected + * variables. + *
+ * Additional dimension + * + * One dataset, the last “dimension” is a vec-tor of variables + * + * This can give good performance, although selecting only a few variables may be slow. This may + * not reflect the science. + *
+ * Record with multiple values + * + * One dataset with compound datatype + * + * This enables the variables to be read all together or selected. Also handles “vectors” of + * heterogeneous data. + *
+ * Vector or Tensor value + * + * One dataset, each data element is a small array of values. + * + * This uses the same amount of space as the previous two, and may represent the science model + * better. + *
+ * + * + * + * + * + * + * + * + * + * + * + *
Figure 13 Representing data with multiple measurements
+ * \image html Dtypes_fig13a.gif + * + * \image html Dtypes_fig13b.gif + *
+ * \image html Dtypes_fig13c.gif + * + * \image html Dtypes_fig13d.gif + *
+ * + * The HDF5 #H5T_ARRAY datatype defines the data element to be a homogeneous, multi-dimensional array. + * See Figure 13 above. The elements of the array can be any HDF5 datatype + * (including compound and array), and the size of the datatype is the total size of the array. A + * dataset of array datatype cannot be subdivided for I/O within the data element: the entire array of + * the data element must be transferred. If the data elements need to be accessed separately, for + * example, by plane, then the array datatype should not be used. The table below shows + * advantages and disadvantages of various storage methods. + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + *
Storage method advantages and disadvantages
+ *

Method

+ *
+ *

Advantages

+ *
+ *

Disadvantages

+ *
+ * Multiple Datasets + * + * Easy to access each plane, can select any plane(s) + * + * Less efficient to access a ‘column’ through the planes + *
+ * N+1 Dimension + * + * All access patterns supported + * + * Must be homogeneous datatype
+ * The added dimension may not make sense in the scientific model + *
+ * Compound Datatype + * + * Can be heterogeneous datatype + * + * Planes must be named, selection is by plane
+ * Not a natural representation for a matrix + *
+ * Array + * + * A natural representation for vector or tensor data + * + * Cannot access elements separately (no access by plane) + *
+ * + * An array datatype may be multi-dimensional with 1 to #H5S_MAX_RANK(the maximum rank + * of a dataset is currently 32) dimensions. The dimensions can be any size greater than 0, but + * unlimited dimensions are not supported (although the datatype can be a variable-length datatype). + * + * An array datatype is created with the #H5Tarray_create call, which specifies the number of + * dimensions, the size of each dimension, and the base type of the array. The array datatype can + * then be used in any way that any datatype object is used. The example below shows the creation + * of a datatype that is a two-dimensional array of native integers, and this is then used to create a + * dataset. Note that the dataset can be a dataspace that is any number and size of dimensions. The figure + * below shows the layout in memory assuming that the native integers are 4 bytes. Each + * data element has 6 elements, for a total of 24 bytes. + * + * Create a two-dimensional array datatype + * \code + * hid_t file, dataset; + * hid_t datatype, dataspace; + * hsize_t adims[] = {3, 2}; + * + * datatype = H5Tarray_create(H5T_NATIVE_INT, 2, adims, NULL); + * + * dataset = H5Dcreate(file, datasetname, datatype, + * dataspace, H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT); + * \endcode + * + * + * + * + * + *
+ * \image html Dtypes_fig14.gif "Memory layout of a two-dimensional array datatype" + *
+ * + * @anchor h4_vlen_datatype

Variable-length Datatypes

+ * + * A variable-length (VL) datatype is a one-dimensional sequence of a datatype which are not fixed + * in length from one dataset location to another. In other words, each data element may have a + * different number of members. Variable-length datatypes cannot be divided;the entire data + * element must be transferred. + * + * VL datatypes are useful to the scientific community in many different ways, possibly including: + *
    + *
  • Ragged arrays: Multi-dimensional ragged arrays can be implemented with the last (fastest changing) + * dimension being ragged by using a VL datatype as the type of the element stored. + *
  • + *
  • Fractal arrays: A nested VL datatype can be used to implement ragged arrays of ragged arrays, to + * whatever nesting depth is required for the user. + *
  • + *
  • Polygon lists: A common storage requirement is to efficiently store arrays of polygons with + * different numbers of vertices. A VL datatype can be used to efficiently and succinctly describe + * an array of polygons with different numbers of vertices. + *
  • + *
  • Character strings: Perhaps the most common use of VL datatypes will be to store C-like VL + * character strings in dataset elements or as attributes of objects. + *
  • + *
  • Indices (for example, of objects within a file): An array of VL object references could be used + * as an index to all the objects in a file which contain a particular sequence of dataset values. + *
  • + *
  • Object Tracking: An array of VL dataset region references can be used as a method of tracking + * objects or features appearing in a sequence of datasets. + *
  • + *
+ * + * A VL datatype is created by calling #H5Tvlen_create which specifies the base datatype. The first + * example below shows an example of code that creates a VL datatype of unsigned integers. Each + * data element is a one-dimensional array of zero or more members and is stored in the + * hvl_t structure. See the second example below. + * + * Create a variable-length datatype of unsigned integers + * \code + * tid1 = H5Tvlen_create (H5T_NATIVE_UINT); + * + * dataset=H5Dcreate(fid1,“Dataset1”, tid1, sid1, H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT); + * \endcode + * + * Data element storage for members of the VL datatype + * \code + * typedef struct + * { + * size_t len; // Length of VL data + * //(in base type units) + * void *p; // Pointer to VL data + * } hvl_t; + * \endcode + * + * The first example below shows how the VL data is written. For each of the 10 data elements, a + * length and data buffer must be allocated. Below the two examples is a figure that shows how the + * data is laid out in memory. + * + * An analogous procedure must be used to read the data. See the second example below. An + * appropriate array of vl_t must be allocated, and the data read. It is then traversed one data + * element at a time. The #H5Dvlen_reclaim call frees the data buffer for the buffer. With each + * element possibly being of different sequence lengths for a dataset with a VL datatype, the + * memory for the VL datatype must be dynamically allocated. Currently there are two methods of + * managing the memory for VL datatypes: the standard C malloc/free memory allocation routines + * or a method of calling user-defined memory management routines to allocate or free memory + * (set with #H5Pset_vlen_mem_manager). Since the memory allocated when reading (or writing) + * may be complicated to release, the #H5Dvlen_reclaim function is provided to traverse a memory + * buffer and free the VL datatype information without leaking memory. + * + * Write VL data + * \code + * hvl_t wdata[10]; // Information to write + * + * // Allocate and initialize VL data to write + * for(i = 0; i < 10; i++) { + * wdata[i].p = malloc((i + 1) * sizeof(unsigned int)); + * wdata[i].len = i + 1; + * for(j = 0; j < (i + 1); j++) + * ((unsigned int *)wdata[i].p)[j]=i * 10 + j; + * } + * ret = H5Dwrite(dataset, tid1, H5S_ALL, H5S_ALL, H5P_DEFAULT, wdata); + * \endcode + * + * Read VL data + * \code + * hvl_t rdata[SPACE1_DIM1]; + * ret = H5Dread(dataset, tid1, H5S_ALL, H5S_ALL, xfer_pid, rdata); + * + * for(i = 0; i < SPACE1_DIM1; i++) { + * printf(“%d: len %d ”,rdata[i].len); + * for(j = 0; j < rdata[i].len; j++) { + * printf(“ value: %u\n”,((unsigned int *)rdata[i].p)[j]); + * } + * } + * ret = H5Dvlen_reclaim(tid1, sid1, xfer_pid, rdata); + * \endcode + * + * + * + * + * + *
+ * \image html Dtypes_fig15.gif "Memory layout of a VL datatype" + *
+ * + * The user program must carefully manage these relatively complex data structures. The + * #H5Dvlen_reclaim function performs a standard traversal, freeing all the data. This function + * analyzes the datatype and dataspace objects, and visits each VL data element, recursing through + * nested types. By default, the system free is called for the pointer in each vl_t. Obviously, this + * call assumes that all of this memory was allocated with the system malloc. + * + * The user program may specify custom memory manager routines, one for allocating and one for + * freeing. These may be set with the #H5Pset_vlen_mem_manager, and must have the following + * prototypes: + *
    + *
  • + * \code + * typedef void *(*H5MM_allocate_t)(size_t size, void *info); + * \endcode + *
  • + *
  • + * \code + * typedef void (*H5MM_free_t)(void *mem, void *free_info); + * \endcode + *
  • + *
+ * The utility function #H5Dvlen_get_buf_size checks the number of bytes required to store the VL + * data from the dataset. This function analyzes the datatype and dataspace object to visit all the VL + * data elements, to determine the number of bytes required to store the data for the in the + * destination storage (memory). The size value is adjusted for data conversion and alignment in the + * destination. + * + * \subsection subsec_datatype_other Other Non-numeric Datatypes + * Several datatype classes define special types of objects. + * + * \subsubsection subsubsec_datatype_other_strings Strings + * Text data is represented by arrays of characters, called strings. Many programming languages + * support different conventions for storing strings, which may be fixed or variable-length, and may + * have different rules for padding unused storage. HDF5 can represent strings in several ways. See + * the figure below. + * + * The strings to store are “Four score” and “lazy programmers.” + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + *
A string stored as one-character elements in a one-dimensional array
+ * a) #H5T_NATIVE_CHAR: The dataset is a one-dimensional array with 29 elements, and each element + * is a single character. + *
+ * \image html Dtypes_fig16a.gif + *
+ * b) Fixed-length string: The dataset is a one-dimensional array with two elements, and each + * element is 20 characters. + *
+ * \image html Dtypes_fig16b.gif + *
+ * c) Variable-length string: The dataset is a one-dimensional array with two elements, and each + * element is a variable-length string. This is the same result when stored as a fixed-length + * string except that the first element of the array will need only 11 bytes for storage instead of 20. + *
+ * \image html Dtypes_fig16c.gif + *
+ * \image html Dtypes_fig16d.gif + *
+ * + * First, a dataset may have a dataset with datatype #H5T_NATIVE_CHAR with each character of + * the string as an element of the dataset. This will store an unstructured block of text data, but + * gives little indication of any structure in the text. See item a in the figure above. + * + * A second alternative is to store the data using the datatype class #H5T_STRING with each + * element a fixed length. See item b in the figure above. In this approach, each element might be a + * word or a sentence, addressed by the dataspace. The dataset reserves space for the specified + * number of characters, although some strings may be shorter. This approach is simple and usually + * is fast to access, but can waste storage space if the length of the Strings varies. + * + * A third alternative is to use a variable-length datatype. See item c in the figure above. This can + * be done using the standard mechanisms described above. The program would use vl_t structures + * to write and read the data. + * + * A fourth alternative is to use a special feature of the string datatype class to set the size of the + * datatype to #H5T_VARIABLE. See item c in the figure above. The example below shows a + * declaration of a datatype of type #H5T_C_S1 which is set to #H5T_VARIABLE. The HDF5 + * Library automatically translates between this and the vl_t structure. Note: the #H5T_VARIABLE + * size can only be used with string datatypes. + * Set the string datatype size to H5T_VARIABLE + * \code + * tid1 = H5Tcopy (H5T_C_S1); + * ret = H5Tset_size (tid1, H5T_VARIABLE); + * \endcode + * + * Variable-length strings can be read into C strings (in other words, pointers to zero terminated + * arrays of char). See the example below. + * Read variable-length strings into C strings + * \code + * char *rdata[SPACE1_DIM1]; + * + * ret = H5Dread(dataset, tid1, H5S_ALL, H5S_ALL, xfer_pid, rdata); + * + * for(i = 0; i < SPACE1_DIM1; i++) { + * printf(“%d: len: %d, str is: %s\n”, i, strlen(rdata[i]), rdata[i]); + * } + * + * ret = H5Dvlen_reclaim(tid1, sid1, xfer_pid, rdata); + * \endcode + * + * \subsubsection subsubsec_datatype_other_refs Reference + * In HDF5, objects (groups, datasets, and committed datatypes) are usually accessed by name. + * There is another way to access stored objects - by reference. There are two reference datatypes: + * object reference and region reference. Object reference objects are created with #H5Rcreate and + * other calls (cross reference). These objects can be stored and retrieved in a dataset as elements + * with reference datatype. The first example below shows an example of code that creates + * references to four objects, and then writes the array of object references to a dataset. The second + * example below shows a dataset of datatype reference being read and one of the reference objects + * being dereferenced to obtain an object pointer. + * + * In order to store references to regions of a dataset, the datatype should be #H5T_STD_REF_DSETREG. + * Note that a data element must be either an object reference or a region reference: these are + * different types and cannot be mixed within a single array. + * + * A reference datatype cannot be divided for I/O: an element is read or written completely. + * + * Create object references and write to a dataset + * \code + * dataset= H5Dcreate (fid1, “Dataset3”, H5T_STD_REF_OBJ, sid1, H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT); + * + * // Create reference to dataset + * ret = H5Rcreate(&wbuf[0], fid1,“/Group1/Dataset1”, H5R_OBJECT, -1); + * + * // Create reference to dataset + * ret = H5Rcreate(&wbuf[1], fid1, “/Group1/Dataset2”, H5R_OBJECT, -1); + * + * // Create reference to group + * ret = H5Rcreate(&wbuf[2], fid1, “/Group1”, H5R_OBJECT, -1); + * + * // Create reference to committed datatype + * ret = H5Rcreate(&wbuf[3], fid1, “/Group1/Datatype1”, H5R_OBJECT, -1); + * + * // Write selection to disk + * ret=H5Dwrite(dataset, H5T_STD_REF_OBJ, H5S_ALL, H5S_ALL, H5P_DEFAULT, wbuf); + * \endcode + * + * Read a dataset with a reference datatype + * \code + * rbuf = malloc(sizeof(hobj_ref_t)*SPACE1_DIM1); + * + * // Read selection from disk + * ret=H5Dread(dataset, H5T_STD_REF_OBJ, H5S_ALL, H5S_ALL, H5P_DEFAULT, rbuf); + * + * // Open dataset object + * dset2 = H5Rdereference(dataset, H5R_OBJECT, &rbuf[0]); + * \endcode + * + * \subsubsection subsubsec_datatype_other_enum ENUM + * The enum datatype implements a set of (name, value) pairs, similar to C/C++ enum. The values + * are currently limited to native integer datatypes. Each name can be the name of only one value, + * and each value can have only one name. + * + * The data elements of the ENUMERATION are stored according to the datatype. An example + * would be as an array of integers. The example below shows an example of how to create an + * enumeration with five elements. The elements map symbolic names to 2-byte integers. See the + * table below. + * Create an enumeration with five elements + * \code + * hid_t hdf_en_colors; + * short val; + * + * hdf_en_colors = H5Tcreate(H5T_ENUM, sizeof(short)); + * H5Tenum_insert(hdf_en_colors, “RED”, (val=0, &val)); + * H5Tenum_insert(hdf_en_colors, “GREEN”, (val=1, &val)); + * H5Tenum_insert(hdf_en_colors, “BLUE”, (val=2, &val)); + * H5Tenum_insert(hdf_en_colors, “WHITE”, (val=3, &val)); + * H5Tenum_insert(hdf_en_colors, “BLACK”, (val=4, &val)); + * H5Dcreate(fileid, datasetname, hdf_en_colors, spaceid, H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT); + * \endcode + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + *
An enumeration with five elements
NameValue
RED0
GREEN1
BLUE2
WHITE3
BLACK4
+ * + * The figure below shows how an array of eight values might be stored. Conceptually, the array is + * an array of symbolic names [BLACK, RED, WHITE, BLUE, ...] See item a in the figure below. + * These are stored as the values and are short integers. So, the first 2 bytes are the value associated + * with “BLACK”, which is the number 4, and so on. See item b in the figure below. + * + * + * + * + * + * + * + * + * + * + * + * + * + * + *
Storing an enum array
+ * a) Logical data to be written - eight elements + *
+ * \image html Dtypes_fig17a.gif + *
+ * b) The storage layout. Total size of the array is 16 bytes, 2 bytes per element. + *
+ * \image html Dtypes_fig17b.gif + *
+ * + * The order that members are inserted into an enumeration type is unimportant; the important part + * is the associations between the symbol names and the values. Thus, two enumeration datatypes + * will be considered equal if and only if both types have the same symbol/value associations and + * both have equal underlying integer datatypes. Type equality is tested with the H5Tequal + * function. + * + * If a particular architecture type is required, a little-endian or big-endian datatype for example, + * use a native integer datatype as the ENUM base datatype and use #H5Tconvert on values as they + * are read from or written to a dataset. + * + * \subsubsection subsubsec_datatype_other_opaque Opaque + * In some cases, a user may have data objects that should be stored and retrieved as blobs with no + * attempt to interpret them. For example, an application might wish to store an array of encrypted + * certificates which are 100 bytes long. + * + * While an arbitrary block of data may always be stored as bytes, characters, integers, or whatever, + * this might mislead programs about the meaning of the data. The opaque datatype defines data + * elements which are uninterpreted by HDF5. The opaque data may be labeled with + * #H5Tset_tag with a string that might be used by an application. For example, the encrypted + * certificates might have a tag to indicate the encryption and the certificate standard. + * + * \subsubsection subsubsec_datatype_other_bitfield Bitfield + * Some data is represented as bits, where the number of bits is not an integral byte and the bits are + * not necessarily interpreted as a standard type. Some examples might include readings from + * machine registers (for example, switch positions), a cloud mask, or data structures with several + * small integers that should be store in a single byte. + * + * This data could be stored as integers, strings, or enumerations. However, these storage methods + * would likely result in considerable wasted space. For example, storing a cloud mask with one + * byte per value would use up to eight times the space of a packed array of bits. + * + * The HDF5 bitfield datatype class defines a data element that is a contiguous sequence of bits, + * which are stored on disk in a packed array. The programming model is the same as for unsigned + * integers: the datatype object is created by copying a predefined datatype, and then the precision, + * offset, and padding are set. + * + * While the use of the bitfield datatype will reduce storage space substantially, there will still be + * wasted space if the bitfield as a whole does not match the 1-, 2-, 4-, or 8-byte unit in which it is + * written. The remaining unused space can be removed by applying the N-bit filter to the dataset + * containing the bitfield data. For more information, see "Using the N-bit Filter." + * + * \subsection subsec_datatype_fill Fill Values + * The “fill value” for a dataset is the specification of the default value assigned to data elements + * that have not yet been written. In the case of a dataset with an atomic datatype, the fill value is a + * single value of the appropriate datatype, such as ‘0’ or ‘-1.0’. In the case of a dataset with a + * composite datatype, the fill value is a single data element of the appropriate type. For example, + * for an array or compound datatype, the fill value is a single data element with values for all the + * component elements of the array or compound datatype. + * + * The fill value is set (permanently) when the dataset is created. The fill value is set in the dataset + * creation properties in the #H5Dcreate call. Note that the #H5Dcreate call must also include the + * datatype of the dataset, and the value provided for the fill value will be interpreted as a single + * element of this datatype. The example below shows code which creates a dataset of integers with + * fill value -1. Any unwritten data elements will be set to -1. + * + * Create a dataset with a fill value of -1 + * \code + * hid_t plist_id; + * int filler; + * + * filler = -1; + * plist_id = H5Pcreate(H5P_DATASET_CREATE); + * H5Pset_fill_value(plist_id, H5T_NATIVE_INT, &filler); + * + * // Create the dataset with fill value ‘-1’. + * dataset_id = H5Dcreate(file_id, “/dset”, H5T_STD_I32BE, dataspace_id, H5P_DEFAULT, plist_id, + * H5P_DEFAULT); + * \endcode + * + * Create a fill value for a compound datatype + * \code + * typedef struct s1_t { + * int a; + * char b; + * double c; + * } s1_t; + * s1_t filler; + * + * s1_tid = H5Tcreate (H5T_COMPOUND, sizeof(s1_t)); + * H5Tinsert(s1_tid, “a_name”, HOFFSET(s1_t, a), H5T_NATIVE_INT); + * H5Tinsert(s1_tid, “b_name”, HOFFSET(s1_t, b), H5T_NATIVE_CHAR); + * H5Tinsert(s1_tid, “c_name”, HOFFSET(s1_t, c), H5T_NATIVE_DOUBLE); + * + * filler.a = -1; + * filler.b = ‘*’; + * filler.c = -2.0; + * plist_id = H5Pcreate(H5P_DATASET_CREATE); + * H5Pset_fill_value(plist_id, s1_tid, &filler); + * + * // Create the dataset with fill value + * // (-1, ‘*’, -2.0). + * dataset = H5Dcreate(file, datasetname, s1_tid, space, H5P_DEFAULT, plist_id, H5P_DEFAULT); + * \endcode + * + * The code above shows how to create a fill value for a compound datatype. The procedure is the + * same as the previous example except the filler must be a structure with the correct fields. Each + * field is initialized to the desired fill value. + * + * The fill value for a dataset can be retrieved by reading the dataset creation properties of the + * dataset and then by reading the fill value with #H5Pget_fill_value. The data will be read into + * memory using the storage layout specified by the datatype. This transfer will convert data in the + * same way as #H5Dread. The example below shows how to get the fill value from the dataset + * created in the example "Create a dataset with a fill value of -1". + * + * Retrieve a fill value + * \code + * hid_t plist2; + * int filler; + * + * dataset_id = H5Dopen(file_id, “/dset”, H5P_DEFAULT); + * plist2 = H5Dget_create_plist(dataset_id); + * + * H5Pget_fill_value(plist2, H5T_NATIVE_INT, &filler); + * + * // filler has the fill value, ‘-1’ + * \endcode + * + * A similar procedure is followed for any datatype. The example below shows how to read the fill + * value for the compound datatype created in an example above. Note that the program must pass + * an element large enough to hold a fill value of the datatype indicated by the argument to + * #H5Pget_fill_value. Also, the program must understand the datatype in order to interpret its + * components. This may be difficult to determine without knowledge of the application that + * created the dataset. + * + * Read the fill value for a compound datatype + * \code + * char *fillbuf; + * int sz; + * + * dataset = H5Dopen( file, DATASETNAME, H5P_DEFAULT); + * + * s1_tid = H5Dget_type(dataset); + * + * sz = H5Tget_size(s1_tid); + * + * fillbuf = (char *)malloc(sz); + * + * plist_id = H5Dget_create_plist(dataset); + * + * H5Pget_fill_value(plist_id, s1_tid, fillbuf); + * + * printf(“filler.a: %d\n”,((s1_t *) fillbuf)->a); + * printf(“filler.b: %c\n”,((s1_t *) fillbuf)->b); + * printf(“filler.c: %f\n”,((s1_t *) fillbuf)->c); + * \endcode + * + * \subsection subsec_datatype_complex Complex Combinations of Datatypes + * Several composite datatype classes define collections of other datatypes, including other + * composite datatypes. In general, a datatype can be nested to any depth, with any combination of + * datatypes. + * + * For example, a compound datatype can have members that are other compound datatypes, arrays, + * VL datatypes. An array can be an array of array, an array of compound, or an array of VL. And a + * VL datatype can be a variable-length array of compound, array, or VL datatypes. + * + * These complicated combinations of datatypes form a logical tree, with a single root datatype, and + * leaves which must be atomic datatypes (predefined or user-defined). The figure below shows an + * example of a logical tree describing a compound datatype constructed from different datatypes. + * + * Recall that the datatype is a description of the layout of storage. The complicated compound + * datatype is constructed from component datatypes, each of which describes the layout of part of + * the storage. Any datatype can be used as a component of a compound datatype, with the + * following restrictions: + *
  • 1. No byte can be part of more than one component datatype (in other words, the fields cannot + * overlap within the compound datatype)
  • + *
  • 2. The total size of the components must be less than or equal to the total size of the compound + * datatype
+ * These restrictions are essentially the rules for C structures and similar record types familiar from + * programming languages. Multiple typing, such as a C union, is not allowed in HDF5 datatypes. + * + * + * + * + * + *
+ * \image html Dtypes_fig18.gif "A compound datatype built with different datatypes" + *
+ * + * \subsubsection subsubsec_datatype_complex_create Creating a Complicated Compound Datatype + * To construct a complicated compound datatype, each component is constructed, and then added + * to the enclosing datatype description. The example below shows how to create a compound + * datatype with four members: + * \li “T1”, a compound datatype with three members + * \li “T2”, a compound datatype with two members + * \li “T3”, a one-dimensional array of integers + * \li “T4”, a string + * + * Below the example code is a figure that shows this datatype as a logical tree. The output of the + * h5dump utility is shown in the example below the figure. + * + * Each datatype is created as a separate datatype object. Figure "The storage layout for the + * four member datatypes" below shows the storage layout + * for the four individual datatypes. Then the datatypes are inserted into the outer datatype at an + * appropriate offset. Figure "The storage layout of the combined four members" below shows the + * resulting storage layout. The combined record is 89 bytes long. + * + * The Dataset is created using the combined compound datatype. The dataset is declared to be a 4 + * by 3 array of compound data. Each data element is an instance of the 89-byte compound + * datatype. Figure "The layout of the dataset" below shows the layout of the dataset, and expands + * one of the elements to show the relative position of the component data elements. + * + * Each data element is a compound datatype, which can be written or read as a record, or each + * field may be read or written individually. The first field (“T1”) is itself a compound datatype + * with three fields (“T1.a”, “T1.b”, and “T1.c”). “T1” can be read or written as a record, or + * individual fields can be accessed. Similarly, the second filed is a compound datatype with two + * fields (“T2.f1”, “T2.f2”). + * + * The third field (“T3”) is an array datatype. Thus, “T3” should be accessed as an array of 40 + * integers. Array data can only be read or written as a single element, so all 40 integers must be + * read or written to the third field. The fourth field (“T4”) is a single string of length 25. + * + * Create a compound datatype with four members + * \code + * typedef struct s1_t { + * int a; + * char b; + * double c; + * } s1_t; + * typedef struct s2_t { + * float f1; + * float f2; + * } s2_t; + * hid_t s1_tid, s2_tid, s3_tid, s4_tid, s5_tid; + * + * // Create a datatype for s1 + * s1_tid = H5Tcreate (H5T_COMPOUND, sizeof(s1_t)); + * H5Tinsert(s1_tid, “a_name”, HOFFSET(s1_t, a), H5T_NATIVE_INT); + * H5Tinsert(s1_tid, “b_name”, HOFFSET(s1_t, b), H5T_NATIVE_CHAR); + * H5Tinsert(s1_tid, “c_name”, HOFFSET(s1_t, c), H5T_NATIVE_DOUBLE); + * + * // Create a datatype for s2. + * s2_tid = H5Tcreate (H5T_COMPOUND, sizeof(s2_t)); + * H5Tinsert(s2_tid, “f1”, HOFFSET(s2_t, f1), H5T_NATIVE_FLOAT); + * H5Tinsert(s2_tid, “f2”, HOFFSET(s2_t, f2), H5T_NATIVE_FLOAT); + * + * // Create a datatype for an Array of integers + * s3_tid = H5Tarray_create(H5T_NATIVE_INT, RANK, dim); + * + * // Create a datatype for a String of 25 characters + * s4_tid = H5Tcopy(H5T_C_S1); + * H5Tset_size(s4_tid, 25); + * + * // Create a compound datatype composed of one of each of these types. + * // The total size is the sum of the size of each. + * sz = H5Tget_size(s1_tid) + H5Tget_size(s2_tid) + H5Tget_size(s3_tid) + H5Tget_size(s4_tid); + * s5_tid = H5Tcreate (H5T_COMPOUND, sz); + * + * // Insert the component types at the appropriate offsets. + * H5Tinsert(s5_tid, “T1”, 0, s1_tid); + * H5Tinsert(s5_tid, “T2”, sizeof(s1_t), s2_tid); + * H5Tinsert(s5_tid, “T3”, sizeof(s1_t) + sizeof(s2_t), s3_tid); + * H5Tinsert(s5_tid, “T4”, (sizeof(s1_t) + sizeof(s2_t) + H5Tget_size(s3_tid)), s4_tid); + * + * // Create the dataset with this datatype. + * dataset = H5Dcreate(file, DATASETNAME, s5_tid, space, H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT); + * \endcode + * + * + * + * + * + *
+ * \image html Dtypes_fig19.gif "Logical tree for the compound datatype with four members" + *
+ * + * Output from h5dump for the compound datatype + * \code + * DATATYPE H5T_COMPOUND { + * H5T_COMPOUND { + * H5T_STD_I32LE “a_name”; + * H5T_STD_I8LE “b_name”; + * H5T_IEEE_F64LE “c_name”; + * } “T1”; + * H5T_COMPOUND { + * H5T_IEEE_F32LE “f1”; + * H5T_IEEE_F32LE “f2”; + * } “T2”; + * H5T_ARRAY { [10] H5T_STD_I32LE } “T3”; + * H5T_STRING { + * STRSIZE 25; + * STRPAD H5T_STR_NULLTERM; + * CSET H5T_CSET_ASCII; + * CTYPE H5T_C_S1; + * } “T4”; + * } + * \endcode + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + *
The storage layout for the four member datatypes
+ * a) Compound type ‘s1_t’, size 16 bytes. + *
+ * \image html Dtypes_fig20a.gif + *
+ * b) Compound type ‘s2_t’, size 8 bytes. + *
+ * \image html Dtypes_fig20b.gif + *
+ * c) Array type ‘s3_tid’, 40 integers, total size 40 bytes. + *
+ * \image html Dtypes_fig20c.gif + *
+ * d) String type ‘s4_tid’, size 25 bytes. + *
+ * \image html Dtypes_fig20d.gif + *
+ * + * + * + * + * + *
+ * \image html Dtypes_fig21.gif "The storage layout of the combined four members" + *
+ * + * \li A 4 x 3 array of Compound Datatype + * \li Element [1,1] expanded + * + * + * + * + *
+ * \image html Dtypes_fig22.gif "The layout of the dataset" + *
+ * + * \subsubsection subsubsec_datatype_complex_analyze Analyzing and Navigating a Compound Datatype + * A complicated compound datatype can be analyzed piece by piece to discover the exact storage + * layout. In the example above, the outer datatype is analyzed to discover that it is a compound + * datatype with four members. Each member is analyzed in turn to construct a complete map of the + * storage layout. + * + * The example below shows an example of code that partially analyzes a nested compound + * datatype. The name and overall offset and size of the component datatype is discovered, and then + * its type is analyzed depending on the datatype class. Through this method, the complete storage + * layout can be discovered. + * + * Output from h5dump for the compound datatype + * \code + * s1_tid = H5Dget_type(dataset); + * + * if (H5Tget_class(s1_tid) == H5T_COMPOUND) { + * printf(“COMPOUND DATATYPE {\n”); + * sz = H5Tget_size(s1_tid); + * nmemb = H5Tget_nmembers(s1_tid); + * printf(“ %d bytes\n”,sz); + * printf(“ %d members\n”,nmemb); + * for (i =0; i < nmemb; i++) { + * s2_tid = H5Tget_member_type(s1_tid, i); + * if (H5Tget_class(s2_tid) == H5T_COMPOUND) { + * // recursively analyze the nested type. + * } + * else if (H5Tget_class(s2_tid) == H5T_ARRAY) { + * sz2 = H5Tget_size(s2_tid); + * printf(“ %s: NESTED ARRAY DATATYPE offset %d size %d + * {\n”, H5Tget_member_name(s1_tid, i), H5Tget_member_offset(s1_tid, i), sz2); + * H5Tget_array_dims(s2_tid, dim); + * s3_tid = H5Tget_super(s2_tid); + * // Etc., analyze the base type of the array + * } + * else { + * // analyze a simple type + * printf(“ %s: type code %d offset %d size %d\n”, H5Tget_member_name(s1_tid, i), + * H5Tget_class(s2_tid), H5Tget_member_offset(s1_tid, i), H5Tget_size(s2_tid)); + * } + * // and so on.... + * \endcode + * + * \subsection subsec_datatype_life Life Cycle of the Datatype Object + * Application programs access HDF5 datatypes through identifiers. Identifiers are obtained by + * creating a new datatype or by copying or opening an existing datatype. The identifier can be used + * until it is closed or until the library shuts down. See items a and b in the figure below. By default, + * a datatype is transient, and it disappears when it is closed. + * + * When a dataset or attribute is created (#H5Dcreate or #H5Acreate), its datatype is stored in the + * HDF5 file as part of the dataset or attribute object. See item c in the figure below. Once an object + * created, its datatype cannot be changed or deleted. The datatype can be accessed by calling + * #H5Dget_type, #H5Aget_type, #H5Tget_super, or #H5Tget_member_type. See item d in the figure + * below. These calls return an identifier to a transient copy of the datatype of the dataset or + * attribute unless the datatype is a committed datatype. + * Note that when an object is created, the stored datatype is a copy of the transient datatype. If two + * objects are created with the same datatype, the information is stored in each object with the same + * effect as if two different datatypes were created and used. + * + * A transient datatype can be stored using #H5Tcommit in the HDF5 file as an independent, named + * object, called a committed datatype. Committed datatypes were formerly known as named + * datatypes. See item e in the figure below. Subsequently, when a committed datatype is opened + * with #H5Topen (item f), or is obtained with #H5Tget_member_type or similar call (item k), the return + * is an identifier to a transient copy of the stored datatype. The identifier can be used in the + * same way as other datatype identifiers except that the committed datatype cannot be modified. When a + * committed datatype is copied with #H5Tcopy, the return is a new, modifiable, transient datatype + * object (item f). + * + * When an object is created using a committed datatype (#H5Dcreate, #H5Acreate), the stored + * datatype is used without copying it to the object. See item j in the figure below. In this case, if + * multiple objects are created using the same committed datatype, they all share the exact same + * datatype object. This saves space and makes clear that the datatype is shared. Note that a + * committed datatype can be shared by objects within the same HDF5 file, but not by objects in + * other files. For more information on copying committed datatypes to other HDF5 files, see the + * “Copying Committed Datatypes with H5Ocopy” topic in the “Additional Resources” chapter. + * + * A committed datatype can be deleted from the file by calling #H5Ldelete which replaces + * #H5Gunlink. See item i in the figure below. If one or more objects are still using the datatype, the + * committed datatype cannot be accessed with #H5Topen, but will not be removed from the file + * until it is no longer used. #H5Tget_member_type and similar calls will return a transient copy of the + * datatype. + * + * + * + * + * + *
+ * \image html Dtypes_fig23.gif "Life cycle of a datatype" + *
+ * + * Transient datatypes are initially modifiable. Note that when a datatype is copied or when it is + * written to the file (when an object is created) or the datatype is used to create a composite + * datatype, a copy of the current state of the datatype is used. If the datatype is then modified, the + * changes have no effect on datasets, attributes, or datatypes that have already been created. See + * the figure below. + * + * A transient datatype can be made read-only (#H5Tlock). Note that the datatype is still transient, + * and otherwise does not change. A datatype that is immutable is read-only but cannot be closed + * except when the entire library is closed. The predefined types such as #H5T_NATIVE_INT are + * immutable transient types. + * + * + * + * + * + *
+ * \image html Dtypes_fig24.gif "Transient datatype states: modifiable, read-only, and immutable" + *
+ * + * To create two or more datasets that share a common datatype, first commit the datatype, and then + * use that datatype to create the datasets. See the example below. + * Create a shareable datatype + * \code + * hid_t t1 = ...some transient type...; + * H5Tcommit (file, “shared_type”, t1, H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT); + * hid_t dset1 = H5Dcreate (file, “dset1”, t1, space, H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT); + * hid_t dset2 = H5Dcreate (file, “dset2”, t1, space, H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT); + * + * hid_t dset1 = H5Dopen (file, “dset1”, H5P_DEFAULT); + * hid_t t2 = H5Dget_type (dset1); + * hid_t dset3 = H5Dcreate (file, “dset3”, t2, space, H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT); + * hid_t dset4 = H5Dcreate (file, “dset4”, t2, space, H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT); + * \endcode + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + *
Datatype APIs
FunctionDescription
+ * \code + * hid_t H5Topen (hid_t location, const char *name) + * \endcode + * + * A committed datatype can be opened by calling this function, which returns a datatype identifier. + * The identifier should eventually be released by calling #H5Tclose() to release resources. The + * committed datatype returned by this function is read-only or a negative value is returned for failure. + * The location is either a file or group identifier. + *
+ * \code + * herr_t H5Tcommit (hid_t location, const char *name, hid_t type, H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT) + * \endcode + * + * A transient datatype (not immutable) can be written to a file and turned into a committed datatype by + * calling this function. The location is either a file or group identifier and when combined with name + * refers to a new committed datatype. + *
+ * \code + * htri_t H5Tcommitted (hid_t type) + * \endcode + * + * A type can be queried to determine if it is a committed type or a transient type. If this function + * returns a positive value then the type is committed. Datasets which return committed datatypes with + * #H5Dget_type() are able to share the datatype with other datasets in the same file. + *
+ * + * \subsection subsec_datatype_transfer Data Transfer: Datatype Conversion and Selection + * When data is transferred (write or read), the storage layout of the data elements may be different. + * For example, an integer might be stored on disk in big-endian byte order and read into memory + * with little-endian byte order. In this case, each data element will be transformed by the HDF5 + * Library during the data transfer. + * + * The conversion of data elements is controlled by specifying the datatype of the source and + * specifying the intended datatype of the destination. The storage format on disk is the datatype + * specified when the dataset is created. The datatype of memory must be specified in the library + * call. + * + * In order to be convertible, the datatype of the source and destination must have the same + * datatype class (with the exception of enumeration type). Thus, integers can be converted to other + * integers, and floats to other floats, but integers cannot (yet) be converted to floats. For each + * atomic datatype class, the possible conversions are defined. An enumeration datatype can be + * converted to an integer or a floating-point number datatype. + * + * Basically, any datatype can be converted to another datatype of the same datatype class. The + * HDF5 Library automatically converts all properties. If the destination is too small to hold the + * source value then an overflow or underflow exception occurs. If a handler is defined with the + * #H5Pset_type_conv_cb function, it will be called. Otherwise, a default action will be performed. + * The table below summarizes the default actions. + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + *
Default actions for datatype conversion exceptions
Datatype ClassPossible ExceptionsDefault Action
IntegerSize, offset, pad
FloatSize, offset, pad, ebits
StringSizeTruncates, zero terminate if required.
EnumerationNo fieldAll bits set
+ * + * For example, when reading data from a dataset, the source datatype is the datatype set when the + * dataset was created, and the destination datatype is the description of the storage layout in + * memory. The destination datatype must be specified in the #H5Dread call. The example below + * shows an example of reading a dataset of 32-bit integers. The figure below the example shows + * the data transformation that is performed. + * Specify the destination datatype with H5Dread + * \code + * // Stored as H5T_STD_BE32 + * // Use the native memory order in the destination + * mem_type_id = H5Tcopy(H5T_NATIVE_INT); + * status = H5Dread(dataset_id, mem_type_id, mem_space_id, file_space_id, xfer_plist_id, buf); + * \endcode + * + * + * + * + * + * + *
Layout of a datatype conversion
+ * \image html Dtypes_fig25a.gif
+ * \image html Dtypes_fig25b.gif
+ * \image html Dtypes_fig25c.gif + *
+ * + * One thing to note in the example above is the use of the predefined native datatype + * #H5T_NATIVE_INT. Recall that in this example, the data was stored as a 4-bytes in big-endian + * order. The application wants to read this data into an array of integers in memory. Depending on + * the system, the storage layout of memory might be either big or little-endian, so the data may + * need to be transformed on some platforms and not on others. The #H5T_NATIVE_INT type is set + * by the HDF5 Library to be the correct type to describe the storage layout of the memory on the + * system. Thus, the code in the example above will work correctly on any platform, performing a + * transformation when needed. + * + * There are predefined native types for most atomic datatypes, and these can be combined in + * composite datatypes. In general, the predefined native datatypes should always be used for data + * stored in memory. + * Predefined native datatypes describe the storage properties of memory. + * + * + * + * + * + *
+ * \image html Dtypes_fig26.gif "An enum datatype conversion" + *
+ * + * Create an aligned and packed compound datatype + * \code + * // Stored as H5T_STD_BE32 + * // Use the native memory order in the destination + * mem_type_id = H5Tcopy(H5T_NATIVE_INT); + * status = H5Dread(dataset_id, mem_type_id, mem_space_id, file_space_id, xfer_plist_id, buf); + * \endcode + * + * + * + * + * + *
+ * \image html Dtypes_fig27.gif "Alignment of a compound datatype" + *
+ * + * Transfer some fields of a compound datatype + * \code + * // Stored as H5T_STD_BE32 + * // Use the native memory order in the destination + * mem_type_id = H5Tcopy(H5T_NATIVE_INT); + * status = H5Dread(dataset_id, mem_type_id, mem_space_id, file_space_id, xfer_plist_id, buf); + * \endcode + * + * + * + * + * + *
+ * \image html Dtypes_fig28.gif "Layout when an element is skipped" + *
+ * + * \subsection subsec_datatype_text Text Descriptions of Datatypes: Conversion to and from + * + * HDF5 provides a means for generating a portable and human-readable text description of a + * datatype and for generating a datatype from such a text description. This capability is particularly + * useful for creating complex datatypes in a single step, for creating a text description of a datatype + * for debugging purposes, and for creating a portable datatype definition that can then be used to + * recreate the datatype on many platforms or in other applications. + * + * These tasks are handled by two functions provided in the HDF5 Lite high-level library: + * \li #H5LTtext_to_dtype Creates an HDF5 datatype in a single step. + * \li #H5LTdtype_to_text Translates an HDF5 datatype into a text description. + * + * Note that this functionality requires that the HDF5 High-Level Library (H5LT) be installed. + * + * While #H5LTtext_to_dtype can be used to generate any sort of datatype, it is particularly useful + * for complex datatypes. + * + * #H5LTdtype_to_text is most likely to be used in two sorts of situations: when a datatype must be + * closely examined for debugging purpose or to create a portable text description of the datatype + * that can then be used to recreate the datatype on other platforms or in other applications. + * + * These two functions work for all valid HDF5 datatypes except time, bitfield, and reference + * datatypes. + * + * The currently supported text format used by #H5LTtext_to_dtype and #H5LTdtype_to_text is the + * data description language (DDL) and conforms to the \ref DDLBNF110. The portion of the + * \ref DDLBNF110 that defines HDF5 datatypes appears below. + * The definition of HDF5 datatypes from the HDF5 DDL + * \code + * ::= | | | + * + * ::= | |