summaryrefslogtreecommitdiffstats
path: root/doxygen/dox
diff options
context:
space:
mode:
authorAllen Byrne <50328838+byrnHDF@users.noreply.github.com>2022-09-14 20:44:24 (GMT)
committerGitHub <noreply@github.com>2022-09-14 20:44:24 (GMT)
commit45178c87a3099a9fef8bae6f7249ca306cf89629 (patch)
treecb404581365434d641e4d6303921613ef3432bd0 /doxygen/dox
parentdcf3b54b6ef3ffe2093cfae81fe80cdb2bb53047 (diff)
downloadhdf5-45178c87a3099a9fef8bae6f7249ca306cf89629.zip
hdf5-45178c87a3099a9fef8bae6f7249ca306cf89629.tar.gz
hdf5-45178c87a3099a9fef8bae6f7249ca306cf89629.tar.bz2
develop Merge doxygen from 1.12 branch (#2095)
Diffstat (limited to 'doxygen/dox')
-rw-r--r--doxygen/dox/DDLBNF110.dox2
-rw-r--r--doxygen/dox/FileFormatSpec.dox23
-rw-r--r--doxygen/dox/GettingStarted.dox101
-rw-r--r--doxygen/dox/IntroHDF5.dox627
-rw-r--r--doxygen/dox/LearnBasics.dox183
-rw-r--r--doxygen/dox/LearnBasics1.dox1023
-rw-r--r--doxygen/dox/LearnBasics2.dox1159
-rw-r--r--doxygen/dox/LearnBasics3.dox1015
-rw-r--r--doxygen/dox/LearnHDFView.dox472
-rw-r--r--doxygen/dox/ReferenceManual.dox59
-rw-r--r--doxygen/dox/Specifications.dox16
-rw-r--r--doxygen/dox/TechnicalNotes.dox16
-rw-r--r--doxygen/dox/UsersGuide.dox403
-rw-r--r--doxygen/dox/ViewTools.dox1198
-rw-r--r--doxygen/dox/ViewTools2.dox786
-rw-r--r--doxygen/dox/ViewToolsJPSS.dox763
-rw-r--r--doxygen/dox/high_level/extension.dox543
-rw-r--r--doxygen/dox/high_level/high_level.dox29
-rw-r--r--doxygen/dox/rm-template.dox2
19 files changed, 8040 insertions, 380 deletions
diff --git a/doxygen/dox/DDLBNF110.dox b/doxygen/dox/DDLBNF110.dox
index f7e4267..6d6b67e 100644
--- a/doxygen/dox/DDLBNF110.dox
+++ b/doxygen/dox/DDLBNF110.dox
@@ -126,7 +126,7 @@ This section contains a brief explanation of the symbols used in the DDL.
<reference> ::= H5T_REFERENCE { <ref_type> }
-<ref_type> ::= H5T_STD_REF_OBJECT | H5T_STD_REF_DSETREG | H5T_STD_REF | UNDEFINED
+<ref_type> ::= H5T_STD_REF_OBJECT | H5T_STD_REF_DSETREG
<compound_type> ::= H5T_COMPOUND {
<member_type_def>+
diff --git a/doxygen/dox/FileFormatSpec.dox b/doxygen/dox/FileFormatSpec.dox
new file mode 100644
index 0000000..fc10574
--- /dev/null
+++ b/doxygen/dox/FileFormatSpec.dox
@@ -0,0 +1,23 @@
+/** \page FMT3 HDF5 File Format Specification Version 3.0
+
+\htmlinclude H5.format.html
+
+*/
+
+/** \page FMT2 HDF5 File Format Specification Version 2.0
+
+\htmlinclude H5.format.2.0.html
+
+*/
+
+/** \page FMT11 HDF5 File Format Specification Version 1.1
+
+\htmlinclude H5.format.1.1.html
+
+*/
+
+/** \page FMT1 HDF5 File Format Specification Version 1.0
+
+\htmlinclude H5.format.1.0.html
+
+*/ \ No newline at end of file
diff --git a/doxygen/dox/GettingStarted.dox b/doxygen/dox/GettingStarted.dox
index 880491d..29c5033 100644
--- a/doxygen/dox/GettingStarted.dox
+++ b/doxygen/dox/GettingStarted.dox
@@ -1,3 +1,100 @@
-/** \page GettingStarted \Code{Hello, HDF5!}
+/** @page GettingStarted Getting Started with HDF5
- */ \ No newline at end of file
+Navigate back: \ref index "Main"
+<hr>
+
+\section sec_learn Learning HDF5
+There are several resources for learning about HDF5. The HDF Group provides an on-line HDF5 tutorial,
+documentation, examples, and videos. There are also tutorials provided by other organizations that are very useful for learning about HDF5.
+
+\subsection subsec_learn_intro The HDF Group Resources
+For a quick introduction to HDF5 see the following:
+<table>
+<tr>
+<td style="background-color:#F5F5F5">
+@ref IntroHDF5
+</td>
+<td>
+A very brief introduction to HDF5 and the HDF5 programming model and APIs
+</td>
+</tr>
+<tr>
+<td style="background-color:#F5F5F5">
+@ref LearnHDFView
+</td>
+<td>
+A tutorial for learning how to use HDFView. NO programming involved!
+</td>
+</tr>
+<tr>
+<td style="background-color:#F5F5F5">
+@ref LearnBasics
+</td>
+<td>
+Step by step instructions for learning HDF5 that include programming examples
+</td>
+</tr>
+</table>
+
+\subsection subsec_learn_tutor The HDF Group Tutorials and Examples
+These tutorials and examples are available for learning about the HDF5 High Level APIs, tools,
+Parallel HDF5, and the HDF5-1.10 VDS and SWMR new features:
+<table>
+<tr>
+<td style="background-color:#F5F5F5">
+<a href="https://portal.hdfgroup.org/display/HDF5/Introduction+to+the+HDF5+High+Level+APIs">Using the High Level APIs</a>
+</td>
+<td>
+\ref H5LT \ref H5IM \ref H5TB \ref H5PT \ref H5DS
+</td>
+</tr>
+<tr>
+<td style="background-color:#F5F5F5">
+<a href="https://portal.hdfgroup.org/display/HDF5/Introduction+to+Parallel+HDF5">Introduction to Parallel HDF5</a>
+</td>
+<td>
+A brief introduction to Parallel HDF5. If you are new to HDF5 please see the @ref LearnBasics topic first.
+</td>
+</tr>
+<tr>
+<td style="background-color:#F5F5F5">
+\ref ViewTools
+</td>
+<td>
+\li @ref LearnHDFView
+\li @ref ViewToolsCommand
+\li @ref ViewToolsJPSS
+</td>
+</tr>
+<tr>
+<td style="background-color:#F5F5F5">
+HDF5-1.10 New Features
+</td>
+<td>
+\li <a href="https://portal.hdfgroup.org/display/HDF5/Introduction+to+the+Virtual+Dataset++-+VDS">Introduction to the Virtual Dataset - VDS</a>
+\li <a href="https://portal.hdfgroup.org/pages/viewpage.action?pageId=48812567">Introduction to Single-Writer/Multiple-Reader (SWMR)</a>
+</td>
+</tr>
+<tr>
+<td style="background-color:#F5F5F5">
+Example Programs
+</td>
+<td>
+\ref HDF5Examples
+</td>
+</tr>
+<tr>
+<td style="background-color:#F5F5F5">
+Videos
+</td>
+<td>
+\li <a href="https://www.youtube.com/watch?v=BAjsCldRMMc">Introduction to HDF5</a>
+\li <a href="https://www.youtube.com/watch?v=qrI27pI0P1E">Parallel HDF5</a>
+</td>
+</tr>
+</table>
+
+<hr>
+Navigate back: \ref index "Main"
+
+*/
diff --git a/doxygen/dox/IntroHDF5.dox b/doxygen/dox/IntroHDF5.dox
new file mode 100644
index 0000000..ec46217
--- /dev/null
+++ b/doxygen/dox/IntroHDF5.dox
@@ -0,0 +1,627 @@
+/** @page IntroHDF5 Introduction to HDF5
+
+Navigate back: \ref index "Main" / \ref GettingStarted
+<hr>
+
+\section sec_intro_desc HDF5 Description
+HDF5 consists of a file format for storing HDF5 data, a data model for logically organizing and accessing
+HDF5 data from an application, and the software (libraries, language interfaces, and tools) for working with this format.
+
+\subsection subsec_intro_desc_file File Format
+HDF5 consists of a file format for storing HDF5 data, a data model for logically organizing and accessing HDF5 data from an application,
+and the software (libraries, language interfaces, and tools) for working with this format.
+
+\subsection subsec_intro_desc_dm Data Model
+The HDF5 Data Model, also known as the HDF5 Abstract (or Logical) Data Model consists of
+the building blocks for data organization and specification in HDF5.
+
+An HDF5 file (an object in itself) can be thought of as a container (or group) that holds
+a variety of heterogeneous data objects (or datasets). The datasets can be images, tables,
+graphs, and even documents, such as PDF or Excel:
+
+<table>
+<tr>
+<td>
+\image html fileobj.png
+</td>
+</tr>
+</table>
+
+The two primary objects in the HDF5 Data Model are groups and datasets.
+
+There are also a variety of other objects in the HDF5 Data Model that support groups and datasets,
+including datatypes, dataspaces, properties and attributes.
+
+\subsubsection subsec_intro_desc_dm_group Groups
+HDF5 groups (and links) organize data objects. Every HDF5 file contains a root group that can
+contain other groups or be linked to objects in other files.
+
+<table>
+<caption>There are two groups in the HDF5 file depicted above: Viz and SimOut.
+Under the Viz group are a variety of images and a table that is shared with the SimOut group.
+The SimOut group contains a 3-dimensional array, a 2-dimensional array and a link to a 2-dimensional
+array in another HDF5 file.</caption>
+<tr>
+<td>
+\image html group.png
+</td>
+</tr>
+</table>
+
+Working with groups and group members is similar in many ways to working with directories and files
+in UNIX. As with UNIX directories and files, objects in an HDF5 file are often described by giving
+their full (or absolute) path names.
+\li / signifies the root group.
+\li /foo signifies a member of the root group called foo.
+\li /foo/zoo signifies a member of the group foo, which in turn is a member of the root group.
+
+\subsubsection subsec_intro_desc_dm_dset Datasets
+HDF5 datasets organize and contain the “raw” data values. A dataset consists of metadata
+that describes the data, in addition to the data itself:
+
+<table>
+<caption>In this picture, the data is stored as a three dimensional dataset of size 4 x 5 x 6 with an integer datatype.
+It contains attributes, Time and Pressure, and the dataset is chunked and compressed.</caption>
+<tr>
+<td>
+\image html dataset.png
+</td>
+</tr>
+</table>
+
+Datatypes, dataspaces, properties and (optional) attributes are HDF5 objects that describe a dataset.
+The datatype describes the individual data elements.
+
+\subsection subsec_intro_desc_props Datatypes, Dataspaces, Properties and Attributes
+
+\subsubsection subsec_intro_desc_prop_dtype Datatypes
+The datatype describes the individual data elements in a dataset. It provides complete information for
+data conversion to or from that datatype.
+
+<table>
+<caption>In the dataset depicted, each element of the dataset is a 32-bit integer.</caption>
+<tr>
+<td>
+\image html datatype.png
+</td>
+</tr>
+</table>
+
+Datatypes in HDF5 can be grouped into:
+<ul>
+<li>
+<b>Pre-Defined Datatypes</b>: These are datatypes that are created by HDF5. They are actually opened (and closed)
+by HDF5 and can have different values from one HDF5 session to the next. There are two types of pre-defined datatypes:
+<ul>
+<li>
+Standard datatypes are the same on all platforms and are what you see in an HDF5 file. Their names are of the form
+H5T_ARCH_BASE where ARCH is an architecture name and BASE is a pro­gramming type name. For example, #H5T_IEEE_F32BE
+indicates a standard Big Endian floating point type.
+</li>
+<li>
+Native datatypes are used to simplify memory operations (reading, writing) and are NOT the same on different platforms.
+For example, #H5T_NATIVE_INT indicates an int (C).
+</li>
+</ul>
+</li>
+<li>
+<b>Derived Datatypes</b>: These are datatypes that are created or derived from the pre-defined datatypes.
+An example of a commonly used derived datatype is a string of more than one character. Compound datatypes
+are also derived types. A compound datatype can be used to create a simple table, and can also be nested,
+in which it includes one more other compound datatypes.
+<table>
+<caption>This is an example of a dataset with a compound datatype. Each element in the dataset consists
+of a 16-bit integer, a character, a 32-bit integer, and a 2x3x2 array of 32-bit floats (the datatype).
+It is a 2-dimensional 5 x 3 array (the dataspace). The datatype should not be confused with the dataspace.
+</caption>
+<tr>
+<td>
+\image html cmpnddtype.png
+</td>
+</tr>
+</table>
+</li>
+</ul>
+
+\subsubsection subsec_intro_desc_prop_dspace Dataspaces
+A dataspace describes the layout of a dataset’s data elements. It can consist of no elements (NULL),
+a single element (scalar), or a simple array.
+
+<table>
+<caption>This image illustrates a dataspace that is an array with dimensions of 5 x 3 and a rank (number of dimensions) of 2.</caption>
+<tr>
+<td>
+\image html dataspace1.png
+</td>
+</tr>
+</table>
+
+A dataspace can have dimensions that are fixed (unchanging) or unlimited, which means they can grow
+in size (i.e. they are extendible).
+
+There are two roles of a dataspace:
+\li It contains the spatial information (logical layout) of a dataset stored in a file. This includes the rank and dimensions of a dataset, which are a permanent part of the dataset definition.
+\li It describes an application’s data buffers and data elements participating in I/O. In other words, it can be used to select a portion or subset of a dataset.
+
+<table>
+<caption>The dataspace is used to describe both the logical layout of a dataset and a subset of a dataset.</caption>
+<tr>
+<td>
+\image html dataspace.png
+</td>
+</tr>
+</table>
+
+\subsubsection subsec_intro_desc_prop_property Properties
+A property is a characteristic or feature of an HDF5 object. There are default properties which
+handle the most common needs. These default properties can be modified using the HDF5 Property
+List API to take advantage of more powerful or unusual features of HDF5 objects.
+
+<table>
+<tr>
+<td>
+\image html properties.png
+</td>
+</tr>
+</table>
+
+For example, the data storage layout property of a dataset is contiguous by default. For better
+performance, the layout can be modified to be chunked or chunked and compressed:
+
+\subsubsection subsec_intro_desc_prop_attr Attributes
+Attributes can optionally be associated with HDF5 objects. They have two parts: a name and a value.
+Attributes are accessed by opening the object that they are attached to so are not independent objects.
+Typically an attribute is small in size and contains user metadata about the object that it is attached to.
+
+Attributes look similar to HDF5 datasets in that they have a datatype and dataspace. However, they
+do not support partial I/O operations, and they cannot be compressed or extended.
+
+\subsection subsec_intro_desc_soft HDF5 Software
+The HDF5 software is written in C and includes optional wrappers for C++, FORTRAN (90 and F2003),
+and Java. The HDF5 binary distribution consists of the HDF5 libraries, include files, command-line
+utilities, scripts for compiling applications, and example programs.
+
+\subsubsection subsec_intro_desc_soft_apis HDF5 APIs and Libraries
+There are APIs for each type of object in HDF5. For example, all C routines in the HDF5 library
+begin with a prefix of the form H5*, where * is one or two uppercase letters indicating the type
+of object on which the function operates:
+\li @ref H5A <b>A</b>ttribute Interface
+\li @ref H5D <b>D</b>ataset Interface
+\li @ref H5F <b>F</b>ile Interface
+
+The HDF5 High Level APIs simplify many of the steps required to create and access objects, as well
+as providing templates for storing objects. Following is a list of the High Level APIs:
+\li @ref H5LT – simplifies steps in creating datasets and attributes
+\li @ref H5IM – defines a standard for storing images in HDF5
+\li @ref H5TB – condenses the steps required to create tables
+\li @ref H5DS – provides a standard for dimension scale storage
+\li @ref H5PT – provides a standard for storing packet data
+
+\subsubsection subsec_intro_desc_soft_tools Tools
+Useful tools for working with HDF5 files include:
+\li h5dump: A utility to dump or display the contents of an HDF5 File
+\li h5cc, h5c++, h5fc: Unix scripts for compiling applications
+\li HDFView: A java browser to view HDF (HDF4 and HDF5) files
+
+<h4>h5dump</h4>
+The h5dump utility displays the contents of an HDF5 file in Data Description Language (\ref DDLBNF110).
+Below is an example of h5dump output for an HDF5 file that contains no objects:
+\code
+$ h5dump file.h5
+ HDF5 "file.h5" {
+ GROUP "/" {
+ }
+ }
+\endcode
+
+With large files and datasets the output from h5dump can be overwhelming.
+There are options that can be used to examine specific parts of an HDF5 file.
+Some useful h5dump options are included below:
+\code
+ -H, --header Display header information only (no data)
+ -d <name> Display a dataset with a specified path and name
+ -p Display properties
+ -n Display the contents of the file
+\endcode
+
+<h4>h5cc, h5fc, h5c++</h4>
+The built HDF5 binaries include the h5cc, h5fc, h5c++ compile scripts for compiling applications.
+When using these scripts there is no need to specify the HDF5 libraries and include files.
+Compiler options can be passed to the scripts.
+
+<h4>HDFView</h4>
+The HDFView tool allows browsing of data in HDF (HDF4 and HDF5) files.
+
+\section sec_intro_pm Introduction to the HDF5 Programming Model and APIs
+The HDF5 Application Programming Interface is extensive, but a few functions do most of the work.
+
+To introduce the programming model, examples in Python and C are included below. The Python examples
+use the HDF5 Python APIs (h5py). See the Examples from "Learning the Basics" page for complete examples
+that can be downloaded and run for C, FORTRAN, C++, Java and Python.
+
+The general paradigm for working with objects in HDF5 is to:
+\li Open the object.
+\li Access the object.
+\li Close the object.
+
+The library imposes an order on the operations by argument dependencies. For example, a file must be
+opened before a dataset because the dataset open call requires a file handle as an argument. Objects
+can be closed in any order. However, once an object is closed it no longer can be accessed.
+
+Keep the following in mind when looking at the example programs included in this section:
+<ul>
+<li>
+<ul>
+<li>
+C routines begin with the prefix “H5*” where * is a single letter indicating the object on which the
+operation is to be performed.
+</li>
+<li>
+FORTRAN routines are similar; they begin with “h5*” and end with “_f”.
+</li>
+<li>
+Java routines are similar; the routine names begin with “H5*” and are prefixed with “H5.” as the class. Constants are
+in the HDF5Constants class and are prefixed with "HDF5Constants.". The function arguments
+are usually similar, @see @ref HDF5LIB
+</li>
+</ul>
+For example:
+<ul>
+<li>
+File Interface:<ul><li>#H5Fopen (C)</li><li>h5fopen_f (FORTRAN)</li><li>H5.H5Fopen (Java)</li></ul>
+</li>
+<li>
+Dataset Interface:<ul><li>#H5Dopen (C)</li><li>h5dopen_f (FORTRAN)</li><li>H5.H5Dopen (Java)</li></ul>
+</li>
+<li>
+Dataspace interface:<ul><li>#H5Sclose (C)</li><li>h5sclose_f (FORTRAN)</li><li>H5.H5Sclose (Java)</li></ul>
+</li>
+</ul>
+The HDF5 Python APIs use methods associated with specific objects.
+</li>
+<li>
+For portability, the HDF5 library has its own defined types. Some common types that you will see
+in the example code are:
+<ul>
+<li>
+#hid_t is used for object handles
+</li>
+<li>
+hsize_t is used for dimensions
+</li>
+<li>
+#herr_t is used for many return values
+</li>
+</ul>
+</li>
+<li>
+Language specific files must be included in applications:
+<ul>
+<li>
+Python: Add <code>"import h5py / import numpy"</code>
+</li>
+<li>
+C: Add <code>"#include hdf5.h"</code>
+</li>
+<li>
+FORTRAN: Add <code>"USE HDF5"</code> and call h5open_f and h5close_f to initialize and close the HDF5 FORTRAN interface
+</li>
+<li>
+Java: Add <code>"import hdf.hdf5lib.H5;
+ import hdf.hdf5lib.HDF5Constants;"</code>
+</li>
+</ul>
+</li>
+</ul>
+
+\subsection subsec_intro_pm_file Steps to create a file
+To create an HDF5 file you must:
+\li Specify property lists (or use the defaults).
+\li Create the file.
+\li Close the file (and property lists if needed).
+
+Example:
+<table>
+<caption>The following Python and C examples create a file, file.h5, and then close it.
+The resulting HDF5 file will only contain a root group:</caption>
+<tr>
+<td>
+\image html crtf-pic.png
+</td>
+</tr>
+</table>
+
+Calling h5py.File with ‘w’ for the file access flag will create a new HDF5 file and overwrite
+an existing file with the same name. “file” is the file handle returned from opening the file.
+When finished with the file, it must be closed. When not specifying property lists, the default
+property lists are used:
+
+<table>
+<tr>
+<td>
+<em>Python</em>
+\code
+ import h5py
+ file = h5py.File (‘file.h5’, ‘w’)
+ file.close ()
+\endcode
+</td>
+</tr>
+</table>
+
+The H5Fcreate function creates an HDF5 file. #H5F_ACC_TRUNC is the file access flag to create a new
+file and overwrite an existing file with the same name, and #H5P_DEFAULT is the value specified to
+use a default property list.
+
+<table>
+<tr>
+<td>
+<em>C</em>
+\code
+ #include “hdf5.h”
+
+ int main() {
+ hid_t file_id;
+ herr_t status;
+
+ file_id = H5Fcreate ("file.h5", H5F_ACC_TRUNC, H5P_DEFAULT, H5P_DEFAULT);
+ status = H5Fclose (file_id);
+ }
+\endcode
+</td>
+</tr>
+</table>
+
+\subsection subsec_intro_pm_dataset Steps to create a dataset
+As described previously, an HDF5 dataset consists of the raw data, as well as the metadata that
+describes the data (datatype, spatial information, and properties). To create a dataset you must:
+\li Define the dataset characteristics (datatype, dataspace, properties).
+\li Decide which group to attach the dataset to.
+\li Create the dataset.
+\li Close the dataset handle from step 3.
+
+Example:
+<table>
+<caption>The code excerpts below show the calls that need to be made to create a 4 x 6 integer dataset dset
+in a file dset.h5. The dataset will be located in the root group:</caption>
+<tr>
+<td>
+\image html crtdset.png
+</td>
+</tr>
+</table>
+
+With Python, the creation of the dataspace is included as a parameter in the dataset creation method.
+Just one call will create a 4 x 6 integer dataset dset. A pre-defined Big Endian 32-bit integer datatype
+is specified. The create_dataset method creates the dataset in the root group (the file object).
+The dataset is close by the Python interface.
+
+<table>
+<tr>
+<td>
+<em>Python</em>
+\code
+ dataset = file.create_dataset("dset",(4, 6), h5py.h5t.STD_I32BE)
+\endcode
+</td>
+</tr>
+</table>
+
+To create the same dataset in C, you must specify the dataspace with the #H5Screate_simple function,
+create the dataset by calling #H5Dcreate, and then close the dataspace and dataset with calls to #H5Dclose
+and #H5Sclose. #H5P_DEFAULT is specified to use a default property list. Note that the file identifier
+(file_id) is passed in as the first parameter to #H5Dcreate, which creates the dataset in the root group.
+
+<table>
+<tr>
+<td>
+<em>C</em>
+\code
+ // Create the dataspace for the dataset.
+ dims[0] = 4;
+ dims[1] = 6;
+
+ dataspace_id = H5Screate_simple(2, dims, NULL);
+
+ // Create the dataset.
+ dataset_id = H5Dcreate (file_id, "/dset", H5T_STD_I32BE, dataspace_id, H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT);
+
+ // Close the dataset and dataspace
+ status = H5Dclose(dataset_id);
+ status = H5Sclose(dataspace_id);
+\endcode
+</td>
+</tr>
+</table>
+
+\subsection subsec_intro_pm_write Writing to or reading from a dataset
+Once you have created or opened a dataset you can write to it:
+
+<table>
+<tr>
+<td>
+<em>Python</em>
+\code
+ data = np.zeros((4,6))
+ for i in range(4):
+ for j in range(6):
+ data[i][j]= i*6+j+1
+
+ dataset[...] = data <-- Write data to dataset
+ data_read = dataset[...] <-- Read data from dataset
+\endcode
+</td>
+</tr>
+</table>
+
+#H5S_ALL is passed in for the memory and file dataspace parameters to indicate that the entire dataspace
+of the dataset is specified. These two parameters can be modified to allow subsetting of a dataset.
+The native predefined datatype, #H5T_NATIVE_INT, is used for reading and writing so that HDF5 will do
+any necessary integer conversions:
+
+<table>
+<tr>
+<td>
+<em>C</em>
+\code
+ status = H5Dwrite (dataset_id, H5T_NATIVE_INT, H5S_ALL, H5S_ALL, H5P_DEFAULT, dset_data);
+ status = H5Dread (dataset_id, H5T_NATIVE_INT, H5S_ALL, H5S_ALL, H5P_DEFAULT, dset_data);
+\endcode
+</td>
+</tr>
+</table>
+
+\subsection subsec_intro_pm_group Steps to create a group
+An HDF5 group is a structure containing zero or more HDF5 objects. Before you can create a group you must
+obtain the location identifier of where the group is to be created. Following are the steps that are required:
+\li Decide where to put the group – in the “root group” (or file identifier) or in another group. Open the group if it is not already open.
+\li Define properties or use the default.
+\li Create the group.
+\li Close the group.
+
+<table>
+<caption>Creates attributes that are attached to the dataset dset</caption>
+<tr>
+<td>
+\image html crtgrp.png
+</td>
+</tr>
+</table>
+
+The code below opens the dataset dset.h5 with read/write permission and creates a group MyGroup in the root group.
+Properties are not specified so the defaults are used:
+
+<table>
+<tr>
+<td>
+<em>Python</em>
+\code
+ import h5py
+ file = h5py.File('dset.h5', 'r+')
+ group = file.create_group ('MyGroup')
+ file.close()
+\endcode
+</td>
+</tr>
+</table>
+
+To create the group MyGroup in the root group, you must call #H5Gcreate, passing in the file identifier returned
+from opening or creating the file. The default property lists are specified with #H5P_DEFAULT. The group is then
+closed:
+
+<table>
+<tr>
+<td>
+<em>C</em>
+\code
+ group_id = H5Gcreate (file_id, "MyGroup", H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT);
+ status = H5Gclose (group_id);
+\endcode
+</td>
+</tr>
+</table>
+
+\subsection subsec_intro_pm_attr Steps to create and write to an attribute
+To create an attribute you must open the object that you wish to attach the attribute to. Then you can create,
+access, and close the attribute as needed:
+\li Open the object that you wish to add an attribute to.
+\li Create the attribute
+\li Write to the attribute
+\li Close the attribute and the object it is attached to.
+
+<table>
+<caption>Creates attributes that are attached to the dataset dset</caption>
+<tr>
+<td>
+\image html crtatt.png
+</td>
+</tr>
+</table>
+
+The dataspace, datatype, and data are specified in the call to create an attribute in Python:
+
+<table>
+<tr>
+<td>
+<em>Python</em>
+\code
+ dataset.attrs["Units"] = “Meters per second” <-- Create string
+ attr_data = np.zeros((2,))
+ attr_data[0] = 100
+ attr_data[1] = 200
+ dataset.attrs.create("Speed", attr_data, (2,), “i”) <-- Create Integer
+\endcode
+</td>
+</tr>
+</table>
+
+To create an integer attribute in C, you must create the dataspace, create the attribute, write
+to it and then close it in separate steps:
+
+<table>
+<tr>
+<td>
+<em>C</em>
+\code
+ hid_t attribute_id, dataspace_id; // identifiers
+ hsize_t dims;
+ int attr_data[2];
+ herr_t status;
+
+ ...
+
+ // Initialize the attribute data.
+ attr_data[0] = 100;
+ attr_data[1] = 200;
+
+ // Create the data space for the attribute.
+ dims = 2;
+ dataspace_id = H5Screate_simple(1, &dims, NULL);
+
+ // Create a dataset attribute.
+ attribute_id = H5Acreate2 (dataset_id, "Units", H5T_STD_I32BE,
+ dataspace_id, H5P_DEFAULT, H5P_DEFAULT);
+
+ // Write the attribute data.
+ status = H5Awrite(attribute_id, H5T_NATIVE_INT, attr_data);
+
+ // Close the attribute.
+ status = H5Aclose(attribute_id);
+
+ // Close the dataspace.
+ status = H5Sclose(dataspace_id);
+\endcode
+</td>
+</tr>
+</table>
+
+<hr>
+Navigate back: \ref index "Main" / \ref GettingStarted
+
+
+@page HDF5Examples HDF5 Examples
+Example programs of how to use HDF5 are provided below.
+For HDF-EOS specific examples, see the <a href="http://hdfeos.org/zoo/index.php">examples</a>
+of how to access and visualize NASA HDF-EOS files using IDL, MATLAB, and NCL on the
+<a href="http://hdfeos.org/">HDF-EOS Tools and Information Center</a> page.
+
+\section secHDF5Examples Examples
+\li \ref LBExamples
+\li <a href="https://portal.hdfgroup.org/display/HDF5/Examples+by+API">Examples by API</a>
+\li <a href="https://portal.hdfgroup.org/display/HDF5/Examples+in+the+Source+Code">Examples in the Source Code</a>
+\li <a href="https://portal.hdfgroup.org/display/HDF5/Other+Examples">Other Examples</a>
+
+\section secHDF5ExamplesCompile How To Compile
+For information on compiling in C, C++ and Fortran, see: \ref LBCompiling
+
+\section secHDF5ExamplesOther Other Examples
+<a href="http://hdfeos.org/zoo/index.php">IDL, MATLAB, and NCL Examples for HDF-EOS</a>
+Examples of how to access and visualize NASA HDF-EOS files using IDL, MATLAB, and NCL.
+
+<a href="https://support.hdfgroup.org/ftp/HDF5/examples/misc-examples/">Miscellaneous Examples</a>
+These (very old) examples resulted from working with users, and are not fully tested. Most of them are in C, with a few in Fortran and Java.
+
+<a href="https://support.hdfgroup.org/ftp/HDF5/examples/special_values_HDF5_example.tar">Using Special Values</a>
+These examples show how to create special values in an HDF5 application.
+
+*/
diff --git a/doxygen/dox/LearnBasics.dox b/doxygen/dox/LearnBasics.dox
new file mode 100644
index 0000000..298672d
--- /dev/null
+++ b/doxygen/dox/LearnBasics.dox
@@ -0,0 +1,183 @@
+/** @page LearnBasics Learning the Basics
+
+Navigate back: \ref index "Main" / \ref GettingStarted
+<hr>
+
+\section secIntro Introduction
+The following topics cover the basic features in HDF5. The topics build on each other and are
+intended to be completed in order. Some sections use files created in earlier sections. The
+examples used can also be found on the \ref LBExamples
+page and in the HDF5 source code (C, C++, Fortran).
+
+\section Topics Topics
+\li @subpage LBFileOrg
+\li @subpage LBAPI
+\li @subpage LBProg
+\li @subpage LBFileCreate
+\li @subpage LBDsetCreate
+\li @subpage LBDsetRW
+\li @subpage LBAttrCreate
+\li @subpage LBGrpCreate
+\li @subpage LBGrpCreateNames
+\li @subpage LBGrpDset
+\li @subpage LBDsetSubRW
+\li @subpage LBDatatypes
+\li @subpage LBPropsList
+\li @subpage LBDsetLayout
+\li @subpage LBExtDset
+\li @subpage LBComDset
+\li @subpage LBContents
+\li @subpage LBQuiz
+\li @subpage LBQuizAnswers
+\li @subpage LBCompiling
+\li @subpage LBTraining
+
+<hr>
+Navigate back: \ref index "Main" / \ref GettingStarted
+
+
+@page LBExamples Examples from Learning the Basics
+
+Navigate back: \ref index "Main" / \ref GettingStarted / \ref LearnBasics
+<hr>
+
+\section secLBExamples
+These examples are used in the \ref LearnBasics topic. See \ref LBCompiling for details on compiling them.
+PLEASE NOTE that the example programs are listed in the order they are expected to be run. Some example
+programs use files created in earlier examples.
+
+\section secLBExamplesSrc HDF5 Source Code Examples
+These examples (C, C++, Fortran) are provided in the HDF5 source code and (Unix) binaries.
+<table>
+<tr>
+<th>Feature
+</th>
+<th>Examples
+</th>
+<th>Comments
+</th>
+<tr>
+<td>Create a file
+</td>
+<td>C Fortran C++ <a href="https://raw.githubusercontent.com/HDFGroup/hdf5/hdf5_1_10/java/examples/intro/H5_CreateFile.java">Java</a> Python
+</td>
+<td>
+</td>
+</tr>
+<tr>
+<td>Create a dataset
+</td>
+<td><a href="https://raw.githubusercontent.com//HDFGroup/hdf5/hdf5_1_10/examples/h5_crtdat.c">C</a> <a href="https://raw.githubusercontent.com/HDFGroup/hdf5/hdf5_1_10/fortran/examples/h5_crtdat.f90">Fortran</a> <a href="https://raw.githubusercontent.com/HDFGroup/hdf5/hdf5_1_10/c++/examples/h5tutr_crtdat.cpp">C++</a> <a href="https://raw.githubusercontent.com/HDFGroup/hdf5/hdf5_1_10/java/examples/intro/H5_CreateDataset.java">Java</a> <a href="https://support.hdfgroup.org/ftp/HDF5/examples/Py/h5_crtdat.py">Python</a>
+</td>
+<td>
+</td>
+</tr>
+<tr>
+<td>Read and write to a dataset
+</td>
+<td><a href="https://raw.githubusercontent.com/HDFGroup/hdf5/hdf5_1_10/examples/h5_rdwt.c">C</a> <a href="https://raw.githubusercontent.com/HDFGroup/hdf5/hdf5_1_10/fortran/examples/h5_rdwt.f90">Fortran</a> <a href="https://raw.githubusercontent.com/HDFGroup/hdf5/hdf5_1_10/c++/examples/h5tutr_rdwt.cpp">C++</a> <a href="https://raw.githubusercontent.com/HDFGroup/hdf5/hdf5_1_10/java/examples/intro/H5_ReadWrite.java">Java</a> <a href="https://support.hdfgroup.org/ftp/HDF5/examples/Py/h5_rdwt.py">Python</a>
+</td>
+<td>
+</td>
+</tr>
+<tr>
+<td>Create an attribute
+</td>
+<td><a href="https://raw.githubusercontent.com/HDFGroup/hdf5/hdf5_1_10/examples/h5_crtatt.c">C</a> <a href="https://raw.githubusercontent.com/HDFGroup/hdf5/hdf5_1_10/fortran/examples/h5_crtatt.f90">Fortran</a> <a href="https://raw.githubusercontent.com/HDFGroup/hdf5/hdf5_1_10/c++/examples/h5tutr_crtatt.cpp">C++</a> <a href="https://raw.githubusercontent.com/HDFGroup/hdf5/hdf5_1_10/java/examples/intro/H5_CreateAttribute.java">Java</a> <a href="https://support.hdfgroup.org/ftp/HDF5/examples/Py/h5_crtatt.py">Python</a>
+</td>
+<td>
+</td>
+</tr>
+<tr>
+<td>Create a group
+</td>
+<td><a href="https://raw.githubusercontent.com/HDFGroup/hdf5/hdf5_1_10/examples/h5_crtgrp.c">C</a> <a href="https://raw.githubusercontent.com/HDFGroup/hdf5/hdf5_1_10/fortran/examples/h5_crtgrp.f90">Fortran</a> <a href="https://raw.githubusercontent.com/HDFGroup/hdf5/hdf5_1_10/c++/examples/h5tutr_crtgrp.cpp">C++</a> <a href="https://raw.githubusercontent.com/HDFGroup/hdf5/hdf5_1_10/java/examples/intro/H5_CreateGroup.java">Java</a> <a href="https://support.hdfgroup.org/ftp/HDF5/examples/Py/h5_crtgrp.py">Python</a>
+</td>
+<td>
+</td>
+</tr>
+<tr>
+<td>Create groups in a file using absolute and relative paths
+</td>
+<td><a href="https://raw.githubusercontent.com/HDFGroup/hdf5/hdf5_1_10/examples/h5_crtgrpar.c">C</a> <a href="https://raw.githubusercontent.com/HDFGroup/hdf5/hdf5_1_10/fortran/examples/h5_crtgrpar.f90">Fortran</a> <a href="https://raw.githubusercontent.com/HDFGroup/hdf5/hdf5_1_10/c++/examples/h5tutr_crtgrpar.cpp">C++</a> <a href="https://raw.githubusercontent.com/HDFGroup/hdf5/hdf5_1_10/java/examples/intro/H5_CreateGroupAbsoluteRelative.java">Java</a> <a href="https://support.hdfgroup.org/ftp/HDF5/examples/Py/h5_crtgrpar.py">Python</a>
+</td>
+<td>
+</td>
+</tr>
+<tr>
+<td>Create datasets in a group
+</td>
+<td><a href="https://raw.githubusercontent.com/HDFGroup/hdf5/hdf5_1_10/examples/h5_crtgrpd.c">C</a> <a href="https://raw.githubusercontent.com/HDFGroup/hdf5/hdf5_1_10/fortran/examples/h5_crtgrpd.f90">Fortran</a> <a href="https://raw.githubusercontent.com/HDFGroup/hdf5/hdf5_1_10/c++/examples/h5tutr_crtgrpd.cpp">C++</a> <a href="https://raw.githubusercontent.com/HDFGroup/hdf5/hdf5_1_10/java/examples/intro/H5_CreateGroupDataset.java">Java</a> <a href="https://support.hdfgroup.org/ftp/HDF5/examples/Py/h5_crtgrpd.py">Python</a>
+</td>
+<td>
+</td>
+</tr>
+<tr>
+<td>Create a file and dataset and select/read a subset from the dataset
+</td>
+<td><a href="https://raw.githubusercontent.com/HDFGroup/hdf5/hdf5_1_10/examples/h5_subset.c">C</a> <a href="https://raw.githubusercontent.com/HDFGroup/hdf5/hdf5_1_10/fortran/examples/h5_subset.f90">Fortran</a> <a href="https://raw.githubusercontent.com/HDFGroup/hdf5/hdf5_1_10/c++/examples/h5tutr_subset.cpp">C++</a> Java Python
+</td>
+<td>Also see examples to Write by row (and column) below.
+</td>
+</tr>
+<tr>
+<td>Create an extendible (unlimited dimension) dataset
+</td>
+<td><a href="https://raw.githubusercontent.com/HDFGroup/hdf5/hdf5_1_10/examples/h5_extend.c">C</a> <a href="https://raw.githubusercontent.com/HDFGroup/hdf5/hdf5_1_10/fortran/examples/h5_extend.f90">Fortran</a> <a href="https://raw.githubusercontent.com/HDFGroup/hdf5/hdf5_1_10/c++/examples/h5tutr_extend.cpp">C++</a> Java Python
+</td>
+<td>Also see examples to Extend by row (and column) below
+</td>
+</tr>
+<tr>
+<td>Create a chunked and compressed dataset
+</td>
+<td><a href="https://raw.githubusercontent.com/HDFGroup/hdf5/hdf5_1_10/examples/h5_cmprss.c">C</a> <a href="https://raw.githubusercontent.com/HDFGroup/hdf5/hdf5_1_10/fortran/examples/h5_cmprss.f90">Fortran</a> <a href="https://raw.githubusercontent.com/HDFGroup/hdf5/hdf5_1_10/c++/examples/h5tutr_cmprss.cpp">C++</a> Java <a href="https://support.hdfgroup.org/ftp/HDF5/examples/Py/h5_cmprss.py">Python</a>
+</td>
+<td>
+</td>
+</tr>
+</table>
+
+*See <a href="https://github.com/scotmartin1234/HDF5Mathematica">HDF5Mathematica</a> for user-contributed
+HDF5 Mathematica Wrappers and Introductory Tutorial Examples. The examples use P/Invoke.
+
+\section secLBExamplesAddl Additional Examples
+These examples make minor changes to the tutorial examples.
+<table>
+<tr>
+<th>Feature
+</th>
+<th>Examples
+</th>
+</tr>
+<tr>
+<td>Write by row
+</td>
+<td><a href="">C</a> <a href="">Fortran</a>
+</td>
+</tr>
+<tr>
+<td>Write by column
+</td>
+<td><a href="">C</a> <a href="">Fortran</a>
+</td>
+</tr>
+<tr>
+<td>Extend by row
+</td>
+<td><a href="">C</a> <a href="">Fortran</a>
+</td>
+</tr>
+<tr>
+<td>Extend by column
+</td>
+<td><a href="">C</a> <a href="">Fortran</a>
+</td>
+</tr>
+</table>
+
+
+<hr>
+Navigate back: \ref index "Main" / \ref GettingStarted / \ref LearnBasics
+
+*/
diff --git a/doxygen/dox/LearnBasics1.dox b/doxygen/dox/LearnBasics1.dox
new file mode 100644
index 0000000..a9b6d0e
--- /dev/null
+++ b/doxygen/dox/LearnBasics1.dox
@@ -0,0 +1,1023 @@
+/** @page LBFileOrg HDF5 File Organization
+
+Navigate back: \ref index "Main" / \ref GettingStarted / \ref LearnBasics
+<hr>
+
+\section secLBFileOrg HDF5 file
+An HDF5 file is a container for storing a variety of scientific data and is composed of two primary types of objects: groups and datasets.
+
+\li HDF5 group: a grouping structure containing zero or more HDF5 objects, together with supporting metadata
+\li HDF5 dataset: a multidimensional array of data elements, together with supporting metadata
+
+Any HDF5 group or dataset may have an associated attribute list. An HDF5 attribute is a user-defined HDF5 structure
+that provides extra information about an HDF5 object.
+
+Working with groups and datasets is similar in many ways to working with directories and files in UNIX. As with UNIX
+directories and files, an HDF5 object in an HDF5 file is often referred to by its full path name (also called an absolute path name).
+
+\li <code style="background-color:whitesmoke;">/</code> signifies the root group.
+
+\li <code style="background-color:whitesmoke;">/foo</code> signifies a member of the root group called foo.
+
+\li <code style="background-color:whitesmoke;">/foo/zoo</code> signifies a member of the group foo, which in turn is a member of the root group.
+
+Navigate back: \ref index "Main" / \ref GettingStarted / \ref LearnBasics
+
+@page LBAPI The HDF5 API
+
+Navigate back: \ref index "Main" / \ref GettingStarted / \ref LearnBasics
+<hr>
+
+\section secLBAPI HDF5 C API
+The HDF5 library provides several interfaces, or APIs. These APIs provide routines for creating,
+accessing, and manipulating HDF5 files and objects.
+
+The library itself is implemented in C. To facilitate the work of FORTRAN 90, C++ and Java programmers,
+HDF5 function wrappers have been developed in each of these languages. This tutorial discusses the use
+of the C functions and the FORTRAN wrappers.
+
+All C routines in the HDF5 library begin with a prefix of the form H5*, where * is one or two uppercase
+letters indicating the type of object on which the function operates.
+The FORTRAN wrappers come in the form of subroutines that begin with h5 and end with _f.
+Java routine names begin with “H5*” and are prefixed with “H5.” as the class. Constants are
+in the HDF5Constants class and are prefixed with "HDF5Constants.".
+The APIs are listed below:
+<table>
+<tr>
+<th><strong>API</strong>
+</th>
+<th><strong>DESCRIPTION</strong>
+</th>
+</tr>
+<tr>
+<th><strong>H5</strong>
+</th>
+<td>Library Functions: general-purpose H5 functions
+</td>
+</tr>
+<tr>
+<th><strong>H5A</strong>
+</th>
+<td>Annotation Interface: attribute access and manipulation routines
+</td>
+</tr>
+<tr>
+<th><strong>H5D</strong>
+</th>
+<td>Dataset Interface: dataset access and manipulation routines
+</td>
+</tr>
+<tr>
+<th><strong>H5E</strong>
+</th>
+<td>Error Interface: error handling routines
+</td>
+</tr>
+<tr>
+<th><strong>H5F</strong>
+</th>
+<td>File Interface: file access routines
+</td>
+</tr>
+<tr>
+<th><strong>H5G</strong>
+</th>
+<td>Group Interface: group creation and operation routines
+</td>
+</tr>
+<tr>
+<th><strong>H5I</strong>
+</th>
+<td>Identifier Interface: identifier routines
+</td>
+</tr>
+<tr>
+<th><strong>H5L</strong>
+</th>
+<td>Link Interface: link routines
+</td>
+</tr>
+<tr>
+<th><strong>H5O</strong>
+</th>
+<td>Object Interface: object routines
+</td>
+</tr>
+<tr>
+<th><strong>H5P</strong>
+</th>
+<td>Property List Interface: object property list manipulation routines
+</td>
+</tr>
+<tr>
+<th><strong>H5R</strong>
+</th>
+<td>Reference Interface: reference routines
+</td>
+</tr>
+<tr>
+<th><strong>H5S</strong>
+</th>
+<td>Dataspace Interface: dataspace definition and access routines
+</td>
+</tr>
+<tr>
+<th><strong>H5T</strong>
+</th>
+<td>Datatype Interface: datatype creation and manipulation routines
+</td>
+</tr>
+<tr>
+<th><strong>H5Z</strong>
+</th>
+<td>Compression Interface: compression routine(s)
+</td>
+</tr>
+</table>
+
+Navigate back: \ref index "Main" / \ref GettingStarted / \ref LearnBasics
+
+@page LBProg Programming Issues
+
+Navigate back: \ref index "Main" / \ref GettingStarted / \ref LearnBasics
+<hr>
+
+Keep the following in mind when looking at the example programs included in this tutorial:
+
+\section LBProgAPI APIs vary with different languages
+\li C routines begin with the prefix “H5*” where * is a single letter indicating the object on which the operation is to be performed:
+<table>
+<tr>
+<td>File Interface: </td>
+<td>#H5Fopen</td>
+</tr><tr>
+<td>Dataset Interface:</td>
+<td>#H5Dopen</td>
+</tr>
+</table>
+
+\li FORTRAN routines begin with “h5*” and end with “_f”:
+<table>
+<tr>
+<td>File Interface: </td>
+<td>h5fopen_f</td>
+</tr><tr>
+<td>Dataset Interface:</td>
+<td>h5dopen_f</td>
+</tr>
+</table>
+
+\li Java routine names begin with “H5*” and are prefixed with “H5.” as the class. Constants are
+in the HDF5Constants class and are prefixed with "HDF5Constants.".:
+<table>
+<tr>
+<td>File Interface: </td>
+<td>H5.H5Fopen</td>
+</tr><tr>
+<td>Dataset Interface:</td>
+<td>H5.H5Dopen</td>
+</tr>
+</table>
+
+\li APIS for languages like C++, Java, and Python use methods associated with specific objects.
+
+\section LBProgTypes HDF5 library has its own defined types
+\li #hid_t is used for object handles
+\li hsize_t is used for dimensions
+\li #herr_t is used for many return values
+
+\section LBProgLang Language specific files must be included in applications
+<ul>
+<li>
+Python: Add <code>"import h5py / import numpy"</code>
+</li>
+<li>
+C: Add <code>"#include hdf5.h"</code>
+</li>
+<li>
+FORTRAN: Add <code>"USE HDF5"</code> and call h5open_f and h5close_f to initialize and close the HDF5 FORTRAN interface
+</li>
+<li>
+Java: Add <code>"import hdf.hdf5lib.H5;
+ import hdf.hdf5lib.HDF5Constants;"</code>
+</li>
+</ul>
+
+Navigate back: \ref index "Main" / \ref GettingStarted / \ref LearnBasics
+
+@page LBFileCreate Creating an HDF5 File
+
+Navigate back: \ref index "Main" / \ref GettingStarted / \ref LearnBasics
+<hr>
+
+An HDF5 file is a binary file containing scientific data and supporting metadata.
+\section secLBFileCreate HDF5 File Access
+To create an HDF5 file, an application must specify not only a file name, but a file access mode,
+a file creation property list, and a file access property list. These terms are described below:
+<ul>
+<li><strong>File access mode:</strong><br />
+When creating a file, the file access mode specifies the action to take if the file already exists:
+<ul>
+<li>#H5F_ACC_TRUNC specifies that if the file already exists, the current contents will be deleted so
+that the application can rewrite the file with new data.
+</li>
+<li>#H5F_ACC_EXCL specifies that the open will fail if the file already exists. If the file does not
+already exist, the file access parameter is ignored.
+</li>
+</ul>
+In either case, the application has both read and write access to the successfully created file.
+<br />
+Note that there are two different access modes for opening existing files:
+<ul>
+<li>#H5F_ACC_RDONLY specifies that the application has read access but will not be allowed to write any data.
+</li>
+<li>#H5F_ACC_RDWR specifies that the application has read and write access.
+</li>
+</ul>
+</li>
+<li><strong>File creation property list:</strong><br />The file creation property list is used to
+control the file metadata. File metadata contains information about the size of the user-block*,
+the size of various file data structures used by the HDF5 library, etc. In this tutorial, the
+default file creation property list, #H5P_DEFAULT, is used.<br />
+ *The user-block is a fixed-length block of data located at the beginning of the file which is
+ignored by the HDF5 library. The user-block may be used to store any data or information found
+to be useful to applications.
+</li>
+<li><strong>File access property list:</strong><br />The file access property list is used to
+control different methods of performing I/O on files. It also can be used to control how a file
+is closed (whether or not to delay the actual file close until all objects in a file are closed).
+The default file access property list, #H5P_DEFAULT, is used in this tutorial.
+</li>
+</ul>
+
+Please refer to the \ref sec_file section of the \ref UG and \ref H5F section in the \ref RM for
+detailed information regarding file access/creation property lists and access modes.
+
+The steps to create and close an HDF5 file are as follows:
+<ol>
+<li>Specify the file creation and access property lists, if necessary.</li>
+<li>Create the file.</li>
+<li>Close the file, and if necessary, close the property lists.</li>
+</ol>
+
+\section secLBFileExample Programming Example
+
+\subsection subsecLBFileExampleDesc Description
+The following example code demonstrates how to create and close an HDF5 file.
+
+<em>C</em>
+\code
+#include "hdf5.h"
+ #define FILE "file.h5"
+
+ int main() {
+
+ hid_t file_id; /* file identifier */
+ herr_t status;
+
+ /* Create a new file using default properties. */
+ file_id = H5Fcreate(FILE, H5F_ACC_TRUNC, H5P_DEFAULT, H5P_DEFAULT);
+
+ /* Terminate access to the file. */
+ status = H5Fclose(file_id);
+ }
+\endcode
+
+<em>Fortran</em>
+\code
+ PROGRAM FILEEXAMPLE
+
+ USE HDF5 ! This module contains all necessary modules
+
+ IMPLICIT NONE
+
+ CHARACTER(LEN=8), PARAMETER :: filename = "filef.h5" ! File name
+ INTEGER(HID_T) :: file_id ! File identifier
+
+ INTEGER :: error ! Error flag
+
+!
+! Initialize FORTRAN interface.
+!
+ CALL h5open_f (error)
+ !
+ ! Create a new file using default properties.
+ !
+ CALL h5fcreate_f(filename, H5F_ACC_TRUNC_F, file_id, error)
+
+ !
+ ! Terminate access to the file.
+ !
+ CALL h5fclose_f(file_id, error)
+!
+! Close FORTRAN interface.
+!
+ CALL h5close_f(error)
+ END PROGRAM FILEEXAMPLE
+\endcode
+
+See \ref LBExamples for the examples used in the Learning the Basics tutorial.
+
+For details on compiling an HDF5 application:
+[ \ref LBCompiling ]
+
+\subsection subsecLBFileExampleRem Remarks
+\li <strong>In C</strong>: The include file <code style="background-color:whitesmoke;">hdf5.h</code> contains definitions and declarations and must be included
+in any program that uses the HDF5 library.
+<br />
+<strong>In FORTRAN</strong>: The module <code style="background-color:whitesmoke;">HDF5</code> contains definitions and declarations and must be used in any
+program that uses the HDF5 library. Also note that #H5open MUST be called at the beginning of an HDF5 Fortran
+application (prior to any HDF5 calls) to initialize the library and variables. The #H5close call MUST be at
+the end of the HDF5 Fortran application.
+\li #H5Fcreate creates an HDF5 file and returns the file identifier.<br />
+For Fortran, the file creation property list and file access property list are optional. They can be omitted if the
+default values are to be used.<br />
+The root group is automatically created when a file is created. Every file has a root group and the path name of
+the root group is always <code style="background-color:whitesmoke;">/</code>.
+\li #H5Fclose terminates access to an HDF5 file.<br />
+When an HDF5 file is no longer accessed by a program, #H5Fclose must be called to release the resources used by the file.
+This call is mandatory.<br />
+Note that if #H5Fclose is called for a file, but one or more objects within the file remain open, those objects will
+remain accessible until they are individually closed. This can cause access problems for other users, if objects were
+inadvertently left open. A File Access property controls how the file is closed.
+
+\subsection subsecLBFileExampleCont File Contents
+The HDF Group has developed tools for examining the contents of HDF5 files. The tool used throughout the HDF5 tutorial
+is the HDF5 dumper, <code style="background-color:whitesmoke;">h5dump</code>, which displays the file contents in human-readable form. The output of <code style="background-color:whitesmoke;">h5dump</code> is an ASCII
+display formatted according to the HDF5 DDL grammar. This grammar is defined, using Backus-Naur Form, in the
+\ref DDLBNF110.
+
+To view the HDF5 file contents, simply type:
+\code
+h5dump <filename>
+\endcode
+
+<table>
+<caption>Describe the file contents of file.h5 using a directed graph.</caption>
+<tr>
+<td>
+\image html imgLBFile.gif
+</td>
+</tr>
+</table>
+
+The text description of <code style="background-color:whitesmoke;">file.h5</code>, as generated by <code style="background-color:whitesmoke;">h5dump</code>. The HDF5 file called <code style="background-color:whitesmoke;">file.h5</code>
+contains a group called <code style="background-color:whitesmoke;">/</code>, or the root group. (The file called <code style="background-color:whitesmoke;">filef.h5</code>, created by the FORTRAN version of the example,
+has the same output except that the filename shown is <code style="background-color:whitesmoke;">filef.h5</code>.)
+\code
+HDF5 "file.h5" {
+ GROUP "/" {
+ }
+ }
+\endcode
+
+\subsection subsecLBFileExampleDDL File Definition in DDL
+The simplified DDL file definition for creating an HDF5 file. For simplicity, a simplified DDL is used in this tutorial. A
+complete and more rigorous DDL can be found in the \ref DDLBNF110.
+
+The following symbol definitions are used in the DDL:
+\code
+ ::= defined as
+ <tname> a token with the name tname
+ <a> | <b> one of <a> or <b>
+ <a>* zero or more occurrences of <a>
+\endcode
+
+The simplified DDL for file definition is as follows:
+\code
+ <file> ::= HDF5 "<file_name>" { <root_group> }
+
+ <root_group> ::= GROUP "/" { <group_attribute>*
+ <group_member>* }
+
+ <group_attribute> ::= <attribute>
+
+ <group_member> ::= <group> | <dataset>
+\endcode
+
+Navigate back: \ref index "Main" / \ref GettingStarted / \ref LearnBasics
+
+@page LBDsetCreate Creating a Dataset
+Navigate back: \ref index "Main" / \ref GettingStarted / \ref LearnBasics
+<hr>
+
+A dataset is a multidimensional array of data elements, together with supporting metadata. To create
+a dataset, the application program must specify the location at which to create the dataset, the
+dataset name, the datatype and dataspace of the data array, and the property lists.
+
+\section secLBDsetCreateDtype Datatypes
+A datatype is a collection of properties, all of which can be stored on disk, and which, when taken as
+a whole, provide complete information for data conversion to or from that datatype.
+
+There are two categories of datatypes in HDF5:
+<ul>
+<li><strong>Pre-defined</strong>: These datatypes are opened and closed by HDF5.<br />
+Pre-defined datatypes can be atomic or composite:
+<ul><li>Atomic datatypes cannot be decomposed into smaller datatype units at the API level. For example: integer, float, reference, string.</li>
+<li>Composite datatypes are aggregations of one or more datatypes. For example: array, variable length, enumeration, compound.</li></ul>
+</li>
+<li><strong>Derived</strong>: These datatypes are created or derived from the pre-defined types.<br />
+A simple example of creating a derived datatype is using the string datatype, H5T_C_S1, to create strings of more than one character:<br />
+\code
+ hid_t strtype; // Datatype ID
+ herr_t status;
+
+ strtype = H5Tcopy (H5T_C_S1);
+ status = H5Tset_size (strtype, 5); // create string of length 5
+\endcode
+</li>
+</ul>
+
+Shown below is the HDF5 pre-defined datatypes.
+\code
+ +-- integer
+ +-- floating point
+ +---- atomic ----+-- date and time
+ | +-- character string
+ HDF5 datatypes --| +-- bitfield
+ | +-- opaque
+ |
+ +---- compound
+\endcode
+
+Some of the HDF5 predefined atomic datatypes are listed below.
+
+<table>
+<caption>Examples of HDF5 predefined datatypes</caption>
+<tr>
+<th><strong>Datatype</strong></th>
+<th><strong>Description</strong></th>
+</tr>
+<tr>
+<th><strong>H5T_STD_I32LE</strong></th>
+<td>Four-byte, little-endian, signed, two's complement integer</td>
+</tr>
+<tr>
+<th><strong>H5T_STD_U16BE</strong></th>
+<td>Two-byte, big-endian, unsigned integer</td>
+</tr>
+<tr>
+<th><strong>H5T_IEEE_F32BE</strong></th>
+<td>Four-byte, big-endian, IEEE floating point</td>
+</tr>
+<tr>
+<th><strong>H5T_IEEE_F64LE</strong></th>
+<td>Eight-byte, little-endian, IEEE floating point</td>
+</tr>
+<tr>
+<th><strong>H5T_C_S1</strong></th>
+<td>One-byte, null-terminated string of eight-bit characters</td>
+</tr>
+</table>
+
+<table>
+<caption>Examples of HDF5 predefined native datatypes</caption>
+<tr>
+<th><strong>Native Datatype</strong></th>
+<th><strong>Corresponding C or FORTRAN Type</strong></th>
+</tr>
+<tr>
+<th span="2"><strong>C</strong></th>
+</tr>
+<tr>
+<th><strong>H5T_NATIVE_INT</strong></th>
+<td>int</td>
+</tr>
+<tr>
+<th><strong>H5T_NATIVE_FLOAT</strong></th>
+<td>float</td>
+</tr>
+<tr>
+<th><strong>H5T_NATIVE_CHAR</strong></th>
+<td>char</td>
+</tr>
+<tr>
+<th><strong>H5T_NATIVE_DOUBLE</strong></th>
+<td>double</td>
+</tr>
+<tr>
+<th><strong>H5T_NATIVE_LDOUBLE</strong></th>
+<td>long double</td>
+</tr>
+<tr>
+<th span="2"><strong>Fortran</strong></th>
+</tr>
+<tr>
+<th><strong>H5T_NATIVE_INTEGER</strong></th>
+<td>integer</td>
+</tr>
+<tr>
+<th><strong>H5T_NATIVE_REAL</strong></th>
+<td>real</td>
+</tr>
+<tr>
+<th><strong>H5T_NATIVE_DOUBLE</strong></th>
+<td>double precision</td>
+</tr>
+<tr>
+<th><strong>H5T_NATIVE_CHARACTER</strong></th>
+<td>character</td>
+</tr>
+</table>
+
+In this tutorial, we consider only HDF5 predefined integers.
+
+For further information on datatypes, see \ref sec_datatype in the \ref UG, in addition to the \ref LBDatatypes tutorial topic.
+
+\section secLBDsetCreateDspace Datasets and Dataspaces
+A dataspace describes the dimensionality of the data array. A dataspace is either a regular N-dimensional
+array of data points, called a simple dataspace, or a more general collection of data points organized
+in another manner, called a complex dataspace. In this tutorial, we only consider simple dataspaces.
+
+<em>HDF5 dataspaces</em>
+\code
+ +-- simple
+ HDF5 dataspaces --|
+ +-- complex
+\endcode
+The dimensions of a dataset can be fixed (unchanging), or they may be unlimited, which means that they are
+extensible. A dataspace can also describe a portion of a dataset, making it possible to do partial
+I/O operations on selections.
+
+\section secLBDsetCreateProp Property Lists
+Property lists are a mechanism for modifying the default behavior when creating or accessing objects. For
+more information on property lists see the \ref LBPropsList tutorial topic.
+
+The following property lists can be specified when creating a dataset:
+\li Dataset Creation Property List<br />
+When creating a dataset, HDF5 allows the user to specify how raw data is organized and/or compressed on
+disk. This information is stored in a dataset creation property list and passed to the dataset interface.
+The raw data on disk can be stored contiguously (in the same linear way that it is organized in memory),
+partitioned into chunks, stored externally, etc. In this tutorial, we use the default dataset creation
+property list (contiguous storage layout and no compression). For more information about dataset creation
+property lists, see \ref sec_dataset in the \ref UG.
+\li Link Creation Property List<br />
+The link creation property list governs creation of the link(s) by which a new dataset is accessed and the
+creation of any intermediate groups that may be missing.
+\li Dataset Access Property List<br />
+Dataset access property lists are properties that can be specified when accessing a dataset.
+
+\section secLBDsetCreateSteps Steps to Create a Dataset
+To create an empty dataset (no data written) the following steps need to be taken:
+<ol>
+<li>Obtain the location identifier where the dataset is to be created.</li>
+<li>Define or specify the dataset characteristics:
+<ol>
+<li>Define a datatype or specify a pre-defined datatype.</li>
+<li>Define a dataspace.</li>
+<li>Specify the property list(s) or use the default.</li>
+</ol></li>
+<li>Create the dataset.</li>
+<li>Close the datatype, the dataspace, and the property list(s) if necessary.</li>
+<li>Close the dataset.</li>
+</ol>
+In HDF5, datatypes and dataspaces are independent objects which are created separately from any dataset
+that they might be attached to. Because of this, the creation of a dataset requires the definition of
+the datatype and dataspace. In this tutorial, we use the HDF5 predefined datatypes (integer) and consider
+only simple dataspaces. Hence, only the creation of dataspace objects is needed.
+
+\section secLBDsetCreateHL High Level APIs
+The High Level \ref H5LT include functions that simplify and condense the steps for
+creating datasets in HDF5. The examples in the following section use the standard APIs. For a
+quick start you may prefer to look at the \ref H5LT at this time.
+
+If you plan to work with images, please look at the High Level \ref H5IM, as well.
+
+\section secLBDsetCreateProg Programming Example
+
+\subsection subsecLBDsetCreateProgDesc Description
+See \ref LBExamples for the examples used in the \ref LearnBasics tutorial.
+
+The example shows how to create an empty dataset. It creates a file called <code style="background-color:whitesmoke;">dset.h5</code>
+in the C version (<code style="background-color:whitesmoke;">dsetf.h5</code> in Fortran), defines the dataset dataspace, creates a
+dataset which is a 4x6 integer array, and then closes the dataspace, the dataset, and the file.
+
+For details on compiling an HDF5 application: [ \ref LBCompiling ]
+
+\subsection subsecLBDsetCreateProgRem Remarks
+#H5Screate_simple creates a new simple dataspace and returns a dataspace identifier.
+#H5Sclose releases and terminates access to a dataspace.
+
+<em>C</em>
+\code
+ dataspace_id = H5Screate_simple (rank, dims, maxdims);
+ status = H5Sclose (dataspace_id );
+\endcode
+
+<em>FORTRAN</em>
+\code
+ CALL h5screate_simple_f (rank, dims, dataspace_id, hdferr, maxdims=max_dims)
+ or
+ CALL h5screate_simple_f (rank, dims, dataspace_id, hdferr)
+
+ CALL h5sclose_f (dataspace_id, hdferr)
+\endcode
+
+#H5Dcreate creates an empty dataset at the specified location and returns a dataset identifier.
+#H5Dclose closes the dataset and releases the resource used by the dataset. This call is mandatory.
+
+<em>C</em>
+\code
+ dataset_id = H5Dcreate(file_id, "/dset", H5T_STD_I32BE, dataspace_id, H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT);
+ status = H5Dclose (dataset_id);
+\endcode
+
+<em>FORTRAN</em>
+\code
+ CALL h5dcreate_f (loc_id, name, type_id, dataspace_id, dset_id, hdferr)
+ CALL h5dclose_f (dset_id, hdferr)
+\endcode
+
+Note that if using the pre-defined datatypes in FORTRAN, then a call must be made to initialize and terminate access to the pre-defined datatypes:
+\code
+ CALL h5open_f (hdferr)
+ CALL h5close_f (hdferr)
+\endcode
+
+H5open must be called before any HDF5 library subroutine calls are made;
+H5close must be called after the final HDF5 library subroutine call.
+
+See the programming example for an illustration of the use of these calls.
+
+\subsection subsecLBDsetCreateContent File Contents
+The contents of the file dset.h5 (dsetf.h5 for FORTRAN) are shown below:
+<table>
+<caption>Contents of dset.h5 ( dsetf.h5)</caption>
+<tr>
+<td>
+\image html imgLBDsetCreate.gif
+</td>
+</tr>
+</table>
+<table>
+<tr>
+<th>dset.h5 in DDL</th>
+<th>dsetf.h5 in DDL</th>
+<tr>
+<td>
+\code
+HDF5 "dset.h5" {
+GROUP "/" {
+ DATASET "dset" {
+ DATATYPE { H5T_STD_I32BE }
+ DATASPACE { SIMPLE ( 4, 6 ) / ( 4, 6 ) }
+ DATA {
+ 0, 0, 0, 0, 0, 0,
+ 0, 0, 0, 0, 0, 0,
+ 0, 0, 0, 0, 0, 0,
+ 0, 0, 0, 0, 0, 0
+ }
+ }
+}
+}
+\endcode
+</td>
+<td>
+\code
+HDF5 "dsetf.h5" {
+GROUP "/" {
+ DATASET "dset" {
+ DATATYPE { H5T_STD_I32BE }
+ DATASPACE { SIMPLE ( 6, 4 ) / ( 6, 4 ) }
+ DATA {
+ 0, 0, 0, 0,
+ 0, 0, 0, 0,
+ 0, 0, 0, 0,
+ 0, 0, 0, 0,
+ 0, 0, 0, 0,
+ 0, 0, 0, 0
+ }
+ }
+}
+}
+\endcode
+</td>
+</tr>
+</table>
+Note in above that #H5T_STD_I32BE, a 32-bit Big Endian integer, is an HDF atomic datatype.
+
+\subsection subsecLBDsetCreateProgDDL Dataset Definition in DDL
+The following is the simplified DDL dataset definition:
+\code
+ <dataset> ::= DATASET "<dataset_name>" { <datatype>
+ <dataspace>
+ <data>
+ <dataset_attribute>* }
+
+ <datatype> ::= DATATYPE { <atomic_type> }
+
+ <dataspace> ::= DATASPACE { SIMPLE <current_dims> / <max_dims> }
+
+ <dataset_attribute> ::= <attribute>
+\endcode
+
+<hr>
+Navigate back: \ref index "Main" / \ref GettingStarted / \ref LearnBasics
+
+@page LBDsetRW Reading From and Writing To a Dataset
+Navigate back: \ref index "Main" / \ref GettingStarted / \ref LearnBasics
+<hr>
+
+\section secLBDsetRW Dataset I/O Operation
+During a dataset I/O operation, the library transfers raw data between memory and the file. The data in memory
+can have a datatype different from that of the file and can also be of a different size (i.e., the data in
+memory is a subset of the dataset elements, or vice versa). Therefore, to perform read or write operations,
+the application program must specify:
+\li The dataset
+\li The dataset's datatype in memory
+\li The dataset's dataspace in memory
+\li The dataset's dataspace in the file
+\li The dataset transfer property list<br />
+<ul>
+<li>(The dataset transfer property list controls various aspects of the I/O operations, such as the number
+of processes participating in a collective I/O request or hints to the library to control caching of raw
+data. In this tutorial, we use the default dataset transfer property list.)</li>
+</ul>
+\li The data buffer
+
+The steps to read from or write to a dataset are as follows:
+<ol>
+<li>Obtain the dataset identifier.</li>
+<li>Specify the memory datatype.</li>
+<li>Specify the memory dataspace.</li>
+<li>Specify the file dataspace.</li>
+<li>Specify the transfer properties.</li>
+<li>Perform the desired operation on the dataset.</li>
+<li>Close the dataset.</li>
+<li>Close the dataspace, datatype, and property list if necessary.</li>
+</ol>
+
+To read from or write to a dataset, the #H5Dread and #H5Dwrite routines are used.
+
+<em>C</em>
+\code
+ status = H5Dread (set_id, mem_type_id, mem_space_id, file_space_id, xfer_prp, buf );
+ status = H5Dwrite (set_id, mem_type_id, mem_space_id, file_space_id, xfer_prp, buf);
+\endcode
+
+<em>Fortran</em>
+\code
+ CALL h5dread_f(dset_id, mem_type_id, buf, dims, error, &
+ mem_space_id=mspace_id, file_space_id=fspace_id, &
+ xfer_prp=xfer_plist_id)
+ or
+ CALL h5dread_f(dset_id, mem_type_id, buf, dims, error)
+
+
+ CALL h5dwrite_f(dset_id, mem_type_id, buf, dims, error, &
+ mem_space_id=mspace_id, file_space_id=fspace_id, &
+ xfer_prp=xfer_plist_id)
+ or
+ CALL h5dwrite_f(dset_id, mem_type_id, buf, dims, error)
+\endcode
+
+\section secLBDsetRWHL High Level APIs
+The High Level \ref H5LT include functions that simplify and condense the steps for creating and
+reading datasets. Please be sure to review them, in addition to this tutorial.
+
+\section secLBDsetRWEx Programming Example
+
+\subsection secLBDsetRWExDesc Description
+See \ref LBExamples for the examples used in the \ref LearnBasics tutorial.
+
+The example shows how to read and write an existing dataset. It opens the file created in the previous example,
+obtains the dataset identifier for the dataset <code style="background-color:whitesmoke;">/dset</code>, writes the dataset to the file, then reads
+the dataset back. It then closes the dataset and file.
+
+Note that #H5S_ALL is passed in for both the memory and file dataspace parameters in the read and write calls.
+This indicates that the entire dataspace of the dataset will be read or written to. #H5S_ALL by itself does not
+necessarily have this meaning. See the \ref RM entry for #H5Dread or #H5Dwrite for more information on using #H5S_ALL.
+
+For details on compiling an HDF5 application:
+[ \ref LBCompiling ]
+
+\subsection secLBDsetRWExRem Remarks
+#H5Fopen opens an existing file and returns a file identifier.
+
+#H5Dopen opens an existing dataset with the specified name and location.
+
+#H5Dwrite writes raw data from an application buffer to the specified dataset, converting from the datatype and
+dataspace of the dataset in memory to the datatype and dataspace of the dataset in the file. Specifying #H5S_ALL
+for both the memory and file dataspaces indicates that the entire dataspace of the dataset is to be written to.
+#H5S_ALL by itself does not necessarily have this meaning. See the \ref RM entry for #H5Dwrite for more information
+on using #H5S_ALL.
+
+#H5Dread reads raw data from the specified dataset to an application buffer, converting from the file datatype and
+dataspace to the memory datatype and dataspace. Specifying #H5S_ALL for both the memory and file dataspaces
+indicates that the entire dataspace of the dataset is to be read. #H5S_ALL by itself does not necessarily have
+this meaning. See the \ref RM entry for #H5Dread for more information on using #H5S_ALL.
+
+\subsection secLBDsetRWExCont File Contents
+
+Shown below is the contents of dset.h5 (created by the C program).
+
+<em>dset.h5 in DDL</em>
+\code
+ HDF5 "dset.h5" {
+ GROUP "/" {
+ DATASET "dset" {
+ DATATYPE { H5T_STD_I32BE }
+ DATASPACE { SIMPLE ( 4, 6 ) / ( 4, 6 ) }
+ DATA {
+ 1, 2, 3, 4, 5, 6,
+ 7, 8, 9, 10, 11, 12,
+ 13, 14, 15, 16, 17, 18,
+ 19, 20, 21, 22, 23, 24
+ }
+ }
+ }
+ }
+\endcode
+
+Shown below is the contents of dsetf.h5 (created by the FORTRAN program).
+
+<em>dsetf.h5 in DDL</em>
+\code
+ HDF5 "dsetf.h5" {
+ GROUP "/" {
+ DATASET "dset" {
+ DATATYPE { H5T_STD_I32BE }
+ DATASPACE { SIMPLE ( 6, 4 ) / ( 6, 4 ) }
+ DATA {
+ 1, 7, 13, 19,
+ 2, 8, 14, 20,
+ 3, 9, 15, 21,
+ 4, 10, 16, 22,
+ 5, 11, 17, 23,
+ 6, 12, 18, 24
+ }
+ }
+ }
+ }
+\endcode
+
+<hr>
+Navigate back: \ref index "Main" / \ref GettingStarted / \ref LearnBasics
+
+@page LBAttrCreate Creating an Attribute
+Navigate back: \ref index "Main" / \ref GettingStarted / \ref LearnBasics
+<hr>
+
+Attributes are small datasets that can be used to describe the nature and/or the intended usage of
+the object they are attached to. In this section, we show how to create, read, and write an attribute.
+
+\section secLBAttrCreate Creating an attribute
+Creating an attribute is similar to creating a dataset. To create an attribute, the application must
+specify the object which the attribute is attached to, the datatype and dataspace of the attribute
+data, and the attribute creation property list.
+
+The steps to create an attribute are as follows:
+<ol>
+<li>Obtain the object identifier that the attribute is to be attached to.</li>
+<li>Define the characteristics of the attribute and specify the attribute creation property list.
+<ul>
+<li>Define the datatype.</li>
+<li>Define the dataspace.</li>
+<li>Specify the attribute creation property list.</li>
+</ul></li>
+<li>Create the attribute.</li>
+<li>Close the attribute and datatype, dataspace, and attribute creation property list, if necessary.</li>
+</ol>
+
+To create and close an attribute, the calling program must use #H5Acreate and #H5Aclose. For example:
+
+<em>C</em>
+\code
+ attr_id = H5Acreate (dataset_id, "Units", H5T_STD_I32BE, dataspace_id, H5P_DEFAULT, H5P_DEFAULT)
+ status = H5Aclose (attr_id);
+\endcode
+
+<em>Fortran</em>
+\code
+ CALL h5acreate_f (dset_id, attr_nam, type_id, space_id, attr_id, &
+ hdferr, creation_prp=creat_plist_id)
+ or
+ CALL h5acreate_f (dset_id, attr_nam, type_id, space_id, attr_id, hdferr)
+
+ CALL h5aclose_f (attr_id, hdferr)
+\endcode
+
+\section secLBAttrCreateRW Reading/Writing an attribute
+Attributes may only be read or written as an entire object; no partial I/O is supported. Therefore,
+to perform I/O operations on an attribute, the application needs only to specify the attribute and
+the attribute's memory datatype.
+
+The steps to read or write an attribute are as follows.
+<ol>
+<li>Obtain the attribute identifier.</li>
+<li>Specify the attribute's memory datatype.</li>
+<li>Perform the desired operation.</li>
+<li>Close the memory datatype if necessary.</li>
+</ol>
+
+To read and/or write an attribute, the calling program must contain the #H5Aread and/or
+#H5Awrite routines. For example:
+
+<em>C</em>
+\code
+ status = H5Aread (attr_id, mem_type_id, buf);
+ status = H5Awrite (attr_id, mem_type_id, buf);
+\endcode
+
+<em>Fortran</em>
+\code
+ CALL h5awrite_f (attr_id, mem_type_id, buf, dims, hdferr)
+ CALL h5aread_f (attr_id, mem_type_id, buf, dims, hdferr)
+\endcode
+
+\section secLBAttrCreateHL High Level APIs
+The High Level \ref H5LT include functions that simplify and condense the steps for creating and
+reading datasets. Please be sure to review them, in addition to this tutorial.
+
+\section secLBAttrCreateRWEx Programming Example
+
+\subsection secLBAttrCreateRWExDesc Description
+See \ref LBExamples for the examples used in the \ref LearnBasics tutorial.
+
+The example shows how to create and write a dataset attribute. It opens an existing file <code style="background-color:whitesmoke;">dset.h5</code>
+in C (<code style="background-color:whitesmoke;">dsetf.h5</code> in FORTRAN), obtains the identifier of the dataset <code style="background-color:whitesmoke;">/dset</code>, defines
+the attribute's dataspace, creates the dataset attribute, writes the attribute, and then closes the attribute's
+dataspace, attribute, dataset, and file.
+
+For details on compiling an HDF5 application:
+[ \ref LBCompiling ]
+
+\subsection secLBAttrCreateRWExRem Remarks
+#H5Acreate creates an attribute which is attached to the object specified by the first parameter, and returns an identifier.
+
+#H5Awrite writes the entire attribute, and returns the status of the write.
+
+When an attribute is no longer accessed by a program, #H5Aclose must be called to release the attribute from use.
+An #H5Aclose/h5aclose_f call is mandatory.
+
+\subsection secLBAttrCreateRWExCont File Contents
+
+Shown below is the contents and the attribute definition of <code style="background-color:whitesmoke;">dset.h5</code> (created by the C program).
+
+<em>dset.h5 in DDL</em>
+\code
+HDF5 "dset.h5" {
+GROUP "/" {
+DATASET "dset" {
+DATATYPE { H5T_STD_I32BE }
+DATASPACE { SIMPLE ( 4, 6 ) / ( 4, 6 ) }
+DATA {
+ 1, 2, 3, 4, 5, 6,
+ 7, 8, 9, 10, 11, 12,
+ 13, 14, 15, 16, 17, 18,
+ 19, 20, 21, 22, 23, 24
+}
+ATTRIBUTE "attr" {
+ DATATYPE { H5T_STD_I32BE }
+ DATASPACE { SIMPLE ( 2 ) / ( 2 ) }
+ DATA {
+ 100, 200
+ }
+}
+}
+}
+}
+\endcode
+
+Shown below is the contents and the attribute definition of <code style="background-color:whitesmoke;">dsetf.h5</code> (created by the FORTRAN program).
+
+<em>dsetf.h5 in DDL</em>
+\code
+HDF5 "dsetf.h5" {
+GROUP "/" {
+DATASET "dset" {
+DATATYPE { H5T_STD_I32BE }
+DATASPACE { SIMPLE ( 6, 4 ) / ( 6, 4 ) }
+DATA {
+ 1, 7, 13, 19,
+ 2, 8, 14, 20,
+ 3, 9, 15, 21,
+ 4, 10, 16, 22,
+ 5, 11, 17, 23,
+ 6, 12, 18, 24
+}
+ATTRIBUTE "attr" {
+ DATATYPE { H5T_STD_I32BE }
+ DATASPACE { SIMPLE ( 2 ) / ( 2 ) }
+ DATA {
+ 100, 200
+ }
+}
+}
+}
+}
+\endcode
+
+\subsection secLBAttrCreateRWExDDL Attribute Definition in DDL
+
+<em>HDF5 Attribute Definition</em>
+\code
+<attribute> ::= ATTRIBUTE "<attr_name>" { <datatype>
+ <dataspace>
+ <data> }
+\endcode
+
+<hr>
+Navigate back: \ref index "Main" / \ref GettingStarted / \ref LearnBasics
+
+*/
diff --git a/doxygen/dox/LearnBasics2.dox b/doxygen/dox/LearnBasics2.dox
new file mode 100644
index 0000000..ffcb971
--- /dev/null
+++ b/doxygen/dox/LearnBasics2.dox
@@ -0,0 +1,1159 @@
+/** @page LBGrpCreate Creating an Group
+Navigate back: \ref index "Main" / \ref GettingStarted / \ref LearnBasics
+<hr>
+
+\section secLBGrpCreate Creating an group
+An HDF5 group is a structure containing zero or more HDF5 objects. The two primary HDF5 objects are groups and datasets. To create a group, the calling program must:
+<ol>
+<li>Obtain the location identifier where the group is to be created.</li>
+<li>Create the group.</li>
+<li>Close the group.</li>
+</ol>
+
+To create a group, the calling program must call #H5Gcreate.
+To close the group, #H5Gclose must be called. The close call is mandatory.
+
+For example:
+
+<em>C</em>
+\code
+ group_id = H5Gcreate(file_id, "/MyGroup", H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT);
+ status = H5Gclose (group_id);
+\endcode
+
+<em>Fortran</em>
+\code
+ CALL h5gcreate_f (loc_id, name, group_id, error)
+ CALL h5gclose_f (group_id, error)
+\endcode
+
+\section secLBGrpCreateRWEx Programming Example
+
+\subsection secLBGrpCreateRWExDesc Description
+See \ref LBExamples for the examples used in the \ref LearnBasics tutorial.
+
+The example shows how to create and close a group. It creates a file called <code style="background-color:whitesmoke;">group.h5</code> in C
+(<code style="background-color:whitesmoke;">groupf.h5</code> for FORTRAN), creates a group called MyGroup in the root group, and then closes the group and file.
+
+For details on compiling an HDF5 application:
+[ \ref LBCompiling ]
+
+\subsection secLBGrpCreateRWExCont File Contents
+
+Shown below is the contents and the definition of the group of <code style="background-color:whitesmoke;">group.h5</code> (created by the C program).
+(The FORTRAN program creates the HDF5 file <code style="background-color:whitesmoke;">groupf.h5</code> and the resulting DDL shows the filename
+<code style="background-color:whitesmoke;">groupf.h5</code> in the first line.)
+<table>
+<caption>The Contents of group.h5.</caption>
+<tr>
+<td>
+\image html imggrpcreate.gif
+</td>
+</tr>
+</table>
+
+<em>group.h5 in DDL</em>
+\code
+HDF5 "group.h5" {
+GROUP "/" {
+ GROUP "MyGroup" {
+ }
+}
+}
+\endcode
+
+<hr>
+Navigate back: \ref index "Main" / \ref GettingStarted / \ref LearnBasics
+
+@page LBGrpCreateNames Creating Groups using Absolute and Relative Names
+Navigate back: \ref index "Main" / \ref GettingStarted / \ref LearnBasics
+<hr>
+
+Recall that to create an HDF5 object, we have to specify the location where the object is to be created.
+This location is determined by the identifier of an HDF5 object and the name of the object to be created.
+The name of the created object can be either an absolute name or a name relative to the specified identifier.
+In the previous example, we used the file identifier and the absolute name <code style="background-color:whitesmoke;">/MyGroup</code> to create a group.
+
+In this section, we discuss HDF5 names and show how to use absolute and relative names.
+
+\section secLBGrpCreateNames Names
+HDF5 object names are a slash-separated list of components. There are few restrictions on names: component
+names may be any length except zero and may contain any character except slash (<code style="background-color:whitesmoke;">/</code>) and the null terminator.
+A full name may be composed of any number of component names separated by slashes, with any of the component
+names being the special name <code style="background-color:whitesmoke;">.</code> (a dot or period). A name which begins with a slash is an <em>absolute name</em> which
+is accessed beginning with the root group of the file; all other names are <em>relative names</em> and and the named
+object is accessed beginning with the specified group. A special case is the name <code style="background-color:whitesmoke;">/</code> (or equivalent) which
+refers to the root group.
+
+Functions which operate on names generally take a location identifier, which can be either a file identifier
+or a group identifier, and perform the lookup with respect to that location. Several possibilities are
+described in the following table:
+
+<table>
+<tr>
+<th><strong>Location Type</strong></th>
+<th><strong>Object Name</strong></th>
+<th><strong>Description</strong></th>
+</tr>
+<tr>
+<th><strong>File identifier</strong></th>
+<td>/foo/bar</td>
+<td>The object bar in group foo in the root group.</td>
+</tr>
+<tr>
+<th><strong>Group identifier</strong></th>
+<td>/foo/bar</td>
+<td>The object bar in group foo in the root group of the file containing the specified group.
+In other words, the group identifier's only purpose is to specify a file.</td>
+</tr>
+<tr>
+<th><strong>File identifier</strong></th>
+<td>/</td>
+<td>The root group of the specified file.</td>
+</tr>
+<tr>
+<th><strong>Group identifier</strong></th>
+<td>/</td>
+<td>The root group of the file containing the specified group.</td>
+</tr>
+<tr>
+<th><strong>Group identifier</strong></th>
+<td>foo/bar</td>
+<td>The object bar in group foo in the specified group.</td>
+</tr>
+<tr>
+<th><strong>File identifier</strong></th>
+<td>.</td>
+<td>The root group of the file.</td>
+</tr>
+<tr>
+<th><strong>Group identifier</strong></th>
+<td>.</td>
+<td>The specified group.</td>
+</tr>
+<tr>
+<th><strong>Other identifier</strong></th>
+<td>.</td>
+<td>The specified object.</td>
+</tr>
+</table>
+
+\section secLBGrpCreateNamesEx Programming Example
+
+\subsection secLBGrpCreateNamesExDesc Description
+See \ref LBExamples for the examples used in the \ref LearnBasics tutorial.
+
+The example code shows how to create groups using absolute and relative names. It creates three groups: the first two groups are created using
+the file identifier and the group absolute names while the third group is created using a group identifier and a name relative to the specified group.
+
+For details on compiling an HDF5 application:
+[ \ref LBCompiling ]
+
+\subsection secLBGrpCreateNamesExRem Remarks
+#H5Gcreate creates a group at the location specified by a location identifier and a name. The location identifier
+can be a file identifier or a group identifier and the name can be relative or absolute.
+
+The first #H5Gcreate/h5gcreate_f creates the group <code style="background-color:whitesmoke;">MyGroup</code> in the root group of the specified file.
+
+The second #H5Gcreate/h5gcreate_f creates the group <code style="background-color:whitesmoke;">Group_A</code> in the group <code style="background-color:whitesmoke;">MyGroup</code> in the root group of the specified
+file. Note that the parent group (<code style="background-color:whitesmoke;">MyGroup</code>) already exists.
+
+The third #H5Gcreate/h5gcreate_f creates the group <code style="background-color:whitesmoke;">Group_B</code> in the specified group.
+
+\subsection secLBGrpCreateNamesExCont File Contents
+
+Shown below is the contents and the definition of the group of <code style="background-color:whitesmoke;">groups.h5</code> (created by the C program).
+(The FORTRAN program creates the HDF5 file <code style="background-color:whitesmoke;">groupsf.h5</code> and the resulting DDL shows the filename
+<code style="background-color:whitesmoke;">groupsf.h5</code> in the first line.)
+<table>
+<caption>The Contents of groups.h5.</caption>
+<tr>
+<td>
+\image html imggrps.gif
+</td>
+</tr>
+</table>
+
+<em>groups.h5 in DDL</em>
+\code
+HDF5 "groups.h5" {
+GROUP "/" {
+ GROUP "MyGroup" {
+ GROUP "Group_A" {
+ }
+ GROUP "Group_B" {
+ }
+ }
+}
+}
+\endcode
+
+<hr>
+Navigate back: \ref index "Main" / \ref GettingStarted / \ref LearnBasics
+
+@page LBGrpDset Creating Datasets in Groups
+Navigate back: \ref index "Main" / \ref GettingStarted / \ref LearnBasics
+<hr>
+
+\section secLBGrpDset Datasets in Groups
+We have shown how to create groups, datasets, and attributes. In this section, we show how to create
+datasets in groups. Recall that #H5Dcreate creates a dataset at the location specified by a location
+identifier and a name. Similar to #H5Gcreate, the location identifier can be a file identifier or a
+group identifier and the name can be relative or absolute. The location identifier and the name
+together determine the location where the dataset is to be created. If the location identifier and
+name refer to a group, then the dataset is created in that group.
+
+\section secLBGrpDsetEx Programming Example
+
+\subsection secLBGrpDsetExDesc Description
+See \ref LBExamples for the examples used in the \ref LearnBasics tutorial.
+
+The example shows how to create a dataset in a particular group. It opens the file created in the previous example and creates two datasets:
+
+For details on compiling an HDF5 application:
+[ \ref LBCompiling ]
+
+\subsection secLBGrpDsetExCont File Contents
+
+Shown below is the contents and the definition of the group of <code style="background-color:whitesmoke;">groups.h5</code> (created by the C program).
+(The FORTRAN program creates the HDF5 file <code style="background-color:whitesmoke;">groupsf.h5</code> and the resulting DDL shows the filename
+<code style="background-color:whitesmoke;">groupsf.h5</code> in the first line.)
+<table>
+<caption>The contents of the file groups.h5 (groupsf.h5 for FORTRAN)</caption>
+<tr>
+<td>
+\image html imggrpdsets.gif
+</td>
+</tr>
+</table>
+
+<em>groups.h5 in DDL</em>
+\code
+HDF5 "groups.h5" {
+GROUP "/" {
+GROUP "MyGroup" {
+GROUP "Group_A" {
+ DATASET "dset2" {
+ DATATYPE { H5T_STD_I32BE }
+ DATASPACE { SIMPLE ( 2, 10 ) / ( 2, 10 ) }
+ DATA {
+ 1, 2, 3, 4, 5, 6, 7, 8, 9, 10,
+ 1, 2, 3, 4, 5, 6, 7, 8, 9, 10
+ }
+ }
+}
+GROUP "Group_B" {
+}
+DATASET "dset1" {
+ DATATYPE { H5T_STD_I32BE }
+ DATASPACE { SIMPLE ( 3, 3 ) / ( 3, 3 ) }
+ DATA {
+ 1, 2, 3,
+ 1, 2, 3,
+ 1, 2, 3
+ }
+}
+}
+}
+}
+\endcode
+
+<em>groupsf.h5 in DDL</em>
+\code
+HDF5 "groupsf.h5" {
+GROUP "/" {
+GROUP "MyGroup" {
+GROUP "Group_A" {
+ DATASET "dset2" {
+ DATATYPE { H5T_STD_I32BE }
+ DATASPACE { SIMPLE ( 10, 2 ) / ( 10, 2 ) }
+ DATA {
+ 1, 1,
+ 2, 2,
+ 3, 3,
+ 4, 4,
+ 5, 5,
+ 6, 6,
+ 7, 7,
+ 8, 8,
+ 9, 9,
+ 10, 10
+ }
+ }
+}
+GROUP "Group_B" {
+}
+DATASET "dset1" {
+ DATATYPE { H5T_STD_I32BE }
+ DATASPACE { SIMPLE ( 3, 3 ) / ( 3, 3 ) }
+ DATA {
+ 1, 1, 1,
+ 2, 2, 2,
+ 3, 3, 3
+ }
+}
+}
+}
+}
+\endcode
+
+<hr>
+Navigate back: \ref index "Main" / \ref GettingStarted / \ref LearnBasics
+
+@page LBDsetSubRW Reading From or Writing To a Subset of a Dataset
+Navigate back: \ref index "Main" / \ref GettingStarted / \ref LearnBasics
+<hr>
+
+\section secLBDsetSubRW Dataset Subsets
+There are two ways that you can select a subset in an HDF5 dataset and read or write to it:
+<ul><li>
+<strong>Hyperslab Selection</strong>: The #H5Sselect_hyperslab call selects a logically contiguous
+collection of points in a dataspace, or a regular pattern of points or blocks in a dataspace.
+</li><li>
+<strong>Element Selection</strong>: The #H5Sselect_elements call selects elements in an array.
+</li></ul>
+
+HDF5 allows you to read from or write to a portion or subset of a dataset by:
+\li Selecting a Subset of the Dataset's Dataspace,
+\li Selecting a Memory Dataspace,
+\li Reading From or Writing to a Dataset Subset.
+
+\section secLBDsetSubRWSel Selecting a Subset of the Dataset's Dataspace
+First you must obtain the dataspace of a dataset in a file by calling #H5Dget_space.
+
+Then select a subset of that dataspace by calling #H5Sselect_hyperslab. The <em>offset</em>, <em>count</em>, <em>stride</em>
+and <em>block</em> parameters of this API define the shape and size of the selection. They must be arrays
+with the same number of dimensions as the rank of the dataset's dataspace. These arrays <strong>ALL</strong> work
+together to define a selection. A change to one of these arrays can affect the others.
+\li \em offset: An array that specifies the offset of the starting element of the specified hyperslab.
+\li \em count: An array that determines how many blocks to select from the dataspace in each dimension. If the block
+size for a dimension is one then the count is the number of elements along that dimension.
+\li \em stride: An array that allows you to sample elements along a dimension. For example, a stride of one (or NULL)
+will select every element along a dimension, a stride of two will select every other element, and a stride of three
+will select an element after every two elements.
+\li \em block: An array that determines the size of the element block selected from a dataspace. If the block size
+is one or NULL then the block size is a single element in that dimension.
+
+\section secLBDsetSubRWMem Selecting a Memory Dataspace
+You must select a memory dataspace in addition to a file dataspace before you can read a subset from or write a subset
+to a dataset. A memory dataspace can be specified by calling #H5Screate_simple.
+
+The memory dataspace passed to the read or write call must contain the same number of elements as the file dataspace.
+The number of elements in a dataspace selection can be determined with the #H5Sget_select_npoints API.
+
+\section secLBDsetSubRWSub Reading From or Writing To a Dataset Subset
+To read from or write to a dataset subset, the #H5Dread and #H5Dwrite routines are used. The memory and file dataspace
+identifiers from the selections that were made are passed into the read or write call. For example (C):
+\code
+ status = H5Dwrite (.., .., memspace_id, dataspace_id, .., ..);
+\endcode
+
+\section secLBDsetSubRWProg Programming Example
+
+\subsection subsecLBDsetSubRWProgDesc Description
+See \ref LBExamples for the examples used in the \ref LearnBasics tutorial.
+
+The example creates an 8 x 10 integer dataset in an HDF5 file. It then selects and writes to a 3 x 4 subset
+of the dataset created with the dimensions offset by 1 x 2. (If using Fortran, the dimensions will be swapped.
+The dataset will be 10 x 8, the subset will be 4 x 3, and the offset will be 2 x 1.)
+
+PLEASE NOTE that the examples and images below were created using C.
+
+The following image shows the dataset that gets written originally, and the subset of data that gets modified
+afterwards. Dimension 0 is vertical and Dimension 1 is horizontal as shown below:
+<table>
+<tr>
+<td>
+\image html LBDsetSubRWProg.png
+</td>
+</tr>
+</table>
+
+The subset on the right above is created using these values for offset, count stride, and block:
+\code
+offset = {1, 2}
+
+count = {3, 4}
+
+stride = {1, 1}
+
+block = {1, 1}
+\endcode
+
+\subsection subsecLBDsetSubRWProgExper Experiments with Different Selections
+Following are examples of changes that can be made to the example code provided to better understand
+how to make selections.
+
+\subsubsection subsubsecLBDsetSubRWProgExperOne Example 1
+By default the example code will select and write to a 3 x 4 subset. You can modify the count
+parameter in the example code to select a different subset, by changing the value of
+DIM0_SUB (C, C++) / dim0_sub (Fortran) near the top. Change its value to 7 to create a 7 x 4 subset:
+<table>
+<tr>
+<td>
+\image html imgLBDsetSubRW11.png
+</td>
+</tr>
+</table>
+
+If you were to change the subset to 8 x 4, the selection would be beyond the extent of the dimension:
+<table>
+<tr>
+<td>
+\image html imgLBDsetSubRW12.png
+</td>
+</tr>
+</table>
+
+The write will fail with the error: "<strong>file selection+offset not within extent</strong>"
+
+\subsubsection subsubsecLBDsetSubRWProgExperTwo Example 2
+In the example code provided, the memory and file dataspaces passed to the H5Dwrite call have the
+same size, 3 x 4 (DIM0_SUB x DIM1_SUB). Change the size of the memory dataspace to be 4 x 4 so that
+they do not match, and then compile:
+\code
+ dimsm[0] = DIM0_SUB + 1;
+ dimsm[1] = DIM1_SUB;
+ memspace_id = H5Screate_simple (RANK, dimsm, NULL);
+\endcode
+The code will fail with the error: "<strong>src and dest data spaces have different sizes</strong>"
+
+How many elements are in the memory and file dataspaces that were specified above? Add these lines:
+\code
+ hssize_t size;
+
+ /* Just before H5Dwrite call the following */
+ size = H5Sget_select_npoints (memspace_id);
+ printf ("\nmemspace_id size: %i\n", size);
+ size = H5Sget_select_npoints (dataspace_id);
+ printf ("dataspace_id size: %i\n", size);
+\endcode
+
+You should see these lines followed by the error:
+\code
+ memspace_id size: 16
+ dataspace_id size: 12
+\endcode
+
+\subsubsection subsubsecLBDsetSubRWProgExperThree Example 3
+This example shows the selection that occurs if changing the values of the <em>offset</em>, <em>count</em>,
+<em>stride</em> and <em>block</em> parameters in the example code.
+
+This will select two blocks. The <em>count</em> array specifies the number of blocks. The <em>block</em> array
+specifies the size of a block. The <em>stride</em> must be modified to accommodate the block <em>size</em>.
+<table>
+<tr>
+<td>
+\image html imgLBDsetSubRW31.png
+</td>
+</tr>
+</table>
+
+Now try modifying the count as shown below. The write will fail because the selection goes beyond the extent of the dimension:
+<table>
+<tr>
+<td>
+\image html imgLBDsetSubRW32.png
+</td>
+</tr>
+</table>
+
+If the offset were 1x1 (instead of 1x2), then the selection can be made:
+<table>
+<tr>
+<td>
+\image html imgLBDsetSubRW33.png
+</td>
+</tr>
+</table>
+
+The selections above were tested with the
+<a href="https://support.hdfgroup.org/ftp/HDF5/examples/howto/subset/h5_subsetbk.c">h5_subsetbk.c</a>
+example code. The memory dataspace was defined as one-dimensional.
+
+\subsection subsecLBDsetSubRWProgRem Remarks
+\li In addition to #H5Sselect_hyperslab, this example introduces the #H5Dget_space call to obtain the dataspace of a dataset.
+\li If using the default values for the stride and block parameters of #H5Sselect_hyperslab, then, for C you can specify NULL
+for these parameters, rather than passing in an array for each, and for Fortran 90 you can omit these parameters.
+
+<hr>
+Navigate back: \ref index "Main" / \ref GettingStarted / \ref LearnBasics
+
+@page LBDatatypes Datatype Basics
+Navigate back: \ref index "Main" / \ref GettingStarted / \ref LearnBasics
+<hr>
+
+\section secLBDtype What is a Datatype?
+A datatype is a collection of datatype properties which provide complete information for data conversion to or from that datatype.
+
+Datatypes in HDF5 can be grouped as follows:
+\li <strong>Pre-Defined Datatypes</strong>: These are datatypes that are created by HDF5. They are actually opened
+(and closed) by HDF5, and can have a different value from one HDF5 session to the next.
+\li <strong>Derived Datatypes</strong>: These are datatypes that are created or derived from the pre-defined datatypes.
+Although created from pre-defined types, they represent a category unto themselves. An example of a commonly used derived
+datatype is a string of more than one character.
+
+\section secLBDtypePre Pre-defined Datatypes
+The properties of pre-defined datatypes are:
+\li Pre-defined datatypes are opened and closed by HDF5.
+\li A pre-defined datatype is a handle and is NOT PERSISTENT. Its value can be different from one HDF5 session to the next.
+\li Pre-defined datatypes are Read-Only.
+\li As mentioned, other datatypes can be derived from pre-defined datatypes.
+
+There are two types of pre-defined datatypes, standard (file) and native.
+
+<h4>Standard</h4>
+A standard (or file) datatype can be:
+<ul>
+<li><strong>Atomic</strong>: A datatype which cannot be decomposed into smaller datatype units at the API level.
+The atomic datatypes are:
+<ul>
+<li>integer</li>
+<li>float</li>
+<li>string (1-character)</li>
+<li>date and time</li>
+<li>bitfield</li>
+<li>reference</li>
+<li>opaque</li>
+</ul>
+</li>
+<li><strong>Composite</strong>: An aggregation of one or more datatypes.
+Composite datatypes include:
+<ul>
+<li>array</li>
+<li>variable length</li>
+<li>enumeration</li>
+<li>compound datatypes</li>
+</ul>
+Array, variable length, and enumeration datatypes are defined in terms of a single atomic datatype,
+whereas a compound datatype is a datatype composed of a sequence of datatypes.
+</li>
+</ul>
+
+<table>
+<tr>
+<th><strong>Notes</strong></th>
+</tr>
+<tr>
+<td>
+\li Standard pre-defined datatypes are the SAME on all platforms.
+\li They are the datatypes that you see in an HDF5 file.
+\li They are typically used when creating a dataset.
+</td>
+</tr>
+</table>
+
+<h4>Native</h4>
+Native pre-defined datatypes are used for memory operations, such as reading and writing. They are
+NOT THE SAME on different platforms. They are similar to C type names, and are aliased to the
+appropriate HDF5 standard pre-defined datatype for a given platform.
+
+For example, when on an Intel based PC, #H5T_NATIVE_INT is aliased to the standard pre-defined type,
+#H5T_STD_I32LE. On a MIPS machine, it is aliased to #H5T_STD_I32BE.
+<table>
+<tr>
+<th><strong>Notes</strong></th>
+</tr>
+<tr>
+<td>
+\li Native datatypes are NOT THE SAME on all platforms.
+\li Native datatypes simplify memory operations (read/write). The HDF5 library automatically converts as needed.
+\li Native datatypes are NOT in an HDF5 File. The standard pre-defined datatype that a native datatype corresponds
+to is what you will see in the file.
+</td>
+</tr>
+</table>
+
+<h4>Pre-Defined</h4>
+The following table shows the native types and the standard pre-defined datatypes they correspond
+to. (Keep in mind that HDF5 can convert between datatypes, so you can specify a buffer of a larger
+type for a dataset of a given type. For example, you can read a dataset that has a short datatype
+into a long integer buffer.)
+
+<table>
+<caption>Some HDF5 pre-defined native datatypes and corresponding standard (file) type</caption>
+<tr>
+<th><strong>C Type</strong></th>
+<th><strong>HDF5 Memory Type</strong></th>
+<th><strong>HDF5 File Type*</strong></th>
+</tr>
+<tr>
+<th span="3"><strong>Integer</strong></th>
+</tr>
+<tr>
+<td>int</td>
+<td>#H5T_NATIVE_INT</td>
+<td>#H5T_STD_I32BE or #H5T_STD_I32LE</td>
+</tr>
+<tr>
+<td>short</td>
+<td>#H5T_NATIVE_SHORT</td>
+<td>#H5T_STD_I16BE or #H5T_STD_I16LE</td>
+</tr>
+<tr>
+<td>long</td>
+<td>#H5T_NATIVE_LONG</td>
+<td>#H5T_STD_I32BE, #H5T_STD_I32LE,
+ #H5T_STD_I64BE or #H5T_STD_I64LE</td>
+</tr>
+<tr>
+<td>long long</td>
+<td>#H5T_NATIVE_LLONG</td>
+<td>#H5T_STD_I64BE or #H5T_STD_I64LE</td>
+</tr>
+<tr>
+<td>unsigned int</td>
+<td>#H5T_NATIVE_UINT</td>
+<td>#H5T_STD_U32BE or #H5T_STD_U32LE</td>
+</tr>
+<tr>
+<td>unsigned short</td>
+<td>#H5T_NATIVE_USHORT</td>
+<td>#H5T_STD_U16BE or #H5T_STD_U16LE</td>
+</tr>
+<tr>
+<td>unsigned long</td>
+<td>#H5T_NATIVE_ULONG</td>
+<td>#H5T_STD_U32BE, #H5T_STD_U32LE,
+ #H5T_STD_U64BE or #H5T_STD_U64LE</td>
+</tr>
+<tr>
+<td>unsigned long long</td>
+<td>#H5T_NATIVE_ULLONG</td>
+<td>#H5T_STD_U64BE or #H5T_STD_U64LE</td>
+</tr>
+<tr>
+<th span="3"><strong>Float</strong></th>
+</tr>
+<tr>
+<td>float</td>
+<td>#H5T_NATIVE_FLOAT</td>
+<td>#H5T_IEEE_F32BE or #H5T_IEEE_F32LE</td>
+</tr>
+<tr>
+<td>double</td>
+<td>#H5T_NATIVE_DOUBLE</td>
+<td>#H5T_IEEE_F64BE or #H5T_IEEE_F64LE</td>
+</tr>
+</table>
+
+<table>
+<caption>Some HDF5 pre-defined native datatypes and corresponding standard (file) type</caption>
+<tr>
+<th><strong>F90 Type</strong></th>
+<th><strong>HDF5 Memory Type</strong></th>
+<th><strong>HDF5 File Type*</strong></th>
+</tr>
+<tr>
+<td>integer</td>
+<td>H5T_NATIVE_INTEGER</td>
+<td>#H5T_STD_I32BE(8,16) or #H5T_STD_I32LE(8,16)</td>
+</tr>
+<tr>
+<td>real</td>
+<td>H5T_NATIVE_REAL</td>
+<td>#H5T_IEEE_F32BE or #H5T_IEEE_F32LE</td>
+</tr>
+<tr>
+<td>double-precision</td>
+<td>#H5T_NATIVE_DOUBLE</td>
+<td>#H5T_IEEE_F64BE or #H5T_IEEE_F64LE</td>
+</tr>
+</table>
+
+<table>
+<tr>
+<td>* Note that the HDF5 File Types listed are those that are most commonly created.
+ The file type created depends on the compiler switches and platforms being
+ used. For example, on the Cray an integer is 64-bit, and using #H5T_NATIVE_INT (C)
+ or H5T_NATIVE_INTEGER (F90) would result in an #H5T_STD_I64BE file type.</td>
+</tr>
+</table>
+
+The following code is an example of when you would use standard pre-defined datatypes vs. native types:
+\code
+ #include "hdf5.h"
+
+ main() {
+
+ hid_t file_id, dataset_id, dataspace_id;
+ herr_t status;
+ hsize_t dims[2]={4,6};
+ int i, j, dset_data[4][6];
+
+ for (i = 0; i < 4; i++)
+ for (j = 0; j < 6; j++)
+ dset_data[i][j] = i * 6 + j + 1;
+
+ file_id = H5Fcreate ("dtypes.h5", H5F_ACC_TRUNC, H5P_DEFAULT, H5P_DEFAULT);
+
+ dataspace_id = H5Screate_simple (2, dims, NULL);
+
+ dataset_id = H5Dcreate (file_id, "/dset", H5T_STD_I32BE, dataspace_id,
+ H5P_DEFAULT);
+
+ status = H5Dwrite (dataset_id, H5T_NATIVE_INT, H5S_ALL, H5S_ALL,
+ H5P_DEFAULT, dset_data);
+
+ status = H5Dclose (dataset_id);
+
+ status = H5Fclose (file_id);
+ }
+\endcode
+By using the native types when reading and writing, the code that reads from or writes to a dataset
+can be the same for different platforms.
+
+Can native types also be used when creating a dataset? Yes. However, just be aware that the resulting
+datatype in the file will be one of the standard pre-defined types and may be different than expected.
+
+What happens if you do not use the correct native datatype for a standard (file) datatype? Your data
+may be incorrect or not what you expect.
+
+\section secLBDtypeDer Derived Datatypes
+ANY pre-defined datatype can be used to derive user-defined datatypes.
+
+To create a datatype derived from a pre-defined type:
+<ol>
+<li>Make a copy of the pre-defined datatype:
+\code
+ tid = H5Tcopy (H5T_STD_I32BE);
+\endcode
+</li>
+<li>Change the datatype.</li>
+</ol>
+
+There are numerous datatype functions that allow a user to alter a pre-defined datatype. See
+\ref subsecLBDtypeSpecStr below for a simple example.
+
+Refer to the \ref H5T in the \ref RM. Example functions are #H5Tset_size and #H5Tset_precision.
+
+\section secLBDtypeSpec Specific Datatypes
+On the <a href="https://portal.hdfgroup.org/display/HDF5/Examples+by+API">Examples by API</a>
+page under <a href="https://confluence.hdfgroup.org/display/HDF5/Examples+by+API#ExamplesbyAPI-datatypes">Datatypes</a>
+you will find many example programs for creating and reading datasets with different datatypes.
+
+Below is additional information on some of the datatypes. See
+the <a href="https://portal.hdfgroup.org/display/HDF5/Examples+by+API">Examples by API</a>
+page for examples of these datatypes.
+
+\subsection subsecLBDtypeSpec Array Datatype vs Array Dataspace
+#H5T_ARRAY is a datatype, and it should not be confused with the dataspace of a dataset. The dataspace
+of a dataset can consist of a regular array of elements. For example, the datatype for a dataset
+could be an atomic datatype like integer, and the dataset could be an N-dimensional appendable array,
+as specified by the dataspace. See #H5Screate and #H5Screate_simple for details.
+
+Unlimited dimensions and subsetting are not supported when using the #H5T_ARRAY datatype.
+
+The #H5T_ARRAY datatype was primarily created to address the simple case of a compound datatype
+when all members of the compound datatype are of the same type and there is no need to subset by
+compound datatype members. Creation of such a datatype is more efficient and I/O also requires
+less work, because there is no alignment involved.
+
+\subsection subsecLBDtypeSpecArr Array Datatype
+The array class of datatypes, #H5T_ARRAY, allows the construction of true, homogeneous,
+multi-dimensional arrays. Since these are homogeneous arrays, each element of the array
+will be of the same datatype, designated at the time the array is created.
+
+Users may be confused by this datatype, as opposed to a dataset with a simple atomic
+datatype (eg. integer) that is an array. See subsecLBDtypeSpec for more information.
+
+Arrays can be nested. Not only is an array datatype used as an element of an HDF5 dataset,
+but the elements of an array datatype may be of any datatype, including another array datatype.
+
+Array datatypes <strong>cannot be subdivided for I/O</strong>; the entire array must be transferred from one
+dataset to another.
+
+Within certain limitations, outlined in the next paragraph, array datatypes may be N-dimensional
+and of any dimension size. <strong>Unlimited dimensions, however, are not supported</strong>. Functionality similar
+to unlimited dimension arrays is available through the use of variable-length datatypes.
+
+The maximum number of dimensions, i.e., the maximum rank, of an array datatype is specified by
+the HDF5 library constant #H5S_MAX_RANK. The minimum rank is 1 (one). All dimension sizes must
+be greater than 0 (zero).
+
+One array datatype may only be converted to another array datatype if the number of dimensions
+and the sizes of the dimensions are equal and the datatype of the first array's elements can be
+converted to the datatype of the second array's elements.
+
+\subsubsection subsubsecLBDtypeSpecArrAPI Array Datatype APIs
+There are three functions that are specific to array datatypes: one, #H5Tarray_create, for creating
+an array datatype, and two, #H5Tget_array_ndims and #H5Tget_array_dims
+for working with existing array datatypes.
+
+<h4>Creating</h4>
+The function #H5Tarray_create creates a new array datatype object. Parameters specify
+\li the base datatype of each element of the array,
+\li the rank of the array, i.e., the number of dimensions,
+\li the size of each dimension, and
+\li the dimension permutation of the array, i.e., whether the elements of the array are listed in C or FORTRAN order.
+
+<h4>Working with existing array datatypes</h4>
+When working with existing arrays, one must first determine the the rank, or number of dimensions, of the array.
+
+The function #H5Tget_array_dims returns the rank of a specified array datatype.
+
+In many instances, one needs further information. The function #H5Tget_array_dims retrieves the
+permutation of the array and the size of each dimension.
+
+\subsection subsecLBDtypeSpecCmpd Compound
+
+\subsubsection subsubsecLBDtypeSpecCmpdProp Properties of compound datatypes
+A compound datatype is similar to a struct in C or a common block in Fortran. It is a collection of
+one or more atomic types or small arrays of such types. To create and use of a compound datatype
+you need to refer to various properties of the data compound datatype:
+\li It is of class compound.
+\li It has a fixed total size, in bytes.
+\li It consists of zero or more members (defined in any order) with unique names and which occupy non-overlapping regions within the datum.
+\li Each member has its own datatype.
+\li Each member is referenced by an index number between zero and N-1, where N is the number of members in the compound datatype.
+\li Each member has a name which is unique among its siblings in a compound datatype.
+\li Each member has a fixed byte offset, which is the first byte (smallest byte address) of that member in a compound datatype.
+\li Each member can be a small array of up to four dimensions.
+
+Properties of members of a compound datatype are defined when the member is added to the compound type and cannot be subsequently modified.
+
+\subsubsection subsubsecLBDtypeSpecCmpdDef Defining compound datatypes
+Compound datatypes must be built out of other datatypes. First, one creates an empty compound
+datatype and specifies its total size. Then members are added to the compound datatype in any order.
+
+Member names. Each member must have a descriptive name, which is the key used to uniquely identify
+the member within the compound datatype. A member name in an HDF5 datatype does not necessarily
+have to be the same as the name of the corresponding member in the C struct in memory, although
+this is often the case. Nor does one need to define all members of the C struct in the HDF5
+compound datatype (or vice versa).
+
+Offsets. Usually a C struct will be defined to hold a data point in memory, and the offsets of the
+members in memory will be the offsets of the struct members from the beginning of an instance of the
+struct. The library defines the macro to compute the offset of a member within a struct:
+\code
+ HOFFSET(s,m)
+\endcode
+This macro computes the offset of member m within a struct variable s.
+
+Here is an example in which a compound datatype is created to describe complex numbers whose type
+is defined by the complex_t struct.
+\code
+typedef struct {
+ double re; /*real part */
+ double im; /*imaginary part */
+} complex_t;
+
+complex_t tmp; /*used only to compute offsets */
+hid_t complex_id = H5Tcreate (H5T_COMPOUND, sizeof tmp);
+H5Tinsert (complex_id, "real", HOFFSET(tmp,re), H5T_NATIVE_DOUBLE);
+H5Tinsert (complex_id, "imaginary", HOFFSET(tmp,im), H5T_NATIVE_DOUBLE);
+\endcode
+
+\subsection subsecLBDtypeSpecRef Reference
+There are two types of Reference datatypes in HDF5:
+\li \ref subsubsecLBDtypeSpecRefObj
+\li \ref subsubsecLBDtypeSpecRefDset
+
+\subsubsection subsubsecLBDtypeSpecRefObj Reference to objects
+In HDF5, objects (i.e. groups, datasets, and named datatypes) are usually accessed by name.
+There is another way to access stored objects -- by reference.
+
+An object reference is based on the relative file address of the object header in the file
+and is constant for the life of the object. Once a reference to an object is created and
+stored in a dataset in the file, it can be used to dereference the object it points to.
+References are handy for creating a file index or for grouping related objects by storing
+references to them in one dataset.
+
+<h4>Creating and storing references to objects</h4>
+The following steps are involved in creating and storing file references to objects:
+<ol>
+<li>Create the objects or open them if they already exist in the file.</li>
+<li>Create a dataset to store the objects' references, by specifying #H5T_STD_REF_OBJ as the datatype</li>
+<li>Create and store references to the objects in a buffer, using #H5Rcreate.</li>
+<li>Write a buffer with the references to the dataset, using #H5Dwrite with the #H5T_STD_REF_OBJ datatype.</li>
+</ol>
+
+<h4>Reading references and accessing objects using references</h4>
+The following steps are involved:
+<ol>
+<li>Open the dataset with the references and read them. The #H5T_STD_REF_OBJ datatype must be used to describe the memory datatype.</li>
+<li>Use the read reference to obtain the identifier of the object the reference points to using #H5Rdereference.</li>
+<li>Open the dereferenced object and perform the desired operations.</li>
+<li>Close all objects when the task is complete.</li>
+</ol>
+
+\subsubsection subsubsecLBDtypeSpecRefDset Reference to a dataset region
+A dataset region reference points to a dataset selection in another dataset.
+A reference to the dataset selection (region) is constant for the life of the dataset.
+
+<h4>Creating and storing references to dataset regions</h4>
+The following steps are involved in creating and storing references to a dataset region:
+\li Create a dataset to store the dataset region (selection), by passing in #H5T_STD_REF_DSETREG for the datatype when calling #H5Dcreate.
+\li Create selection(s) in existing dataset(s) using #H5Sselect_hyperslab and/or #H5Sselect_elements.
+\li Create reference(s) to the selection(s) using #H5Rcreate and store them in a buffer.
+\li Write the references to the dataset regions in the file.
+\li Close all objects.
+
+<h4>Reading references to dataset regions</h4>
+The following steps are involved in reading references to dataset regions and referenced dataset regions (selections).
+<ol>
+<li>Open and read the dataset containing references to the dataset regions.
+The datatype #H5T_STD_REF_DSETREG must be used during read operation.</li>
+<li>Use #H5Rdereference to obtain the dataset identifier from the read dataset region reference.
+ OR
+ Use #H5Rget_region to obtain the dataspace identifier for the dataset containing the selection from the read dataset region reference.
+</li>
+<li>With the dataspace identifier, the \ref H5S interface functions, H5Sget_select_*,
+can be used to obtain information about the selection.</li>
+<li>Close all objects when they are no longer needed.</li>
+</ol>
+
+The dataset with the region references was read by #H5Dread with the #H5T_STD_REF_DSETREG datatype specified.
+
+The read reference can be used to obtain the dataset identifier by calling #H5Rdereference or by obtaining
+obtain spacial information (dataspace and selection) with the call to #H5Rget_region.
+
+The reference to the dataset region has information for both the dataset itself and its selection. In both functions:
+\li The first parameter is an identifier of the dataset with the region references.
+\li The second parameter specifies the type of reference stored. In this example, a reference to the dataset region is stored.
+\li The third parameter is a buffer containing the reference of the specified type.
+
+This example introduces several H5Sget_select_* functions used to obtain information about selections:
+<table>
+<caption>Examples of HDF5 predefined datatypes</caption>
+<tr>
+<th><strong>Function</strong></th>
+<th><strong>Description</strong></th>
+</tr>
+<tr>
+<td>#H5Sget_select_npoints</td>
+<td>Returns the number of elements in the hyperslab</td>
+</tr>
+<tr>
+<td>#H5Sget_select_hyper_nblocks</td>
+<td>Returns the number of blocks in the hyperslab</td>
+</tr>
+<tr>
+<td>#H5Sget_select_hyper_blocklist</td>
+<td>Returns the "lower left" and "upper right" coordinates of the blocks in the hyperslab selection</td>
+</tr>
+<tr>
+<td>#H5Sget_select_bounds</td>
+<td>Returns the coordinates of the "minimal" block containing a hyperslab selection</td>
+</tr>
+<tr>
+<td>#H5Sget_select_elem_npoints</td>
+<td>Returns the number of points in the element selection</td>
+</tr>
+<tr>
+<td>#H5Sget_select_elem_pointlist</td>
+<td>Returns the coordinates of points in the element selection</td>
+</tr>
+</table>
+
+\subsection subsecLBDtypeSpecStr String
+A simple example of creating a derived datatype is using the string datatype,
+#H5T_C_S1 (#H5T_FORTRAN_S1) to create strings of more than one character. Strings
+can be stored as either fixed or variable length, and may have different rules
+for padding of unused storage.
+
+\subsubsection subsecLBDtypeSpecStrFix Fixed Length 5-character String Datatype
+\code
+ hid_t strtype; /* Datatype ID */
+ herr_t status;
+
+ strtype = H5Tcopy (H5T_C_S1);
+ status = H5Tset_size (strtype, 5); /* create string of length 5 */
+\endcode
+
+\subsubsection subsecLBDtypeSpecStrVar Variable Length String Datatype
+\code
+ strtype = H5Tcopy (H5T_C_S1);
+ status = H5Tset_size (strtype, H5T_VARIABLE);
+\endcode
+
+The ability to derive datatypes from pre-defined types allows users to create any number of datatypes,
+from simple to very complex.
+
+As the term implies, variable length strings are strings of varying lengths. They are stored internally
+in a heap, potentially impacting efficiency in the following ways:
+\li Heap storage requires more space than regular raw data storage.
+\li Heap access generally reduces I/O efficiency because it requires individual read or write operations
+for each data element rather than one read or write per dataset or per data selection.
+\li A variable length dataset consists of pointers to the heaps of data, not the actual data. Chunking
+and filters, including compression, are not available for heaps.
+
+See \ref subsubsec_datatype_other_strings in the \ref UG, for more information on how fixed and variable
+length strings are stored.
+
+\subsection subsecLBDtypeSpecVL Variable Length
+Variable-length (VL) datatypes are sequences of an existing datatype (atomic, VL, or compound)
+which are not fixed in length from one dataset location to another. In essence, they are similar
+to C character strings -- a sequence of a type which is pointed to by a particular type of
+pointer -- although they are implemented more closely to FORTRAN strings by including an explicit
+length in the pointer instead of using a particular value to terminate the sequence.
+
+VL datatypes are useful to the scientific community in many different ways, some of which are listed below:
+<ul>
+<li>Ragged arrays: Multi-dimensional ragged arrays can be implemented with the last (fastest changing)
+dimension being ragged by using a VL datatype as the type of the element stored. (Or as a field in a compound datatype.)
+</li>
+<li>Fractal arrays: If a compound datatype has a VL field of another compound type with VL fields
+(a nested VL datatype), this can be used to implement ragged arrays of ragged arrays, to whatever
+nesting depth is required for the user.
+</li>
+<li>Polygon lists: A common storage requirement is to efficiently store arrays of polygons with
+different numbers of vertices. VL datatypes can be used to efficiently and succinctly describe an
+array of polygons with different numbers of vertices.
+</li>
+<li>Character strings: Perhaps the most common use of VL datatypes will be to store C-like VL character
+strings in dataset elements or as attributes of objects.
+</li>
+<li>Indices: An array of VL object references could be used as an index to all the objects in a file
+which contain a particular sequence of dataset values. Perhaps an array something like the following:
+\code
+ Value1: Object1, Object3, Object9
+ Value2: Object0, Object12, Object14, Object21, Object22
+ Value3: Object2
+ Value4: <none>
+ Value5: Object1, Object10, Object12
+ .
+ .
+\endcode
+</li>
+<li>Object Tracking: An array of VL dataset region references can be used as a method of tracking
+objects or features appearing in a sequence of datasets. Perhaps an array of them would look like:
+\code
+ Feature1: Dataset1:Region, Dataset3:Region, Dataset9:Region
+ Feature2: Dataset0:Region, Dataset12:Region, Dataset14:Region,
+ Dataset21:Region, Dataset22:Region
+ Feature3: Dataset2:Region
+ Feature4: <none>
+ Feature5: Dataset1:Region, Dataset10:Region, Dataset12:Region
+ .
+ .
+\endcode
+</li>
+</ul>
+
+\subsubsection subsubsecLBDtypeSpecVLMem Variable-length datatype memory management
+With each element possibly being of different sequence lengths for a dataset with a VL datatype,
+the memory for the VL datatype must be dynamically allocated. Currently there are two methods
+of managing the memory for VL datatypes: the standard C malloc/free memory allocation routines
+or a method of calling user-defined memory management routines to allocate or free memory. Since
+the memory allocated when reading (or writing) may be complicated to release, an HDF5 routine is
+provided to traverse a memory buffer and free the VL datatype information without leaking memory.
+
+\subsubsection subsubsecLBDtypeSpecVLDiv Variable-length datatypes cannot be divided
+VL datatypes are designed so that they cannot be subdivided by the library with selections, etc.
+This design was chosen due to the complexities in specifying selections on each VL element of a
+dataset through a selection API that is easy to understand. Also, the selection APIs work on
+dataspaces, not on datatypes. At some point in time, we may want to create a way for dataspaces
+to have VL components to them and we would need to allow selections of those VL regions, but
+that is beyond the scope of this document.
+
+\subsubsection subsubsecLBDtypeSpecVLErr What happens if the library runs out of memory while reading?
+It is possible for a call to #H5Dread to fail while reading in VL datatype information if the memory
+required exceeds that which is available. In this case, the #H5Dread call will fail gracefully and any
+VL data which has been allocated prior to the memory shortage will be returned to the system via the
+memory management routines detailed below. It may be possible to design a partial read API function
+at a later date, if demand for such a function warrants.
+
+\subsubsection subsubsecLBDtypeSpecVLStr Strings as variable-length datatypes
+Since character strings are a special case of VL data that is implemented in many different ways on
+different machines and in different programming languages, they are handled somewhat differently from
+other VL datatypes in HDF5.
+
+HDF5 has native VL strings for each language API, which are stored the same way on disk, but are
+exported through each language API in a natural way for that language. When retrieving VL strings
+from a dataset, users may choose to have them stored in memory as a native VL string or in HDF5's
+#hvl_t struct for VL datatypes.
+
+VL strings may be created in one of two ways: by creating a VL datatype with a base type of
+#H5T_C_S1 and setting its length to #H5T_VARIABLE. The second method is used to access native VL strings in memory. The
+library will convert between the two types, but they are stored on disk using different datatypes
+and have different memory representations.
+
+Multi-byte character representations, such as \em UNICODE or \em wide characters in C/C++, will need the
+appropriate character and string datatypes created so that they can be described properly through
+the datatype API. Additional conversions between these types and the current ASCII characters
+will also be required.
+
+Variable-width character strings (which might be compressed data or some other encoding) are not
+currently handled by this design. We will evaluate how to implement them based on user feedback.
+
+\subsubsection subsubsecLBDtypeSpecVLAPIs Variable-length datatype APIs
+
+<h4>Creation</h4>
+VL datatypes are created with the #H5Tvlen_create function as follows:
+\code
+type_id = H5Tvlen_create(hid_t base_type_id);
+\endcode
+The base datatype will be the datatype that the sequence is composed of, characters for character
+strings, vertex coordinates for polygon lists, etc. The base datatype specified for the VL datatype
+can be of any HDF5 datatype, including another VL datatype, a compound datatype, or an atomic datatype.
+
+<h4>Querying base datatype of VL datatype</h4>
+It may be necessary to know the base datatype of a VL datatype before memory is allocated, etc.
+The base datatype is queried with the #H5Tget_super function, described in the \ref H5T documentation.
+
+<h4>Querying minimum memory required for VL information</h4>
+It order to predict the memory usage that #H5Dread may need to allocate to store VL data while
+reading the data, the #H5Dvlen_get_buf_size function is provided:
+\code
+herr_t H5Dvlen_get_buf_size(hid_t dataset_id, hid_t type_id, hid_t space_id, hsize_t *size)
+\endcode
+This routine checks the number of bytes required to store the VL data from the dataset, using
+the \em space_id for the selection in the dataset on disk and the \em type_id for the memory representation
+of the VL data in memory. The *\em size value is modified according to how many bytes are required
+to store the VL data in memory.
+
+<h4>Specifying how to manage memory for the VL datatype</h4>
+The memory management method is determined by dataset transfer properties passed into the
+#H5Dread and #H5Dwrite functions with the dataset transfer property list.
+
+Default memory management is set by using #H5P_DEFAULT for the dataset transfer
+property list identifier. If #H5P_DEFAULT is used with #H5Dread, the system \em malloc and \em free
+calls will be used for allocating and freeing memory. In such a case, #H5P_DEFAULT should
+also be passed as the property list identifier to #H5Dvlen_reclaim.
+
+The rest of this subsection is relevant only to those who choose not to use default memory management.
+
+The user can choose whether to use the system \em malloc and \em free calls or user-defined, or custom,
+memory management functions. If user-defined memory management functions are to be used, the
+memory allocation and free routines must be defined via #H5Pset_vlen_mem_manager(), as follows:
+\code
+herr_t H5Pset_vlen_mem_manager(hid_t plist_id, H5MM_allocate_t alloc, void *alloc_info, H5MM_free_t free, void *free_info)
+\endcode
+The \em alloc and \em free parameters identify the memory management routines to be used. If the user
+has defined custom memory management routines, \em alloc and/or \em free should be set to make those
+routine calls (i.e., the name of the routine is used as the value of the parameter); if the user
+prefers to use the system's \em malloc and/or \em free, the \em alloc and \em free parameters, respectively, should be set to \em NULL
+
+The prototypes for the user-defined functions would appear as follows:
+\code
+typedef void *(*H5MM_allocate_t)(size_t size, void *info) ; typedef void (*H5MM_free_t)(void *mem, void *free_info) ;
+\endcode
+The \em alloc_info and \em free_info parameters can be used to pass along any required information to
+the user's memory management routines.
+
+In summary, if the user has defined custom memory management routines, the name(s) of the routines
+are passed in the \em alloc and \em free parameters and the custom routines' parameters are passed in the
+\em alloc_info and \em free_info parameters. If the user wishes to use the system \em malloc and \em free functions,
+the \em alloc and/or \em free parameters are set to \em NULL and the \em alloc_info and \em free_info parameters are ignored.
+
+<h4>Recovering memory from VL buffers read in</h4>
+The complex memory buffers created for a VL datatype may be reclaimed with the #H5Dvlen_reclaim
+function call, as follows:
+\code
+herr_t H5Dvlen_reclaim(hid_t type_id, hid_t space_id, hid_t plist_id, void *buf);
+\endcode
+
+The \em type_id must be the datatype stored in the buffer, \em space_id describes the selection for the
+memory buffer to free the VL datatypes within, \em plist_id is the dataset transfer property list
+which was used for the I/O transfer to create the buffer, and \em buf is the pointer to the buffer
+to free the VL memory within. The VL structures (#hvl_t) in the user's buffer are modified to zero
+out the VL information after it has been freed.
+
+If nested VL datatypes were used to create the buffer, this routine frees them from the bottom up,
+releasing all the memory without creating memory leaks.
+
+<hr>
+Navigate back: \ref index "Main" / \ref GettingStarted / \ref LearnBasics
+
+*/
diff --git a/doxygen/dox/LearnBasics3.dox b/doxygen/dox/LearnBasics3.dox
new file mode 100644
index 0000000..2fe0f52
--- /dev/null
+++ b/doxygen/dox/LearnBasics3.dox
@@ -0,0 +1,1015 @@
+/** @page LBPropsList Property Lists Basics
+Navigate back: \ref index "Main" / \ref GettingStarted / \ref LearnBasics
+<hr>
+
+\section secLBPList What is a Property (or Property List)?
+In HDF5, a property or property list is a characteristic or feature associated with an HDF5 object.
+There are default properties which handle the most common needs. These default properties are
+specified by passing in #H5P_DEFAULT for the Property List parameter of a function. Default properties
+can be modified by use of the \ref H5P interface and function parameters.
+
+The \ref H5P API allows a user to take advantage of the more powerful features in HDF5. It typically
+supports unusual cases when creating or accessing HDF5 objects. There is a programming model for
+working with Property Lists in HDF5 (see below).
+
+For examples of modifying a property list, see these tutorial topics:
+\li \see \ref LBDsetLayout
+\li \see \ref LBExtDset
+\li \see \ref LBComDset
+
+There are many Property Lists associated with creating and accessing objects in HDF5. See the
+\ref H5P Interface documentation in the HDF5 \ref RM for a list of the different
+properties associated with HDF5 interfaces.
+
+In summary:
+\li Properties are features of HDF5 objects, that can be changed by use of the Property List API and function parameters.
+\li Property lists provide a mechanism for adding functionality to HDF5 calls without increasing the number of arguments used for a given call.
+\li The Property List API supports unusual cases when creating and accessing HDF5 objects.
+
+\section secLBPListProg Programming Model
+Default properties are specified by simply passing in #H5P_DEFAULT (C) / H5P_DEFAULT_F (F90) for
+the property list parameter in those functions for which properties can be changed.
+
+The programming model for changing a property list is as follows:
+\li Create a copy or "instance" of the desired pre-defined property type, using the #H5Pcreate call. This
+will return a property list identifier. Please see the \ref RM entry for #H5Pcreate, for a comprehensive
+list of the property types.
+\li With the property list identifier, modify the property, using the \ref H5P APIs.
+\li Modify the object feature, by passing the property list identifier into the corresponding HDF5 object function.
+\li Close the property list when done, using #H5Pclose.
+
+<hr>
+Navigate back: \ref index "Main" / \ref GettingStarted / \ref LearnBasics
+
+@page LBDsetLayout Dataset Storage Layout
+Navigate back: \ref index "Main" / \ref GettingStarted / \ref LearnBasics
+<hr>
+
+\section secLBDsetLayoutDesc Description of a Dataset
+
+\section secLBDsetLayout Dataset Storage Layout
+The storage information, or storage layout, defines how the raw data values in the dataset are
+physically stored on disk. There are three ways that a dataset can be stored:
+\li contiguous
+\li chunked
+\li compact
+
+See the #H5Pset_layout/#H5Pget_layout APIs for details.
+
+\subsection subsecLBDsetLayoutCont Contiguous
+If the storage layout is contiguous, then the raw data values will be stored physically adjacent
+to each other in the HDF5 file (in one contiguous block). This is the default layout for a dataset.
+In other words, if you do not explicitly change the storage layout for the dataset, then it will
+be stored contiguously.
+<table>
+<tr>
+<td>
+\image html tutr-locons.png
+</td>
+</tr>
+</table>
+
+\subsection subsecLBDsetLayoutChunk Chunked
+With a chunked storage layout the data is stored in equal-sized blocks or chunks of
+a pre-defined size. The HDF5 library always writes and reads the entire chunk:
+<table>
+<tr>
+<td>
+\image html tutr-lochk.png
+</td>
+</tr>
+</table>
+
+Each chunk is stored as a separate contiguous block in the HDF5 file. There is a chunk index
+which keeps track of the chunks associated with a dataset:
+<table>
+<tr>
+<td>
+\image html tutr-lochks.png
+</td>
+</tr>
+</table>
+
+
+\subsubsection susubsecLBDsetLayoutChunkWhy Why Chunking ?
+Chunking is required for enabling compression and other filters, as well as for creating extendible
+or unlimited dimension datasets.
+
+It is also commonly used when subsetting very large datasets. Using the chunking layout can
+greatly improve performance when subsetting large datasets, because only the chunks required
+will need to be accessed. However, it is easy to use chunking without considering the consequences
+of the chunk size, which can lead to strikingly poor performance.
+
+Note that a chunk always has the same rank as the dataset and the chunk's dimensions do not need
+to be factors of the dataset dimensions.
+
+Writing or reading a chunked dataset is transparent to the application. You would use the same
+set of operations that you would use for a contiguous dataset. For example:
+\code
+ H5Dopen (...);
+ H5Sselect_hyperslab (...);
+ H5Dread (...);
+\endcode
+
+\subsubsection susubsecLBDsetLayoutChunkProb Problems Using Chunking
+Issues that can cause performance problems with chunking include:
+\li Chunks are too small.
+If a very small chunk size is specified for a dataset it can cause the dataset to be excessively
+large and it can result in degraded performance when accessing the dataset. The smaller the chunk
+size the more chunks that HDF5 has to keep track of, and the more time it will take to search for a chunk.
+\li Chunks are too large.
+An entire chunk has to be read and uncompressed before performing an operation. There can be a
+performance penalty for reading a small subset, if the chunk size is substantially larger than
+the subset. Also, a dataset may be larger than expected if there are chunks that only contain a
+small amount of data.
+\li A chunk does not fit in the Chunk Cache.
+Every chunked dataset has a chunk cache associated with it that has a default size of 1 MB. The
+purpose of the chunk cache is to improve performance by keeping chunks that are accessed frequently
+in memory so that they do not have to be accessed from disk. If a chunk is too large to fit in the
+chunk cache, it can significantly degrade performance. However, the size of the chunk cache can be
+increased by calling #H5Pset_chunk_cache.
+
+It is a good idea to:
+\li Avoid very small chunk sizes, and be aware of the 1 MB chunk cache size default.
+\li Test the data with different chunk sizes to determine the optimal chunk size to use.
+\li Consider the chunk size in terms of the most common access patterns that will be used once the dataset has been created.
+
+\subsection subsecLBDsetLayoutCom Compact
+A compact dataset is one in which the raw data is stored in the object header of the dataset.
+This layout is for very small datasets that can easily fit in the object header.
+
+The compact layout can improve storage and access performance for files that have many very tiny
+datasets. With one I/O access both the header and data values can be read. The compact layout reduces
+the size of a file, as the data is stored with the header which will always be allocated for a dataset.
+However, the object header is 64 KB in size, so this layout can only be used for very small datasets.
+
+\section secLBDsetLayoutProg Programming Model to Modify the Storage Layout
+To modify the storage layout, the following steps must be done:
+\li Create a Dataset Creation Property list. (See #H5Pcreate)
+\li Modify the property list.
+To use chunked storage layout, call: #H5Pset_chunk
+To use the compact storage layout, call: #H5Pset_layout
+\li Create a dataset with the modified property list. (See #H5Dcreate)
+\li Close the property list. (See #H5Pclose)
+For example code, see the \ref HDF5Examples page.
+Specifically look at the <a href="https://portal.hdfgroup.org/display/HDF5/Examples+by+API">Examples by API</a>.
+There are examples for different languages.
+
+The C example to create a chunked dataset is:
+<a href="https://support.hdfgroup.org/ftp/HDF5/examples/examples-by-api/hdf5-examples/1_8/C/H5D/h5ex_d_chunk.c">h5ex_d_chunk.c</a>
+The C example to create a compact dataset is:
+<a href="https://support.hdfgroup.org/ftp/HDF5/examples/examples-by-api/hdf5-examples/1_8/C/H5D/h5ex_d_compact.c">h5ex_d_compact.c</a>
+
+\section secLBDsetLayoutChange Changing the Layout after Dataset Creation
+The dataset layout is a Dataset Creation Property List. This means that once the dataset has been
+created the dataset layout cannot be changed. The h5repack utility can be used to write a file
+to a new with a new layout.
+
+\section secLBDsetLayoutSource Sources of Information
+<a href="https://confluence.hdfgroup.org/display/HDF5/Chunking+in+HDF5">Chunking in HDF5</a>
+(See the documentation on <a href="https://confluence.hdfgroup.org/display/HDF5/Advanced+Topics+in+HDF5">Advanced Topics in HDF5</a>)
+\see \ref sec_plist in the HDF5 \ref UG.
+
+<hr>
+Navigate back: \ref index "Main" / \ref GettingStarted / \ref LearnBasics
+
+
+@page LBExtDset Extendible Datasets
+Navigate back: \ref index "Main" / \ref GettingStarted / \ref LearnBasics
+<hr>
+
+\section secLBExtDsetCreate Creating an Extendible Dataset
+An extendible dataset is one whose dimensions can grow. HDF5 allows you to define a dataset to have
+certain initial dimensions, then to later increase the size of any of the initial dimensions.
+
+HDF5 requires you to use chunking to define extendible datasets. This makes it possible to extend
+datasets efficiently without having to excessively reorganize storage. (To use chunking efficiently,
+be sure to see the advanced topic, <a href="https://confluence.hdfgroup.org/display/HDF5/Chunking+in+HDF5">Chunking in HDF5</a>.)
+
+The following operations are required in order to extend a dataset:
+\li Declare the dataspace of the dataset to have unlimited dimensions for all dimensions that might eventually be extended.
+\li Set dataset creation properties to enable chunking.
+\li Create the dataset.
+\li Extend the size of the dataset.
+
+\section secLBExtDsetProg Programming Example
+
+\subsection subsecLBExtDsetProgDesc Description
+See \ref LBExamples for the examples used in the \ref LearnBasics tutorial.
+
+The example shows how to create a 3 x 3 extendible dataset, write to that dataset, extend the dataset
+to 10x3, and write to the dataset again.
+
+For details on compiling an HDF5 application:
+[ \ref LBCompiling ]
+
+\subsection subsecLBExtDsetProgRem Remarks
+\li An unlimited dimension dataspace is specified with the #H5Screate_simple call, by passing in
+#H5S_UNLIMITED as an element of the maxdims array.
+\li The #H5Pcreate call creates a new property as an instance of a property list class. For creating
+an extendible array dataset, pass in #H5P_DATASET_CREATE for the property list class.
+\li The #H5Pset_chunk call modifies a Dataset Creation Property List instance to store a chunked
+layout dataset and sets the size of the chunks used.
+\li To extend an unlimited dimension dataset use the the #H5Dset_extent call. Please be aware that
+after this call, the dataset's dataspace must be refreshed with #H5Dget_space before more data can be accessed.
+\li The #H5Pget_chunk call retrieves the size of chunks for the raw data of a chunked layout dataset.
+\li Once there is no longer a need for a Property List instance, it should be closed with the #H5Pclose call.
+
+<hr>
+Navigate back: \ref index "Main" / \ref GettingStarted / \ref LearnBasics
+
+@page LBComDset Compressed Datasets
+Navigate back: \ref index "Main" / \ref GettingStarted / \ref LearnBasics
+<hr>
+
+\section secLBComDsetCreate Creating a Compressed Dataset
+HDF5 requires you to use chunking to create a compressed dataset. (To use chunking efficiently,
+be sure to see the advanced topic, <a href="https://confluence.hdfgroup.org/display/HDF5/Chunking+in+HDF5">Chunking in HDF5</a>.)
+
+The following operations are required in order to create a compressed dataset:
+\li Create a dataset creation property list.
+\li Modify the dataset creation property list instance to enable chunking and to enable compression.
+\li Create the dataset.
+\li Close the dataset creation property list and dataset.
+
+For more information on compression, see the FAQ question on <a href="https://confluence.hdfgroup.org/display/HDF5/Using+Compression+in+HDF5">Using Compression in HDF5</a>.
+
+\section secLBComDsetProg Programming Example
+
+\subsection subsecLBComDsetProgDesc Description
+See \ref LBExamples for the examples used in the \ref LearnBasics tutorial.
+
+The example creates a chunked and ZLIB compressed dataset. It also includes comments for what needs
+to be done to create an SZIP compressed dataset. The example then reopens the dataset, prints the
+filter information, and reads the dataset.
+
+For details on compiling an HDF5 application:
+[ \ref LBCompiling ]
+
+\subsection subsecLBComDsetProgRem Remarks
+\li The #H5Pset_chunk call modifies a Dataset Creation Property List instance to store a chunked layout
+dataset and sets the size of the chunks used.
+\li The #H5Pset_deflate call modifies the Dataset Creation Property List instance to use ZLIB or DEFLATE
+compression. The #H5Pset_szip call modifies it to use SZIP compression. There are different compression
+parameters required for each compression method.
+\li SZIP compression can only be used with atomic datatypes that are integer, float, or char. It cannot be
+applied to compound, array, variable-length, enumerations, or other user-defined datatypes. The call
+to #H5Dcreate will fail if attempting to create an SZIP compressed dataset with a non-allowed datatype.
+The conflict can only be detected when the property list is used.
+
+<hr>
+Navigate back: \ref index "Main" / \ref GettingStarted / \ref LearnBasics
+
+@page LBContents Discovering the Contents of an HDF5 File
+Navigate back: \ref index "Main" / \ref GettingStarted / \ref LearnBasics
+<hr>
+
+\section secLBContents Discovering what is in an HDF5 file
+HDFView and h5dump are standalone tools which cannot be called within an application, and using
+#H5Dopen and #H5Dread require that you know the name of the HDF5 dataset. How would an application
+that has no prior knowledge of an HDF5 file be able to determine or discover the contents of it,
+much like HDFView and h5dump?
+
+The answer is that there are ways to discover the contents of an HDF5 file, by using the
+\ref H5G, \ref H5L and \ref H5O APIs:
+\li The \ref H5G interface (covered earlier) consists of routines for working with groups. A group is
+a structure that can be used to organize zero or more HDF5 objects, not unlike a Unix directory.
+\li The \ref H5L interface consists of link routines. A link is a path between groups. The \ref H5L interface
+allows objects to be accessed by use of these links.
+\li The \ref H5O interface consists of routines for working with objects. Datasets, groups, and committed
+datatypes are all objects in HDF5.
+
+Interface routines that simplify the process:
+\li #H5Literate traverses the links in a specified group, in the order of the specified index, using a
+user-defined callback routine. (A callback function is one that will be called when a certain condition
+is met, at a certain point in the future.)
+\li #H5Ovisit / #H5Lvisit recursively visit all objects/links accessible from a specified object/group.
+
+
+\section secLBContentsProg Programming Example
+
+\subsection subsecLBContentsProgUsing Using #H5Literate, #H5Lvisit and #H5Ovisit
+For example code, see the \ref HDF5Examples page.
+Specifically look at the <a href="https://portal.hdfgroup.org/display/HDF5/Examples+by+API">Examples by API</a>.
+There are examples for different languages, where examples of using #H5Literate and #H5Ovisit/#H5Lvisit are included.
+
+The h5ex_g_traverse example traverses a file using H5Literate:
+\li C: <a href="https://support.hdfgroup.org/ftp/HDF5/examples/examples-by-api/hdf5-examples/1_8/C/H5G/h5ex_g_traverse.c">h5ex_g_traverse.c</a>
+\li F90: <a href="https://support.hdfgroup.org/ftp/HDF5/examples/examples-by-api/hdf5-examples/1_8/FORTRAN/H5G/h5ex_g_traverse_F03.f90">h5ex_g_traverse_F03.f90</a>
+
+The h5ex_g_visit example traverses a file using H5Ovisit and H5Lvisit:
+\li C: <a href="https://support.hdfgroup.org/ftp/HDF5/examples/examples-by-api/hdf5-examples/1_8/C/H5G/h5ex_g_visit.c">h5ex_g_visit.c</a>
+\li F90: <a href="https://support.hdfgroup.org/ftp/HDF5/examples/examples-by-api/hdf5-examples/1_8/FORTRAN/H5G/h5ex_g_visit_F03.f90">h5ex_g_visit_F03.f90</a>
+
+<hr>
+Navigate back: \ref index "Main" / \ref GettingStarted / \ref LearnBasics
+
+@page LBQuiz Learning the basics QUIZ
+Navigate back: \ref index "Main" / \ref GettingStarted / \ref LearnBasics
+<hr>
+
+\ref LBFileOrg
+<ol>
+<li>Name and describe the two primary objects that can be stored in an HDF5 file.
+</li>
+<li>What is an attribute?
+</li>
+<li>Give the path name for an object called <code style="background-color:whitesmoke;">harry</code> that is a member of a group called <code style="background-color:whitesmoke;">dick</code>, which, in turn, is a member of the root group.
+</li>
+</ol>
+
+\ref LBAPI
+<ol>
+<li>Describe the purpose of each of the following HDF5 APIs:
+\code
+ H5A, H5D, H5E, H5F, H5G, H5T, H5Z
+\endcode
+</li>
+</ol>
+
+\ref LBFileCreate
+<ol>
+<li>What two HDF5 routines must be called to create an HDF5 file?
+</li>
+<li>What include file must be included in any file that uses the HDF5 library?
+</li>
+<li>An HDF5 file is never completely empty because as soon as it is created, it automatically contains a certain primary object. What is that object?
+</li>
+</ol>
+
+\ref LBDsetCreate
+<ol>
+<li>Name and describe two major datatype categories.
+</li>
+<li>List the HDF5 atomic datatypes. Give an example of a predefined datatype. How would you create a string dataset?
+</li>
+<li>What does the dataspace describe? What are the major characteristics of the simple dataspace?
+</li>
+<li>What information needs to be passed to the #H5Dcreate function, i.e., what information is needed to describe a dataset at creation time?
+</li>
+</ol>
+
+
+\ref LBDsetRW
+<ol>
+<li>What are six pieces of information which need to be specified for reading and writing a dataset?
+</li>
+<li>Why are both the memory dataspace and file dataspace needed for read/write operations, while only the memory datatype is required?
+</li>
+<li>In Figure 6.1, what does this line mean?
+\code
+DATASPACE { SIMPLE (4 , 6 ) / ( 4 , 6 ) }
+\endcode
+</li>
+</ol>
+
+
+\ref LBAttrCreate
+<ol>
+<li>What is an attribute?
+</li>
+<li>Can partial I/O operations be performed on attributes?
+</li>
+</ol>
+
+
+\ref LBGrpCreate
+<ol>
+<li>What are the two primary objects that can be included in a group?
+</li>
+</ol>
+
+
+\ref LBGrpCreateNames
+<ol>
+<li>Group names can be specified in two ways. What are these two types of group names?
+</li>
+<li>You have a dataset named <code style="background-color:whitesmoke;">moo</code> in the group <code style="background-color:whitesmoke;">boo</code>, which is in the group <code style="background-color:whitesmoke;">foo</code>, which, in turn,
+is in the <code style="background-color:whitesmoke;">root</code> group. How would you specify an absolute name to access this dataset?
+</li>
+</ol>
+
+
+\ref LBGrpDset
+<ol>
+<li>Describe a way to access the dataset moo described in the previous section
+(question 2) using a relative name. Describe a way to access the same dataset using an absolute name.
+</li>
+</ol>
+
+<hr>
+Navigate back: \ref index "Main" / \ref GettingStarted / \ref LearnBasics
+
+@page LBQuizAnswers Learning the basics QUIZ with Answers
+Navigate back: \ref index "Main" / \ref GettingStarted / \ref LearnBasics
+<hr>
+
+\ref LBFileOrg
+<ol>
+<li>Name and describe the two primary objects that can be stored in an HDF5 file.
+<table>
+<tr>
+<th><strong>Answer</strong>
+</th>
+<td>Group: A grouping structure containing zero or more HDF5 objects, together with supporting metadata.<br />
+Dataset: A multidimensional array of data elements, together with supporting metadata.
+</td>
+</tr>
+</table>
+</li>
+<li>What is an attribute?
+<table>
+<tr>
+<th><strong>Answer</strong>
+</th>
+<td>An HDF5 attribute is a user-defined HDF5 structure that provides extra information about an HDF5 object.
+</td>
+</tr>
+</table>
+</li>
+<li>Give the path name for an object called <code style="background-color:whitesmoke;">harry</code> that is a member of a group called <code style="background-color:whitesmoke;">dick</code>, which, in turn, is a member of the root group.
+<table>
+<tr>
+<th><strong>Answer</strong>
+</th>
+<td>/dick/harry
+</td>
+</tr>
+</table>
+</li>
+</ol>
+
+\ref LBAPI
+<ol>
+<li>Describe the purpose of each of the following HDF5 APIs:
+\code
+ H5A, H5D, H5E, H5F, H5G, H5T, H5Z
+\endcode
+<table>
+<tr>
+<th><strong>Answer</strong>
+</th>
+<td>H5A: Attribute access and manipulation routines
+<br />
+H5D: Dataset access and manipulation routines
+<br />
+H5E: Error handling routines H5F: File access routines
+<br />
+H5G: Routines for creating and operating on groups
+<br />
+H5T: Routines for creating and manipulating the datatypes of dataset elements
+<br />
+H5Z: Data compression routines
+</td>
+</tr>
+</table>
+</li>
+</ol>
+
+\ref LBFileCreate
+<ol>
+<li>What two HDF5 routines must be called to create an HDF5 file?
+<table>
+<tr>
+<th><strong>Answer</strong>
+</th>
+<td>#H5Fcreate and #H5Fclose.
+</td>
+</tr>
+</table>
+</li>
+<li>What include file must be included in any file that uses the HDF5 library?
+<table>
+<tr>
+<th><strong>Answer</strong>
+</th>
+<td>hdf5.h must be included because it contains definitions and declarations used by the library.
+</td>
+</tr>
+</table>
+</li>
+<li>An HDF5 file is never completely empty because as soon as it is created, it automatically contains a certain primary object. What is that object?
+<table>
+<tr>
+<th><strong>Answer</strong>
+</th>
+<td>The root group.
+</td>
+</tr>
+</table>
+</li>
+</ol>
+
+\ref LBDsetCreate
+<ol>
+<li>Name and describe two major datatype categories.
+<table>
+<tr>
+<th><strong>Answer</strong>
+</th>
+<td>Atomic datatype: An atomic datatype cannot be decomposed into smaller units at the API level.
+<br />
+Compound datatype: A compound datatype is a collection of atomic and compound datatypes, or small arrays of such types.
+</td>
+</tr>
+</table>
+</li>
+<li>List the HDF5 atomic datatypes. Give an example of a predefined datatype. How would you create a string dataset?
+<table>
+<tr>
+<th><strong>Answer</strong>
+</th>
+<td>There are six HDF5 atomic datatypes: integer, floating point, date and time, character string, bit field, and opaque.
+<br />
+Examples of predefined datatypes include the following:<br />
+\li #H5T_IEEE_F32LE - 4-byte little-endian, IEEE floating point
+\li #H5T_NATIVE_INT - native integer
+
+You would create a string dataset with the #H5T_C_S1 datatype, and set the size of the string with the #H5Tset_size call.
+</td>
+</tr>
+</table>
+</li>
+<li>What does the dataspace describe? What are the major characteristics of the simple dataspace?
+<table>
+<tr>
+<th><strong>Answer</strong>
+</th>
+<td>The dataspace describes the dimensionality of the dataset. A simple dataspace is characterized by its rank and dimension sizes.
+</td>
+</tr>
+</table>
+</li>
+<li>What information needs to be passed to the #H5Dcreate function, i.e., what information is needed to describe a dataset at creation time?
+<table>
+<tr>
+<th><strong>Answer</strong>
+</th>
+<td>The dataset location, name, dataspace, datatype, and dataset creation property list.
+</td>
+</tr>
+</table>
+</li>
+</ol>
+
+
+\ref LBDsetRW
+<ol>
+<li>What are six pieces of information which need to be specified for reading and writing a dataset?
+<table>
+<tr>
+<th><strong>Answer</strong>
+</th>
+<td>The dataset identifier, the dataset's datatype and dataspace in memory, the dataspace in the file,
+the dataset transfer property list, and a data buffer.
+</td>
+</tr>
+</table>
+</li>
+<li>Why are both the memory dataspace and file dataspace needed for read/write operations, while only the memory datatype is required?
+<table>
+<tr>
+<th><strong>Answer</strong>
+</th>
+<td>A dataset's file datatype is not required for a read/write operation because the file datatype is specified
+when the dataset is created and cannot be changed. Both file and memory dataspaces are required for dataset
+subsetting and for performing partial I/O operations.
+</td>
+</tr>
+</table>
+</li>
+<li>In Figure 6.1, what does this line mean?
+\code
+DATASPACE { SIMPLE (4 , 6 ) / ( 4 , 6 ) }
+\endcode
+<table>
+<tr>
+<th><strong>Answer</strong>
+</th>
+<td>It means that the dataset dset has a simple dataspace with the current dimensions (4,6) and the maximum size of the dimensions (4,6).
+</td>
+</tr>
+</table>
+</li>
+</ol>
+
+
+\ref LBAttrCreate
+<ol>
+<li>What is an attribute?
+<table>
+<tr>
+<th><strong>Answer</strong>
+</th>
+<td>An attribute is a dataset attached to an object. It describes the nature and/or the intended usage of the object.
+</td>
+</tr>
+</table>
+</li>
+<li>Can partial I/O operations be performed on attributes?
+<table>
+<tr>
+<th><strong>Answer</strong>
+</th>
+<td>No.
+</td>
+</tr>
+</table>
+</li>
+</ol>
+
+
+\ref LBGrpCreate
+<ol>
+<li>What are the two primary objects that can be included in a group?
+<table>
+<tr>
+<th><strong>Answer</strong>
+</th>
+<td>A group and a dataset.
+</td>
+</tr>
+</table>
+</li>
+</ol>
+
+
+\ref LBGrpCreateNames
+<ol>
+<li>Group names can be specified in two ways. What are these two types of group names?
+<table>
+<tr>
+<th><strong>Answer</strong>
+</th>
+<td>Relative and absolute.
+</td>
+</tr>
+</table>
+</li>
+<li>You have a dataset named <code style="background-color:whitesmoke;">moo</code> in the group <code style="background-color:whitesmoke;">boo</code>, which is in the group <code style="background-color:whitesmoke;">foo</code>, which, in turn,
+is in the <code style="background-color:whitesmoke;">root</code> group. How would you specify an absolute name to access this dataset?
+<table>
+<tr>
+<th><strong>Answer</strong>
+</th>
+<td>/foo/boo/moo
+</td>
+</tr>
+</table>
+</li>
+</ol>
+
+
+\ref LBGrpDset
+<ol>
+<li>Describe a way to access the dataset moo described in the previous section
+(question 2) using a relative name. Describe a way to access the same dataset using an absolute name.
+<table>
+<tr>
+<th><strong>Answer</strong>
+</th>
+<td>Access the group /foo and get the group ID. Access the group boo using the group ID obtained in Step 1.
+Access the dataset moo using the group ID obtained in Step 2.
+\code
+gid = H5Gopen (file_id, "/foo", 0); /* absolute path */
+gid1 = H5Gopen (gid, "boo", 0); /* relative path */
+did = H5Dopen (gid1, "moo"); /* relative path */
+\endcode
+Access the group /foo and get the group ID. Access the dataset boo/moo with the group ID just obtained.
+\code
+gid = H5Gopen (file_id, "/foo", 0); /* absolute path */
+did = H5Dopen (gid, "boo/moo"); /* relative path */
+\endcode
+Access the dataset with an absolute path.
+\code
+did = H5Dopen (file_id, "/foo/boo/moo"); /* absolute path */
+\endcode
+</td>
+</tr>
+</table>
+</li>
+</ol>
+
+<hr>
+Navigate back: \ref index "Main" / \ref GettingStarted / \ref LearnBasics
+
+@page LBCompiling Compiling HDF5 Applications
+Navigate back: \ref index "Main" / \ref GettingStarted / \ref LearnBasics
+<hr>
+
+\section secLBCompiling Tools and Instructions on Compiling
+Compiling applications to use the HDF5 Library can be as simple as executing:
+\code
+h5cc -o myprog myprog.c
+\endcode
+
+As an application's file base evolves, there are better solutions using autotools and makefiles or
+CMake and CMakeLists.txt files. Many tutorials and references can be found with a simple search.
+
+This tutorial section will discuss the use of compile scripts on Linux.
+See the \ref secLBCompilingVS section for compiling with Visual Studio.
+
+\section secLBCompilingLinux Compile Scripts
+When the library is built, the following compile scripts are included:
+\li h5cc: compile script for HDF5 C programs
+\li h5fc: compile script for HDF5 F90 programs
+\li h5c++: compile script for HDF5 C++ programs
+
+These scripts are easilye used to compile single file applications, such as those included in the tutorial.
+<table>
+<tr>
+<th><strong>Warning</strong>
+</th>
+<td>The h5cc/h5fc/h5c++ compile scripts are included when building with configure. Versions of
+these compile scripts have also been added to CMake for Linux ONLY. The CMake versions rely on pkgconfig files.
+</td>
+</tr>
+</table>
+
+<h4>Examples of Using the Unix Compile Scripts:</h4>
+Following are examples of compiling and running an application with the Unix compile scripts:
+\code
+ h5fc myprog.f90
+ ./a.out
+
+ h5cc -o myprog myprog.c
+ ./myprog
+\endcode
+
+To see how the libraries linked in with a compile script were configured and built, use the
+-showconfig option. For example, if using h5cc type:
+\code
+ h5cc -showconfig
+\endcode
+
+<h4>Detailed Description of Unix Compile Scripts:</h4>
+The h5cc, h5c++, and h5fc compile scripts come with the HDF5 binary distributions (include files,
+libraries, and utilities) for the platforms we support. The h5c++ and h5fc utilities are ONLY present
+if the library was built with C++ and Fortran.
+
+\section secLBCompilingVS Using Visual Studio
+
+ 1. If you are building on 64-bit Windows, find the "Platform" dropdown
+ and select "x64". Also select the correct Configuration (Debug, Release, RelWithDebInfo, etc)
+
+ 2. Set up path for external headers
+
+ The HDF5 install path settings will need to be in the project property sheets per project.
+ Go to "Project" and select "Properties", find "Configuration Properties",
+ and then "C/C++".
+
+ 2.1 Add the header path to the "Additional Include Directories" setting. Under "C/C++"
+ find "General" and select "Additional Include Directories". Select "Edit" from the dropdown
+ and add the HDF5 install/include path to the list.
+ (Ex: "C:\Program Files\HDF_Group\HDF5\1.10.9\include")
+
+ 2.2 Building applications with the dynamic/shared hdf5 libraries requires
+ that the "H5_BUILT_AS_DYNAMIC_LIB" compile definition be used. Under "C/C++"
+ find "Preprocessor" and select "Preprocessor Definitions". Select "Edit" from the dropdown
+ and add "H5_BUILT_AS_DYNAMIC_LIB" to the list.
+
+ 3. Set up path for external libraries
+
+ The HDF5 install path/lib settings will need to be in the project property sheets per project.
+ Go to "Project" and select "Properties", find "Configuration Properties",
+ and then "Linker".
+
+ 3.1 Add the libraries to the "Additional Dependencies" setting. Under "Linker"
+ find "Input" and select "Additional Dependencies". Select "Edit" from the dropdown
+ and add the required HDF5 install/lib path to the list.
+ (Ex: "C:\Program Files\HDF_Group\HDF5\1.10.9\lib\hdf5.lib")
+
+ 3.2 For static builds, the external libraries should be added.
+ For example, to compile a C++ application, enter:
+ libhdf5_cpp.lib libhdf5.lib libz.lib libszaec.lib libaec.lib
+
+\section secLBCompilingLibs HDF5 Libraries
+Following are the libraries included with HDF5. Whether you are using the Unix compile scripts or
+Makefiles, or are compiling on Windows, these libraries are or may need to be specified. The order
+they are specified is important on Linux:
+
+<table>
+<caption>HDF5 Static Libraries</caption>
+<tr>
+<th>Library</th>
+<th>Linux Name</th>
+<th>Mac Name</th>
+<th>Windows Name</th>
+</tr>
+<tr>
+<td>
+\code
+HDF5 High Level C++ APIs
+HDF5 C++ Library
+HDF5 High Level Fortran APIs
+HDF5 Fortran Library
+HDF5 High Level C APIs
+HDF5 C Library
+\endcode
+</td>
+<td>
+\code
+libhdf5_hl_cpp.a
+libhdf5_cpp.a
+libhdf5hl_fortran.a
+libhdf5_fortran.a
+libhdf5_hl.a
+libhdf5.a
+\endcode
+</td>
+<td>
+\code
+libhdf5_hl_cpp.a
+libhdf5_cpp.a
+libhdf5hl_fortran.a
+libhdf5_fortran.a
+libhdf5_hl.a
+libhdf5.a
+\endcode
+</td>
+<td>
+<em>Windows</em>
+\code
+libhdf5_hl_cpp.lib
+libhdf5_cpp.lib
+libhdf5hl_fortran.lib
+libhdf5_fortran.lib
+libhdf5_hl.lib
+libhdf5.lib
+\endcode
+</tr>
+</table>
+
+<table>
+<caption>HDF5 Shared Libraries</caption>
+<tr>
+<th>Library</th>
+<th>Linux Name</th>
+<th>Mac Name</th>
+<th>Windows Name</th>
+</tr>
+<tr>
+<td>
+\code
+HDF5 High Level C++ APIs
+HDF5 C++ Library
+HDF5 High Level Fortran APIs
+HDF5 Fortran Library
+HDF5 High Level C APIs
+HDF5 C Library
+\endcode
+</td>
+<td>
+\code
+libhdf5_hl_cpp.so
+libhdf5_cpp.so
+libhdf5hl_fortran.so
+libhdf5_fortran.so
+libhdf5_hl.so
+libhdf5.so
+\endcode
+</td>
+<td>
+\code
+libhdf5_hl_cpp.dylib
+libhdf5_cpp.dylib
+libhdf5hl_fortran.dylib
+libhdf5_fortran.dylib
+libhdf5_hl.dylib
+libhdf5.dylib
+\endcode
+</td>
+<td>
+\code
+hdf5_hl_cpp.lib
+hdf5_cpp.lib
+hdf5hl_fortran.lib
+hdf5_fortran.lib
+hdf5_hl.lib
+hdf5.lib
+\endcode
+</tr>
+</table>
+
+<table>
+<caption>External Libraries</caption>
+<tr>
+<th>Library</th>
+<th>Linux Name</th>
+<th>Mac Name</th>
+<th>Windows Name</th>
+</tr>
+<tr>
+<td>
+\code
+SZIP Compression Library
+SZIP Compression Library
+ZLIB or DEFLATE Compression Library
+\endcode
+</td>
+<td>
+\code
+libszaec.a
+libaec.a
+libz.a
+\endcode
+</td>
+<td>
+\code
+libszaec.a
+libaec.a
+libz.a
+\endcode
+</td>
+<td>
+\code
+libszaec.lib
+libaec.lib
+libz.lib
+\endcode
+</td>
+</tr>
+</table>
+
+The pre-compiled binaries, in particular, are built (if at all possible) with these libraries as well as with
+SZIP and ZLIB. If using shared libraries you may need to add the path to the library to LD_LIBRARY_PATH on Linux
+or on WINDOWS you may need to add the path to the bin folder to PATH.
+
+\section secLBCompilingCMake Compiling an Application with CMake
+
+\subsection subsecLBCompilingCMakeScripts CMake Scripts for Building Applications
+Simple scripts are provided for building applications with different languages and options.
+See <a href="https://confluence.hdfgroup.org/display/support/CMake+Scripts+for+Building+Applications">CMake Scripts for Building Applications</a>.
+
+For a more complete script (and to help resolve issues) see the script provided with the HDF5 Examples project.
+
+\subsection subsecLBCompilingCMakeExamples HDF5 Examples
+The installed HDF5 can be verified by compiling the HDF5 Examples project, included with the CMake built HDF5 binaries
+in the share folder or you can go to the <a href="https://github.com/HDFGroup/hdf5_examples">HDF5 Examples</a> github repository.
+
+Go into the share directory and follow the instructions in USING_CMake_examples.txt to build the examples.
+
+In general, users must first set the HDF5_ROOT environment variable to the installed location of the CMake
+configuration files for HDF5. For example, on Windows the following path might be set:
+
+\code
+ HDF5_ROOT=C:/Program Files/HDF_Group/HDF5/1.N.N
+\endcode
+
+\subsection subsecLBCompilingCMakeTroubless Troubleshooting CMake
+<h4>How do you use find_package with HDF5?</h4>
+To use find_package you will first need to make sure that HDF5_ROOT is set correctly. For setting this
+environment variable see the Preconditions in the USING_HDF5_CMake.txt file in the share directory.
+
+See the CMakeLists.txt file provided with these examples for how to use find_package with HDF5.
+
+Please note that the find_package invocation changed to require "shared" or "static":
+\code
+ FIND_PACKAGE(HDF5 COMPONENTS C HL NO_MODULE REQUIRED shared)
+ FIND_PACKAGE(HDF5 COMPONENTS C HL NO_MODULE REQUIRED static)
+\endcode
+
+Previously, the find_package invocation was:
+\code
+ FIND_PACKAGE(HDF5 COMPONENTS C HL NO_MODULE REQUIRED)
+\endcode
+
+<h4>My platform/compiler is not included. Can I still use the configuration files?</h4>
+Yes, you can but you will have to edit the HDF5_Examples.cmake file and update the variable:
+\code
+ CTEST_CMAKE_GENERATOR
+\endcode
+
+The generators for your platform can be seen by typing:
+\code
+ cmake --help
+\endcode
+
+<h4>What do I do if the build fails?</h4>
+I received an error during the build and the application binary is not in the
+build directory as I expected. How do I determine what the problem is?
+
+If the error is not clear, then the first thing you may want to do is replace the -V (Dash Uppercase Vee)
+option for ctest in the build script to -VV (Dash Uppercase Vee Uppercase Vee). Then remove the build
+directory and re-run the build script. The output should be more verbose.
+
+If the error is still not clear, then check the log files. You will find those in the build directory.
+For example, on Unix the log files will be in:
+\code
+ build/Testing/Temporary/
+\endcode
+There are log files for the configure, test, and build.
+
+<hr>
+Navigate back: \ref index "Main" / \ref GettingStarted / \ref LearnBasics
+
+@page LBTraining Training Videos
+Navigate back: \ref index "Main" / \ref GettingStarted / \ref LearnBasics
+<hr>
+
+<a href="https://confluence.hdfgroup.org/display/HDF5/Training+Videos">Training Videos</a>
+
+<hr>
+Navigate back: \ref index "Main" / \ref GettingStarted / \ref LearnBasics
+
+*/
diff --git a/doxygen/dox/LearnHDFView.dox b/doxygen/dox/LearnHDFView.dox
new file mode 100644
index 0000000..b1f632c
--- /dev/null
+++ b/doxygen/dox/LearnHDFView.dox
@@ -0,0 +1,472 @@
+/** @page LearnHDFView Learning HDF5 with HDFView
+
+Navigate back: \ref index "Main" / \ref GettingStarted
+<hr>
+
+This tutorial enables you to get a feel for HDF5 by using the HDFView browser. It does NOT require
+any programming experience.
+
+\section sec_learn_hv_install HDFView Installation
+\li Download and install HDFView. It can be downloaded from the <a href="https://portal.hdfgroup.org/display/support/Download+HDFView">Download HDFView</a> page.
+\li Obtain the <a href="https://support.hdfgroup.org/ftp/HDF5/examples/files/tutorial/storm1.txt">storm1.txt</a> text file, used in the tutorial.
+
+\section sec_learn_hv_begin Begin Tutorial
+Once you have HDFView installed, bring it up and you are ready to begin the tutorial.
+
+<table style="background-color:#FAFAD2">
+<caption>
+Unable to complete tutorial because fields are greyed out?
+</caption>
+<tr>
+<td>
+This tutorial requires that the default HDFView File Access Mode be Read / Write. If fields are greyed out so that you cannot select them, then the File Access Mode is Read Only.
+
+To change the File Access Mode follow these steps:
+<ul>
+<li>Bring up HDFView</li>
+<li>Left-mouse click on the Tools pull-down menu and select User Options.</li>
+<li>A Preferences window pops up with the General Settings tab selected.
+About half-way down you will see Default File Access Mode.
+Select Read / Write.</li>
+<li>Click on Apply and Close at the bottom of the window.</li>
+<li>Close down HDFView.</li>
+<li>Bring HDFView back up and try the tutorial again.</li>
+PLEASE BE AWARE that selecting a File Access Mode of Read / Write can result in changes to the timestamp of HDF files that are viewed with HDFView. In general, a File Access Mode
+of Read Only should be used to ensure that this does not occur.
+</ul>
+</td>
+</tr>
+</table>
+
+\subsection subsec_learn_hv_begin_topics Topics Covered
+Following are the topics covered in the tutorial. The first topic creates the file that is used in
+the subsequent topics.
+<ul>
+<li>@ref subsec_learn_hv_topics_file</li>
+<li>@ref subsec_learn_hv_topics_image</li>
+<li>@ref subsec_learn_hv_topics_attr</li>
+<li>@ref subsec_learn_hv_topics_compress</li>
+<li>@ref subsec_learn_hv_topics_subset</li>
+<li>@ref subsec_learn_hv_topics_table</li>
+</ul>
+
+\section sec_learn_hv_topics Topics
+
+\subsection subsec_learn_hv_topics_file Creating a New HDF5 File with a Contiguous Dataset
+The steps below describe how to create a file (storm.h5), group (/Data), and a contiguous dataset
+(/Data/Storm) using HDFView. A group is an HDF5 object that allows objects to be collected together.
+A dataset is an array of data values. A contiguous dataset is one that is stored as a single block
+in the HDF5 file.
+<ul>
+<li>Select the <em>File</em> pull-down menu at the top left, and then select <em>New -> HDF5</em>.</li>
+<li>Specify a location and type in <em>storm.h5</em> for the name of your file, and click on the <em>Save</em> button.
+You will see the <em>storm.h5</em> file in the TableView:
+<table>
+<tr>
+<td>
+\image html storm.png
+</td>
+</tr>
+</table>
+</li>
+<li>Right click on <em>storm.h5</em>, and select <em>New -> Group</em>.</li>
+<li>Enter <em>Data</em> for the name of the group and then click the <em>Ok</em> button. You will see the group <em>Data</em> in the TableView.
+<table>
+<tr>
+<td>
+\image html DataGroup.png
+</td>
+</tr>
+</table>
+</li>
+<li>Right click on the group <em>Data</em> and select <em>New -> Dataset</em>.</li>
+<li>A window pops up on the right. Fill in the information as follows, and then click <em>Ok</em> (leave the
+Datatype information as is):
+<table>
+<tr>
+<th>Dataset Name
+</th>
+<td><em>Storm</em>
+</td>
+</tr>
+<tr>
+<th>Under Dataspace, Current size
+</th>
+<td>57x57
+</td>
+</tr>
+<tr>
+<th>Layout
+</th>
+<td><em>Contiguous</em> (default)
+</td>
+</tr>
+</table>
+</li>
+<li>Click to expand the <em>Data</em> group in the tree view to see the <em>Storm</em> dataset:
+<table>
+<tr>
+<td>
+\image html StormDataset.png
+</td>
+</tr>
+</table>
+</li>
+<li>Double left click on the <em>Storm</em> dataset in the tree view. A window with an empty spreadsheet pops open.</li>
+<li>Copy the data from the <a href="https://support.hdfgroup.org/ftp/HDF5/examples/files/tutorial/storm1.txt">storm1.txt</a> file into the dataset.
+
+If you downloaded <a href="https://support.hdfgroup.org/ftp/HDF5/examples/files/tutorial/storm1.txt">storm1.txt</a>,
+then click on the <em>Import/Export Data</em> menu and select <em>Import Data from -> Text File</em>.
+Specify a location, select <a href="https://support.hdfgroup.org/ftp/HDF5/examples/files/tutorial/storm1.txt">storm1.txt</a>
+and click on the <em>Open</em> button. Answer <em>Yes</em> in the dialog box that
+pops up (which asks if you wish to paste the selected data).
+
+Alternately, you can copy/paste directly. Select and copy the data in a separate window. Position your
+cursor at (0,0) in your table, and select <em>Paste</em> from the <em>Table</em> menu.
+
+The values will be entered into the spreadsheet.
+<table>
+<tr>
+<td>
+\image html datasetwdata.png
+</td>
+</tr>
+</table>
+</li>
+<li><em>Table -> Close</em> the dataset, and save the data.</li>
+</ul>
+
+\subsection subsec_learn_hv_topics_image Displaying a Dataset as an Image
+Any dataset can be viewed as an image in HDFView. Below are the steps that demonstrate this.
+<ul>
+<li>Right click on <em>Storm</em> in the tree view, and select <em>Open As</em>.</li>
+<li>Select the <em>Image</em> button under <em>Display As</em> (near the top) in the Dataset Selection window that pops
+up. Then click <em>OK</em> at the bottom of the window to display the image.
+<table>
+<tr>
+<td>
+\image html showasimage.png
+</td>
+</tr>
+</table>
+</li>
+<li>The rainbow icon brings you to the Image Palette window. Click on that to play with the palette
+(GrayWave probably is the best choice). Close.</li>
+</ul>
+
+\subsection subsec_learn_hv_topics_attr Creating Attributes
+Additional information to describe an object can be stored in attributes. An attribute can be
+added to a group or dataset with HDFView.
+
+The following illustrates how to add an attribute to the group <em>/Data</em>:
+<ul>
+<li>Click on the <em>/Data</em> folder in the tree view. You will see two tabs, <em>Object Attribute Info</em> and
+<em>General Object Info</em>, in the pane on the right site of the HDFView window.
+<table>
+<tr>
+<td>
+\image html noattrs.png
+</td>
+</tr>
+</table>
+</li>
+<li>With the left mouse button, select the <em>Add Attribute</em> button.</li>
+<li>Select the <em>Add Attribute</em> button to add an attribute with these values:</li>
+<table>
+<tr>
+<th>Name
+</th>
+<td><em>BatchID</em>
+</td>
+</tr>
+<tr>
+<th>Type
+</th>
+<td>INTEGER
+</td>
+</tr>
+<tr>
+<th>Size (bits)
+</th>
+<td>32
+</td>
+</table>
+<li>Select the <em>Ok</em> button. The attribute will show up under the <em>Object Attribute Info</em> tab.</li>
+<li>Double-click the BatchID attribute line to open the data table for BatchID.</li>
+<li>Click in the first cell and enter <em>3343</em> followed by the enter key.</li>
+<li><em>Table -> Close</em>, answer <em>Yes</em> in the dialog box that
+pops up (which asks if you wish to paste the selected data).</li>
+</ul>
+Adding an attribute to a dataset is very similar to adding an attribute to a group. For example,
+the following adds an attribute to the <em>/Storm</em> dataset:
+<ul>
+<li>Left mouse click on the <em>/Storm</em> dataset in the tree view. You will see the <em>Object Attribute
+Info</em> and <em>General Object Info</em> tabs on the right</li>
+<li>In the <em>Object Attribute Info</em> pane select the <em>Add Attribute</em> button and enter an attribute with
+these values. (Be sure to add a <em>String Length</em> or the string will be truncated to one character!):</li>
+<table>
+<tr>
+<th>Name
+</th>
+<td><em>Units</em>
+</td>
+</tr>
+<tr>
+<th>Type
+</th>
+<td>STRING
+</td>
+</tr>
+<tr>
+<th>String Length
+</th>
+<td>3
+</td>
+</table>
+<li>Select the <em>Ok</em> button. The attribute will show up under the <em>Object Attribute Info</em> tab.</li>
+<li>Double-click the Units attribute line to open the data table for Units.</li>
+<li>Click in the first cell and enter <em>m/s</em> followed by the enter key.</li>
+<li><em>Table -> Close</em>, answer <em>Yes</em> in the dialog box that
+pops up (which asks if you wish to paste the selected data).
+<table>
+<tr>
+<td>
+\image html scarletletter.png
+</td>
+</tr>
+</table>
+</li>
+</ul>
+
+\subsection subsec_learn_hv_topics_compress Creating a Compressed and Chunked Dataset
+A chunked and compressed dataset can be created using HDFView. A compressed dataset is a dataset
+whose size has been compressed to take up less space. In order to compress an HDF5 dataset, the
+dataset must be stored with a chunked dataset layout (as multiple <em>chunks</em> that are stored separately
+in the file).
+
+Please note that the chunk sizes used in this topic are for demonstration purposes only. For
+information on chunking and specifying an appropriate chunk size, see the
+<a href="https://confluence.hdfgroup.org/display/HDF5/Chunking+in+HDF5">Chunking in HDF5</a> documentation.
+
+Also see the HDF5 Tutorial topic on \ref secLBComDsetCreate.
+<ul>
+<li>Right click on storm.h5. Select <em>New -> Group</em>.</li>
+<li>Enter <em>Image</em> for the name of the group, and click the <em>OK</em> button to create the group.
+<table>
+<tr>
+<td>
+\image html newgroupimage.png
+</td>
+</tr>
+</table>
+</li>
+<li>Right click on the <em>Image</em> group, and select <em>New -> Dataset</em>.</li>
+<li>Enter the following information for the dataset. Leave the <em>Datatype</em> as is (INTEGER):
+<table>
+<tr>
+<th>Dataset name
+</th>
+<td><em>Another Storm</em>
+</td>
+</tr>
+<tr>
+<th>Under Dataspace, Current size
+</th>
+<td>57x57
+</td>
+</tr>
+<tr>
+<th>Storage Layout
+</th>
+<td>Chunked
+</td>
+</tr>
+<tr>
+<th>Chunk Size
+</th>
+<td>20x20
+</td>
+</tr>
+<tr>
+<th>Compression
+</th>
+<td>gzip
+</td>
+</tr>
+<tr>
+<th>Compression Level
+</th>
+<td>9
+</td>
+</table>
+You will see the <em>Another Storm</em> dataset in the <em>Image</em> group:
+<table>
+<tr>
+<td>
+\image html hdfview-anthrstrm.png
+</td>
+</tr>
+</table>
+</li>
+<li>Double left-mouse click on the <em>Another Storm</em> dataset to display the spreadsheet:
+<table>
+<tr>
+<td>
+\image html hdfview-anthrstrm-sprdsht.png
+</td>
+</tr>
+</table>
+</li>
+<li>Copy the data from the <a href="https://support.hdfgroup.org/ftp/HDF5/examples/files/tutorial/storm1.txt">storm1.txt</a> file into the dataset. (See the previous topic for copying
+<a href="https://support.hdfgroup.org/ftp/HDF5/examples/files/tutorial/storm1.txt">storm1.txt</a> into a dataset.)</li>
+<li><em>Table -> Close</em>, and save the data.</li>
+<li>Right click on <em>Another Storm</em>, and select <em>Open As</em>.</li>
+<li>Select the <em>Image</em> button in the Dataset Selection window that pops up. Click the <em>Ok</em> button at the
+bottom of the window to view the dataset as an image.
+<table>
+<tr>
+<td>
+\image html hdfview-anthrstrm-img.png
+</td>
+</tr>
+</table>
+</li>
+</ul>
+
+\subsection subsec_learn_hv_topics_subset Creating an Image and a Subset
+A previous topic demonstrated how to view any dataset as an image in HDFView. With HDFView you can also
+create an image to begin with, as is shown below.
+<ul>
+<li>Right click on the <em>Data</em> group and select <em>New -> Image</em>.</li>
+<li>A window pops up on the right. Enter the following and then click <em>Ok</em>:</li>
+<table>
+<tr>
+<th>Image name
+</th>
+<td><em>Storm Image</em>
+</td>
+</tr>
+<tr>
+<th>Height
+</th>
+<td>57
+</td>
+</tr>
+<tr>
+<th>Width
+</th>
+<td>57
+</td>
+</table>
+
+<li>Close the dataset.</li>
+<li>Expand the <em>Data</em> group to see its contents. You will see the <em>Storm Image</em> dataset.
+<table>
+<tr>
+<td>
+\image html hdfview-imgicon.png
+</td>
+</tr>
+</table>
+</li>
+<li>
+Add data to the <em>Storm Image</em> dataset as was shown previously:
+<ul>
+<li>Right click on <em>Storm Image</em>, and select <em>Open As</em> to open the Dataset Selection window.</li>
+<li>Click on the <em>Spreadsheet</em> button at the top left of the Dataset Selection window to view the image
+as a spreadsheet.</li>
+<li>Copy the data from the <a href="https://support.hdfgroup.org/ftp/HDF5/examples/files/tutorial/storm1.txt">storm1.txt</a> file into the dataset.</li>
+<li>Close the dataset and save the data.</li>
+</ul>
+</li>
+<li>Left double click on <em>Storm Image</em> to see the image. Close the dataset.</li>
+<li>Right click on <em>Storm Image</em> and select <em>Open As</em> to bring up the Data Selection window.</li>
+<li>Select a subset by clicking the left mouse on the image in the window and dragging the mouse.
+Notice that the Height and Width values change. Select to display it as an image. Click <em>Ok</em>.
+<table>
+<tr>
+<td>
+\image html hdfview-imgsubset.png
+</td>
+</tr>
+</table>
+</li>
+<li>Position the cursor in the middle of the image. Press Shift+Left Mouse button and hold, and then
+drag the mouse to select another subset.</li>
+<li>Select <em>Image->Write Selection to Image</em>. Enter <em>Subset</em> for the new image name. Click <em>Ok</em>. The <em>Subset</em>
+image will appear in the tree view on the left.</li>
+<li>Left double click on the image <em>Subset</em> to bring it up on the right.
+<table>
+<tr>
+<td>
+\image html hdfview-newimgsubset.png
+</td>
+</tr>
+</table>
+</li>
+<li>Close the <em>Subset</em> image.</li>
+</ul>
+
+\subsection subsec_learn_hv_topics_table Creating a Table (Compound Dataset)
+A dataset with a compound datatype contains data elements that consist of multiple fields. If the
+dataspace for the compound dataset is one-dimensional, then the dataset can be viewed as a table in
+HDFView, as is shown below.
+<ul>
+<li>Right button click on the group <em>Data</em>. Select <em>New -> Compound DS</em>.</li>
+<li>A window pops up. Only fill in the following fields:
+<table>
+<tr>
+<th>Dataset name
+</th>
+<td>Table
+</td>
+</tr>
+<tr>
+<th>Dataspace (Current size only)
+</th>
+<td>4
+</td>
+</tr>
+<tr>
+<th>Compound Datatype Properties:
+<br />Number of Members
+</th>
+<td>3
+</td>
+</tr>
+<tr>
+<th>Compound Datatype Properties:
+<br /><em>Name</em> / Datatype / Size
+</th>
+<td><em>Description</em> / string / 4
+<br /><em>Temperature</em> / float / 1
+<br /><em>Pressure</em> / double / 1
+</td>
+</tr>
+</table>
+
+<table>
+<tr>
+<td>
+\image html hdfview-newcmpd.png
+</td>
+</tr>
+</table>
+</li>
+<li>Click Ok at the bottom.</li>
+<li>Open the Data group (if it is not open) and double left click on the Table object.
+<table>
+<tr>
+<td>
+\image html hdfview-table.png
+</td>
+</tr>
+</table>
+</li>
+<li>Close the dataset.</li>
+</ul>
+
+<hr>
+Navigate back: \ref index "Main" / \ref GettingStarted
+
+*/
diff --git a/doxygen/dox/ReferenceManual.dox b/doxygen/dox/ReferenceManual.dox
index df0c747..7900925 100644
--- a/doxygen/dox/ReferenceManual.dox
+++ b/doxygen/dox/ReferenceManual.dox
@@ -1,53 +1,32 @@
/** \page RM HDF5 Reference Manual
-The functions provided by the HDF5 C-API are grouped into the following
+The functions provided by the HDF5 API are grouped into the following
\Emph{modules}:
<table>
<tr><th>Modules</th></tr>
<tr valign="top">
<td>
-
<table>
<tr valign="top"><td style="border: none;">
-\li \ref H5A "Attributes (H5A)"
-\li \ref H5D "Datasets (H5D)"
-\li \ref H5S "Dataspaces (H5S)"
-\li \ref H5T "Datatypes (H5T)"
-\li \ref H5E "Error Handling (H5E)"
-\li \ref H5ES "Event Sets (H5ES)"
-\li \ref H5F "Files (H5F)"
-\li \ref H5Z "Filters (H5Z)"
-\li \ref H5G "Groups (H5G)"
-</td><td style="border: none;">
-\li \ref H5I "Identifiers (H5I)"
-\li \ref H5 "Library General (H5)"
-\li \ref H5L "Links (H5L)"
-\li \ref H5M "Maps (H5M)"
-\li \ref H5O "Objects (H5O)"
-\li \ref H5P "Property Lists (H5P)"
-\li \ref H5PL "Dynamically-loaded Plugins (H5PL)"
-\li \ref H5R "References (H5R)"
-\li \ref H5VL "Virtual Object Layer (H5VL)"
-</td><td style="border: none;">
-\li \ref high_level
- <ul>
- <li>\ref H5LT "Lite (H5LT, H5LD)"
- <li>\ref H5IM "Images (H5IM)"
- <li>\ref H5TB "Table (H5TB)"
- <li>\ref H5PT "Packet Table (H5PT)"
- <li>\ref H5DS "Dimension Scale (H5DS)"
- <li>\ref H5DO "Optimizations (H5DO)"
- <li>\ref H5LR "Extensions (H5LR, H5LT)"
- </ul>
-</td></tr>
-<tr><td colspan="3" style="border: none;">
-\a Core \a library: \ref H5 \ref H5A \ref H5D \ref H5E \ref H5ES \ref H5F \ref H5G \ref H5I \ref H5L
-\ref H5M \ref H5O \ref H5P \ref H5PL \ref H5R \ref H5S \ref H5T \ref H5VL \ref H5Z
-</td></tr>
-<tr><td colspan="3" style="border: none;">
-\a High-level \a library: \ref H5LT \ref H5IM \ref H5TB \ref H5PT \ref H5DS \ref H5DO \ref H5LR
-</td></tr>
+\include{doc} core_menu.md
+</td>
+</tr>
+<tr valign="top"><td style="border: none;">
+<!-- High-level library -->
+\include{doc} high_level_menu.md
+</td>
+</tr>
+<tr valign="top"><td style="border: none;">
+<!-- Fortran library -->
+\include{doc} fortran_menu.md
+</td>
+</tr>
+<tr valign="top"><td style="border: none;">
+<!-- Java library -->
+\include{doc} java_menu.md
+</td>
+</tr>
<tr>
<td><a href="./deprecated.html">Deprecated functions</a></td>
<td>Functions with \ref ASYNC</td>
diff --git a/doxygen/dox/Specifications.dox b/doxygen/dox/Specifications.dox
index 5a36d61..e352f40 100644
--- a/doxygen/dox/Specifications.dox
+++ b/doxygen/dox/Specifications.dox
@@ -2,20 +2,20 @@
\section DDL
-\li \ref DDLBNF110 "DDL in BNF through HDF5 1.10"
-\li \ref DDLBNF112 "DDL in BNF for HDF5 1.12 and above"
+\li \ref DDLBNF110
+\li \ref DDLBNF112
\section File Format
-\li \ref FMT1 "HDF5 File Format Specification Version 1.0"
-\li \ref FMT11 "HDF5 File Format Specification Version 1.1"
-\li \ref FMT2 "HDF5 File Format Specification Version 2.0"
-\li \ref FMT3 "HDF5 File Format Specification Version 3.0"
+\li \ref FMT1
+\li \ref FMT11
+\li \ref FMT2
+\li \ref FMT3
\section Other
-\li \ref IMG "HDF5 Image and Palette Specification Version 1.2"
-\li \ref TBL "HDF5 Table Specification Version 1.0"
+\li \ref IMG
+\li \ref TBL
\li <a href="https://support.hdfgroup.org/HDF5/doc/HL/H5DS_Spec.pdf">
HDF5 Dimension Scale Specification</a>
diff --git a/doxygen/dox/TechnicalNotes.dox b/doxygen/dox/TechnicalNotes.dox
index 9bd2802..bca81e4 100644
--- a/doxygen/dox/TechnicalNotes.dox
+++ b/doxygen/dox/TechnicalNotes.dox
@@ -1,13 +1,13 @@
/** \page TN Technical Notes
-\li \link api-compat-macros API Compatibility Macros \endlink
-\li \ref APPDBG "Debugging HDF5 Applications"
-\li \ref FMTDISC "File Format Walkthrough"
-\li \ref FILTER "Filters"
-\li \ref IOFLOW "HDF5 Raw I/O Flow Notes"
-\li \ref TNMDC "Metadata Caching in HDF5"
-\li \ref MT "Thread Safe library"
-\li \ref VFL "Virtual File Layer"
+\li \ref api-compat-macros
+\li \ref APPDBG
+\li \ref FMTDISC
+\li \ref FILTER
+\li \ref IOFLOW
+\li \ref TNMDC
+\li \ref MT
+\li \ref VFL
*/
diff --git a/doxygen/dox/UsersGuide.dox b/doxygen/dox/UsersGuide.dox
new file mode 100644
index 0000000..dbb6053
--- /dev/null
+++ b/doxygen/dox/UsersGuide.dox
@@ -0,0 +1,403 @@
+/** \page UG HDF5 User Guide
+
+<center>
+HDF5 Release 1.14
+
+\image html HDFG-logo.png "The HDF Group"
+
+</center>
+
+\ref sec_data_model
+\li \ref subsec_data_model_intro
+\li \ref subsec_data_model_abstract
+ <ul>
+ <li> \ref subsubsec_data_model_abstract_file
+ <li> \ref subsubsec_data_model_abstract_group
+ <li> \ref subsubsec_data_model_abstract_dataset
+ <li> \ref subsubsec_data_model_abstract_space
+ <li> \ref subsubsec_data_model_abstract_type
+ <li> \ref subsubsec_data_model_abstract_attr
+ <li> \ref subsubsec_data_model_abstract_plist
+ <li> \ref subsubsec_data_model_abstract_link
+ </ul>
+\li \ref subsec_data_model_storage
+ <ul>
+ <li> \ref subsubsec_data_model_storage_spec
+ <li> \ref subsubsec_data_model_storage_imple
+ </ul>
+\li \ref subsec_data_model_structure
+ <ul>
+ <li> \ref subsubsec_data_model_structure_file
+ <li> \ref subsubsec_data_model_structure_path
+ <li> \ref subsubsec_data_model_structure_example
+ </ul>
+
+\ref sec_program
+\li \ref subsec_program_intro
+\li \ref subsec_program_model
+ <ul>
+ <li> \ref subsubsec_program_model_create
+ <li> \ref subsubsec_program_model_dset
+ <li> \ref subsubsec_program_model_close
+ <li> \ref subsubsec_program_model_data
+ <li> \ref subsubsec_program_model_partial
+ <li> \ref subsubsec_program_model_info
+ <li> \ref subsubsec_program_model_compound
+ <li> \ref subsubsec_program_model_extend
+ <li> \ref subsubsec_program_model_group
+ <li> \ref subsubsec_program_model_attr
+ </ul>
+\li \ref subsec_program_transfer_pipeline
+
+\ref sec_file
+\li \ref subsec_file_intro
+\li \ref subsec_file_access_modes
+\li \ref subsec_file_creation_access
+\li \ref subsec_file_drivers
+\li \ref subsec_file_program_model
+ <ul>
+ <li> \ref subsubsec_file_program_model_create
+ <li> \ref subsubsec_file_program_model_open
+ <li> \ref subsubsec_file_program_model_close
+ </ul>
+\li \ref subsec_file_h5dump
+\li \ref subsec_file_summary
+\li \ref subsec_file_create
+\li \ref subsec_file_closes
+\li \ref subsec_file_property_lists
+ <ul>
+ <li> \ref subsubsec_file_property_lists_create
+ <li> \ref subsubsec_file_property_lists_props
+ <li> \ref subsubsec_file_property_lists_access
+ </ul>
+\li \ref subsec_file_alternate_drivers
+ <ul>
+ <li> \ref subsubsec_file_alternate_drivers_id
+ <li> \ref subsubsec_file_alternate_drivers_sec2
+ <li> \ref subsubsec_file_alternate_drivers_direct
+ <li> \ref subsubsec_file_alternate_drivers_log
+ <li> \ref subsubsec_file_alternate_drivers_win
+ <li> \ref subsubsec_file_alternate_drivers_stdio
+ <li> \ref subsubsec_file_alternate_drivers_mem
+ <li> \ref subsubsec_file_alternate_drivers_family
+ <li> \ref subsubsec_file_alternate_drivers_multi
+ <li> \ref subsubsec_file_alternate_drivers_split
+ <li> \ref subsubsec_file_alternate_drivers_par
+ </ul>
+\li \ref subsec_file_examples
+ <ul>
+ <li> \ref subsubsec_file_examples_trunc
+ <li> \ref subsubsec_file_examples_props
+ <li> \ref subsubsec_file_examples_access
+ </ul>
+\li \ref subsec_file_multiple
+
+\ref sec_group
+\li \ref subsec_group_intro
+\li \ref subsec_group_descr
+ <ul>
+ <li> \ref subsubsec_group_descr_object
+ <li> \ref subsubsec_group_descr_model
+ <li> \ref subsubsec_group_descr_path
+ <li> \ref subsubsec_group_descr_impl
+ </ul>
+\li \ref subsec_group_h5dump
+\li \ref subsec_group_function
+\li \ref subsec_group_program
+ <ul>
+ <li> \ref subsubsec_group_program_create
+ <li> \ref subsubsec_group_program_open
+ <li> \ref subsubsec_group_program_dataset
+ <li> \ref subsubsec_group_program_close
+ <li> \ref subsubsec_group_program_links
+ <li> \ref subsubsec_group_program_info
+ <li> \ref subsubsec_group_program_objs
+ <li> \ref subsubsec_group_program_all
+ </ul>
+\li \ref subsec_group_examples
+
+\ref sec_dataset
+\li \ref subsec_dataset_intro
+\li \ref subsec_dataset_function
+\li \ref subsec_dataset_program
+ <ul>
+ <li> \ref subsubsec_dataset_program_general
+ <li> \ref subsubsec_dataset_program_create
+ <li> \ref subsubsec_dataset_program_transfer
+ <li> \ref subsubsec_dataset_program_read
+ </ul>
+\li \ref subsec_dataset_transfer Data Transfer
+ <ul>
+ <li> \ref subsubsec_dataset_transfer_pipe
+ <li> \ref subsubsec_dataset_transfer_filter
+ <li> \ref subsubsec_dataset_transfer_drive
+ <li> \ref subsubsec_dataset_transfer_props
+ <li> \ref subsubsec_dataset_transfer_store
+ <li> \ref subsubsec_dataset_transfer_partial
+ </ul>
+\li \ref subsec_dataset_allocation
+ <ul>
+ <li> \ref subsubsec_dataset_allocation_store
+ <li> \ref subsubsec_dataset_allocation_delete
+ <li> \ref subsubsec_dataset_allocation_release
+ <li> \ref subsubsec_dataset_allocation_ext
+ </ul>
+\li \ref subsec_dataset_filters
+ <ul>
+ <li> \ref subsubsec_dataset_filters_nbit
+ <li> \ref subsubsec_dataset_filters_scale
+ <li> \ref subsubsec_dataset_filters_szip
+ </ul>
+
+\ref sec_datatype
+\li \ref subsec_datatype_intro
+\li \ref subsec_datatype_model
+ <ul>
+ <li> \ref subsubsec_datatype_model_class
+ <li> \ref subsubsec_datatype_model_predefine
+ </ul>
+\li \ref subsec_datatype_usage
+ <ul>
+ <li> \ref subsubsec_datatype_usage_object
+ <li> \ref subsubsec_datatype_usage_create
+ <li> \ref subsubsec_datatype_usage_transfer
+ <li> \ref subsubsec_datatype_usage_discover
+ <li> \ref subsubsec_datatype_usage_user
+ </ul>
+\li \ref subsec_datatype_function
+\li \ref subsec_datatype_program
+ <ul>
+ <li> \ref subsubsec_datatype_program_discover
+ <li> \ref subsubsec_datatype_program_define
+ </ul>
+\li \ref subsec_datatype_other
+ <ul>
+ <li> \ref subsubsec_datatype_other_strings
+ <li> \ref subsubsec_datatype_other_refs
+ <li> \ref subsubsec_datatype_other_enum
+ <li> \ref subsubsec_datatype_other_opaque
+ <li> \ref subsubsec_datatype_other_bitfield
+ </ul>
+\li \ref subsec_datatype_fill
+\li \ref subsec_datatype_complex
+ <ul>
+ <li> \ref subsubsec_datatype_complex_create
+ <li> \ref subsubsec_datatype_complex_analyze
+ </ul>
+\li \ref subsec_datatype_life
+\li \ref subsec_datatype_transfer
+\li \ref subsec_datatype_text
+
+\ref sec_dataspace
+\li \ref subsec_dataspace_intro
+\li \ref subsec_dataspace_function
+\li \ref subsec_dataspace_program
+ <ul>
+ <li> \ref subsubsec_dataspace_program_object
+ <li> \ref subsubsec_dataspace_program_model
+ </ul>
+\li \ref subsec_dataspace_transfer
+ <ul>
+ <li> \ref subsubsec_dataspace_transfer_select
+ <li> \ref subsubsec_dataspace_transfer_model
+ </ul>
+\li \ref subsec_dataspace_select
+\li \ref subsec_dataspace_refer
+ <ul>
+ <li> \ref subsubsec_dataspace_refer_use
+ <li> \ref subsubsec_dataspace_refer_create
+ <li> \ref subsubsec_dataspace_refer_read
+ </ul>
+\li \ref subsec_dataspace_sample
+
+\ref sec_attribute
+\li \ref subsec_attribute_intro
+\li \ref subsec_attribute_program
+ <ul>
+ <li> <!-- \subsubsection subsubsec_attribute_program_exist --> To Open and Read or Write an Existing Attribute </li>
+ </ul>
+\li \ref subsec_error_H5A
+\li \ref subsec_attribute_work
+ <ul>
+ <li> \ref subsubsec_attribute_work_struct
+ <li> \ref subsubsec_attribute_work_create
+ <li> \ref subsubsec_attribute_work_access
+ <li> \ref subsubsec_attribute_work_info
+ <li> \ref subsubsec_attribute_work_iterate
+ <li> \ref subsubsec_attribute_work_delete
+ <li> \ref subsubsec_attribute_work_close
+ </ul>
+\li \ref subsec_attribute_special
+
+\ref sec_error
+\li \ref subsec_error_intro
+\li \ref subsec_error_program
+\li \ref subsec_error_H5E
+\li \ref subsec_error_ops
+ <ul>
+ <li> \ref subsubsec_error_ops_stack
+ <li> \ref subsubsec_error_ops_print
+ <li> \ref subsubsec_error_ops_mute
+ <li> \ref subsubsec_error_ops_custom_print
+ <li> \ref subsubsec_error_ops_walk
+ <li> \ref subsubsec_error_ops_travers
+ </ul>
+\li \ref subsec_error_adv
+ <ul>
+ <li> \ref subsubsec_error_adv_more
+ <li> \ref subsubsec_error_adv_app
+ </ul>
+
+\ref sec_plist
+\li \ref subsec_plist_intro
+\li \ref subsec_plist_class
+ <ul>
+ <li> \ref subsubsec_plist_class
+ <li> \ref subsubsec_plist_lists
+ <li> \ref subsubsec_plist_props
+ </ul>
+\li \ref subsec_plist_program
+ <ul>
+ <li> \ref subsubsec_plist_default
+ <li> \ref subsubsec_plist_basic
+ <li> \ref subsubsec_plist_additional
+ </ul>
+\li \ref subsec_plist_generic
+\li \ref subsec_plist_H5P
+\li \ref subsec_plist_resources
+\li \ref subsec_plist_notes
+
+\ref sec_vol
+\li \ref subsec_vol_intro
+\li \ref subsec_vol_abstract_layer
+\li \ref subsec_vol_connect
+\li \ref subsec_vol_use
+
+\ref sec_async
+\li \ref subsec_async_intro
+
+\ref sec_map
+
+\ref sec_addition
+
+\page AR_UG Additional Resources
+
+\section sec_addition Additional Resources
+These documents provide additional information for the use and tuning of specific HDF5 features.
+ <table style=" border-spacing:0; padding-left:6.00pt; padding-top:6.00pt; padding-right:6.00pt; padding-bottom:6.00pt; float:aligncenter; width:100%; max-width:432.00pt;" cellspacing="0">
+ <caption x-list-start="1" style="font-size: 12.0pt;">Table of Additional resources</caption>
+ <tr style="height: 23.00pt;">
+ <th style="width: 234.000pt; border-top-style: solid; border-top-width: 1px; border-top-color: #228a22; border-bottom-style: solid; border-bottom-width: 1px; border-bottom-color: #228a22; vertical-align : top;padding-left: 6.00pt; padding-top: 3.00pt; padding-right: 6.00pt; padding-bottom: 3.00pt;">
+ <p>Document</p>
+</th>
+ <th style="width: 198.000pt; border-top-style: solid; border-top-width: 1px; border-top-color: #228a22; border-bottom-style: solid; border-bottom-width: 1px; border-bottom-color: #228a22; vertical-align : top;padding-left: 6.00pt; padding-top: 3.00pt; padding-right: 6.00pt; padding-bottom: 3.00pt;">
+ <p>Comments</p>
+</th>
+</tr>
+ <tr style="height: 23.00pt;">
+ <td style="width: 234.000pt; border-bottom-style: solid; border-bottom-width: 1px; border-bottom-color: #228a22; vertical-align: top;padding-left: 6.00pt; padding-top: 3.00pt; padding-right: 6.00pt; padding-bottom: 3.00pt;">
+ <p style="font-style: italic; color: #0000ff;"><span>@ref HDF5Examples</span></p>
+</td>
+ <td style="width: 198.000pt; border-bottom-style: solid; border-bottom-width: 1px; border-bottom-color: #228a22; vertical-align: top;padding-left: 6.00pt; padding-top: 3.00pt; padding-right: 6.00pt; padding-bottom: 3.00pt;">
+ <p>Code examples by API. </p>
+</td>
+</tr>
+ <tr style="height: 36.00pt;">
+ <td style="width: 234.000pt; border-bottom-style: solid; border-bottom-width: 1px; border-bottom-color: #228a22; vertical-align: top;padding-left: 6.00pt; padding-top: 3.00pt; padding-right: 6.00pt; padding-bottom: 3.00pt;">
+ <p style="font-style: italic; color: #0000ff;"><span><a href="http://www.hdfgroup.org/HDF5/doc/Advanced/Chunking/index.html">Chunking in HDF5</a></span></p>
+</td>
+ <td style="width: 198.000pt; border-bottom-style: solid; border-bottom-width: 1px; border-bottom-color: #228a22; vertical-align: top;padding-left: 6.00pt; padding-top: 3.00pt; padding-right: 6.00pt; padding-bottom: 3.00pt;">
+ <p>Structuring the use of chunking and tuning it for performance.</p>
+</td>
+</tr>
+ <tr style="height: 36.00pt;">
+ <td style="width: 234.000pt; border-bottom-style: solid; border-bottom-width: 1px; border-bottom-color: #228a22; vertical-align: top;padding-left: 6.00pt; padding-top: 3.00pt; padding-right: 6.00pt; padding-bottom: 3.00pt;">
+ <p style="font-style: italic; color: #0000ff;"><span class="FM_LT_LinkText"><a href="http://www.hdfgroup.org/HDF5/doc/Advanced/DirectChunkWrite/UsingDirectChunkWrite.pdf">Using the Direct Chunk Write Function</a></span></p>
+</td>
+ <td style="width: 198.000pt; border-bottom-style: solid; border-bottom-width: 1px; border-bottom-color: #228a22; vertical-align: top;padding-left: 6.00pt; padding-top: 3.00pt; padding-right: 6.00pt; padding-bottom: 3.00pt;">
+ <p>Describes another way that chunks can be written to datasets.</p>
+</td>
+</tr>
+ <tr style="height: 88.00pt;">
+ <td style="width: 234.000pt; border-bottom-style: solid; border-bottom-width: 1px; border-bottom-color: #228a22; vertical-align: top;padding-left: 6.00pt; padding-top: 3.00pt; padding-right: 6.00pt; padding-bottom: 3.00pt;">
+ <p style="font-style: italic; color: #0000ff;"><span><a href="http://www.hdfgroup.org/HDF5/doc/Advanced/CommittedDatatypeCopying/CopyingCommittedDatatypesWithH5Ocopy.pdf">Copying Committed Datatypes with H5Ocopy</a></span></p>
+</td>
+ <td style="width: 198.000pt; border-bottom-style: solid; border-bottom-width: 1px; border-bottom-color: #228a22; vertical-align: top;padding-left: 6.00pt; padding-top: 3.00pt; padding-right: 6.00pt; padding-bottom: 3.00pt;">
+ <p>Describes how to copy to another file a dataset that uses a committed datatype or an object with an attribute that uses a committed datatype so that the committed datatype in the destination file can be used by multiple objects.</p>
+</td>
+</tr>
+ <tr style="height: 36.00pt;">
+ <td style="width: 234.000pt; border-bottom-style: solid; border-bottom-width: 1px; border-bottom-color: #228a22; vertical-align: top;padding-left: 6.00pt; padding-top: 3.00pt; padding-right: 6.00pt; padding-bottom: 3.00pt;">
+ <p style="font-style: italic; color: #0000ff;"><span><a href="http://www.hdfgroup.org/HDF5/doc/Advanced/MetadataCache/index.html">Metadata Caching in HDF5</a></span></p>
+</td>
+ <td style="width: 198.000pt; border-bottom-style: solid; border-bottom-width: 1px; border-bottom-color: #228a22; vertical-align: top;padding-left: 6.00pt; padding-top: 3.00pt; padding-right: 6.00pt; padding-bottom: 3.00pt;">
+ <p>Managing the HDF5 metadata cache and tuning it for performance.</p>
+</td>
+</tr>
+ <tr style="height: 49.00pt;">
+ <td style="width: 234.000pt; border-bottom-style: solid; border-bottom-width: 1px; border-bottom-color: #228a22; vertical-align: top;padding-left: 6.00pt; padding-top: 3.00pt; padding-right: 6.00pt; padding-bottom: 3.00pt;">
+ <p style="font-style: italic; color: #0000ff;"><span><a href="http://www.hdfgroup.org/HDF5/doc/Advanced/DynamicallyLoadedFilters/HDF5DynamicallyLoadedFilters.pdf">HDF5 Dynamically Loaded Filters</a></span></p>
+</td>
+ <td style="width: 198.000pt; border-bottom-style: solid; border-bottom-width: 1px; border-bottom-color: #228a22; vertical-align: top;padding-left: 6.00pt; padding-top: 3.00pt; padding-right: 6.00pt; padding-bottom: 3.00pt;">
+ <p>Describes how an HDF5 application can apply a filter that is not registered with the HDF5 Library.</p>
+</td>
+</tr>
+ <tr style="height: 62.00pt;">
+ <td style="width: 234.000pt; border-bottom-style: solid; border-bottom-width: 1px; border-bottom-color: #228a22; vertical-align: top;padding-left: 6.00pt; padding-top: 3.00pt; padding-right: 6.00pt; padding-bottom: 3.00pt;">
+ <p style="font-style: italic; color: #0000ff;"><span><a href="http://www.hdfgroup.org/HDF5/doc/Advanced/FileImageOperations/HDF5FileImageOperations.pdf">HDF5 File Image Operations</a></span></p>
+</td>
+ <td style="width: 198.000pt; border-bottom-style: solid; border-bottom-width: 1px; border-bottom-color: #228a22; vertical-align: top;padding-left: 6.00pt; padding-top: 3.00pt; padding-right: 6.00pt; padding-bottom: 3.00pt;">
+ <p>Describes how to work with HDF5 files in memory. Disk I/O is not required when file images are opened, created, read from, or written to.</p>
+</td>
+</tr>
+ <tr style="height: 62.00pt;">
+ <td style="width: 234.000pt; border-bottom-style: solid; border-bottom-width: 1px; border-bottom-color: #228a22; vertical-align: top;padding-left: 6.00pt; padding-top: 3.00pt; padding-right: 6.00pt; padding-bottom: 3.00pt;">
+ <p style="font-style: italic; color: #0000ff;"><span><a href="http://www.hdfgroup.org/HDF5/doc/Advanced/ModifiedRegionWrites/ModifiedRegionWrites.pdf">Modified Region Writes</a></span></p>
+</td>
+ <td style="width: 198.000pt; border-bottom-style: solid; border-bottom-width: 1px; border-bottom-color: #228a22; vertical-align: top;padding-left: 6.00pt; padding-top: 3.00pt; padding-right: 6.00pt; padding-bottom: 3.00pt;">
+ <p>Describes how to set write operations for in-memory files so that only modified regions are written to storage. Available when the Core (Memory) VFD is used.</p>
+</td>
+</tr>
+ <tr style="height: 36.00pt;">
+ <td style="width: 234.000pt; border-bottom-style: solid; border-bottom-width: 1px; border-bottom-color: #228a22; vertical-align: top;padding-left: 6.00pt; padding-top: 3.00pt; padding-right: 6.00pt; padding-bottom: 3.00pt;">
+ <p style="font-style: italic; color: #0000ff;"><span><a href="http://www.hdfgroup.org/HDF5/doc/Advanced/UsingIdentifiers/index.html">Using Identifiers</a></span></p>
+</td>
+ <td style="width: 198.000pt; border-bottom-style: solid; border-bottom-width: 1px; border-bottom-color: #228a22; vertical-align: top;padding-left: 6.00pt; padding-top: 3.00pt; padding-right: 6.00pt; padding-bottom: 3.00pt;">
+ <p>Describes how identifiers behave and how they should be treated.</p>
+</td>
+</tr>
+ <tr style="height: 36.00pt;">
+ <td style="width: 234.000pt; border-bottom-style: solid; border-bottom-width: 1px; border-bottom-color: #228a22; vertical-align: top;padding-left: 6.00pt; padding-top: 3.00pt; padding-right: 6.00pt; padding-bottom: 3.00pt;">
+ <p style="font-style: italic; color: #0000ff;"><span><a href="http://www.hdfgroup.org/HDF5/doc/Advanced/UsingUnicode/index.html">Using UTF-8 Encoding in HDF5 Applications</a></span></p>
+</td>
+ <td style="width: 198.000pt; border-bottom-style: solid; border-bottom-width: 1px; border-bottom-color: #228a22; vertical-align: top;padding-left: 6.00pt; padding-top: 3.00pt; padding-right: 6.00pt; padding-bottom: 3.00pt;">
+ <p>Describes the use of UTF-8 Unicode character encodings in HDF5 applications.</p>
+</td>
+</tr>
+ <tr style="height: 49.00pt;">
+ <td style="width: 234.000pt; border-bottom-style: solid; border-bottom-width: 1px; border-bottom-color: #228a22; vertical-align: top;padding-left: 6.00pt; padding-top: 3.00pt; padding-right: 6.00pt; padding-bottom: 3.00pt;">
+ <p style="font-style: italic; color: #0000ff;"><span><a href="http://www.hdfgroup.org/HDF5/doc/Advanced/FreeingMemory/FreeingMemoryAllocatedByTheHdf5Library.pdf">Freeing Memory Allocated by the HDF5 Library</a></span></p>
+</td>
+ <td style="width: 198.000pt; border-bottom-style: solid; border-bottom-width: 1px; border-bottom-color: #228a22; vertical-align: top;padding-left: 6.00pt; padding-top: 3.00pt; padding-right: 6.00pt; padding-bottom: 3.00pt;">
+ <p>Describes how inconsistent memory management can cause heap corruption or resource leaks and possible solutions.</p>
+</td>
+</tr>
+ <tr style="height: 23.00pt;">
+ <td style="width: 234.000pt; border-bottom-style: solid; border-bottom-width: 1px; border-bottom-color: #228a22; vertical-align: top;padding-left: 6.00pt; padding-top: 3.00pt; padding-right: 6.00pt; padding-bottom: 3.00pt;">
+ <p style="font-style: italic; color: #0000ff;"><span><a href="http://www.hdfgroup.org/HDF5/doc/Glossary.html">HDF5 Glossary</a></span></p>
+</td>
+ <td style="width: 198.000pt; border-bottom-style: solid; border-bottom-width: 1px; border-bottom-color: #228a22; vertical-align: top;padding-left: 6.00pt; padding-top: 3.00pt; padding-right: 6.00pt; padding-bottom: 3.00pt;">
+ <p>A glossary of terms.</p>
+</td>
+</tr>
+ </table>
+
+Previous Chapter \ref sec_plist
+
+\par Don't like what you see? - You can help to improve this User Guide
+ Complete the survey linked near the top of this page!\n
+ We treat documentation like code: Fork the
+ <a href="https://github.com/HDFGroup/hdf5">HDF5 repo</a>, make changes, and create a
+ <a href="https://github.com/HDFGroup/hdf5/pulls">pull request</a> !\n
+
+*/ \ No newline at end of file
diff --git a/doxygen/dox/ViewTools.dox b/doxygen/dox/ViewTools.dox
new file mode 100644
index 0000000..0b685a0
--- /dev/null
+++ b/doxygen/dox/ViewTools.dox
@@ -0,0 +1,1198 @@
+/** @page ViewTools Tools for Viewing and Editing HDF5 Files
+
+Navigate back: \ref index "Main" / \ref GettingStarted
+<hr>
+
+\section secToolsBasic Basic Facts about HDF5
+The following are basic facts about HDF5 files to keep in mind while completing these tutorial topics:
+\li All HDF5 files contain a root group "/".
+\li There are two primary objects in HDF5, a group and a dataset:<br />
+ Groups allow objects to be organized into a group structure, such as a tree.<br />
+ Datasets contain raw data values.
+\li Additional information about an HDF5 object may optionally be stored in attributes attached to the object.
+
+\section secToolsTopics Tutorial Topics
+<table>
+<tr>
+<th>Tutorial Topic</th>
+<th>Description</th>
+</tr>
+<tr>
+<td>
+@ref LearnHDFView
+</td>
+<td>Use HDFView to create, edit and view files.
+</td>
+</tr>
+<tr>
+<td>
+@ref ViewToolsCommand
+</td>
+<td>Use the HDF5 command-line tools for viewing, editing, and comparing HDF5 files.
+</td>
+</tr>
+<tr>
+<td>@ref ViewToolsJPSS
+</td>
+<td>Use HDF5 tools to examine and work with JPSS NPP files.
+</td>
+</tr>
+</table>
+
+<hr>
+Navigate back: \ref index "Main" / \ref GettingStarted
+
+@page ViewToolsCommand Command-line Tools
+Navigate back: \ref index "Main" / \ref GettingStarted
+<hr>
+
+\section secViewToolsCommandObtain Obtain Tools and Files (Optional)
+Pre-built binaries for Linux and Windows are distributed within the respective HDF5 binary release
+packages, which can be obtained from the <a href="https://portal.hdfgroup.org/display/support/Download+HDF5">Download HDF5</a> page.
+
+HDF5 files can be obtained from various places such as \ref HDF5Examples and <a href="http://www.hdfeos.org/">HDF-EOS and Tools and
+Information Center</a>. Specifically, the following examples are used in this tutorial topic:
+\li HDF5 Files created from compiling the \ref LBExamples
+\li HDF5 Files on the <a href="https://portal.hdfgroup.org/display/HDF5/Examples+by+API">Examples by API</a> page
+\li NPP JPSS files, <a href="https://support.hdfgroup.org/ftp/HDF5/examples/files/tutorial/SVM01_npp_d20130524_t1255132_e1256374_b08146_c20130524192048864992_noaa_ops.h5.gz">SVM01_npp.. (gzipped)</a>
+and <a href="https://support.hdfgroup.org/ftp/HDF5/examples/files/tutorial/SVM09_npp_d20120229_t0849107_e0854511_b01759_c20120229145452682127_noaa_ops.h5.gz">SVM09_npp.. (gzipped)</a>
+\li HDF-EOS <a href="https://support.hdfgroup.org/ftp/HDF5/examples/files/tutorial/OMI-Aura.he5">OMI-Aura file</a>
+
+\section secViewToolsCommandTutor Tutorial Topics
+A variety of command-line tools are included in the HDF5 binary distribution. There are tools to view,
+edit, convert and compare HDF5 files. This tutorial discusses the tools by their functionality. It
+does not cover all of the HDF5 tools.
+
+<table>
+<tr>
+<th>Tool Category</th>
+<th>Topic</th>
+<th>Tools Used</th>
+</tr>
+<tr>
+<td><strong>@ref ViewToolsView</strong></td>
+<td>@ref secViewToolsViewContent</td>
+<td>h5dump and h5ls
+</td>
+</tr>
+<tr>
+<td></td>
+<td>@ref secViewToolsViewDset</td>
+<td>h5dump and h5ls
+</td>
+</tr>
+<tr>
+<td></td>
+<td>@ref secViewToolsViewGrps</td>
+<td>h5dump and h5ls
+</td>
+</tr>
+<tr>
+<td></td>
+<td>@ref secViewToolsViewAttr</td>
+<td>h5dump
+</td>
+</tr>
+<tr>
+<td></td>
+<td>@ref secViewToolsViewSub</td>
+<td>h5dump
+</td>
+</tr>
+<tr>
+<td></td>
+<td>@ref secViewToolsViewDtypes</td>
+<td>h5dump
+</td>
+</tr>
+<tr>
+<td>@ref ViewToolsEdit</td>
+<td>@ref secViewToolsEditRemove</td>
+<td>h5repack
+</td>
+</tr>
+<tr>
+<td></td>
+<td>@ref secViewToolsEditChange</td>
+<td>h5repack
+</td>
+</tr>
+<tr>
+<td></td>
+<td>@ref secViewToolsEditApply</td>
+<td>h5repack
+</td>
+</tr>
+<tr>
+<td></td>
+<td>@ref secViewToolsEditCopy</td>
+<td>h5copy
+</td>
+</tr>
+<tr>
+<td></td>
+<td>@ref secViewToolsEditAdd</td>
+<td>h5jam and h5unjam
+</td>
+</tr>
+<tr>
+<td>@ref ViewToolsConvert</td>
+<td>@ref secViewToolsConvertASCII</td>
+<td>h5dump
+</td>
+</tr>
+<tr>
+<td></td>
+<td>@ref secViewToolsConvertBinary</a></td>
+<td>h5dump
+</td>
+</tr>
+<tr>
+<td></td>
+<td>@ref secViewToolsConvertExport</td>
+<td>h5dump and h5import
+</td>
+</tr>
+</table>
+
+<hr>
+Navigate back: \ref index "Main" / \ref GettingStarted
+
+@page ViewToolsView Command-line Tools For Viewing HDF5 Files
+Navigate back: \ref index "Main" / \ref GettingStarted / \ref ViewToolsCommand
+<hr>
+
+\section secViewToolsViewTOC Contents
+<ul>
+<li>\ref secViewToolsViewContent</li>
+<li>\ref secViewToolsViewDset</li>
+<li>\ref secViewToolsViewGrps</li>
+<li>\ref secViewToolsViewAttr</li>
+<li>\ref secViewToolsViewSub</li>
+<li>\ref secViewToolsViewDtypes</li>
+</ul>
+
+\section secViewToolsViewContent File Content and Structure
+The h5dump and h5ls tools can both be used to view the contents of an HDF5 file. The tools are discussed below:
+<ul>
+<li>\ref subsecViewToolsViewContent_h5dump</li>
+<li>\ref subsecViewToolsViewContent_h5ls</li>
+</ul>
+
+\subsection subsecViewToolsViewContent_h5dump h5dump
+The h5dump tool dumps or displays the contents of an HDF5 file (textually). By default if you specify no options,
+h5dump will display the entire contents of a file. There are many h5dump options for examining specific details
+of a file. To see all of the available h5dump options, specify the <code style="background-color:whitesmoke;">-h</code>
+or <code style="background-color:whitesmoke;">--help</code> option:
+\code
+h5dump -h
+\endcode
+
+The following h5dump options can be helpful in viewing the content and structure of a file:
+<table>
+<tr>
+<th>Option</th>
+<th>Description</th>
+<th>Comment</th>
+</tr>
+<tr>
+<td>-n, --contents
+</td>
+<td>Displays a list of the objects in a file
+</td>
+<td>See @ref subsubsecViewToolsViewContent_h5dumpEx1
+</td>
+</tr>
+<tr>
+<td>-n 1, --contents=1
+</td>
+<td>Displays a list of the objects and attributes in a file
+</td>
+<td>See @ref subsubsecViewToolsViewAttr_h5dumpEx6
+</td>
+</tr>
+<tr>
+<td>-H, --header
+</td>
+<td>Displays header information only (no data)
+</td>
+<td>See @ref subsubsecViewToolsViewContent_h5dumpEx2
+</td>
+</tr>
+<tr>
+<td>-A 0, --onlyattr=0
+</td>
+<td>Suppresses the display of attributes
+</td>
+<td>See @ref subsubsecViewToolsViewContent_h5dumpEx2
+</td>
+</tr>
+<tr>
+<td>-N P, --any_path=P
+</td>
+<td>Displays any object or attribute that matches path P
+</td>
+<td>See @ref subsubsecViewToolsViewAttr_h5dumpEx6
+</td>
+</tr>
+</table>
+
+\subsubsection subsubsecViewToolsViewContent_h5dumpEx1 Example 1
+The following command displays a list of the objects in the file OMI-Aura.he5 (an HDF-EOS5 file):
+\code
+h5dump -n OMI-Aura.he5
+\endcode
+
+As shown in the output below, the objects (groups, datasets) are listed to the left, followed by their
+names. You can see that this file contains two root groups, HDFEOS and HDFEOS INFORMATION:
+\code
+HDF5 "OMI-Aura.he5" {
+FILE_CONTENTS {
+ group /
+ group /HDFEOS
+ group /HDFEOS/ADDITIONAL
+ group /HDFEOS/ADDITIONAL/FILE_ATTRIBUTES
+ group /HDFEOS/GRIDS
+ group /HDFEOS/GRIDS/OMI Column Amount O3
+ group /HDFEOS/GRIDS/OMI Column Amount O3/Data Fields
+ dataset /HDFEOS/GRIDS/OMI Column Amount O3/Data Fields/ColumnAmountO3
+ dataset /HDFEOS/GRIDS/OMI Column Amount O3/Data Fields/RadiativeCloudFraction
+ dataset /HDFEOS/GRIDS/OMI Column Amount O3/Data Fields/SolarZenithAngle
+ dataset /HDFEOS/GRIDS/OMI Column Amount O3/Data Fields/ViewingZenithAngle
+ group /HDFEOS INFORMATION
+ dataset /HDFEOS INFORMATION/StructMetadata.0
+ }
+}
+\endcode
+
+\subsubsection subsubsecViewToolsViewContent_h5dumpEx2 Example 2
+The file structure of the OMI-Aura.he5 file can be seen with the following command. The -A 0 option suppresses the display of attributes:
+\code
+h5dump -H -A 0 OMI-Aura.he5
+\endcode
+
+Output of this command is shown below:
+\code
+HDF5 "OMI-Aura.he5" {
+GROUP "/" {
+ GROUP "HDFEOS" {
+ GROUP "ADDITIONAL" {
+ GROUP "FILE_ATTRIBUTES" {
+ }
+ }
+ GROUP "GRIDS" {
+ GROUP "OMI Column Amount O3" {
+ GROUP "Data Fields" {
+ DATASET "ColumnAmountO3" {
+ DATATYPE H5T_IEEE_F32LE
+ DATASPACE SIMPLE { ( 720, 1440 ) / ( 720, 1440 ) }
+ }
+ DATASET "RadiativeCloudFraction" {
+ DATATYPE H5T_IEEE_F32LE
+ DATASPACE SIMPLE { ( 720, 1440 ) / ( 720, 1440 ) }
+ }
+ DATASET "SolarZenithAngle" {
+ DATATYPE H5T_IEEE_F32LE
+ DATASPACE SIMPLE { ( 720, 1440 ) / ( 720, 1440 ) }
+ }
+ DATASET "ViewingZenithAngle" {
+ DATATYPE H5T_IEEE_F32LE
+ DATASPACE SIMPLE { ( 720, 1440 ) / ( 720, 1440 ) }
+ }
+ }
+ }
+ }
+ }
+ GROUP "HDFEOS INFORMATION" {
+ DATASET "StructMetadata.0" {
+ DATATYPE H5T_STRING {
+ STRSIZE 32000;
+ STRPAD H5T_STR_NULLTERM;
+ CSET H5T_CSET_ASCII;
+ CTYPE H5T_C_S1;
+ }
+ DATASPACE SCALAR
+ }
+ }
+}
+}
+\endcode
+
+\subsection subsecViewToolsViewContent_h5ls h5ls
+The h5ls tool by default just displays the objects in the root group. It will not display
+items in groups beneath the root group unless specified. Useful h5ls options for viewing
+file content and structure are:
+<table>
+<tr>
+<th>Option</th>
+<th>Description</th>
+<th>Comment</th>
+</tr>
+<tr>
+<td>-r
+</td>
+<td>Lists all groups and objects recursively
+</td>
+<td>See @ref subsubsecViewToolsViewContent_h5lsEx3
+</td>
+</tr>
+<tr>
+<td>-v
+</td>
+<td>Generates verbose output (lists dataset properties, attributes
+and attribute values, but no dataset values)
+</td>
+<td>
+</td>
+</tr>
+</table>
+
+\subsubsection subsubsecViewToolsViewContent_h5lsEx3 Example 3
+The following command shows the contents of the HDF-EOS5 file OMI-Aura.he5. The output is similar to h5dump, except that h5ls also shows dataspace information for each dataset:
+\code
+h5ls -r OMI-Aura.he5
+\endcode
+
+The output is shown below:
+\code
+/ Group
+/HDFEOS Group
+/HDFEOS/ADDITIONAL Group
+/HDFEOS/ADDITIONAL/FILE_ATTRIBUTES Group
+/HDFEOS/GRIDS Group
+/HDFEOS/GRIDS/OMI\ Column\ Amount\ O3 Group
+/HDFEOS/GRIDS/OMI\ Column\ Amount\ O3/Data\ Fields Group
+/HDFEOS/GRIDS/OMI\ Column\ Amount\ O3/Data\ Fields/ColumnAmountO3 Dataset {720, 1440}
+/HDFEOS/GRIDS/OMI\ Column\ Amount\ O3/Data\ Fields/RadiativeCloudFraction Dataset {720, 1440}
+/HDFEOS/GRIDS/OMI\ Column\ Amount\ O3/Data\ Fields/SolarZenithAngle Dataset {720, 1440}
+/HDFEOS/GRIDS/OMI\ Column\ Amount\ O3/Data\ Fields/ViewingZenithAngle Dataset {720, 1440}
+/HDFEOS\ INFORMATION Group
+/HDFEOS\ INFORMATION/StructMetadata.0 Dataset {SCALAR}
+\endcode
+
+\section secViewToolsViewDset Datasets and Dataset Properties
+Both h5dump and h5ls can be used to view specific datasets.
+<ul>
+<li>\ref subsecViewToolsViewDset_h5dump</li>
+<li>\ref subsecViewToolsViewDset_h5ls</li>
+</ul>
+
+\subsection subsecViewToolsViewDset_h5dump h5dump
+Useful h5dump options for examining specific datasets include:
+<table>
+<tr>
+<th>Option</th>
+<th>Description</th>
+<th>Comment</th>
+</tr>
+<tr>
+<td>-d D, --dataset=D
+</td>
+<td>Displays dataset D
+</td>
+<td>See @ref subsubsecViewToolsViewDset_h5dumpEx4
+</td>
+</tr>
+<tr>
+<td> -H, --header
+</td>
+<td>Displays header information only
+</td>
+<td>See @ref subsubsecViewToolsViewDset_h5dumpEx4
+</td>
+</tr>
+<tr>
+<td>-p, --properties
+</td>
+<td>Displays dataset filters, storage layout, and fill value properties
+</td>
+<td>See @ref subsubsecViewToolsViewDset_h5dumpEx5
+</td>
+</tr>
+<tr>
+<td>-A 0, --onlyattr=0
+</td>
+<td>Suppresses the display of attributes
+</td>
+<td>See @ref subsubsecViewToolsViewContent_h5dumpEx2
+</td>
+</tr>
+<tr>
+<td>-N P, --any_path=P
+</td>
+<td>Displays any object or attribute that matches path P
+</td>
+<td>See @ref subsubsecViewToolsViewAttr_h5dumpEx6
+</td>
+</tr>
+</table>
+
+\subsubsection subsubsecViewToolsViewDset_h5dumpEx4 Example 4
+A specific dataset can be viewed with <code style="background-color:whitesmoke;">h5dump</code> using the <code style="background-color:whitesmoke;">-d D</code> option and specifying the entire
+path and name of the dataset for <code style="background-color:whitesmoke;">D</code>. The path is important in identifying the correct dataset,
+as there can be multiple datasets with the same name. The path can be determined by looking at
+the objects in the file with <code style="background-color:whitesmoke;">h5dump -n</code>.
+
+The following example uses the <code style="background-color:whitesmoke;">groups.h5</code> file that is created by the
+\ref LBExamples
+example <code style="background-color:whitesmoke;">h5_crtgrpar.c</code>. To display <code style="background-color:whitesmoke;">dset1</code> in the <code style="background-color:whitesmoke;">groups.h5</code> file below, specify dataset
+<code style="background-color:whitesmoke;">/MyGroup/dset1</code>. The <code style="background-color:whitesmoke;">-H</code> option is used to suppress printing of the data values:
+
+<em>Contents of groups.h5</em>
+\code
+ $ h5dump -n groups.h5
+ HDF5 "groups.h5" {
+ FILE_CONTENTS {
+ group /
+ group /MyGroup
+ group /MyGroup/Group_A
+ dataset /MyGroup/Group_A/dset2
+ group /MyGroup/Group_B
+ dataset /MyGroup/dset1
+ }
+ }
+\endcode
+
+<em>Display dataset "dset1"</em>
+\code
+ $ h5dump -d "/MyGroup/dset1" -H groups.h5
+ HDF5 "groups.h5" {
+ DATASET "/MyGroup/dset1" {
+ DATATYPE H5T_STD_I32BE
+ DATASPACE SIMPLE { ( 3, 3 ) / ( 3, 3 ) }
+ }
+ }
+\endcode
+
+\subsubsection subsubsecViewToolsViewDset_h5dumpEx5 Example 5
+The <code style="background-color:whitesmoke;">-p</code> option is used to examine the the dataset filters, storage layout, and fill value properties of a dataset.
+
+This option can be useful for checking how well compression works, or even for analyzing performance
+and dataset size issues related to chunking. (The smaller the chunk size, the more chunks that HDF5
+has to keep track of, which increases the size of the file and potentially affects performance.)
+
+In the file shown below the dataset <code style="background-color:whitesmoke;">/DS1</code> is both chunked and compressed:
+\code
+ $ h5dump -H -p -d "/DS1" h5ex_d_gzip.h5
+ HDF5 "h5ex_d_gzip.h5" {
+ DATASET "/DS1" {
+ DATATYPE H5T_STD_I32LE
+ DATASPACE SIMPLE { ( 32, 64 ) / ( 32, 64 ) }
+ STORAGE_LAYOUT {
+ CHUNKED ( 4, 8 )
+ SIZE 5278 (1.552:1 COMPRESSION)
+ }
+ FILTERS {
+ COMPRESSION DEFLATE { LEVEL 9 }
+ }
+ FILLVALUE {
+ FILL_TIME H5D_FILL_TIME_IFSET
+ VALUE 0
+ }
+ ALLOCATION_TIME {
+ H5D_ALLOC_TIME_INCR
+ }
+ }
+ }
+\endcode
+
+You can obtain the <code style="background-color:whitesmoke;">h5ex_d_gzip.c</code> program that created this file, as well as the file created,
+from the <a href="https://portal.hdfgroup.org/display/HDF5/Examples+by+API">Examples by API</a> page.
+
+\subsection subsecViewToolsViewDset_h5ls h5ls
+Specific datasets can be specified with <code style="background-color:whitesmoke;">h5ls</code> by simply adding the dataset path and dataset after the
+file name. As an example, this command displays dataset <code style="background-color:whitesmoke;">dset2</code> in the <code style="background-color:whitesmoke;">groups.h5</code>
+file used in @ref subsubsecViewToolsViewDset_h5dumpEx4 :
+\code
+h5ls groups.h5/MyGroup/Group_A/dset2
+\endcode
+
+Just the dataspace information gets displayed:
+\code
+dset2 Dataset {2, 10}
+\endcode
+
+The following options can be used to see detailed information about a dataset.
+<table>
+<tr>
+<th>Option</th>
+<th>Description</th>
+</tr>
+<tr>
+<td>-v, --verbose
+</td>
+<td>Generates verbose output (lists dataset properties, attributes
+and attribute values, but no dataset values)
+</td>
+</tr>
+<tr>
+<td>-d, --data
+</td>
+<td>Displays dataset values
+</td>
+</tr>
+</table>
+
+The output of using <code style="background-color:whitesmoke;">-v</code> is shown below:
+\code
+ $ h5ls -v groups.h5/MyGroup/Group_A/dset2
+ Opened "groups.h5" with sec2 driver.
+ dset2 Dataset {2/2, 10/10}
+ Location: 1:3840
+ Links: 1
+ Storage: 80 logical bytes, 80 allocated bytes, 100.00% utilization
+ Type: 32-bit big-endian integer
+\endcode
+
+The output of using <code style="background-color:whitesmoke;">-d</code> is shown below:
+\code
+ $ h5ls -d groups.h5/MyGroup/Group_A/dset2
+ dset2 Dataset {2, 10}
+ Data:
+ (0,0) 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10
+\endcode
+
+\section secViewToolsViewGrps Groups
+Both h5dump and h5ls can be used to view specific groups in a file.
+<ul>
+<li>\ref subsecViewToolsViewGrps_h5dump</li>
+<li>\ref subsecViewToolsViewGrps_h5ls</li>
+</ul>
+
+\subsection subsecViewToolsViewGrps_h5dump h5dump
+The <code style="background-color:whitesmoke;">h5dump</code> options that are useful for examining groups are:
+<table>
+<tr>
+<th>Option</th>
+<th>Description</th>
+</tr>
+<tr>
+<td>-g G, --group=G
+</td>
+<td>Displays group G and its members
+</td>
+</tr>
+<tr>
+<td>-H, --header
+</td>
+<td>Displays header information only
+</td>
+</tr>
+<tr>
+<td>-A 0, --onlyattr=0
+</td>
+<td>Suppresses the display of attributes
+</td>
+</tr>
+</table>
+
+To view the contents of the <code style="background-color:whitesmoke;">HDFEOS</code> group in the OMI file mentioned previously, you can specify the path and name of the group as follows:
+\code
+h5dump -g "/HDFEOS" -H -A 0 OMI-Aura.he5
+\endcode
+
+The <code style="background-color:whitesmoke;">-A 0</code> option suppresses attributes and <code style="background-color:whitesmoke;">-H</code> suppresses printing of data values:
+\code
+ HDF5 "OMI-Aura.he5" {
+ GROUP "/HDFEOS" {
+ GROUP "ADDITIONAL" {
+ GROUP "FILE_ATTRIBUTES" {
+ }
+ }
+ GROUP "GRIDS" {
+ GROUP "OMI Column Amount O3" {
+ GROUP "Data Fields" {
+ DATASET "ColumnAmountO3" {
+ DATATYPE H5T_IEEE_F32LE
+ DATASPACE SIMPLE { ( 720, 1440 ) / ( 720, 1440 ) }
+ }
+ DATASET "RadiativeCloudFraction" {
+ DATATYPE H5T_IEEE_F32LE
+ DATASPACE SIMPLE { ( 720, 1440 ) / ( 720, 1440 ) }
+ }
+ DATASET "SolarZenithAngle" {
+ DATATYPE H5T_IEEE_F32LE
+ DATASPACE SIMPLE { ( 720, 1440 ) / ( 720, 1440 ) }
+ }
+ DATASET "ViewingZenithAngle" {
+ DATATYPE H5T_IEEE_F32LE
+ DATASPACE SIMPLE { ( 720, 1440 ) / ( 720, 1440 ) }
+ }
+ }
+ }
+ }
+ }
+ }
+\endcode
+
+\subsection subsecViewToolsViewGrps_h5ls h5ls
+You can view the contents of a group with <code style="background-color:whitesmoke;">h5ls</code>/ by specifying the group after the file name.
+To use <code style="background-color:whitesmoke;">h5ls</code> to view the contents of the <code style="background-color:whitesmoke;">/HDFEOS</code> group in the <code style="background-color:whitesmoke;">OMI-Aura.he5</code> file, type:
+\code
+h5ls -r OMI-Aura.he5/HDFEOS
+\endcode
+
+The output of this command is:
+\code
+ /ADDITIONAL Group
+ /ADDITIONAL/FILE_ATTRIBUTES Group
+ /GRIDS Group
+ /GRIDS/OMI\ Column\ Amount\ O3 Group
+ /GRIDS/OMI\ Column\ Amount\ O3/Data\ Fields Group
+ /GRIDS/OMI\ Column\ Amount\ O3/Data\ Fields/ColumnAmountO3 Dataset {720, 1440}
+ /GRIDS/OMI\ Column\ Amount\ O3/Data\ Fields/RadiativeCloudFraction Dataset {720, 1440}
+ /GRIDS/OMI\ Column\ Amount\ O3/Data\ Fields/SolarZenithAngle Dataset {720, 1440}
+ /GRIDS/OMI\ Column\ Amount\ O3/Data\ Fields/ViewingZenithAngle Dataset {720, 1440}
+\endcode
+
+If you specify the <code style="background-color:whitesmoke;">-v</code> option, you can also see the attributes and properties of the datasets.
+
+\section secViewToolsViewAttr Attributes
+
+\subsection subsecViewToolsViewAttr_h5dump h5dump
+Attributes are displayed by default if using <code style="background-color:whitesmoke;">h5dump</code>. Some files contain many attributes, which
+can make it difficult to examine the objects in the file. Shown below are options that can help
+when using <code style="background-color:whitesmoke;">h5dump</code> to work with files that have attributes.
+
+\subsubsection subsubsecViewToolsViewAttr_h5dumpEx6 Example 6
+The <code style="background-color:whitesmoke;">-a</code> A option will display an attribute. However, the path to the attribute must be included
+when specifying this option. For example, to see the <code style="background-color:whitesmoke;">ScaleFactor</code> attribute in the <code style="background-color:whitesmoke;">OMI-Aura.he5</code> file, type:
+\code
+h5dump -a "/HDFEOS/GRIDS/OMI Column Amount O3/Data Fields/SolarZenithAngle/ScaleFactor" OMI-Aura.he5
+\endcode
+
+This command displays:
+\code
+ HDF5 "OMI-Aura.he5" {
+ ATTRIBUTE "ScaleFactor" {
+ DATATYPE H5T_IEEE_F64LE
+ DATASPACE SIMPLE { ( 1 ) / ( 1 ) }
+ DATA {
+ (0): 1
+ }
+ }
+ }
+\endcode
+
+How can you determine the path to the attribute? This can be done by looking at the file contents with the <code style="background-color:whitesmoke;">-n 1</code> option:
+\code
+h5dump -n 1 OMI-Aura.he5
+\endcode
+
+Below is a portion of the output for this command:
+\code
+ HDF5 "OMI-Aura.he5" {
+ FILE_CONTENTS {
+ group /
+ group /HDFEOS
+ group /HDFEOS/ADDITIONAL
+ group /HDFEOS/ADDITIONAL/FILE_ATTRIBUTES
+ attribute /HDFEOS/ADDITIONAL/FILE_ATTRIBUTES/EndUTC
+ attribute /HDFEOS/ADDITIONAL/FILE_ATTRIBUTES/GranuleDay
+ attribute /HDFEOS/ADDITIONAL/FILE_ATTRIBUTES/GranuleDayOfYear
+ attribute /HDFEOS/ADDITIONAL/FILE_ATTRIBUTES/GranuleMonth
+ attribute /HDFEOS/ADDITIONAL/FILE_ATTRIBUTES/GranuleYear
+ attribute /HDFEOS/ADDITIONAL/FILE_ATTRIBUTES/InstrumentName
+ attribute /HDFEOS/ADDITIONAL/FILE_ATTRIBUTES/OrbitNumber
+ attribute /HDFEOS/ADDITIONAL/FILE_ATTRIBUTES/OrbitPeriod
+ attribute /HDFEOS/ADDITIONAL/FILE_ATTRIBUTES/PGEVersion
+ attribute /HDFEOS/ADDITIONAL/FILE_ATTRIBUTES/Period
+ attribute /HDFEOS/ADDITIONAL/FILE_ATTRIBUTES/ProcessLevel
+ attribute /HDFEOS/ADDITIONAL/FILE_ATTRIBUTES/StartUTC
+ attribute /HDFEOS/ADDITIONAL/FILE_ATTRIBUTES/TAI93At0zOfGranule
+
+ ...
+\endcode
+
+There can be multiple objects or attributes with the same name in a file. How can you make sure
+you are finding the correct object or attribute? You can first determine how many attributes
+there are with a specified name, and then examine the paths to them.
+
+The <code style="background-color:whitesmoke;">-N</code> option can be used to display all objects or attributes with a given name.
+For example, there are four attributes with the name <code style="background-color:whitesmoke;">ScaleFactor</code> in the <code style="background-color:whitesmoke;">OMI-Aura.he5</code> file,
+as can be seen below with the <code style="background-color:whitesmoke;">-N</code> option:
+\code
+h5dump -N ScaleFactor OMI-Aura.he5
+\endcode
+
+It outputs:
+\code
+HDF5 "OMI-Aura.he5" {
+ATTRIBUTE "ScaleFactor" {
+ DATATYPE H5T_IEEE_F64LE
+ DATASPACE SIMPLE { ( 1 ) / ( 1 ) }
+ DATA {
+ (0): 1
+ }
+}
+ATTRIBUTE "ScaleFactor" {
+ DATATYPE H5T_IEEE_F64LE
+ DATASPACE SIMPLE { ( 1 ) / ( 1 ) }
+ DATA {
+ (0): 1
+ }
+}
+ATTRIBUTE "ScaleFactor" {
+ DATATYPE H5T_IEEE_F64LE
+ DATASPACE SIMPLE { ( 1 ) / ( 1 ) }
+ DATA {
+ (0): 1
+ }
+}
+ATTRIBUTE "ScaleFactor" {
+ DATATYPE H5T_IEEE_F64LE
+ DATASPACE SIMPLE { ( 1 ) / ( 1 ) }
+ DATA {
+ (0): 1
+ }
+}
+}
+\endcode
+
+\subsection subsecViewToolsViewAttr_h5ls h5ls
+If you include the <code style="background-color:whitesmoke;">-v</code> (verbose) option for <code style="background-color:whitesmoke;">h5ls</code>, you will see all of the attributes for the
+specified file, dataset or group. You cannot display individual attributes.
+
+\section secViewToolsViewSub Dataset Subset
+
+\subsection subsecViewToolsViewSub_h5dump h5dump
+If you have a very large dataset, you may wish to subset or see just a portion of the dataset.
+This can be done with the following <code style="background-color:whitesmoke;">h5dump</code> options.
+<table>
+<tr>
+<th>Option</th>
+<th>Description</th>
+</tr>
+<tr>
+<td>-d D, --dataset=D
+</td>
+<td>Dataset D
+</td>
+</tr>
+<tr>
+<td>-s START, --start=START
+</td>
+<td>Offset or start of subsetting selection
+</td>
+</tr>
+<tr>
+<td>-S STRIDE, --stride=STRIDE
+</td>
+<td>Stride (sampling along a dimension). The default (unspecified, or 1) selects
+every element along a dimension, a value of 2 selects every other element,
+a value of 3 selects every third element, ...
+</td>
+</tr>
+<tr>
+<td>-c COUNT, --count=COUNT
+</td>
+<td>Number of blocks to include in the selection
+</td>
+</tr>
+<tr>
+<td>-k BLOCK, --block=BLOCK
+</td>
+<td>Size of the block in a hyperslab. The default (unspecified, or 1) is for
+the block size to be the size of a single element.
+</td>
+</tr>
+</table>
+
+The <code style="background-color:whitesmoke;">START (s)</code>, <code style="background-color:whitesmoke;">STRIDE (S)</code>, <code style="background-color:whitesmoke;">COUNT (c)</code>, and <code style="background-color:whitesmoke;">BLOCK (k)</code> options
+define the shape and size of the selection. They are arrays with the same number of dimensions as the rank
+of the dataset's dataspace, and they all work together to define the selection. A change to one of
+these arrays can affect the others.
+
+When specifying these h5dump options, a comma is used as the delimiter for each dimension in the
+option value. For example, with a 2-dimensional dataset, the option value is specified as "H,W",
+where H is the height and W is the width. If the offset is 0 for both dimensions, then
+<code style="background-color:whitesmoke;">START</code> would be specified as follows:
+\code
+-s "0,0"
+\endcode
+
+There is also a shorthand way to specify these options with brackets at the end of the dataset name:
+\code
+-d DATASETNAME[s;S;c;k]
+\endcode
+
+Multiple dimensions are separated by commas. For example, a subset for a 2-dimensional dataset would be specified as follows:
+\code
+-d DATASETNAME[s,s;S,S;c,c;k,k]
+\endcode
+
+For a detailed understanding of how selections works, see the #H5Sselect_hyperslab API in the \ref RM.
+
+The dataset SolarZenithAngle in the OMI-Aura.he5 file can be used to illustrate these options. This
+dataset is a 2-dimensional dataset of size 720 (height) x 1440 (width). Too much data will be displayed
+by simply viewing the specified dataset with the <code style="background-color:whitesmoke;">-d</code> option:
+\code
+h5dump -d "HDFEOS/GRIDS/OMI Column Amount O3/Data Fields/SolarZenithAngle" OMI-Aura.he5
+\endcode
+Subsetting narrows down the output that is displayed. In the following example, the first
+15x10 elements (-c "15,10") are specified, beginning with position (0,0) (-s "0,0"):
+\code
+ h5dump -A 0 -d "HDFEOS/GRIDS/OMI Column Amount O3/Data Fields/SolarZenithAngle" -s "0,0" -c "15,10" -w 0 OMI-Aura.he5
+\endcode
+
+If using the shorthand method, specify:
+\code
+ h5dump -A 0 -d "HDFEOS/GRIDS/OMI Column Amount O3/Data Fields/SolarZenithAngle[0,0;;15,10;]" -w 0 OMI-Aura.he5
+\endcode
+
+Where,
+\par The <code style="background-color:whitesmoke;">-d</code> option must be specified
+
+before
+\par subsetting options (if not using the shorthand method).
+
+The <code style="background-color:whitesmoke;">-A 0</code> option suppresses the printing of attributes.
+
+The <code style="background-color:whitesmoke;">-w 0</code> option sets the number of columns of output to the maximum allowed value (65535).
+This ensures that there are enough columns specified for displaying the data.
+
+Either command displays:
+\code
+ HDF5 "OMI-Aura.he5" {
+ DATASET "HDFEOS/GRIDS/OMI Column Amount O3/Data Fields/SolarZenithAngle" {
+ DATATYPE H5T_IEEE_F32LE
+ DATASPACE SIMPLE { ( 720, 1440 ) / ( 720, 1440 ) }
+ SUBSET {
+ START ( 0, 0 );
+ STRIDE ( 1, 1 );
+ COUNT ( 15, 10 );
+ BLOCK ( 1, 1 );
+ DATA {
+ (0,0): 79.403, 79.403, 79.403, 79.403, 79.403, 79.403, 79.403, 79.403, 79.403, 79.403,
+ (1,0): 79.071, 79.071, 79.071, 79.071, 79.071, 79.071, 79.071, 79.071, 79.071, 79.071,
+ (2,0): 78.867, 78.867, 78.867, 78.867, 78.867, 78.867, 78.867, 78.867, 78.867, 78.867,
+ (3,0): 78.632, 78.632, 78.632, 78.632, 78.632, 78.632, 78.632, 78.632, 78.632, 78.632,
+ (4,0): 78.429, 78.429, 78.429, 78.429, 78.429, 78.429, 78.429, 78.429, 78.429, 78.429,
+ (5,0): 78.225, 78.225, 78.225, 78.225, 78.225, 78.225, 78.225, 78.225, 78.225, 78.225,
+ (6,0): 78.021, 78.021, 78.021, 78.021, 78.021, 78.021, 78.021, 78.021, 78.021, 78.021,
+ (7,0): 77.715, 77.715, 77.715, 77.715, 77.715, 77.715, 77.715, 77.715, 77.715, 77.715,
+ (8,0): 77.511, 77.511, 77.511, 77.511, 77.511, 77.511, 77.511, 77.511, 77.511, 77.511,
+ (9,0): 77.658, 77.658, 77.658, 77.307, 77.307, 77.307, 77.307, 77.307, 77.307, 77.307,
+ (10,0): 77.556, 77.556, 77.556, 77.556, 77.556, 77.556, 77.556, 77.556, 77.102, 77.102,
+ (11,0): 78.408, 78.408, 78.408, 78.408, 78.408, 78.408, 78.408, 78.408, 77.102, 77.102,
+ (12,0): 76.34, 78.413, 78.413, 78.413, 78.413, 78.413, 78.413, 78.413, 78.413, 78.413,
+ (13,0): 78.107, 78.107, 78.107, 78.107, 78.107, 78.107, 78.107, 78.107, 78.107, 77.195,
+ (14,0): 78.005, 78.005, 78.005, 78.005, 78.005, 78.005, 76.991, 76.991, 76.991, 76.991
+ }
+ }
+ }
+ }
+\endcode
+
+What if we wish to read three rows of three elements at a time (-c "3,3"), where each element
+is a 2 x 3 block (-k "2,3") and we wish to begin reading from the second row (-s "1,0")?
+
+You can do that with the following command:
+\code
+ h5dump -A 0 -d "HDFEOS/GRIDS/OMI Column Amount O3/Data Fields/SolarZenithAngle"
+ -s "1,0" -S "2,3" -c "3,3" -k "2,3" -w 0 OMI-Aura.he5
+\endcode
+
+In this case, the stride must be specified as 2 by 3 (or larger) to accommodate the reading of 2 by 3 blocks.
+If it is smaller, the command will fail with the error,
+\code
+h5dump error: wrong subset selection; blocks overlap.
+\endcode
+
+The output of the above command is shown below:
+\code
+ HDF5 "OMI-Aura.he5" {
+ DATASET "HDFEOS/GRIDS/OMI Column Amount O3/Data Fields/SolarZenithAngle" {
+ DATATYPE H5T_IEEE_F32LE
+ DATASPACE SIMPLE { ( 720, 1440 ) / ( 720, 1440 ) }
+ SUBSET {
+ START ( 1, 0 );
+ STRIDE ( 2, 3 );
+ COUNT ( 3, 3 );
+ BLOCK ( 2, 3 );
+ DATA {
+ (1,0): 79.071, 79.071, 79.071, 79.071, 79.071, 79.071, 79.071, 79.071, 79.071,
+ (2,0): 78.867, 78.867, 78.867, 78.867, 78.867, 78.867, 78.867, 78.867, 78.867,
+ (3,0): 78.632, 78.632, 78.632, 78.632, 78.632, 78.632, 78.632, 78.632, 78.632,
+ (4,0): 78.429, 78.429, 78.429, 78.429, 78.429, 78.429, 78.429, 78.429, 78.429,
+ (5,0): 78.225, 78.225, 78.225, 78.225, 78.225, 78.225, 78.225, 78.225, 78.225,
+ (6,0): 78.021, 78.021, 78.021, 78.021, 78.021, 78.021, 78.021, 78.021, 78.021
+ }
+ }
+ }
+ }
+\endcode
+
+\section secViewToolsViewDtypes Datatypes
+
+\subsection subsecViewToolsViewDtypes_h5dump h5dump
+The following datatypes are discussed, using the output of <code style="background-color:whitesmoke;">h5dump</code> with HDF5 files from the
+<a href="https://portal.hdfgroup.org/display/HDF5/Examples+by+API">Examples by API</a> page:
+<ul>
+<li>@ref subsubsecViewToolsViewDtypes_array</li>
+<li>@ref subsubsecViewToolsViewDtypes_objref</li>
+<li>@ref subsubsecViewToolsViewDtypes_regref</li>
+<li>@ref subsubsecViewToolsViewDtypes_string</li>
+</ul>
+
+\subsubsection subsubsecViewToolsViewDtypes_array Array
+Users have been confused by the difference between an Array datatype (#H5T_ARRAY) and a dataset that
+(has a dataspace that) is an array.
+
+Typically, these users want a dataset that has a simple datatype (like integer or float) that is an
+array, like the following dataset <code style="background-color:whitesmoke;">/DS1</code>. It has a datatype of #H5T_STD_I32LE (32-bit Little-Endian Integer)
+and is a 4 by 7 array:
+\code
+$ h5dump h5ex_d_rdwr.h5
+HDF5 "h5ex_d_rdwr.h5" {
+GROUP "/" {
+ DATASET "DS1" {
+ DATATYPE H5T_STD_I32LE
+ DATASPACE SIMPLE { ( 4, 7 ) / ( 4, 7 ) }
+ DATA {
+ (0,0): 0, -1, -2, -3, -4, -5, -6,
+ (1,0): 0, 0, 0, 0, 0, 0, 0,
+ (2,0): 0, 1, 2, 3, 4, 5, 6,
+ (3,0): 0, 2, 4, 6, 8, 10, 12
+ }
+ }
+}
+}
+\endcode
+
+Contrast that with the following dataset that has both an Array datatype and is an array:
+\code
+$ h5dump h5ex_t_array.h5
+HDF5 "h5ex_t_array.h5" {
+GROUP "/" {
+ DATASET "DS1" {
+ DATATYPE H5T_ARRAY { [3][5] H5T_STD_I64LE }
+ DATASPACE SIMPLE { ( 4 ) / ( 4 ) }
+ DATA {
+ (0): [ 0, 0, 0, 0, 0,
+ 0, -1, -2, -3, -4,
+ 0, -2, -4, -6, -8 ],
+ (1): [ 0, 1, 2, 3, 4,
+ 1, 1, 1, 1, 1,
+ 2, 1, 0, -1, -2 ],
+ (2): [ 0, 2, 4, 6, 8,
+ 2, 3, 4, 5, 6,
+ 4, 4, 4, 4, 4 ],
+ (3): [ 0, 3, 6, 9, 12,
+ 3, 5, 7, 9, 11,
+ 6, 7, 8, 9, 10 ]
+ }
+ }
+}
+}
+\endcode
+
+In this file, dataset <code style="background-color:whitesmoke;">/DS1</code> has a datatype of
+\code
+H5T_ARRAY { [3][5] H5T_STD_I64LE }
+\endcode
+and it also has a dataspace of
+\code
+SIMPLE { ( 4 ) / ( 4 ) }
+\endcode
+In other words, it is an array of four elements, in which each element is a 3 by 5 array of #H5T_STD_I64LE.
+
+This dataset is much more complex. Also note that subsetting cannot be done on Array datatypes.
+
+See this <a href="https://portal.hdfgroup.org/display/knowledge/H5T_ARRAY+Datatype">FAQ</a> for more information on the Array datatype.
+
+\subsubsection subsubsecViewToolsViewDtypes_objref Object Reference
+An Object Reference is a reference to an entire object (dataset, group, or named datatype).
+A dataset with an Object Reference datatype consists of one or more Object References.
+An Object Reference dataset can be used as an index to an HDF5 file.
+
+The <code style="background-color:whitesmoke;">/DS1</code> dataset in the following file (<code style="background-color:whitesmoke;">h5ex_t_objref.h5</code>) is an Object Reference dataset.
+It contains two references, one to group <code style="background-color:whitesmoke;">/G1</code> and the other to dataset <code style="background-color:whitesmoke;">/DS2</code>:
+\code
+$ h5dump h5ex_t_objref.h5
+HDF5 "h5ex_t_objref.h5" {
+GROUP "/" {
+ DATASET "DS1" {
+ DATATYPE H5T_REFERENCE { H5T_STD_REF_OBJECT }
+ DATASPACE SIMPLE { ( 2 ) / ( 2 ) }
+ DATA {
+ (0): GROUP 1400 /G1 , DATASET 800 /DS2
+ }
+ }
+ DATASET "DS2" {
+ DATATYPE H5T_STD_I32LE
+ DATASPACE NULL
+ DATA {
+ }
+ }
+ GROUP "G1" {
+ }
+}
+}
+\endcode
+
+\subsubsection subsubsecViewToolsViewDtypes_regref Region Reference
+A Region Reference is a reference to a selection within a dataset. A selection can be either
+individual elements or a hyperslab. In <code style="background-color:whitesmoke;">h5dump</code> you will see the name of the dataset along with
+the elements or slab that is selected. A dataset with a Region Reference datatype consists of
+one or more Region References.
+
+An example of a Region Reference dataset (<code style="background-color:whitesmoke;">h5ex_t_regref.h5</code>) can be found on the
+<a href="https://portal.hdfgroup.org/display/HDF5/Examples+by+API">Examples by API</a> page,
+under Datatypes. If you examine this dataset with <code style="background-color:whitesmoke;">h5dump</code> you will see that <code style="background-color:whitesmoke;">/DS1</code> is a
+Region Reference dataset as indicated by its datatype, highlighted in bold below:
+\code
+$ h5dump h5ex_t_regref.h5
+HDF5 "h5ex_t_regref.h5" {
+GROUP "/" {
+ DATASET "DS1" {
+ DATATYPE H5T_REFERENCE { H5T_STD_REF_DSETREG }
+ DATASPACE SIMPLE { ( 2 ) / ( 2 ) }
+ DATA {
+ DATASET /DS2 {(0,1), (2,11), (1,0), (2,4)},
+ DATASET /DS2 {(0,0)-(0,2), (0,11)-(0,13), (2,0)-(2,2), (2,11)-(2,13)}
+ }
+ }
+ DATASET "DS2" {
+ DATATYPE H5T_STD_I8LE
+ DATASPACE SIMPLE { ( 3, 16 ) / ( 3, 16 ) }
+ DATA {
+ (0,0): 84, 104, 101, 32, 113, 117, 105, 99, 107, 32, 98, 114, 111, 119,
+ (0,14): 110, 0,
+ (1,0): 102, 111, 120, 32, 106, 117, 109, 112, 115, 32, 111, 118, 101,
+ (1,13): 114, 32, 0,
+ (2,0): 116, 104, 101, 32, 53, 32, 108, 97, 122, 121, 32, 100, 111, 103,
+ (2,14): 115, 0
+ }
+ }
+}
+}
+\endcode
+
+It contains two Region References:
+\li A selection of four individual elements in dataset <code style="background-color:whitesmoke;">/DS2 : (0,1), (2,11), (1,0), (2,4)</code>
+See the #H5Sselect_elements API in the \ref UG for information on selecting individual elements.
+\li A selection of these blocks in dataset <code style="background-color:whitesmoke;">/DS2 : (0,0)-(0,2), (0,11)-(0,13), (2,0)-(2,2), (2,11)-(2,13)</code>
+See the #H5Sselect_hyperslab API in the \ref UG for how to do hyperslab selection.
+
+
+If you look at the code that creates the dataset (<code style="background-color:whitesmoke;">h5ex_t_regref.c</code>) you will see that the
+first reference is created with these calls:
+\code
+ status = H5Sselect_elements (space, H5S_SELECT_SET, 4, coords[0]);
+ status = H5Rcreate (&wdata[0], file, DATASET2, H5R_DATASET_REGION, space);
+\endcode
+
+where the buffer containing the coordinates to select is:
+\code
+ coords[4][2] = { {0, 1},
+ {2, 11},
+ {1, 0},
+ {2, 4} },
+\endcode
+
+The second reference is created by calling,
+\code
+ status = H5Sselect_hyperslab (space, H5S_SELECT_SET, start, stride, count, block);
+ status = H5Rcreate (&wdata[1], file, DATASET2, H5R_DATASET_REGION, space);
+\endcode
+where start, stride, count, and block have these values:
+\code
+ start[2] = {0, 0},
+ stride[2] = {2, 11},
+ count[2] = {2, 2},
+ block[2] = {1, 3};
+\endcode
+
+These start, stride, count, and block values will select the elements shown in bold in the dataset:
+\code
+84 104 101 32 113 117 105 99 107 32 98 114 111 119 110 0
+102 111 120 32 106 117 109 112 115 32 111 118 101 114 32 0
+116 104 101 32 53 32 108 97 122 121 32 100 111 103 115 0
+\endcode
+
+If you use <code style="background-color:whitesmoke;">h5dump</code> to select a subset of dataset
+<code style="background-color:whitesmoke;">/DS2</code> with these start, stride, count, and block values, you will see that the same elements are selected:
+\code
+$ h5dump -d "/DS2" -s "0,0" -S "2,11" -c "2,2" -k "1,3" h5ex_t_regref.h5
+HDF5 "h5ex_t_regref.h5" {
+DATASET "/DS2" {
+ DATATYPE H5T_STD_I8LE
+ DATASPACE SIMPLE { ( 3, 16 ) / ( 3, 16 ) }
+ SUBSET {
+ START ( 0, 0 );
+ STRIDE ( 2, 11 );
+ COUNT ( 2, 2 );
+ BLOCK ( 1, 3 );
+ DATA {
+ (0,0): 84, 104, 101, 114, 111, 119,
+ (2,0): 116, 104, 101, 100, 111, 103
+ }
+ }
+}
+}
+\endcode
+
+For more information on selections, see the tutorial topic on
+@ref LBDsetSubRW. Also see the
+\ref secViewToolsViewSub tutorial topic on using <code style="background-color:whitesmoke;">h5dump</code> to view a subset.
+
+\subsubsection subsubsecViewToolsViewDtypes_string String
+There are two types of string data, fixed length strings and variable length strings.
+
+Below is the <code style="background-color:whitesmoke;">h5dump</code> output for two files that have the same strings written to them. In one file,
+the strings are fixed in length, and in the other, the strings have different sizes (and are variable in size).
+
+<em>Dataset of Fixed Length Strings</em>
+\code
+HDF5 "h5ex_t_string.h5" {
+GROUP "/" {
+ DATASET "DS1" {
+ DATATYPE H5T_STRING {
+ STRSIZE 7;
+ STRPAD H5T_STR_SPACEPAD;
+ CSET H5T_CSET_ASCII;
+ CTYPE H5T_C_S1;
+ }
+ DATASPACE SIMPLE { ( 4 ) / ( 4 ) }
+ DATA {
+ (0): "Parting", "is such", "sweet ", "sorrow."
+ }
+ }
+}
+}
+\endcode
+
+<em>Dataset of Variable Length Strings</em>
+\code
+HDF5 "h5ex_t_vlstring.h5" {
+GROUP "/" {
+ DATASET "DS1" {
+ DATATYPE H5T_STRING {
+ STRSIZE H5T_VARIABLE;
+ STRPAD H5T_STR_SPACEPAD;
+ CSET H5T_CSET_ASCII;
+ CTYPE H5T_C_S1;
+ }
+ DATASPACE SIMPLE { ( 4 ) / ( 4 ) }
+ DATA {
+ (0): "Parting", "is such", "sweet", "sorrow."
+ }
+ }
+}
+}
+\endcode
+
+You might wonder which to use. Some comments to consider are included below.
+\li In general, a variable length string dataset is more complex than a fixed length string. If you don't
+specifically need a variable length type, then just use the fixed length string.
+\li A variable length dataset consists of pointers to heaps in different locations in the file. For this
+reason, a variable length dataset cannot be compressed. (Basically, the pointers get compressed and
+not the actual data!) If compression is needed, then do not use variable length types.
+\li If you need to create an array of of different length strings, you can either use fixed length strings
+along with compression, or use a variable length string.
+
+<hr>
+Navigate back: \ref index "Main" / \ref GettingStarted / \ref ViewToolsCommand
+
+*/
diff --git a/doxygen/dox/ViewTools2.dox b/doxygen/dox/ViewTools2.dox
new file mode 100644
index 0000000..4d8788a
--- /dev/null
+++ b/doxygen/dox/ViewTools2.dox
@@ -0,0 +1,786 @@
+/** @page ViewToolsEdit Command-line Tools For Editing HDF5 Files
+Navigate back: \ref index "Main" / \ref GettingStarted / \ref ViewToolsCommand
+<hr>
+
+\section secViewToolsEditTOC Contents
+<ul>
+<li>\ref secViewToolsEditRemove</li>
+<li>\ref secViewToolsEditChange</li>
+<li>\ref secViewToolsEditApply</li>
+<li>\ref secViewToolsEditCopy</li>
+<li>\ref secViewToolsEditAdd</li>
+</ul>
+
+\section secViewToolsEditRemove Remove Inaccessible Objects and Unused Space in a File
+HDF5 files may accumulate unused space when they are read and rewritten to or if objects are deleted within
+them. With many edits and deletions this unused space can add up to a sizable amount.
+
+The <code style="background-color:whitesmoke;">h5repack</code> tool can be used to remove unused space in an HDF5
+file. If no options other than the input and output HDF5 files are specified on the
+<code style="background-color:whitesmoke;">h5repack</code> command line, it will write the file to the new
+file, getting rid of the unused space:
+\code
+h5repack <input file> <output file>
+\endcode
+
+\section secViewToolsEditChange Change a Dataset's Storage Layout
+The <code style="background-color:whitesmoke;">h5repack</code> utility can be used to change a dataset's storage
+layout. By default, the storage layout of a dataset is defined at creation time and it cannot be changed.
+However, with h5repack you can write an HDF5 file to a new file and change the layout for objects in the new file.
+
+The <code style="background-color:whitesmoke;">-l</code> option in <code style="background-color:whitesmoke;">h5repack</code>
+is used to change the layout for an object. The string following the <code style="background-color:whitesmoke;">-l</code>
+option defines the layout type and parameters for specified objects (or all objects):
+\code
+h5repack -l [list of objects:]<layout type>=<layout parameters> <input file> <output file>
+\endcode
+
+If no object is specified, then everything in the input file will be written to the output file with the specified
+layout type and parameters. If objects are specified then everything in the input file will be written to the
+output file as is, except for those specified objects. They will be written to the output file with the given
+layout type and parameters.
+
+Following is a description of the dataset layouts and the <code style="background-color:whitesmoke;">h5repack</code>
+options to use to change a dataset:
+<table>
+<tr>
+<th>Storage Layout</th><th>h5repack Option</th><th>Description</th>
+</tr>
+<tr>
+<td>Contiguous
+</td>
+<td>CONTI
+</td>
+<td>Data is stored physically together
+</td>
+</tr>
+<tr>
+<td>Chunked
+</td>
+<td>CHUNK=DIM[xDIM...xDIM]
+</td>
+<td>Data is stored in DIM[xDIM...xDIM] chunks
+</td>
+</tr>
+<tr>
+<td>Compact
+</td>
+<td>COMPA
+</td>
+<td>Data is stored in the header of the object (less I/O)
+</td>
+</tr>
+</table>
+
+If you type <code style="background-color:whitesmoke;">h5repack -h</code> on the command line, you will see
+a detailed usage statement with examples of modifying the layout.
+
+In the following example, the dataset <code style="background-color:whitesmoke;">/dset</code> in the file
+dset.h5 is contiguous, as shown by the <code style="background-color:whitesmoke;">h5dump -pH</code> command.
+The <code style="background-color:whitesmoke;">h5repack</code> utility writes dset.h5 to a new file, dsetrpk.h5,
+where the dataset <code style="background-color:whitesmoke;">dset</code> is chunked. This can be seen by examining
+the resulting dsetrpk.h5 file with <code style="background-color:whitesmoke;">h5dump</code>, as shown:
+\code
+$ h5dump -pH dset.h5
+HDF5 "dset.h5" {
+GROUP "/" {
+ DATASET "dset" {
+ DATATYPE H5T_STD_I32BE
+ DATASPACE SIMPLE { ( 4, 6 ) / ( 4, 6 ) }
+ STORAGE_LAYOUT {
+ CONTIGUOUS
+ SIZE 96
+ OFFSET 1400
+ }
+ FILTERS {
+ NONE
+ }
+ FILLVALUE {
+ FILL_TIME H5D_FILL_TIME_IFSET
+ VALUE 0
+ }
+ ALLOCATION_TIME {
+ H5D_ALLOC_TIME_LATE
+ }
+ }
+}
+}
+
+$ h5repack -l dset:CHUNK=4x6 dset.h5 dsetrpk.h5
+
+$ h5dump -pH dsetrpk.h5
+HDF5 "dsetrpk.h5" {
+GROUP "/" {
+ DATASET "dset" {
+ DATATYPE H5T_STD_I32BE
+ DATASPACE SIMPLE { ( 4, 6 ) / ( 4, 6 ) }
+ STORAGE_LAYOUT {
+ CHUNKED ( 4, 6 )
+ SIZE 96
+ }
+ FILTERS {
+ NONE
+ }
+ FILLVALUE {
+ FILL_TIME H5D_FILL_TIME_IFSET
+ VALUE 0
+ }
+ ALLOCATION_TIME {
+ H5D_ALLOC_TIME_INCR
+ }
+ }
+}
+}
+\endcode
+
+There can be many reasons that the storage layout needs to be changed for a dataset. For example,
+there may be a performance issue with a dataset due to a small chunk size.
+
+\section secViewToolsEditApply Apply Compression Filter to a Dataset
+The <code style="background-color:whitesmoke;">h5repack</code> utility can be used to compress or
+remove compression from a dataset in a file. By default, compression cannot be added to or removed
+from a dataset once it has been created. However, with <code style="background-color:whitesmoke;">h5repack</code>
+you can write a file to a new file and specify a compression filter to apply to a dataset or datasets in the new file.
+
+To apply a filter to an object in an HDF5 file, specify the <code style="background-color:whitesmoke;">-f</code> option,
+where the string following the <code style="background-color:whitesmoke;">-f</code> option defines the filter and
+its parameters (if there are any) to apply to a given object or objects:
+\code
+h5repack -f [list of objects:]<name of filter>=<filter parameters> <input file> <output file>
+\endcode
+
+If no objects are specified then everything in the input file will be written to the output file with
+the filter and parameters specified. If objects are specified, then everything in the input file will
+be written to the output file as is, except for the specified objects. They will be written to the
+output file with the filter and parameters specified.
+
+If you type <code style="background-color:whitesmoke;">h5repack --help</code> on the command line,
+you will see a detailed usage statement with examples of modifying a filter. There are actually
+numerous filters that you can apply to a dataset:
+<table>
+<tr>
+<th>Filter<th></th>Options</th>
+</tr>
+<tr>
+<td>GZIP compression (levels 1-9)
+<td>GZIP=&lt;deflation level&gt;
+</td>
+</tr>
+<tr>
+<td>SZIP compression
+<td>SZIP=<pixels per block,coding>
+</td>
+</tr>
+<tr>
+<td>Shuffle filter
+<td>SHUF
+</td>
+</tr>
+<tr>
+<td>Checksum filter
+<td>FLET
+</td>
+</tr>
+<tr>
+<td>NBIT compression
+<td>NBIT
+</td>
+</tr>
+<tr>
+<td>HDF5 Scale/Offset filter
+<td>SOFF=<scale_factor,scale_type>
+</td>
+</tr>
+<tr>
+<td>User defined filter
+<td>UD=<filter_number,cd_value_count,value_1[,value_2,...,value_N]>
+</td>
+</tr>
+<tr>
+<td>Remove ALL filters
+</td>
+<td>NONE
+</td>
+</tr>
+</table>
+
+Be aware that a dataset must be chunked to apply compression to it. If the dataset is not already chunked,
+then <code style="background-color:whitesmoke;">h5repack</code> will apply chunking to it. Both chunking
+and compression cannot be applied to a dataset at the same time with <code style="background-color:whitesmoke;">h5repack</code>.
+
+In the following example,
+\li <em>h5dump</em> lists the properties for the objects in <em>dset.h5</em>. Note that the dataset <em>dset</em> is contiguous.
+\li <em>h5repack</em> writes dset.h5 into a new file <em>dsetrpk.h5</em>, applying GZIP Level 5 compression to the dataset <em>/dset</em> in dsetrpk.h5.
+\li <em>h5dump</em> lists the properties for the new <em>dsetrpk.h5</em> file. Note that <em>/dset</em> is both compressed and chunked.
+
+<em>Example</em>
+\code
+$ h5dump -pH dset.h5
+HDF5 "dset.h5" {
+GROUP "/" {
+ DATASET "dset" {
+ DATATYPE H5T_STD_I32BE
+ DATASPACE SIMPLE { ( 12, 18 ) / ( 12, 18 ) }
+ STORAGE_LAYOUT {
+ CONTIGUOUS
+ SIZE 864
+ OFFSET 1400
+ }
+ FILTERS {
+ NONE
+ }
+ FILLVALUE {
+ FILL_TIME H5D_FILL_TIME_IFSET
+ VALUE 0
+ }
+ ALLOCATION_TIME {
+ H5D_ALLOC_TIME_LATE
+ }
+ }
+}
+}
+
+$ h5repack -f dset:GZIP=5 dset.h5 dsetrpk.h5
+
+$ h5dump -pH dsetrpk.h5
+HDF5 "dsetrpk.h5" {
+GROUP "/" {
+ DATASET "dset" {
+ DATATYPE H5T_STD_I32BE
+ DATASPACE SIMPLE { ( 12, 18 ) / ( 12, 18 ) }
+ STORAGE_LAYOUT {
+ CHUNKED ( 12, 18 )
+ SIZE 160 (5.400:1 COMPRESSION)
+ }
+ FILTERS {
+ COMPRESSION DEFLATE { LEVEL 5 }
+ }
+ FILLVALUE {
+ FILL_TIME H5D_FILL_TIME_IFSET
+ VALUE 0
+ }
+ ALLOCATION_TIME {
+ H5D_ALLOC_TIME_INCR
+ }
+ }
+}
+}
+\endcode
+
+\section secViewToolsEditCopy Copy Objects to Another File
+The <code style="background-color:whitesmoke;">h5copy</code> utility can be used to copy an object or
+objects from one HDF5 file to another or to a different location in the same file. It uses the
+#H5Ocopy and #H5Lcopy APIs in HDF5.
+
+Following are some of the options that can be used with <code style="background-color:whitesmoke;">h5copy</code>.
+<table>
+<tr>
+<th>h5copy Options</th><th>Description</th>
+</tr>
+<tr>
+<td>-i, --input
+</td>
+<td>Input file name
+</td>
+</tr>
+<tr>
+<td>-o, --output
+</td>
+<td>Output file name
+</td>
+</tr>
+<tr>
+<td>-s, --source
+</td>
+<td>Source object name
+</td>
+</tr>
+<tr>
+<td>-d, --destination
+</td>
+<td>Destination object name
+</td>
+</tr>
+<tr>
+<td>-p, --parents
+</td>
+<td>Make parent groups as needed
+</td>
+</tr>
+<tr>
+<td>-v, --verbose
+</td>
+<td>Verbose mode
+</td>
+</tr>
+<tr>
+<td>-f, --flag
+</td>
+<td>Flag type
+</td>
+</tr>
+</table>
+
+For a complete list of options and information on using <code style="background-color:whitesmoke;">h5copy</code>, type:
+\code
+h5copy --help
+\endcode
+
+In the example below, the dataset <code style="background-color:whitesmoke;">/MyGroup/Group_A/dset2</code>
+in <code style="background-color:whitesmoke;">groups.h5</code> gets copied to the root
+("<code style="background-color:whitesmoke;">/</code>") group of a new file,
+<code style="background-color:whitesmoke;">newgroup.h5</code>, with the name
+<code style="background-color:whitesmoke;">dset3</code>:
+\code
+$h5dump -H groups.h5
+HDF5 "groups.h5" {
+GROUP "/" {
+ GROUP "MyGroup" {
+ GROUP "Group_A" {
+ DATASET "dset2" {
+ DATATYPE H5T_STD_I32BE
+ DATASPACE SIMPLE { ( 2, 10 ) / ( 2, 10 ) }
+ }
+ }
+ GROUP "Group_B" {
+ }
+ DATASET "dset1" {
+ DATATYPE H5T_STD_I32BE
+ DATASPACE SIMPLE { ( 3, 3 ) / ( 3, 3 ) }
+ }
+ }
+}
+}
+
+$ h5copy -i groups.h5 -o newgroup.h5 -s /MyGroup/Group_A/dset2 -d /dset3
+
+$ h5dump -H newgroup.h5
+HDF5 "newgroup.h5" {
+GROUP "/" {
+ DATASET "dset3" {
+ DATATYPE H5T_STD_I32BE
+ DATASPACE SIMPLE { ( 2, 10 ) / ( 2, 10 ) }
+ }
+}
+}
+\endcode
+
+There are also <code style="background-color:whitesmoke;">h5copy</code> flags that can be specified
+with the <code style="background-color:whitesmoke;">-f</code> option. In the example below, the
+<code style="background-color:whitesmoke;">-f shallow</code> option specifies to copy only the
+immediate members of the group <code style="background-color:whitesmoke;">/MyGroup</code> from
+the <code style="background-color:whitesmoke;">groups.h5</code> file mentioned above to a new
+file <code style="background-color:whitesmoke;">mygrouponly.h5</code>:
+\code
+h5copy -v -i groups.h5 -o mygrouponly.h5 -s /MyGroup -d /MyGroup -f shallow
+\endcode
+
+The output of the above command is shown below. The verbose option <code style="background-color:whitesmoke;">-v</code>
+describes the action that was taken, as shown in the highlighted text.
+\code
+Copying file <groups.h5> and object </MyGroup> to file <mygrouponly.h5> and object </MyGroup>
+Using shallow flag
+
+$ h5dump -H mygrouponly.h5
+HDF5 "mygrouponly.h5" {
+GROUP "/" {
+ GROUP "MyGroup" {
+ GROUP "Group_A" {
+ }
+ GROUP "Group_B" {
+ }
+ DATASET "dset1" {
+ DATATYPE H5T_STD_I32BE
+ DATASPACE SIMPLE { ( 3, 3 ) / ( 3, 3 ) }
+ }
+ }
+}
+}
+\endcode
+
+\section secViewToolsEditAdd Add or Remove User Block from File
+The user block is a space in an HDF5 file that is not interpreted by the HDF5 library. It is a property
+list that can be added when creating a file. See the #H5Pset_userblock API in the \ref RM for more
+information regarding this property.
+
+Once created in a file, the user block cannot be removed. However, you can use the
+<code style="background-color:whitesmoke;">h5jam</code> and <code style="background-color:whitesmoke;">h5unjam</code>
+utilities to add or remove a user block from a file into a new file.
+
+These two utilities work similarly, except that <code style="background-color:whitesmoke;">h5jam</code>
+adds a user block to a file and <code style="background-color:whitesmoke;">h5unjam</code> removes the user
+block. You can also overwrite or delete a user block in a file.
+
+Specify the <code style="background-color:whitesmoke;">-h</code> option to see a complete list of options
+that can be used with <code style="background-color:whitesmoke;">h5jam</code> and
+<code style="background-color:whitesmoke;">h5unjam</code>. For example:
+\code
+ h5jam -h
+ h5unjam -h
+\endcode
+
+Below are the basic options for adding or removing a user block with <code style="background-color:whitesmoke;">h5jam</code>
+and <code style="background-color:whitesmoke;">h5unjam</code>:
+
+<table>
+<tr>
+<th>h5copy Options</th><th>Description</th>
+</tr>
+<tr>
+<td>-i
+</td>
+<td>Input File
+</td>
+</tr>
+<tr>
+<td>-o
+</td>
+<td>Output File
+</td>
+</tr>
+<tr>
+<td>-u
+</td>
+<td>File to add or remove from user block
+</td>
+</tr>
+</table>
+
+Let's say you wanted to add the program that creates an HDF5 file to its user block. As an example, you
+can take the <code style="background-color:whitesmoke;">h5_crtgrpar.c</code> program from the
+\ref LBExamples
+and add it to the file it creates, <code style="background-color:whitesmoke;">groups.h5</code>. This can
+be done with <code style="background-color:whitesmoke;">h5jam</code>, as follows:
+\code
+h5jam -i groups.h5 -u h5_crtgrpar.c -o groupsub.h5
+\endcode
+
+You can actually view the file with more <code style="background-color:whitesmoke;">groupsub.h5</code>
+to see that the <code style="background-color:whitesmoke;">h5_crtgrpar.c</code> file is indeed included.
+
+To remove the user block that was just added, type:
+\code
+h5unjam -i groupsub.h5 -u h5_crtgrparNEW.c -o groups-noub.h5
+\endcode
+
+This writes the user block in the file <code style="background-color:whitesmoke;">groupsub.h5</code>
+into <code style="background-color:whitesmoke;">h5_crtgrparNEW.c</code>. The new HDF5 file,
+<code style="background-color:whitesmoke;">groups-noub.h5</code>, will not contain a user block.
+
+<hr>
+Navigate back: \ref index "Main" / \ref GettingStarted / \ref ViewToolsCommand
+
+*/
+
+/** @page ViewToolsConvert Command-line Tools For Converting HDF5 Files
+Navigate back: \ref index "Main" / \ref GettingStarted / \ref ViewToolsCommand
+<hr>
+
+\section secViewToolsConvertTOC Contents
+<ul>
+<li>\ref secViewToolsConvertASCII</li>
+<li>\ref secViewToolsConvertBinary</li>
+<li>\ref secViewToolsConvertExport</li>
+</ul>
+
+\section secViewToolsConvertASCII Output HDF5 Dataset into an ASCII File (to Import into Excel and Other Applications)
+The <code style="background-color:whitesmoke;">h5dump</code> utility can be used to convert an HDF5 dataset
+into an ASCII file, which can then be imported into Excel and other applications. The following options are used:
+<table>
+<tr>
+<th>Options</th><th>Description</th>
+</tr>
+<tr>
+<td> -d D, --dataset=D
+</td>
+<td>Display dataset D
+</td>
+</tr>
+<tr>
+<td> -o F, --output=F
+</td>
+<td>Output raw data into file F
+</td>
+</tr>
+<tr>
+<td> -y, --noindex
+</td>
+<td>Suppress printing of array indices with the data
+</td>
+</tr>
+<tr>
+<td> -w N, --width=N
+</td>
+<td>Set N number of columns of output. A value of 0
+sets the number to 65535 (the maximum)
+</td>
+</tr>
+</table>
+
+As an example, <code style="background-color:whitesmoke;">h5_crtdat.c</code> from the \ref LBDsetCreate
+HDF5 Tutorial topic, creates the file <code style="background-color:whitesmoke;">dset.h5</code> with
+a dataset <code style="background-color:whitesmoke;">/dset</code> that is a 4 x 6 integer array. The
+following is displayed when viewing <code style="background-color:whitesmoke;">dset.h5</code> with
+<code style="background-color:whitesmoke;">h5dump</code>:
+\code
+$ h5dump dset.h5
+HDF5 "dset.h5" {
+GROUP "/" {
+ DATASET "dset" {
+ DATATYPE H5T_STD_I32BE
+ DATASPACE SIMPLE { ( 4, 6 ) / ( 4, 6 ) }
+ DATA {
+ (0,0): 1, 2, 3, 4, 5, 6,
+ (1,0): 7, 8, 9, 10, 11, 12,
+ (2,0): 13, 14, 15, 16, 17, 18,
+ (3,0): 19, 20, 21, 22, 23, 24
+ }
+ }
+}
+}
+\endcode
+
+The following command will output the values of the <code style="background-color:whitesmoke;">/dset</code>
+dataset to the ASCII file <code style="background-color:whitesmoke;">dset.asci</code>:
+\code
+h5dump -d /dset -o dset.asci -y -w 50 dset.h5
+\endcode
+
+In particular, note that:
+\li The default behavior of <code style="background-color:whitesmoke;">h5dump</code> is to print indices,
+and the <code style="background-color:whitesmoke;">-y</code> option suppresses this.
+\li The <code style="background-color:whitesmoke;">-w 50</code> option tells
+<code style="background-color:whitesmoke;">h5dump</code> to allow 50 columns for outputting the data. The
+value specified must be large enough to accommodate the dimension size of the dataset multiplied by the
+number of positions and spaces needed to print each value. If the value is not large enough, the output
+will wrap to the next line, and the data will not display as expected in Excel or other applications. To
+ensure that the output does not wrap to the next line, you can also specify 0 (zero) for the
+<code style="background-color:whitesmoke;">-w</code> option.
+
+In addition to creating the ASCII file <code style="background-color:whitesmoke;">dset.asci</code>, the
+above command outputs the metadata of the specified dataset:
+\code
+HDF5 "dset.h5" {
+DATASET "/dset" {
+ DATATYPE H5T_STD_I32BE
+ DATASPACE SIMPLE { ( 4, 6 ) / ( 4, 6 ) }
+ DATA {
+ }
+}
+}
+\endcode
+
+The <code style="background-color:whitesmoke;">dset.asci</code> file will contain the values for the dataset:
+\code
+ 1, 2, 3, 4, 5, 6,
+ 7, 8, 9, 10, 11, 12,
+ 13, 14, 15, 16, 17, 18,
+ 19, 20, 21, 22, 23, 24
+\endcode
+
+\section secViewToolsConvertBinary Output HDF5 Dataset into Binary File
+The <code style="background-color:whitesmoke;">h5dump</code> utility can be used to convert an
+HDF5 dataset to a binary file with the following options:
+<table>
+<tr>
+<th>Options</th><th>Description</th>
+</tr>
+<tr>
+<td>-d D, --dataset=D
+</td>
+<td>Display dataset D
+</td>
+</tr>
+<tr>
+<td>-o F, --output=F
+</td>
+<td>Output raw data into file F
+</td>
+</tr>
+<tr>
+<td>-b B, --binary=B
+</td>
+<td>Binary file output of form B.
+Valid values are: LE, BE, NATIVE, FILE
+</td>
+</tr>
+</table>
+
+As an example, <code style="background-color:whitesmoke;">h5_crtdat.c</code> from the
+\ref LBDsetCreate HDF5 Tutorial topic, creates the file dset.h5 with a dataset
+<code style="background-color:whitesmoke;">/dset</code> that is a 4 x 6 integer array. The
+following is displayed when viewing <code style="background-color:whitesmoke;">dset.h5</code>
+with <code style="background-color:whitesmoke;">h5dump</code>:
+\code
+$ h5dump -d /dset/ dset.h5
+HDF5 "dset.h5" {
+DATASET "/dset/" {
+ DATATYPE H5T_STD_I32BE
+ DATASPACE SIMPLE { ( 4, 6 ) / ( 4, 6 ) }
+ DATA {
+ (0,0): 1, 2, 3, 4, 5, 6,
+ (1,0): 7, 8, 9, 10, 11, 12,
+ (2,0): 13, 14, 15, 16, 17, 18,
+ (3,0): 19, 20, 21, 22, 23, 24
+ }
+}
+}
+\endcode
+
+As specified by the <code style="background-color:whitesmoke;">-d</code> and
+<code style="background-color:whitesmoke;">-o</code> options, the following
+<code style="background-color:whitesmoke;">h5dump</code> command will output the values of the dataset
+<code style="background-color:whitesmoke;">/dset </code>to a file called
+<code style="background-color:whitesmoke;">dset.bin</code>. The <code style="background-color:whitesmoke;">-b</code>
+option specifies that the output will be binary in Little Endian format (LE).
+
+\code
+h5dump -d /dset -b LE -o dset.bin dset.h5
+\endcode
+
+This command outputs the metadata for the dataset, as well as creating the binary file
+<code style="background-color:whitesmoke;">dset.bin</code>:
+\code
+HDF5 "dset.h5" {
+DATASET "/dset" {
+ DATATYPE H5T_STD_I32BE
+ DATASPACE SIMPLE { ( 4, 6 ) / ( 4, 6 ) }
+ DATA {
+ }
+}
+}
+\endcode
+
+If you look at the resulting <code style="background-color:whitesmoke;">dset.bin</code> file with
+a binary editor, you will see that it contains the dataset's values. For example (on Linux) you will see:
+\code
+$ od -t d dset.bin
+0000000 1 2 3 4
+0000020 5 6 7 8
+0000040 9 10 11 12
+0000060 13 14 15 16
+0000100 17 18 19 20
+0000120 21 22 23 24
+0000140
+\endcode
+
+\section secViewToolsConvertExport Export from h5dump and Import into HDF5
+The <code style="background-color:whitesmoke;">h5import</code> utility can use the output of
+<code style="background-color:whitesmoke;">h5dump</code> as input to create a dataset or file.
+
+The <code style="background-color:whitesmoke;">h5dump</code> utility must first create two files:
+\li A DDL file, which will be used as an <code style="background-color:whitesmoke;">h5import</code> configuration file
+\li A raw data file containing the data to be imported
+
+The DDL file must be generated with the <code style="background-color:whitesmoke;">h5dump -p</code> option, to generate properties.
+
+The raw data file that can be imported into HDF5 using this method may contain either numeric or string data with the following restrictions:
+\li Numeric data requires the use of the <code style="background-color:whitesmoke;">h5dump -b</code> option to produce a binary data file.
+\li String data must be written with the <code style="background-color:whitesmoke;">h5dump -y</code> and
+<code style="background-color:whitesmoke;">--width=1</code> options, generating a single column of strings without indices.
+
+Two examples follow: the first imports a dataset with a numeric datatype. Note that numeric data requires
+the use of the <code style="background-color:whitesmoke;">h5dump -b</code> option to produce a binary data
+file. The example program (<code style="background-color:whitesmoke;">h5_crtdat.c</code>) that creates this
+file is included with the \ref IntroHDF5 tutorial and can be obtained from the \ref LBExamples page:
+\code
+h5dump -p -d "/dset" --ddl=dsetbin.dmp -o dset.bin -b dset.h5
+h5import dset.bin -c dsetbin.dmp -o new-dset.h5
+\endcode
+
+The output before and after running these commands is shown below:
+\code
+$ h5dump dset.h5
+HDF5 "dset.h5" {
+GROUP "/" {
+ DATASET "dset" {
+ DATATYPE H5T_STD_I32BE
+ DATASPACE SIMPLE { ( 4, 6 ) / ( 4, 6 ) }
+ DATA {
+ (0,0): 1, 2, 3, 4, 5, 6,
+ (1,0): 7, 8, 9, 10, 11, 12,
+ (2,0): 13, 14, 15, 16, 17, 18,
+ (3,0): 19, 20, 21, 22, 23, 24
+ }
+ }
+}
+}
+$ h5dump -p -d "/dset" --ddl=dsetbin.dmp -o dset.bin -b dset.h5
+
+$ h5import dset.bin -c dsetbin.dmp -o new-dset.h5
+
+$ h5dump new-dset.h5
+HDF5 "new-dset.h5" {
+GROUP "/" {
+ DATASET "dset" {
+ DATATYPE H5T_STD_I32BE
+ DATASPACE SIMPLE { ( 4, 6 ) / ( 4, 6 ) }
+ DATA {
+ (0,0): 1, 2, 3, 4, 5, 6,
+ (1,0): 7, 8, 9, 10, 11, 12,
+ (2,0): 13, 14, 15, 16, 17, 18,
+ (3,0): 19, 20, 21, 22, 23, 24
+ }
+ }
+}
+}
+\endcode
+
+The second example imports string data. The example program that creates this file can be downloaded
+from the <a href="https://portal.hdfgroup.org/display/HDF5/Examples+by+API">Examples by API</a> page.
+
+Note that string data requires use of the <code style="background-color:whitesmoke;">h5dump -y</code>
+option to exclude indexes and the <code style="background-color:whitesmoke;">h5dump --width=1</code>
+option to generate a single column of strings. The <code style="background-color:whitesmoke;">-o</code>
+option outputs the data into an ASCII file.
+\code
+h5dump -p -d "/DS1" -O vlstring.dmp -o vlstring.ascii -y --width=1 h5ex_t_vlstring.h5
+h5import vlstring.ascii -c vlstring.dmp -o new-vlstring.h5
+\endcode
+
+The output before and after running these commands is shown below:
+\code
+$ h5dump h5ex_t_vlstring.h5
+HDF5 "h5ex_t_vlstring.h5" {
+GROUP "/" {
+ DATASET "DS1" {
+ DATATYPE H5T_STRING {
+ STRSIZE H5T_VARIABLE;
+ STRPAD H5T_STR_SPACEPAD;
+ CSET H5T_CSET_ASCII;
+ CTYPE H5T_C_S1;
+ }
+ DATASPACE SIMPLE { ( 4 ) / ( 4 ) }
+ DATA {
+ (0): "Parting", "is such", "sweet", "sorrow."
+ }
+ }
+}
+}
+
+$ h5dump -p -d "/DS1" -O vlstring.dmp -o vlstring.ascii -y --width=1 h5ex_t_vlstring.h5
+
+$ h5import vlstring.ascii -c vlstring.dmp -o new-vlstring.h5
+
+$ h5dump new-vlstring.h5
+HDF5 "new-vlstring.h5" {
+GROUP "/" {
+ DATASET "DS1" {
+ DATATYPE H5T_STRING {
+ STRSIZE H5T_VARIABLE;
+ STRPAD H5T_STR_NULLTERM;
+ CSET H5T_CSET_ASCII;
+ CTYPE H5T_C_S1;
+ }
+ DATASPACE SIMPLE { ( 4 ) / ( 4 ) }
+ DATA {
+ (0): "Parting", "is such", "sweet", "sorrow."
+ }
+ }
+}
+\endcode
+
+<hr>
+Navigate back: \ref index "Main" / \ref GettingStarted / \ref ViewToolsCommand
+
+*/
diff --git a/doxygen/dox/ViewToolsJPSS.dox b/doxygen/dox/ViewToolsJPSS.dox
new file mode 100644
index 0000000..9c15395
--- /dev/null
+++ b/doxygen/dox/ViewToolsJPSS.dox
@@ -0,0 +1,763 @@
+/** @page ViewToolsJPSS Use Case: Examining a JPSS NPP File With HDF5 Tools
+Navigate back: \ref index "Main" / \ref GettingStarted / \ref ViewToolsCommand
+<hr>
+
+\section secViewToolsJPSSTOC Contents
+<ul>
+<li>\ref secViewToolsJPSSDeter</li>
+<li>\ref secViewToolsJPSSView</li>
+<li>\ref secViewToolsJPSSExam</li>
+</ul>
+
+This tutorial illustrates how to use the HDF5 tools to examine NPP files from the JPSS project. The following files are discussed:
+\code
+SVM09_npp_d20120229_t0849107_e0854511_b01759_c20120229145452682127_noaa_ops.h5 (<a href="https://support.hdfgroup.org/ftp/HDF5/examples/files/tutorial/SVM09_npp_d20120229_t0849107_e0854511_b01759_c20120229145452682127_noaa_ops.h5.gz">gzipped file</a>)
+SVM01_npp_d20130524_t1255132_e1256374_b08146_c20130524192048864992_noaa_ops.h5 (<a href="https://support.hdfgroup.org/ftp/HDF5/examples/files/tutorial/SVM01_npp_d20130524_t1255132_e1256374_b08146_c20130524192048864992_noaa_ops.h5.gz">gzipped file</a>)
+\endcode
+
+\section secViewToolsJPSSDeter Determining File Contents
+The first thing you may want to do is determine what is in your file. You can use the command-line tools or HDFView to do this:
+\li @ref subsecViewToolsJPSSDeter_h5dump
+\li @ref subsecViewToolsJPSSDeter_h5ls
+\li @ref subsecViewToolsJPSSDeter_HDFView
+
+JPSS NPP files all contain two root level groups:
+<table>
+<tr>
+<th>Group</th><th>Description</th>
+</tr>
+<tr>
+<td>/All_Data
+</td>
+<td>Contains the raw data and optional geo-location information.
+</td>
+</tr>
+<tr>
+<td>/Data_Products
+</td>
+<td>Contains a dataset ending in <code style="background-color:whitesmoke;">Aggr</code> with
+references to objects in the <code style="background-color:whitesmoke;">/All_Data</code> group.
+Contains granules (datasets with a name ending in <code style="background-color:whitesmoke;">Gran_#</code>)
+with references to selected regions in datasets under <code style="background-color:whitesmoke;">/All_Data</code>.
+</td>
+</tr>
+</table>
+
+\subsection subsecViewToolsJPSSDeter_h5dump h5dump
+With <code style="background-color:whitesmoke;">h5dump</code> you can see a list of the objects
+in the file using the <code style="background-color:whitesmoke;">-n</code> option:
+\code
+h5dump -n <file>
+\endcode
+
+For example:
+\code
+$ h5dump -n SVM09_npp_d20120229_t0849107_e0854511_b01759_c20120229145452682127_noaa_ops.h5
+HDF5 "SVM09_npp_d20120229_t0849107_e0854511_b01759_c20120229145452682127_noaa_ops.h5" {
+FILE_CONTENTS {
+ group /
+ group /All_Data
+ group /All_Data/VIIRS-M9-SDR_All
+ dataset /All_Data/VIIRS-M9-SDR_All/ModeGran
+ dataset /All_Data/VIIRS-M9-SDR_All/ModeScan
+ dataset /All_Data/VIIRS-M9-SDR_All/NumberOfBadChecksums
+ dataset /All_Data/VIIRS-M9-SDR_All/NumberOfDiscardedPkts
+ dataset /All_Data/VIIRS-M9-SDR_All/NumberOfMissingPkts
+ dataset /All_Data/VIIRS-M9-SDR_All/NumberOfScans
+ dataset /All_Data/VIIRS-M9-SDR_All/PadByte1
+ dataset /All_Data/VIIRS-M9-SDR_All/QF1_VIIRSMBANDSDR
+ dataset /All_Data/VIIRS-M9-SDR_All/QF2_SCAN_SDR
+ dataset /All_Data/VIIRS-M9-SDR_All/QF3_SCAN_RDR
+ dataset /All_Data/VIIRS-M9-SDR_All/QF4_SCAN_SDR
+ dataset /All_Data/VIIRS-M9-SDR_All/QF5_GRAN_BADDETECTOR
+ dataset /All_Data/VIIRS-M9-SDR_All/Radiance
+ dataset /All_Data/VIIRS-M9-SDR_All/RadianceFactors
+ dataset /All_Data/VIIRS-M9-SDR_All/Reflectance
+ dataset /All_Data/VIIRS-M9-SDR_All/ReflectanceFactors
+ group /Data_Products
+ group /Data_Products/VIIRS-M9-SDR
+ dataset /Data_Products/VIIRS-M9-SDR/VIIRS-M9-SDR_Aggr
+ dataset /Data_Products/VIIRS-M9-SDR/VIIRS-M9-SDR_Gran_0
+ dataset /Data_Products/VIIRS-M9-SDR/VIIRS-M9-SDR_Gran_1
+ dataset /Data_Products/VIIRS-M9-SDR/VIIRS-M9-SDR_Gran_2
+ dataset /Data_Products/VIIRS-M9-SDR/VIIRS-M9-SDR_Gran_3
+ }
+}
+\endcode
+
+In the output above you can see that there are four granules (ending in
+<code style="background-color:whitesmoke;">Gran_#</code>) in the
+<code style="background-color:whitesmoke;">/Data_Products/VIIRS-M9-SDR/</code> group.
+
+\subsection subsecViewToolsJPSSDeter_h5ls h5ls
+With <code style="background-color:whitesmoke;">h5ls</code> you can see a list of the objects in the
+file using the <code style="background-color:whitesmoke;">-lr</code>
+options. The <code style="background-color:whitesmoke;">h5ls</code> utility also shows shape and size
+(dataspace) information about datasets.
+\code
+h5ls -lr <file>
+\endcode
+
+For example:
+\code
+$ h5ls -lr SVM09_npp_d20120229_t0849107_e0854511_b01759_c20120229145452682127_noaa_ops.h5
+/ Group
+/All_Data Group
+/All_Data/VIIRS-M9-SDR_All Group
+/All_Data/VIIRS-M9-SDR_All/ModeGran Dataset {4/Inf}
+/All_Data/VIIRS-M9-SDR_All/ModeScan Dataset {192/Inf}
+/All_Data/VIIRS-M9-SDR_All/NumberOfBadChecksums Dataset {192/Inf}
+/All_Data/VIIRS-M9-SDR_All/NumberOfDiscardedPkts Dataset {192/Inf}
+/All_Data/VIIRS-M9-SDR_All/NumberOfMissingPkts Dataset {192/Inf}
+/All_Data/VIIRS-M9-SDR_All/NumberOfScans Dataset {4/Inf}
+/All_Data/VIIRS-M9-SDR_All/PadByte1 Dataset {12/Inf}
+/All_Data/VIIRS-M9-SDR_All/QF1_VIIRSMBANDSDR Dataset {3072/Inf, 3200/Inf}
+/All_Data/VIIRS-M9-SDR_All/QF2_SCAN_SDR Dataset {192/Inf}
+/All_Data/VIIRS-M9-SDR_All/QF3_SCAN_RDR Dataset {192/Inf}
+/All_Data/VIIRS-M9-SDR_All/QF4_SCAN_SDR Dataset {3072/Inf}
+/All_Data/VIIRS-M9-SDR_All/QF5_GRAN_BADDETECTOR Dataset {64/Inf}
+/All_Data/VIIRS-M9-SDR_All/Radiance Dataset {3072/Inf, 3200/Inf}
+/All_Data/VIIRS-M9-SDR_All/RadianceFactors Dataset {8/Inf}
+/All_Data/VIIRS-M9-SDR_All/Reflectance Dataset {3072/Inf, 3200/Inf}
+/All_Data/VIIRS-M9-SDR_All/ReflectanceFactors Dataset {8/Inf}
+/Data_Products Group
+/Data_Products/VIIRS-M9-SDR Group
+/Data_Products/VIIRS-M9-SDR/VIIRS-M9-SDR_Aggr Dataset {16/Inf}
+/Data_Products/VIIRS-M9-SDR/VIIRS-M9-SDR_Gran_0 Dataset {16/Inf}
+/Data_Products/VIIRS-M9-SDR/VIIRS-M9-SDR_Gran_1 Dataset {16/Inf}
+/Data_Products/VIIRS-M9-SDR/VIIRS-M9-SDR_Gran_2 Dataset {16/Inf}
+/Data_Products/VIIRS-M9-SDR/VIIRS-M9-SDR_Gran_3 Dataset {16/Inf}
+\endcode
+Note that the <code style="background-color:whitesmoke;">Inf</code> indicates that those datasets are appendable or unlimited in size.
+
+\subsection subsecViewToolsJPSSDeter_HDFView HDFView
+If you open the file in HDFView, it will display the file and the root level groups within
+it in the TreeView on the left. An HDF5 file is a folder with a "5" in the middle, followed
+by the file name. There are two folders (groups) within the JPSS file
+(<code style="background-color:whitesmoke;">All_Data/</code> and <code style="background-color:whitesmoke;">Data Products/</code>),
+which you can select to see their contents:
+<table>
+<tr>
+<td>
+\image html hdfview-tree.png
+</td>
+</tr>
+</table>
+
+If you click twice with the left-mouse button on a folder or group in the TreeView, the contents
+of the folder will be listed. If you click twice on an object such as a dataset, a window with
+the object's values will be displayed.
+
+Underneath the <code style="background-color:whitesmoke;">VIIRS-M1-SDR</code> folder are what HDF5
+calls datasets. The scarlet letter <code style="background-color:whitesmoke;">"A"</code> attached
+to the group and datasets under <code style="background-color:whitesmoke;">Data_Products/</code>
+indicates that there are attributes associated with them.
+
+\section secViewToolsJPSSView Viewing the User Block
+All JPSS files contain a user block in XML with information about the file. The user block is an
+optional space allocated at the beginning of an HDF5 file that is not interpreted by the HDF5
+library. Its size is a multiple of 512.
+
+Since the user block in JPSS files is stored in ASCII and it is stored at the beginning of an
+HDF5 file, you could use a text editor or viewer to examine it. However, there are HDF5 utilities
+that can help with this:
+<table>
+<tr>
+<th>Utility</th><th>Description</th>
+</tr>
+<tr>
+<td>h5unjam
+</td>
+<td>Extracts a user block from an HDF5 file
+</td>
+</tr>
+<tr>
+<td>h5dump
+</td>
+<td>The -B (--superblock) option displays the size of the user block in an HDF5 file
+</td>
+</tr>
+</table>
+
+\subsection subsecViewToolsJPSSView_h5unjam h5unjam
+The \ref secViewToolsEditAdd tutorial topic discusses the use of the
+<code style="background-color:whitesmoke;">h5jam</code> and <code style="background-color:whitesmoke;">h5unjam</code>
+utilities for adding or removing a user block from a file. An input HDF5 file
+(<code style="background-color:whitesmoke;">-i</code>), output HDF5 file
+(<code style="background-color:whitesmoke;">-o</code>), and user block text file
+(<code style="background-color:whitesmoke;">-u</code>) can be specified with these tools. You can use the
+<code style="background-color:whitesmoke;">h5unjam</code> tool to extract and view the user block in a JPSS file:
+\code
+h5unjam -i <Input HDF5 File> -o <Output HDF5 File> -u <User Block File>
+\endcode
+
+For example this command will extract the user block into the file UB.xml:
+\code
+$ h5unjam -i SVM09_npp_d20120229_t0849107_e0854511_b01759_c20120229145452682127_noaa_ops.h5
+ -o svm09-noUB.h5 -u UB.xml
+\endcode
+
+The input HDF5 file remains unchanged. The output HDF5 file will not contain the user block.
+The <code style="background-color:whitesmoke;">UB.xml</code> file contains the user block
+which can be viewed with a browser.
+
+\subsection subsecViewToolsJPSSView_h5dump h5dump
+The h5dump utility has the <code style="background-color:whitesmoke;">-B (--superblock)</code> option for displaying the superblock in an HDF5 file.
+The superblock contains information about the file such as the file signature, file consistency flags,
+the number of bytes to store addresses and size of an object, as well as the size of the user block:
+\code
+h5dump -B (--superblock)
+\endcode
+
+Below is an example (Unix):
+\code
+$ h5dump -B -H SVM09_npp_d20120229_t0849107_e0854511_b01759_c20120229145452682127_noaa_ops.h5 | more
+HDF5 "SVM09_npp_d20120229_t0849107_e0854511_b01759_c20120229145452682127_noaa_ops.h5" {
+SUPER_BLOCK {
+ SUPERBLOCK_VERSION 0
+ FREELIST_VERSION 0
+ SYMBOLTABLE_VERSION 0
+ OBJECTHEADER_VERSION 0
+ OFFSET_SIZE 8
+ LENGTH_SIZE 8
+ BTREE_RANK 16
+ BTREE_LEAF 4
+ ISTORE_K 32
+ USER_BLOCK {
+ USERBLOCK_SIZE 1024
+ }
+}
+\endcode
+
+Once you have the size of the user block, you can extract it from the file using system commands.
+For example, on Unix platforms you can use the head command-line tool:
+\code
+head -c <USERBLOCK_SIZE> <JPSS File> >& USERBLOCK.xml
+\endcode
+
+There are Unix tools for Windows that may work, such as <a href="http://gnuwin32.sourceforge.net/packages/coreutils.htm">CoreUtils for Windows</a>.
+
+\section secViewToolsJPSSExam Examining a Granule
+<ul>
+<li>@ref subsecViewToolsJPSSExam_h5dump<br />
+<ul>
+<li>@ref subsubsecViewToolsJPSSExam_h5dumpRegRef</li>
+<li>@ref subsubsecViewToolsJPSSExam_h5dumpQuality</li>
+<li>@ref subsubsecViewToolsJPSSExam_h5dumpProps</li>
+</ul></li>
+<li>@ref subsecViewToolsJPSSExamr_HDFView</li>
+</ul>
+
+\subsection subsecViewToolsJPSSExam_h5dump h5dump
+There are several options that you may first want to use when examining a granule with h5dump:
+<table>
+<tr>
+<th>Option</th><th>Description</th>
+</tr>
+<tr>
+<td>-H, --header
+</td>
+<td>Prints header (metadata) information only
+</td>
+</tr>
+<tr>
+<td>-d D, --dataset=D
+</td>
+<td>Specifies the granule dataset
+</td>
+</tr>
+<tr>
+<td>-A 0, --onlyattr=0
+</td>
+<td>Suppresses attributes
+</td>
+</tr>
+<tr>
+<td>-p, --properties
+</td>
+<td>Show properties of datasets
+(See Properties)
+</td>
+</tr>
+</table>
+
+You would specify the dataset (<code style="background-color:whitesmoke;">-d D</code>) and the
+<code style="background-color:whitesmoke;">-H</code> options to view the metadata associated with
+a specific granule. There are many attributes associated with a granule and
+<code style="background-color:whitesmoke;">-A 0</code> can be used to suppress those.
+
+For example:
+\code
+h5dump -H -A 0 -d "/Data_Products/VIIRS-M9-SDR/VIIRS-M9-SDR_Gran_0"
+ SVM09_npp_d20120229_t0849107_e0854511_b01759_c20120229145452682127_noaa_ops.h5
+\endcode
+
+This command displays:
+\code
+ HDF5 "SVM09_npp_d20120229_t0849107_e0854511_b01759_c20120229145452682127_noaa_ops.h5" {
+ DATASET "/Data_Products/VIIRS-M9-SDR/VIIRS-M9-SDR_Gran_0" {
+ DATATYPE H5T_REFERENCE { H5T_STD_REF_DSETREG }
+ DATASPACE SIMPLE { ( 16 ) / ( H5S_UNLIMITED ) }
+ }
+ }
+\endcode
+
+To see the actual contents of the granule remove the <code style="background-color:whitesmoke;">-H</code> option:
+\code
+h5dump -A 0 -d "/Data_Products/VIIRS-M9-SDR/VIIRS-M9-SDR_Gran_0"
+ SVM09_npp_d20120229_t0849107_e0854511_b01759_c20120229145452682127_noaa_ops.h5
+\endcode
+
+The above command displays:
+\code
+HDF5 "SVM09_npp_d20120229_t0849107_e0854511_b01759_c20120229145452682127_noaa_ops.h5" {
+DATASET "/Data_Products/VIIRS-M9-SDR/VIIRS-M9-SDR_Gran_0" {
+ DATATYPE H5T_REFERENCE { H5T_STD_REF_DSETREG }
+ DATASPACE SIMPLE { ( 16 ) / ( H5S_UNLIMITED ) }
+ DATA {
+ DATASET /All_Data/VIIRS-M9-SDR_All/Radiance {(0,0)-(767,3199)},
+ DATASET /All_Data/VIIRS-M9-SDR_All/Reflectance {(0,0)-(767,3199)},
+ DATASET /All_Data/VIIRS-M9-SDR_All/ModeScan {(0)-(47)},
+ DATASET /All_Data/VIIRS-M9-SDR_All/ModeGran {(0)-(0)},
+ DATASET /All_Data/VIIRS-M9-SDR_All/PadByte1 {(0)-(2)},
+ DATASET /All_Data/VIIRS-M9-SDR_All/NumberOfScans {(0)-(0)},
+ DATASET /All_Data/VIIRS-M9-SDR_All/NumberOfMissingPkts {(0)-(47)},
+ DATASET /All_Data/VIIRS-M9-SDR_All/NumberOfBadChecksums {(0)-(47)},
+ DATASET /All_Data/VIIRS-M9-SDR_All/NumberOfDiscardedPkts {(0)-(47)},
+ DATASET /All_Data/VIIRS-M9-SDR_All/QF1_VIIRSMBANDSDR {(0,0)-(767,3199)},
+ DATASET /All_Data/VIIRS-M9-SDR_All/QF2_SCAN_SDR {(0)-(47)},
+ DATASET /All_Data/VIIRS-M9-SDR_All/QF3_SCAN_RDR {(0)-(47)},
+ DATASET /All_Data/VIIRS-M9-SDR_All/QF4_SCAN_SDR {(0)-(767)},
+ DATASET /All_Data/VIIRS-M9-SDR_All/QF5_GRAN_BADDETECTOR {(0)-(15)},
+ DATASET /All_Data/VIIRS-M9-SDR_All/RadianceFactors {(0)-(1)},
+ DATASET /All_Data/VIIRS-M9-SDR_All/ReflectanceFactors {(0)-(1)}
+ }
+}
+}
+\endcode
+
+As you can see in the output above, the datatype for this dataset is:
+\code
+DATATYPE H5T_REFERENCE { H5T_STD_REF_DSETREG }
+\endcode
+
+This indicates that it is a dataset specifically for storing references to regions (or subsets)
+in other datasets. The dataset contains 16 such references, and more can be added to it, as
+indicated by the dataspace (in other words it is unlimited):
+\code
+DATASPACE SIMPLE { ( 16 ) / ( H5S_UNLIMITED ) }
+\endcode
+
+\subsubsection subsubsecViewToolsJPSSExam_h5dumpRegRef Viewing a Region Reference
+What if we wanted to look at the <code style="background-color:whitesmoke;">NumberOfScans</code> data for a specific granule in a file?
+
+First, we may be interested in determining whether the scans were done at night or in the day. If a scan was at night, there will be no data.
+
+The attribute <code style="background-color:whitesmoke;">N_Day_Night_Flag</code> is used to determine when the scan was done. If you don't know where this attribute is located, you can use the <code style="background-color:whitesmoke;">-N</code> option to search for it in the file. If you were to run this command on the <code style="background-color:whitesmoke;">SVM09</code> file used above, you would see that the <code style="background-color:whitesmoke;">N_Day_Night_Flag</code> attribute has a value of <code style="background-color:whitesmoke;">Night</code> for the four granules in the file. Indeed, if you actually examine the <code style="background-color:whitesmoke;">NumberOfScans</code> data, you will see that only fill values are written.
+
+For that reason we will examine the <code style="background-color:whitesmoke;">NumberOfScans</code> data for the <code style="background-color:whitesmoke;">SVMO1</code> file below, as it was obtained during the day:
+\code
+h5dump -N N_Day_Night_Flag SVM01_npp_d20130524_t1255132_e1256374_b08146_c20130524192048864992_noaa_ops.h5
+\endcode
+
+It displays:
+\code
+HDF5 "SVM01_npp_d20130524_t1255132_e1256374_b08146_c20130524192048864992_noaa_ops.h5" {
+ATTRIBUTE "N_Day_Night_Flag" {
+ DATATYPE H5T_STRING {
+ STRSIZE 4;
+ STRPAD H5T_STR_NULLTERM;
+ CSET H5T_CSET_ASCII;
+ CTYPE H5T_C_S1;
+ }
+ DATASPACE SIMPLE { ( 1, 1 ) / ( 1, 1 ) }
+ DATA {
+ (0,0): "Day"
+ }
+}
+}
+\endcode
+
+There is just one granule in this <code style="background-color:whitesmoke;">SVM01</code> file, as shown below:
+\code
+$ h5dump -n SVM01_npp_d20130524_t1255132_e1256374_b08146_c20130524192048864992_noaa_ops.h5
+HDF5 "SVM01_npp_d20130524_t1255132_e1256374_b08146_c20130524192048864992_noaa_ops.h5" {
+FILE_CONTENTS {
+ group /
+ group /All_Data
+ group /All_Data/VIIRS-M1-SDR_All
+ dataset /All_Data/VIIRS-M1-SDR_All/ModeGran
+ dataset /All_Data/VIIRS-M1-SDR_All/ModeScan
+ dataset /All_Data/VIIRS-M1-SDR_All/NumberOfBadChecksums
+ dataset /All_Data/VIIRS-M1-SDR_All/NumberOfDiscardedPkts
+ dataset /All_Data/VIIRS-M1-SDR_All/NumberOfMissingPkts
+ dataset /All_Data/VIIRS-M1-SDR_All/NumberOfScans
+ dataset /All_Data/VIIRS-M1-SDR_All/PadByte1
+ dataset /All_Data/VIIRS-M1-SDR_All/QF1_VIIRSMBANDSDR
+ dataset /All_Data/VIIRS-M1-SDR_All/QF2_SCAN_SDR
+ dataset /All_Data/VIIRS-M1-SDR_All/QF3_SCAN_RDR
+ dataset /All_Data/VIIRS-M1-SDR_All/QF4_SCAN_SDR
+ dataset /All_Data/VIIRS-M1-SDR_All/QF5_GRAN_BADDETECTOR
+ dataset /All_Data/VIIRS-M1-SDR_All/Radiance
+ dataset /All_Data/VIIRS-M1-SDR_All/RadianceFactors
+ dataset /All_Data/VIIRS-M1-SDR_All/Reflectance
+ dataset /All_Data/VIIRS-M1-SDR_All/ReflectanceFactors
+ group /Data_Products
+ group /Data_Products/VIIRS-M1-SDR
+ dataset /Data_Products/VIIRS-M1-SDR/VIIRS-M1-SDR_Aggr
+ dataset /Data_Products/VIIRS-M1-SDR/VIIRS-M1-SDR_Gran_0
+ }
+}
+\endcode
+
+Now examine the references in the <code style="background-color:whitesmoke;">VIIRS-M1-SDR_Gran_0</code> granule
+\code
+$ h5dump -A 0 -d "/Data_Products/VIIRS-M1-SDR/VIIRS-M1-SDR_Gran_0"
+ SVM01_npp_d20130524_t1255132_e1256374_b08146_c20130524192048864992_noaa_ops.h5
+HDF5 "SVM01_npp_d20130524_t1255132_e1256374_b08146_c20130524192048864992_noaa_ops.h5" {
+DATASET "/Data_Products/VIIRS-M1-SDR/VIIRS-M1-SDR_Gran_0" {
+ DATATYPE H5T_REFERENCE { H5T_STD_REF_DSETREG }
+ DATASPACE SIMPLE { ( 16 ) / ( H5S_UNLIMITED ) }
+ DATA {
+ DATASET /All_Data/VIIRS-M1-SDR_All/Radiance {(0,0)-(767,3199)},
+ DATASET /All_Data/VIIRS-M1-SDR_All/Reflectance {(0,0)-(767,3199)},
+ DATASET /All_Data/VIIRS-M1-SDR_All/ModeScan {(0)-(47)},
+ DATASET /All_Data/VIIRS-M1-SDR_All/ModeGran {(0)-(0)},
+ DATASET /All_Data/VIIRS-M1-SDR_All/PadByte1 {(0)-(2)},
+ DATASET /All_Data/VIIRS-M1-SDR_All/NumberOfScans {(0)-(0)},
+ DATASET /All_Data/VIIRS-M1-SDR_All/NumberOfMissingPkts {(0)-(47)},
+ DATASET /All_Data/VIIRS-M1-SDR_All/NumberOfBadChecksums {(0)-(47)},
+ DATASET /All_Data/VIIRS-M1-SDR_All/NumberOfDiscardedPkts {(0)-(47)},
+ DATASET /All_Data/VIIRS-M1-SDR_All/QF1_VIIRSMBANDSDR {(0,0)-(767,3199)},
+ DATASET /All_Data/VIIRS-M1-SDR_All/QF2_SCAN_SDR {(0)-(47)},
+ DATASET /All_Data/VIIRS-M1-SDR_All/QF3_SCAN_RDR {(0)-(47)},
+ DATASET /All_Data/VIIRS-M1-SDR_All/QF4_SCAN_SDR {(0)-(767)},
+ DATASET /All_Data/VIIRS-M1-SDR_All/QF5_GRAN_BADDETECTOR {(0)-(15)},
+ DATASET /All_Data/VIIRS-M1-SDR_All/RadianceFactors {(0)-(1)},
+ DATASET /All_Data/VIIRS-M1-SDR_All/ReflectanceFactors {(0)-(1)}
+ }
+}
+}
+\endcode
+
+In the output above, you can see that the <code style="background-color:whitesmoke;">NumberOfScans</code>
+reference is the sixth reference in the granule counting from the top.
+
+The list of references shown above is a 0-based index to the dataset. Therefore, to specify
+<code style="background-color:whitesmoke;">NumberOfScans</code>, enter a start offset of
+<code style="background-color:whitesmoke;">5</code> for the <code style="background-color:whitesmoke;">-s</code>
+option (the sixth reference minus 1). To see the region reference data, use the <code style="background-color:whitesmoke;">-R</code> option.
+
+This command will display the data in the <code style="background-color:whitesmoke;">NumberOfScans</code> region reference:
+\code
+h5dump -A 0 -d "/Data_Products/VIIRS-M1-SDR/VIIRS-M1-SDR_Gran_0" -s 5 -R
+ SVM01_npp_d20130524_t1255132_e1256374_b08146_c20130524192048864992_noaa_ops.h5
+\endcode
+
+It displays the number of scans (48):
+\code
+HDF5 "SVM01_npp_d20130524_t1255132_e1256374_b08146_c20130524192048864992_noaa_ops.h5" {
+DATASET "/Data_Products/VIIRS-M1-SDR/VIIRS-M1-SDR_Gran_0" {
+ DATATYPE H5T_REFERENCE { H5T_STD_REF_DSETREG }
+ DATASPACE SIMPLE { ( 16 ) / ( H5S_UNLIMITED ) }
+ SUBSET {
+ START ( 5 );
+ STRIDE ( 1 );
+ COUNT ( 1 );
+ BLOCK ( 1 );
+ DATA {
+ (5): DATASET /All_Data/VIIRS-M1-SDR_All/NumberOfScans {
+ (5): REGION_TYPE BLOCK (0)-(0)
+ (5): DATATYPE H5T_STD_I32BE
+ (5): DATASPACE SIMPLE { ( 1 ) / ( H5S_UNLIMITED ) }
+ (5): DATA {
+ (0): 48
+ (5): }
+ (5): }
+ }
+ }
+}
+}
+\endcode
+
+The <code style="background-color:whitesmoke;">-s</code> option may be familiar as one of the options
+that was described in the \ref secViewToolsViewSub tutorial topic. The other subsetting options are not included,
+indicating that the default values are used.
+
+If you leave off the <code style="background-color:whitesmoke;">-R</code> option, you will see the subset selection, but not the data:
+\code
+$ h5dump -A 0 -d "/Data_Products/VIIRS-M1-SDR/VIIRS-M1-SDR_Gran_0" -s 5
+ SVM01_npp_d20130524_t1255132_e1256374_b08146_c20130524192048864992_noaa_ops.h5
+HDF5 "SVM01_npp_d20130524_t1255132_e1256374_b08146_c20130524192048864992_noaa_ops.h5" {
+DATASET "/Data_Products/VIIRS-M1-SDR/VIIRS-M1-SDR_Gran_0" {
+ DATATYPE H5T_REFERENCE { H5T_STD_REF_DSETREG }
+ DATASPACE SIMPLE { ( 16 ) / ( H5S_UNLIMITED ) }
+ SUBSET {
+ START ( 5 );
+ STRIDE ( 1 );
+ COUNT ( 1 );
+ BLOCK ( 1 );
+ DATA {
+ DATASET /All_Data/VIIRS-M1-SDR_All/NumberOfScans {(0)-(0)}
+ }
+ }
+}
+}
+\endcode
+
+\subsubsection subsubsecViewToolsJPSSExam_h5dumpQuality Viewing a Quality Flag
+The quality flags in an NPP file can be viewed with h5dump using the <code style="background-color:whitesmoke;">-M</code>
+option. Quality flags are packed into each integer value in a quality flag dataset. Quality flag datasets in NPP
+files begin with the letters <code style="background-color:whitesmoke;">QF</code>.
+
+In the following NPP file, there are five Quality Flag datasets
+(<code style="background-color:whitesmoke;">/All_Data/VIIRS-M1-SDR_All/QF*</code>):
+\code
+$ h5dump -n SVM01_npp_d20130524_t1255132_e1256374_b08146_c20130524192048864992_noaa_ops.h5
+HDF5 "SVM01_npp_d20130524_t1255132_e1256374_b08146_c20130524192048864992_noaa_ops.h5" {
+FILE_CONTENTS {
+ group /
+ group /All_Data
+ group /All_Data/VIIRS-M1-SDR_All
+ dataset /All_Data/VIIRS-M1-SDR_All/ModeGran
+ dataset /All_Data/VIIRS-M1-SDR_All/ModeScan
+ dataset /All_Data/VIIRS-M1-SDR_All/NumberOfBadChecksums
+ dataset /All_Data/VIIRS-M1-SDR_All/NumberOfDiscardedPkts
+ dataset /All_Data/VIIRS-M1-SDR_All/NumberOfMissingPkts
+ dataset /All_Data/VIIRS-M1-SDR_All/NumberOfScans
+ dataset /All_Data/VIIRS-M1-SDR_All/PadByte1
+ dataset /All_Data/VIIRS-M1-SDR_All/QF1_VIIRSMBANDSDR
+ dataset /All_Data/VIIRS-M1-SDR_All/QF2_SCAN_SDR
+ dataset /All_Data/VIIRS-M1-SDR_All/QF3_SCAN_RDR
+ dataset /All_Data/VIIRS-M1-SDR_All/QF4_SCAN_SDR
+ dataset /All_Data/VIIRS-M1-SDR_All/QF5_GRAN_BADDETECTOR
+ dataset /All_Data/VIIRS-M1-SDR_All/Radiance
+ dataset /All_Data/VIIRS-M1-SDR_All/RadianceFactors
+ dataset /All_Data/VIIRS-M1-SDR_All/Reflectance
+ dataset /All_Data/VIIRS-M1-SDR_All/ReflectanceFactors
+ group /Data_Products
+ group /Data_Products/VIIRS-M1-SDR
+ dataset /Data_Products/VIIRS-M1-SDR/VIIRS-M1-SDR_Aggr
+ dataset /Data_Products/VIIRS-M1-SDR/VIIRS-M1-SDR_Gran_0
+ }
+}
+\endcode
+
+The flags in this particular dataset happen to be stored in every two bits of each quality flag dataset
+element, and the values range from 0 to 2. In other words, to see the quality flag values for this
+dataset, these bits would be examined: 0 and 1, 2 and 3, 4 and 5, or 6 and 7 (This information was
+obtained from the Product Profile XML File.)
+
+For example, bits 0 and 1 in the <code style="background-color:whitesmoke;">VQF1_VIIRSMBANDSDR</code> dataset specify the flag that
+"Indicates calibration quality due to bad space view offsets, OBC view offsets, etc or use of a
+previous calibration view". It has 3 values: Good (0), Poor (1), or No Calibration (2).
+
+The <code style="background-color:whitesmoke;">-M</code> option is used to specify the quality
+flag bit offset (<code style="background-color:whitesmoke;">O</code>) and length (<code style="background-color:whitesmoke;">L</code>):
+\code
+h5dump -d DATASET -M O,L FILE
+\endcode
+
+To view the first quality flag (0-1) in a 5 x 6 subset of the <code style="background-color:whitesmoke;">QF1_VIIRSMBANDSDR</code> dataset, specify:
+\code
+h5dump -d "/All_Data/VIIRS-M1-SDR_All/QF1_VIIRSMBANDSDR[0,0;;5,6;]"
+ -M 0,2 SVM01_npp_d20130524_t1255132_e1256374_b08146_c20130524192048864992_noaa_ops.h5
+\endcode
+
+This outputs:
+\code
+HDF5 "SVM01_npp_d20130524_t1255132_e1256374_b08146_c20130524192048864992_noaa_ops.h5" {
+DATASET "/All_Data/VIIRS-M1-SDR_All/QF1_VIIRSMBANDSDR" {
+ DATATYPE H5T_STD_U8BE
+ DATASPACE SIMPLE { ( 768, 3200 ) / ( H5S_UNLIMITED, H5S_UNLIMITED ) }
+ PACKED_BITS OFFSET=0 LENGTH=2
+ SUBSET {
+ START ( 0, 0 );
+ STRIDE ( 1, 1 );
+ COUNT ( 5, 6 );
+ BLOCK ( 1, 1 );
+ DATA {
+ (0,0): 2, 2, 2, 2, 2, 2,
+ (1,0): 2, 2, 2, 2, 2, 2,
+ (2,0): 0, 0, 0, 0, 0, 0,
+ (3,0): 0, 0, 0, 0, 0, 0,
+ (4,0): 0, 0, 0, 0, 0, 0
+ }
+ }
+}
+}
+\endcode
+
+To view more than one quality flag at a time simply add the bit offset and length values to
+<code style="background-color:whitesmoke;">-M</code>, separated by commas. For example, this
+<code style="background-color:whitesmoke;">-M</code> option specifies bits 0-1 and 2-3:
+\code
+h5dump -d DATASET -M 0,2,2,2 FILE
+\endcode
+
+\subsubsection subsubsecViewToolsJPSSExam_h5dumpProps Properties
+To view properties of a specific dataset with <code style="background-color:whitesmoke;">h5dump</code>
+use the <code style="background-color:whitesmoke;">-p</code> option along with the
+<code style="background-color:whitesmoke;">-d</code> option. Depending on the number of attributes
+and the amount of data, the <code style="background-color:whitesmoke;">-A 0</code> and
+<code style="background-color:whitesmoke;">-H</code> options can also be specified to suppress
+printing of attributes and data values:
+\code
+h5dump -p -H -A 0 -d DATASET
+\endcode
+
+The <code style="background-color:whitesmoke;">-p</code> option shows any compression filters
+associated with a dataset, as well as layout and fill value information. This option can be helpful
+in diagnosing performance and other issues.
+
+As an example, examine the <code style="background-color:whitesmoke;">/All_Data/VIIRS-M1-SDR_All/Radiance</code>
+dataset in the <code style="background-color:whitesmoke;">SVM01</code> file:
+\code
+$ h5dump -p -H -A 0 -d "/All_Data/VIIRS-M1-SDR_All/Radiance"
+ SVM01_npp_d20130524_t1255132_e1256374_b08146_c20130524192048864992_noaa_ops.h5
+HDF5 "SVM01_npp_d20130524_t1255132_e1256374_b08146_c20130524192048864992_noaa_ops.h5" {
+DATASET "/All_Data/VIIRS-M1-SDR_All/Radiance" {
+ DATATYPE H5T_STD_U16BE
+ DATASPACE SIMPLE { ( 768, 3200 ) / ( H5S_UNLIMITED, H5S_UNLIMITED ) }
+ STORAGE_LAYOUT {
+ CHUNKED ( 768, 3200 )
+ SIZE 4915200
+ }
+ FILTERS {
+ NONE
+ }
+ FILLVALUE {
+ FILL_TIME H5D_FILL_TIME_IFSET
+ VALUE 65529
+ }
+ ALLOCATION_TIME {
+ H5D_ALLOC_TIME_INCR
+ }
+}
+}
+\endcode
+
+We can see that the chunk size for this dataset is 768 x 3200, and the storage size is 4915200.
+
+What if the chunk size were smaller?
+
+The dataset was modified to have a chunk size of 1 x 10, using the
+<code style="background-color:whitesmoke;">h5repack</code> utility, as shown below.
+\code
+$ h5repack -l /All_Data/VIIRS-M1-SDR_All/Radiance:CHUNK=1x10
+ SVM01_npp_d20130524_t1255132_e1256374_b08146_c20130524192048864992_noaa_ops.h5 SVM01repack.h5
+
+$ h5dump -p -H -A 0 -d "/All_Data/VIIRS-M1-SDR_All/Radiance" SVM01repack.h5
+HDF5 "SVM01repack.h5" {
+DATASET "/All_Data/VIIRS-M1-SDR_All/Radiance" {
+ DATATYPE H5T_STD_U16BE
+ DATASPACE SIMPLE { ( 768, 3200 ) / ( H5S_UNLIMITED, H5S_UNLIMITED ) }
+ STORAGE_LAYOUT {
+ CHUNKED ( 1, 10 )
+ SIZE 4915200
+ }
+ FILTERS {
+ NONE
+ }
+ FILLVALUE {
+ FILL_TIME H5D_FILL_TIME_IFSET
+ VALUE 65529
+ }
+ ALLOCATION_TIME {
+ H5D_ALLOC_TIME_INCR
+ }
+}
+}
+\endcode
+
+In this case, the storage size of the dataset is the same, but the size of the file almost doubled!:
+\code
+$ ls -1sh
+total 35M
+12M SVM01_npp_d20130524_t1255132_e1256374_b08146_c20130524192048864992_noaa_ops.h5
+23M SVM01repack.h5
+\endcode
+
+In general, the smaller the chunk size, the more chunks that HDF5 has to keep track of, which increases
+the size of the file and can affect performance.
+
+\subsection subsecViewToolsJPSSExamr_HDFView HDFView
+As mentioned previously, the structure of an HDF5 file is displayed in the TreeView on the left side of the HDFView screen,
+and you can click on objects and have metadata information displayed on the right side.
+
+To discover more about the granule <code style="background-color:whitesmoke;">/Data_Products/VIIRS-M1-SDR/VIIRS-M1-SDR_Gran_0</code>
+in the <code style="background-color:whitesmoke;">SVM01</code> file shown below in the TreeView, position
+the mouse over the granule and click to select. Properties for the object is displayed on the right side of the HDFView screen.
+You can see Datatype and Dataspace information on the <code style="background-color:whitesmoke;">General Object Info</code>
+tab, any Attributes associated with the granulewill be on the
+<code style="background-color:whitesmoke;">Object Attribute Info</code> tab. In the
+<code style="background-color:whitesmoke;">General Object Info</code>, you can see that the dataset is a
+Region Reference dataset, and that there are sixteen Region References in this dataset:
+<table>
+<tr>
+<td>
+\image html hdfview-prop.png
+</td>
+</tr>
+</table>
+
+To examine the data in the granule, click twice on it with the left mouse button in the TreeView,
+and it will open in a new window.:
+<table>
+<tr>
+<td>
+\image html hdfview-regref.png
+</td>
+</tr>
+</table>
+
+If you click twice with the left mouse button on the fifth Region Reference
+<code style="background-color:whitesmoke;">/All_Data/VIIRS-M1-SDR_All/NumberOfScans</code> a window
+will pop up with the value(s) of the reference:
+<table>
+<tr>
+<td>
+\image html hdfview-regref2.png
+</td>
+</tr>
+</table>
+
+You can also set a user option to automatically show the value(s) in a Region Reference. Under the
+<code style="background-color:whitesmoke;">Tools</code> pull-down menu, select
+<code style="background-color:whitesmoke;">User Options</code> and then select
+<code style="background-color:whitesmoke;">HDF Settings</code> and then select
+<code style="background-color:whitesmoke;">Show RegRef Values</code> in the
+<code style="background-color:whitesmoke;">Data</code> section (see the middle of the image below):
+<table>
+<tr>
+<td>
+\image html hdfview-regrefval.png
+</td>
+</tr>
+</table>
+
+Then you will automatically see the values of the Region Reference when you open it and select an entry:
+<table>
+<tr>
+<td>
+\image html hdfview-regref1.png
+</td>
+</tr>
+</table>
+
+You can view and set quality flags by clicking the right mouse button over a quality flags dataset under
+<code style="background-color:whitesmoke;">All_Data</code> and selecting
+<code style="background-color:whitesmoke;">Open As</code> from the pop-up menu. In the middle of
+the window that pops up, you will see where you can specify <code style="background-color:whitesmoke;">Bitmask</code> options.
+<table>
+<tr>
+<td>
+\image html hdfview-qf.png
+</td>
+</tr>
+</table>
+
+<hr>
+Navigate back: \ref index "Main" / \ref GettingStarted / \ref ViewToolsCommand
+
+*/
diff --git a/doxygen/dox/high_level/extension.dox b/doxygen/dox/high_level/extension.dox
index c81ac6e..d754b96 100644
--- a/doxygen/dox/high_level/extension.dox
+++ b/doxygen/dox/high_level/extension.dox
@@ -1,60 +1,51 @@
/** \defgroup H5LR Extensions
*
- * <em>Working with region references, hyperslab selections,
+ * <em>Working with region references, hyperslab selections,
* and bit-fields (H5LR, H5LT)</em>
*
- * The following reference manual entries describe high-level HDF5 C and Fortran APIs
- * for working with region references, hyperslab selections, and bit-fields.
- * These functions were created as part of a project supporting
+ * The following reference manual entries describe high-level HDF5 C and Fortran APIs
+ * for working with region references, hyperslab selections, and bit-fields.
+ * These functions were created as part of a project supporting
* NPP/NPOESS Data Production and Exploitation (
* <a href="https://support.hdfgroup.org/projects/jpss/documentation">
- * project </a>,
- * <a href="https://gamma.hdfgroup.org/ftp/pub/outgoing/NPOESS/source">
- * software </a>).
- * While they were written to facilitate access to NPP, NPOESS, and JPSS
- * data in the HDF5 format, these functions may be useful to anyone working
+ * project</a>, <a href="https://gamma.hdfgroup.org/ftp/pub/outgoing/NPOESS/source">
+ * software </a>).
+ * While they were written to facilitate access to NPP, NPOESS, and JPSS
+ * data in the HDF5 format, these functions may be useful to anyone working
* with region references, hyperslab selections, or bit-fields.
*
* Note that these functions are not part of the standard HDF5 distribution;
- * the
- * <a href="https://gamma.hdfgroup.org/ftp/pub/outgoing/NPOESS/source">
- * software </a>
+ * the <a href="https://gamma.hdfgroup.org/ftp/pub/outgoing/NPOESS/source">
+ * software </a>
* must be separately downloaded and installed.
*
- * A comprehensive guide to this library,
- * <a href="https://support.hdfgroup.org/projects/jpss/documentation/HL/UG/NPOESS_HL-UG.pdf">
+ * A comprehensive guide to this library,
+ * <a href="https://support.hdfgroup.org/projects/jpss/documentation/HL/UG/NPOESS_HL-UG.pdf">
* <em>User Guide to the HDF5 High-level Library for Handling Region References and Hyperslab Selections</em></a>
- * is available at
+ * is available at
* https://support.hdfgroup.org/projects/jpss/documentation/HL/UG/NPOESS_HL-UG.pdf.
*
* - \ref H5LRcopy_reference
- * \n Copies data from the specified dataset to a new location and
- * creates a reference to it.
+ * \n Copies data from the specified dataset to a new location and creates a reference to it.
* - \ref H5LRcopy_region
- * \n Copies data from a referenced region to a region in a
- * destination dataset.
+ * \n Copies data from a referenced region to a region in a destination dataset.
* - \ref H5LRcreate_ref_to_all
- * \n Creates a dataset with the region references to the data in all
- * datasets located under a specified group in a file or creates a
- * dataset with object references to all objects (groups or datasets)
+ * \n Creates a dataset with the region references to the data in all datasets located under a
+ * specified group in a file or creates a dataset with object references to all objects (groups or datasets)
* located under a specified group in a file.
* - \ref H5LRcreate_region_references
- * \n Creates an array of region references using an array of paths to
+ * \n Creates an array of region references using an array of paths to
* datasets and an array of corresponding hyperslab descriptions.
* - \ref H5LRget_region_info
* \n Retrieves information about the data a region reference points to.
* - \ref H5LRmake_dataset
- * \n Creates and writes a dataset containing a list of
- * region references.
+ * \n Creates and writes a dataset containing a list of region references.
* - \ref H5LRread_region
- * \n Retrieves raw data pointed to by a region reference to
- * an application buffer.
+ * \n Retrieves raw data pointed to by a region reference to an application buffer.
* - \ref H5LTcopy_region
- * \n Copies data from a specified region in a source dataset
- * to a specified region in a destination dataset.
+ * \n Copies data from a specified region in a source dataset to a specified region in a destination dataset.
* - \ref H5LTread_bitfield_value
- * \n Retrieves the values of quality flags for each element
- * to the application provided buffer.
+ * \n Retrieves the values of quality flags for each element to the application provided buffer.
* - \ref H5LTread_region
* \n Reads selected data to an application buffer.
*
@@ -77,24 +68,24 @@
* \param[in] path Path to the dataset being created
* \param[in] type_id Datatype of the dataset
* \param[in] buf_size Size of the \p loc_id_ref and \p buf arrays
- * \param[in] loc_id_ref Array of object identifiers; each identifier
- * describes to which HDF5 file the corresponding
+ * \param[in] loc_id_ref Array of object identifiers; each identifier
+ * describes to which HDF5 file the corresponding
* region reference belongs to
* \param[in] buf Array of region references
*
* \return \herr_t
*
- * \details Given an array of size \p buf_size of region references \p buf,
- * the function will create a dataset with path \p path, at location
- * specified by \p loc_id and of a datatype specified by \p type_id,
- * and will write data associated with each region reference in the order
- * corresponding to the order of the region references in the buffer.
- * It is assumed that all referenced hyperslabs have the same dimensionality,
- * and only the size of the slowest changing dimension may differ.
- * Each reference in the \p buf array belongs to the file identified
+ * \details Given an array of size \p buf_size of region references \p buf,
+ * the function will create a dataset with path \p path, at location
+ * specified by \p loc_id and of a datatype specified by \p type_id,
+ * and will write data associated with each region reference in the order
+ * corresponding to the order of the region references in the buffer.
+ * It is assumed that all referenced hyperslabs have the same dimensionality,
+ * and only the size of the slowest changing dimension may differ.
+ * Each reference in the \p buf array belongs to the file identified
* by the corresponding object identifiers in the array \p loc_id_ref.
*
- * If \p path does not exist in \p loc_id then the function will
+ * If \p path does not exist in \p loc_id then the function will
* create the path specified by \p path automatically.
*
* \version 1.1 Fortran wrapper introduced in this release.
@@ -103,10 +94,10 @@
*
*/
H5_HLRDLL herr_t H5LRmake_dataset(hid_t loc_id,
- const char *path,
- hid_t type_id, const size_t buf_size,
- const hid_t *loc_id_ref,
- const hdset_reg_ref_t *buf);
+ const char *path,
+ hid_t type_id, const size_t buf_size,
+ const hid_t *loc_id_ref,
+ const hdset_reg_ref_t *buf);
/*-------------------------------------------------------------------------
*
@@ -119,49 +110,46 @@ H5_HLRDLL herr_t H5LRmake_dataset(hid_t loc_id,
* --------------------------------------------------------------------------
* \ingroup H5LR
*
- * \brief Creates an array of region references using an array of paths to
+ * \brief Creates an array of region references using an array of paths to
* datasets and an array of corresponding hyperslab descriptions.
*
* \param[in] obj_id File identifier for the HDF5 file containing
* the referenced regions or an object identifier
* for any object in that file
- * \param[in] num_elem Number of elements in the \p path and
- * \p buf arrays
- * \param[in] path Array of pointers to strings, which contain
- * the paths to the target datasets for the
- * region references
+ * \param[in] num_elem Number of elements in the \p path and \p buf arrays
+ * \param[in] path Array of pointers to strings, which contain
+ * the paths to the target datasets for the region references
* \param[in] block_coord Array of hyperslab coordinate
- * \param[out] buf Buffer for returning an array of region
- * references
+ * \param[out] buf Buffer for returning an array of region references
*
* \return \herr_t
*
* \note **Motivation:**
- * \note H5LRcreate_region_references() is useful when creating
+ * \note H5LRcreate_region_references() is useful when creating
* large numbers of similar region references.
*
- * \details H5LRcreate_region_references() creates a list of region references
- * given an array of paths to datasets and another array listing the
+ * \details H5LRcreate_region_references() creates a list of region references
+ * given an array of paths to datasets and another array listing the
* corner coordinates of the corresponding hyperslabs.
*
* \p path parameter is an array of pointers to strings.
*
- * \p num_elem specifies the number of region references to be created,
+ * \p num_elem specifies the number of region references to be created,
* thus specifying the size of the \p path and \p _buf arrays.
*
- * Buffer \p block_coord has size 2*rank and is the coordinates of the
- * starting point following by the coordinates of the ending point of
- * the hyperslab, repeated \p num_elem times for each hyperslab.
- * For example, creating two region references to two hyperslabs,
- * one with a rectangular hyperslab region starting at element (2,2)
- * to element (5,4) and the second rectangular region starting at
- * element (7,7) to element (9,10), results in \p block_coord
+ * Buffer \p block_coord has size 2*rank and is the coordinates of the
+ * starting point following by the coordinates of the ending point of
+ * the hyperslab, repeated \p num_elem times for each hyperslab.
+ * For example, creating two region references to two hyperslabs,
+ * one with a rectangular hyperslab region starting at element (2,2)
+ * to element (5,4) and the second rectangular region starting at
+ * element (7,7) to element (9,10), results in \p block_coord
* being {2,2,5,4, 7,7,9,10}.
*
- * The rank of the hyperslab will be the same as the rank of the
- * target dataset. H5LRcreate_region_references() will retrieve
- * the rank for each dataset and will use those values to interpret
- * the values in the buffer. Please note that rank may vary from one
+ * The rank of the hyperslab will be the same as the rank of the
+ * target dataset. H5LRcreate_region_references() will retrieve
+ * the rank for each dataset and will use those values to interpret
+ * the values in the buffer. Please note that rank may vary from one
* dataset to another.
*
* \version 1.1 Fortran wrapper introduced in this release.
@@ -170,43 +158,39 @@ H5_HLRDLL herr_t H5LRmake_dataset(hid_t loc_id,
*
*/
H5_HLRDLL herr_t H5LRcreate_region_references(hid_t obj_id,
- size_t num_elem,
- const char **path,
- const hsize_t *block_coord,
- hdset_reg_ref_t *buf);
+ size_t num_elem,
+ const char **path,
+ const hsize_t *block_coord,
+ hdset_reg_ref_t *buf);
/**
* --------------------------------------------------------------------------
* \ingroup H5LR
*
- * \brief Copies data from the specified dataset to a new location and
- * creates a reference to it.
+ * \brief Copies data from the specified dataset to a new location and creates a reference to it.
*
- * \param[in] obj_id Identifier of any object in a file an
- * HDF5 reference belongs to
+ * \param[in] obj_id Identifier of any object in a file an HDF5 reference belongs to
* \param[in] ref Reference to the datasets region
- * \param[in] file Name of the destination file
+ * \param[in] file Name of the destination file
* \param[in] path Full path to the destination dataset
- * \param[in] block_coord Hyperslab coordinates in the destination
- * dataset
- * \param[out] ref_new Region reference to the new location of
- * data
+ * \param[in] block_coord Hyperslab coordinates in the destination dataset
+ * \param[out] ref_new Region reference to the new location of data
*
* \return \herr_t
*
- * \details Given a data set pointed to by a region reference, the function
- * H5LRcopy_reference() will copy the hyperslab data referenced by
- * a datasets region reference into existing dataset specified by
- * its path \p path in the file with the name \p file, and to location
- * specified by the hyperslab coordinates \p block_coord. It will
- * create the region reference \p ref_new to point to the new location.
- * The number of elements in the old and newly specified regions has
+ * \details Given a data set pointed to by a region reference, the function
+ * H5LRcopy_reference() will copy the hyperslab data referenced by
+ * a datasets region reference into existing dataset specified by
+ * its path \p path in the file with the name \p file, and to location
+ * specified by the hyperslab coordinates \p block_coord. It will
+ * create the region reference \p ref_new to point to the new location.
+ * The number of elements in the old and newly specified regions has
* to be the same.
*
- * Buffer \p block_coord has size 2*rank and is the coordinates of
- * the starting point following by the coordinates of the ending
- * point of the hyperslab. For example, to extract a rectangular
- * hyperslab region starting at element (2,2) to element (5,4)
+ * Buffer \p block_coord has size 2*rank and is the coordinates of
+ * the starting point following by the coordinates of the ending
+ * point of the hyperslab. For example, to extract a rectangular
+ * hyperslab region starting at element (2,2) to element (5,4)
* then \p block_coord would be {2, 2, 5, 4}.
*
* \version 1.1 Fortran wrapper introduced in this release.
@@ -215,41 +199,39 @@ H5_HLRDLL herr_t H5LRcreate_region_references(hid_t obj_id,
*
*/
H5_HLRDLL herr_t H5LRcopy_reference(hid_t obj_id, hdset_reg_ref_t *ref, const char *file,
- const char *path, const hsize_t *block_coord,
- hdset_reg_ref_t *ref_new);
+ const char *path, const hsize_t *block_coord,
+ hdset_reg_ref_t *ref_new);
/**
* --------------------------------------------------------------------------
* \ingroup H5LR
*
- * \brief Copies data from a referenced region to a region in a
- * destination dataset.
+ * \brief Copies data from a referenced region to a region in a destination dataset.
*
- * \param[in] obj_id Identifier of any object in a file
+ * \param[in] obj_id Identifier of any object in a file
* dataset region reference belongs to
* \param[in] ref Dataset region reference
- * \param[in] file Name of the destination file
+ * \param[in] file Name of the destination file
* \param[in] path Full path to the destination dataset
- * \param[in] block_coord Hyperslab coordinates in the destination
- * dataset
+ * \param[in] block_coord Hyperslab coordinates in the destination dataset
*
* \return \herr_t
*
- * \details Given a dataset region reference \p ref in a source file
- * specified by an identifier of any object in that file
- * \p obj_id, the function will write data to the existing
- * dataset \p path in file \p file to the simple hyperslab
+ * \details Given a dataset region reference \p ref in a source file
+ * specified by an identifier of any object in that file
+ * \p obj_id, the function will write data to the existing
+ * dataset \p path in file \p file to the simple hyperslab
* specified by \p block_coord.
*
- * Buffer \p block_coord has size 2*rank and is the coordinates
- * of the starting point following by the coordinates of the
- * ending point of the hyperslab. For example, to specify a
- * rectangular hyperslab destination region starting at element
+ * Buffer \p block_coord has size 2*rank and is the coordinates
+ * of the starting point following by the coordinates of the
+ * ending point of the hyperslab. For example, to specify a
+ * rectangular hyperslab destination region starting at element
* (2,2) to element (5,4) then \p block_coord would be {2, 2, 5, 4}.
*
- * If \p path does not exist in the destination file (as may be
- * the case when writing to a new file) then the dataset will be
- * copied directly to the \p path and \p block_coord will be
+ * If \p path does not exist in the destination file (as may be
+ * the case when writing to a new file) then the dataset will be
+ * copied directly to the \p path and \p block_coord will be
* disregarded.
*
* \version 1.1 Fortran wrapper introduced in this release.
@@ -258,71 +240,66 @@ H5_HLRDLL herr_t H5LRcopy_reference(hid_t obj_id, hdset_reg_ref_t *ref, const ch
*
*/
H5_HLRDLL herr_t H5LRcopy_region(hid_t obj_id,
- hdset_reg_ref_t *ref,
- const char *file,
- const char *path,
- const hsize_t *block_coord);
+ hdset_reg_ref_t *ref,
+ const char *file,
+ const char *path,
+ const hsize_t *block_coord);
/**
* --------------------------------------------------------------------------
* \ingroup H5LR
*
- * \brief Creates a dataset with the region references to the data
- * in all datasets located under a specified group in a file
- * or creates a dataset with object references to all objects
+ * \brief Creates a dataset with the region references to the data
+ * in all datasets located under a specified group in a file
+ * or creates a dataset with object references to all objects
* (groups or datasets) located under a specified group in a file.
*
* \fg_loc_id
- * \param[in] group_path Absolute or relative path to the group
- * at which traversal starts
- * \param[in] ds_path Absolute or relative path to the dataset
- * with region references to be created
- * \param[in] index_type Index_type;
- * see valid values below in description
- * \param[in] order Order in which index is traversed;
- * see valid values below in description
- * \param[in] ref_type Reference type;
- * see valid values below in description
+ * \param[in] group_path Absolute or relative path to the group at which traversal starts
+ * \param[in] ds_path Absolute or relative path to the dataset with region references to be created
+ * \param[in] index_type Index_type; see valid values below in description
+ * \param[in] order Order in which index is traversed; see valid values below in description
+ * \param[in] ref_type Reference type; see valid values below in description
*
* \return \herr_t
*
- * \details H5LRcreate_ref_to_all() creates a dataset with the
- * region references to the data in all datasets located
- * under a specified group in a file or creates a dataset with
- * object references to all objects (groups or datasets) located
+ * \details H5LRcreate_ref_to_all() creates a dataset with the
+ * region references to the data in all datasets located
+ * under a specified group in a file or creates a dataset with
+ * object references to all objects (groups or datasets) located
* under a specified group in a file.
*
- * Given a dataset path \p ds_path in a file specified by the
- * \p loc_id identifier, the function H5LRcreate_ref_to_all()
- * will create a contiguous one-dimensional dataset with the
- * region references or object references depending on the value
- * of the \p ref_type parameter. When \p ref_type is
- * #H5R_DATASET_REGION, each region reference points to all data
- * in a dataset encountered by an internally called H5Lvisit()
- * routine, which starts at the group specified by the \p loc_id
+ * Given a dataset path \p ds_path in a file specified by the
+ * \p loc_id identifier, the function H5LRcreate_ref_to_all()
+ * will create a contiguous one-dimensional dataset with the
+ * region references or object references depending on the value
+ * of the \p ref_type parameter. When \p ref_type is
+ * #H5R_DATASET_REGION, each region reference points to all data
+ * in a dataset encountered by an internally called H5Lvisit()
+ * routine, which starts at the group specified by the \p loc_id
* and \p group_path parameters. In a like manner, when
- * \p ref_type is #H5R_OBJECT, each object reference points to
+ * \p ref_type is #H5R_OBJECT, each object reference points to
* an object (a group or a dataset) encountered by H5Lvisit().
*
- * If \p ds_path does not exist in \p loc_id then the function
+ * If \p ds_path does not exist in \p loc_id then the function
* will create the path specified by \p ds_path automatically.
*
- * \p index_type specifies the index to be used.
+ * \p index_type specifies the index to be used.
* Valid values include the following:
* - #H5_INDEX_NAME Alphanumeric index on name
* - #H5_INDEX_CRT_ORDER Index on creation order
*
- * \p order specifies the order in which objects are to be
- * inspected along the index specified in \p index_type.
+ * \p order specifies the order in which objects are to be
+ * inspected along the index specified in \p index_type.
* Valid values include the following:
* - #H5_ITER_INC Increasing order
* - #H5_ITER_DEC Decreasing order
* - #H5_ITER_NATIVE Fastest available order
*
- * For more detailed information on these two parameters,
- * see H5Lvisit().
+ * For more detailed information on these two parameters,
+ * @see H5Lvisit().
*
- * \p ref_type specifies the type of the reference to be used.
+ * \p ref_type specifies the type of the reference to be used.
* Valid values include the following:
* - #H5R_DATASET_REGION Dataset region reference
* - #H5R_OBJECT Object reference
@@ -333,7 +310,7 @@ H5_HLRDLL herr_t H5LRcopy_region(hid_t obj_id,
*
*/
H5_HLRDLL herr_t H5LRcreate_ref_to_all(hid_t loc_id, const char *group_path,
- const char *ds_path, H5_index_t index_type, H5_iter_order_t order, H5R_type_t ref_type);
+ const char *ds_path, H5_index_t index_type, H5_iter_order_t order, H5R_type_t ref_type);
/*-------------------------------------------------------------------------
*
@@ -352,30 +329,27 @@ H5_HLRDLL herr_t H5LRcreate_ref_to_all(hid_t loc_id, const char *group_path,
* \param[in] obj_id File identifier for the HDF5 file containing
* the dataset with the referenced region or an
* object identifier for any object in that file
- * \param[in] ref Region reference specifying data to be read
- * in
- * \param[in] mem_type Memory datatype of data read from referenced
+ * \param[in] ref Region reference specifying data to be read in
+ * \param[in] mem_type Memory datatype of data read from referenced
* region into the application buffer
- * \param[in,out] numelem Number of elements to be read into buffer
- * \p buf
- * \param[out] buf Buffer in which data is returned to the
- * application
+ * \param[in,out] numelem Number of elements to be read into buffer \p buf
+ * \param[out] buf Buffer in which data is returned to the application
*
* \return \herr_t
*
- * \details H5LRread_region() reads data pointed to by the region
+ * \details H5LRread_region() reads data pointed to by the region
* reference \p ref into the buffer \p buf.
*
- * \p numelem specifies the number of elements to be read
- * into \p buf. When the size of the reference region is unknown,
- * H5LRread_region() can be called with \p buf set to NULL;
- * the number of elements in the referenced region will be returned
+ * \p numelem specifies the number of elements to be read
+ * into \p buf. When the size of the reference region is unknown,
+ * H5LRread_region() can be called with \p buf set to NULL;
+ * the number of elements in the referenced region will be returned
* in \p numelem.
*
- * The buffer buf must be big enough to hold \p numelem elements
- * of type \p mem_type. For example, if data is read from the referenced
- * region into an integer buffer, \p mem_type should be #H5T_NATIVE_INT
- * and the buffer must be at least \c sizeof(int) * \p numelem bytes
+ * The buffer buf must be big enough to hold \p numelem elements
+ * of type \p mem_type. For example, if data is read from the referenced
+ * region into an integer buffer, \p mem_type should be #H5T_NATIVE_INT
+ * and the buffer must be at least \c sizeof(int) * \p numelem bytes
* in size. This buffer must be allocated by the application.
*
* \version 1.1 Fortran wrapper introduced in this release.
@@ -384,10 +358,10 @@ H5_HLRDLL herr_t H5LRcreate_ref_to_all(hid_t loc_id, const char *group_path,
*
*/
H5_HLRDLL herr_t H5LRread_region(hid_t obj_id,
- const hdset_reg_ref_t *ref,
- hid_t mem_type,
- size_t *numelem,
- void *buf );
+ const hdset_reg_ref_t *ref,
+ hid_t mem_type,
+ size_t *numelem,
+ void *buf );
/*-------------------------------------------------------------------------
*
@@ -400,40 +374,33 @@ H5_HLRDLL herr_t H5LRread_region(hid_t obj_id,
* --------------------------------------------------------------------------
* \ingroup H5LR
*
- * \brief Retrieves information about the data a region reference
- * points to.
+ * \brief Retrieves information about the data a region reference points to.
*
- * \param[in] obj_id Identifier of any object in an HDF5 file
- * the region reference belongs to.
+ * \param[in] obj_id Identifier of any object in an HDF5 file the region reference belongs to.
* \param[in] ref Region reference to query
- * \param[in,out] len Size of the buffer to store \p path in.
- * NOTE: if \p *path is not NULL then \p *len
- * must be the appropriate length
+ * \param[in,out] len Size of the buffer to store \p path in.
+ * NOTE: if \p *path is not NULL then \p *len must be the appropriate length
* \param[out] path Full path that a region reference points to
* \param[out] rank The number of dimensions of the dataset
- * dimensions of the dataset pointed by
- * region reference.
- * \param[out] dtype Datatype of the dataset pointed by the
- * region reference.
+ * dimensions of the dataset pointed by region reference.
+ * \param[out] dtype Datatype of the dataset pointed by the region reference.
* \param[out] sel_type Type of the selection (point or hyperslab)
- * \param[in,out] numelem Number of coordinate blocks or
- * selected elements.
- * \param[out] buf Buffer containing description of the region
- * pointed by region reference
+ * \param[in,out] numelem Number of coordinate blocks or selected elements.
+ * \param[out] buf Buffer containing description of the region pointed by region reference
*
* \return \herr_t
*
- * \details H5LRget_region_info() queries information about the data
- * pointed by a region reference \p ref. It returns one of the
- * absolute paths to a dataset, length of the path, dataset’s rank
- * and datatype, description of the referenced region and type of
- * the referenced region. Any output argument can be NULL if that
+ * \details H5LRget_region_info() queries information about the data
+ * pointed by a region reference \p ref. It returns one of the
+ * absolute paths to a dataset, length of the path, dataset’s rank
+ * and datatype, description of the referenced region and type of
+ * the referenced region. Any output argument can be NULL if that
* argument does not need to be returned.
*
- * The parameter \p obj_id is an identifier for any object in the
- * HDF5 file containing the referenced object. For example, it can
- * be an identifier of a dataset the region reference belongs to
- * or an identifier of an HDF5 file the dataset with region references
+ * The parameter \p obj_id is an identifier for any object in the
+ * HDF5 file containing the referenced object. For example, it can
+ * be an identifier of a dataset the region reference belongs to
+ * or an identifier of an HDF5 file the dataset with region references
* is stored in.
*
* The parameter \p ref is a region reference to query.
@@ -442,36 +409,36 @@ H5_HLRDLL herr_t H5LRread_region(hid_t obj_id,
* buffer of size \p len+1 to return an absolute path to a dataset
* the region reference points to.
*
- * The parameter \p len is a length of absolute path string plus
- * the \0 string terminator. If path parameter is NULL, actual
- * length of the path (+1 for \0 string terminator) is returned to
- * application and can be used to allocate buffer path of an
+ * The parameter \p len is a length of absolute path string plus
+ * the \0 string terminator. If path parameter is NULL, actual
+ * length of the path (+1 for \0 string terminator) is returned to
+ * application and can be used to allocate buffer path of an
* appropriate length \p len.
*
* The parameter \p sel_type describes the type of the selected
- * region. Possible values can be #H5S_SEL_POINTS for point
+ * region. Possible values can be #H5S_SEL_POINTS for point
* selection and #H5S_SEL_HYPERSLABS for hyperslab selection.
*
- * The parameter \p numelem describes how many elements will be
- * placed in the buffer \p buf. The number should be interpreted
+ * The parameter \p numelem describes how many elements will be
+ * placed in the buffer \p buf. The number should be interpreted
* using the value of \p sel_type.
*
- * If value of \p sel_type is #H5S_SEL_HYPERSLABS, the parameter
- * \p buf contains \p numelem blocks of the coordinates for each
- * simple hyperslab of the referenced region. Each block has
- * length \c 2*\p rank and is organized as follows: <"start" coordinate>,
- * immediately followed by <"opposite" corner coordinate>.
- * The total size of the buffer to hold the description of the
- * region will be \c 2*\p rank*\p numelem. If region reference
- * points to a contiguous sub-array, then the value of \p numelem
- * is 1 and the block contains coordinates of the upper left and
+ * If value of \p sel_type is #H5S_SEL_HYPERSLABS, the parameter
+ * \p buf contains \p numelem blocks of the coordinates for each
+ * simple hyperslab of the referenced region. Each block has
+ * length \c 2*\p rank and is organized as follows: <"start" coordinate>,
+ * immediately followed by <"opposite" corner coordinate>.
+ * The total size of the buffer to hold the description of the
+ * region will be \c 2*\p rank*\p numelem. If region reference
+ * points to a contiguous sub-array, then the value of \p numelem
+ * is 1 and the block contains coordinates of the upper left and
* lower right corners of the sub-array (or simple hyperslab).
*
- * If value of \p sel_type is #H5S_SEL_POINTS, the parameter \p buf
- * contains \p numelem blocks of the coordinates for each selected
- * point of the referenced region. Each block has length \p rank
- * and contains coordinates of the element. The total size of the
- * buffer to hold the description of the region will be
+ * If value of \p sel_type is #H5S_SEL_POINTS, the parameter \p buf
+ * contains \p numelem blocks of the coordinates for each selected
+ * point of the referenced region. Each block has length \p rank
+ * and contains coordinates of the element. The total size of the
+ * buffer to hold the description of the region will be
* \p rank* \p numelem.
*
*
@@ -481,14 +448,14 @@ H5_HLRDLL herr_t H5LRread_region(hid_t obj_id,
*
*/
H5_HLRDLL herr_t H5LRget_region_info(hid_t obj_id,
- const hdset_reg_ref_t *ref,
- size_t *len,
- char *path,
- int *rank,
- hid_t *dtype,
- H5S_sel_type *sel_type,
- size_t *numelem,
- hsize_t *buf );
+ const hdset_reg_ref_t *ref,
+ size_t *len,
+ char *path,
+ int *rank,
+ hid_t *dtype,
+ H5S_sel_type *sel_type,
+ size_t *numelem,
+ hsize_t *buf );
@@ -503,35 +470,33 @@ H5_HLRDLL herr_t H5LRget_region_info(hid_t obj_id,
* --------------------------------------------------------------------------
* \ingroup H5LR
*
- * \brief Copies data from a specified region in a source dataset
+ * \brief Copies data from a specified region in a source dataset
* to a specified region in a destination dataset
*
* \param[in] file_src Name of the source file
* \param[in] path_src Full path to the source dataset
- * \param[in] block_coord_src Hyperslab coordinates in the
- * source dataset
+ * \param[in] block_coord_src Hyperslab coordinates in the source dataset
* \param[in] file_dest Name of the destination file
* \param[in] path_dest Full path to the destination dataset
- * \param[in] block_coord_dset Hyperslab coordinates in the
- * destination dataset
+ * \param[in] block_coord_dset Hyperslab coordinates in the destination dataset
*
* \return \herr_t
*
- * \details Given a path to a dataset \p path_src in a file with the
- * name \p file_src, and description of a simple hyperslab of
- * the source \p block_coord_src, the function will write data
- * to the dataset \p path_dest in file \p file_dest to the
- * simple hyperslab specified by \p block_coord_dset.
- * The arrays \p block_coord_src and \p block_coord_dset have
- * a length of 2*rank and are the coordinates of the starting
- * point following by the coordinates of the ending point of the
- * hyperslab. For example, to specify a rectangular hyperslab
- * destination region starting at element (2,2) to element (5,4)
+ * \details Given a path to a dataset \p path_src in a file with the
+ * name \p file_src, and description of a simple hyperslab of
+ * the source \p block_coord_src, the function will write data
+ * to the dataset \p path_dest in file \p file_dest to the
+ * simple hyperslab specified by \p block_coord_dset.
+ * The arrays \p block_coord_src and \p block_coord_dset have
+ * a length of 2*rank and are the coordinates of the starting
+ * point following by the coordinates of the ending point of the
+ * hyperslab. For example, to specify a rectangular hyperslab
+ * destination region starting at element (2,2) to element (5,4)
* then \p block_coord_dset would be {2, 2, 5, 4}.
*
- * If \p path_dest does not exist in the destination file
- * (as may be the case when writing to a new file) then the
- * dataset will be copied directly to the \p path_dest and
+ * If \p path_dest does not exist in the destination file
+ * (as may be the case when writing to a new file) then the
+ * dataset will be copied directly to the \p path_dest and
* \p block_coord_dset will be disregarded.
*
* \version 1.1 Fortran wrapper introduced in this release.
@@ -540,11 +505,11 @@ H5_HLRDLL herr_t H5LRget_region_info(hid_t obj_id,
*
*/
H5_HLRDLL herr_t H5LTcopy_region(const char *file_src,
- const char *path_src,
- const hsize_t *block_coord_src,
- const char *file_dest,
- const char *path_dest,
- const hsize_t *block_coord_dset);
+ const char *path_src,
+ const hsize_t *block_coord_src,
+ const char *file_dest,
+ const char *path_dest,
+ const hsize_t *block_coord_dset);
/*-------------------------------------------------------------------------
*
@@ -562,27 +527,25 @@ H5_HLRDLL herr_t H5LTcopy_region(const char *file_src,
* \param[in] file Name of file
* \param[in] path Full path to a dataset
* \param[in] block_coord Hyperslab coordinates
- * \param[in] mem_type Memory datatype, describing the buffer
- * the referenced data will be read into
- * \param[out] buf Buffer containing data from the
- * referenced region
+ * \param[in] mem_type Memory datatype, describing the buffer the referenced data will be read into
+ * \param[out] buf Buffer containing data from the referenced region
*
* \return \herr_t
*
- * \details H5LTread_region() reads data from a region described by
- * the hyperslab coordinates in \p block_coord, located in
- * the dataset specified by its absolute path \p path in a
- * file specified by its name \p file. Data is read into a
- * buffer \p buf of the datatype that corresponds to the
+ * \details H5LTread_region() reads data from a region described by
+ * the hyperslab coordinates in \p block_coord, located in
+ * the dataset specified by its absolute path \p path in a
+ * file specified by its name \p file. Data is read into a
+ * buffer \p buf of the datatype that corresponds to the
* HDF5 datatype specified by \p mem_type.
*
- * Buffer \p block_coord has size 2*rank and is the coordinates
- * of the starting point following by the coordinates of the
- * ending point of the hyperslab. For example, to extract a
- * rectangular hyperslab region starting at element (2,2) to
+ * Buffer \p block_coord has size 2*rank and is the coordinates
+ * of the starting point following by the coordinates of the
+ * ending point of the hyperslab. For example, to extract a
+ * rectangular hyperslab region starting at element (2,2) to
* element (5,4) then \p block_coord would be {2, 2, 5, 4}.
*
- * Buffer \p buf should be big enough to hold selected elements
+ * Buffer \p buf should be big enough to hold selected elements
* of the type that corresponds to the \p mem_type
*
* \version 1.1 Fortran wrapper introduced in this release.
@@ -591,57 +554,55 @@ H5_HLRDLL herr_t H5LTcopy_region(const char *file_src,
*
*/
H5_HLRDLL herr_t H5LTread_region(const char *file,
- const char *path,
- const hsize_t *block_coord,
- hid_t mem_type,
- void *buf );
+ const char *path,
+ const hsize_t *block_coord,
+ hid_t mem_type,
+ void *buf );
/**
* --------------------------------------------------------------------------
* \ingroup H5LR
*
- * \brief Retrieves the values of quality flags for each element
+ * \brief Retrieves the values of quality flags for each element
* to the application provided buffer.
*
* \param[in] dset_id Identifier of the dataset with bit-field values
* \param[in] num_values Number of the values to be extracted
- * \param[in] offset Array of staring bits to be extracted from
+ * \param[in] offset Array of staring bits to be extracted from
* the element; valid values: 0 (zero) through 7
- * \param[in] lengths Array of the number of bits to be extracted
- * for each value
- * \param[in] space Dataspace identifier, describing the elements
- * to be read from the dataset with bit-field
- * values
+ * \param[in] lengths Array of the number of bits to be extracted for each value
+ * \param[in] space Dataspace identifier, describing the elements
+ * to be read from the dataset with bit-field values
* \param[out] buf Buffer to read the values in
*
* \return \herr_t
*
- * \details H5LTread_bitfield_value() reads selected elements from a
- * dataset specified by its identifier \p dset_id, and unpacks
+ * \details H5LTread_bitfield_value() reads selected elements from a
+ * dataset specified by its identifier \p dset_id, and unpacks
* the bit-field values to a buffer \p buf.
*
- * The parameter \p space is a space identifier that indicates
+ * The parameter \p space is a space identifier that indicates
* which elements of the dataset should be read.
*
- * The parameter \p offset is an array of length \p num_values;
+ * The parameter \p offset is an array of length \p num_values;
* the i<sup>th</sup> element of the array holds the value of the
- * starting bit of the i<sup>th</sup> bit-field value.
+ * starting bit of the i<sup>th</sup> bit-field value.
* Valid values are: 0 (zero) through 7.
*
- * The parameter \p lengths is an array of length \p num_values;
- * the i<sup>th</sup> element of the array holds the number of
- * bits to be extracted for the i<sup>th</sup> bit-field value.
- * Extracted bits will be interpreted as a base-2 integer value.
- * Each value will be converted to the base-10 integer value and
- * stored in the application buffer.
- *
- * Buffer \p buf is allocated by the application and should be big
- * enough to hold \c num_sel_elem * \p num_values elements of the
- * specified type, where \c num_sel_elem is a number of the elements
- * to be read from the dataset. Data in the buffer is organized
- * as \p num_values values for the first element, followed by the
- * \p num_values values for the second element, ... , followed by
- * the \p num_values values for the
+ * The parameter \p lengths is an array of length \p num_values;
+ * the i<sup>th</sup> element of the array holds the number of
+ * bits to be extracted for the i<sup>th</sup> bit-field value.
+ * Extracted bits will be interpreted as a base-2 integer value.
+ * Each value will be converted to the base-10 integer value and
+ * stored in the application buffer.
+ *
+ * Buffer \p buf is allocated by the application and should be big
+ * enough to hold \c num_sel_elem * \p num_values elements of the
+ * specified type, where \c num_sel_elem is a number of the elements
+ * to be read from the dataset. Data in the buffer is organized
+ * as \p num_values values for the first element, followed by the
+ * \p num_values values for the second element, ... , followed by
+ * the \p num_values values for the
* \c num_selected_elem<sup>th</sup> element.
*
* \version 1.1 Fortran wrapper introduced in this release.
@@ -650,5 +611,5 @@ H5_HLRDLL herr_t H5LTread_region(const char *file,
*
*/
H5_HLRDLL herr_t H5LTread_bitfield_value(hid_t dset_id, int num_values, const unsigned *offset,
- const unsigned *lengths, hid_t space, int *buf);
+ const unsigned *lengths, hid_t space, int *buf);
diff --git a/doxygen/dox/high_level/high_level.dox b/doxygen/dox/high_level/high_level.dox
deleted file mode 100644
index c53d298..0000000
--- a/doxygen/dox/high_level/high_level.dox
+++ /dev/null
@@ -1,29 +0,0 @@
-/** \page high_level High-level library
- * The high-level HDF5 library includes several sets of convenience and standard-use APIs to
- * facilitate common HDF5 operations.
- *
- * <ul>
- * <li>\ref H5LT "Lite (H5LT, H5LD)"
- * \n
- * Functions to simplify creating and manipulating datasets, attributes and other features
- * <li>\ref H5IM "Image (H5IM)"
- * \n
- * Creating and manipulating HDF5 datasets intended to be interpreted as images
- * <li>\ref H5TB "Table (H5TB)"
- * \n
- * Creating and manipulating HDF5 datasets intended to be interpreted as tables
- * <li>\ref H5PT "Packet Table (H5PT)"
- * \n
- * Creating and manipulating HDF5 datasets to support append- and read-only operations on table data
- * <li>\ref H5DS "Dimension Scale (H5DS)"
- * \n
- * Creating and manipulating HDF5 datasets that are associated with the dimension of another HDF5 dataset
- * <li>\ref H5DO "Optimizations (H5DO)"
- * \n
- * Bypassing default HDF5 behavior in order to optimize for specific use cases
- * <li>\ref H5LR "Extensions (H5LR, H5LT)"
- * \n
- * Working with region references, hyperslab selections, and bit-fields
- * </ul>
- *
- */
diff --git a/doxygen/dox/rm-template.dox b/doxygen/dox/rm-template.dox
index bd81f64..1e9f2d7 100644
--- a/doxygen/dox/rm-template.dox
+++ b/doxygen/dox/rm-template.dox
@@ -96,4 +96,4 @@ the <a href="https://www.oreilly.com/library/view/97-things-every/9780596809515/
* \version 1.MAJOR.MINOR Function was deprecated in this release
\endverbatim
-*/ \ No newline at end of file
+*/