From 73ef4b96dba12eb728d28a5e17a8b8c50e7d95f1 Mon Sep 17 00:00:00 2001 From: Frank Baker Date: Wed, 7 Jun 2000 14:56:04 -0500 Subject: [svn-r2361] Updating "HDF5 Tutorial." --- doc/html/Tutor/Contents.html | 13 +- doc/html/Tutor/ContentsAdd.html | 71 +-- doc/html/Tutor/ContentsAdv.html | 77 +-- doc/html/Tutor/ContentsFull.html | 44 +- doc/html/Tutor/ContentsIntro.html | 62 +- doc/html/Tutor/Graphics/RefObject.gif | Bin 296 -> 292 bytes doc/html/Tutor/Graphics/RefRegion.gif | Bin 292 -> 296 bytes doc/html/Tutor/answers.html | 434 +++++++++----- doc/html/Tutor/api.html | 55 +- doc/html/Tutor/compound.html | 91 +-- doc/html/Tutor/crtatt.html | 318 ++++++---- doc/html/Tutor/crtdat.html | 435 +++++++++----- doc/html/Tutor/crtfile.html | 259 +++++--- doc/html/Tutor/crtgrp.html | 129 ++-- doc/html/Tutor/crtgrpar.html | 163 +++-- doc/html/Tutor/crtgrpd.html | 196 +++--- doc/html/Tutor/examples/attrexample.f90 | 87 +++ doc/html/Tutor/examples/chunk.f90 | 310 ++++++++++ doc/html/Tutor/examples/dsetexample.f90 | 70 +++ doc/html/Tutor/examples/fileexample.f90 | 27 + doc/html/Tutor/examples/groupexample.f90 | 42 ++ doc/html/Tutor/examples/grpdsetexample.f90 | 136 +++++ doc/html/Tutor/examples/grpit.f90 | 189 ++++++ doc/html/Tutor/examples/grpsexample.f90 | 58 ++ doc/html/Tutor/examples/hyperslab.f90 | 199 +++++++ doc/html/Tutor/examples/java/Compound.java | 540 +++++++++++++++++ doc/html/Tutor/examples/java/Copy.java | 541 +++++++++++++++++ doc/html/Tutor/examples/java/CreateAttribute.java | 302 ++++++++++ doc/html/Tutor/examples/java/CreateDataset.java | 210 +++++++ doc/html/Tutor/examples/java/CreateFile.java | 83 +++ doc/html/Tutor/examples/java/CreateFileInput.java | 118 ++++ doc/html/Tutor/examples/java/CreateGroup.java | 139 +++++ doc/html/Tutor/examples/java/CreateGroupAR.java | 152 +++++ .../Tutor/examples/java/CreateGroupDataset.java | 340 +++++++++++ doc/html/Tutor/examples/java/DatasetRdWt.java | 213 +++++++ doc/html/Tutor/examples/java/HyperSlab.java | 590 ++++++++++++++++++ doc/html/Tutor/examples/java/Makefile | 92 +++ doc/html/Tutor/examples/java/Makefile.in | 91 +++ doc/html/Tutor/examples/java/README | 21 + doc/html/Tutor/examples/java/readme.html | 192 ++++++ doc/html/Tutor/examples/java/runCompound.sh | 17 + doc/html/Tutor/examples/java/runCompound.sh.in | 17 + doc/html/Tutor/examples/java/runCopy.sh | 17 + doc/html/Tutor/examples/java/runCopy.sh.in | 17 + doc/html/Tutor/examples/java/runCreateAttribute.sh | 17 + .../Tutor/examples/java/runCreateAttribute.sh.in | 17 + doc/html/Tutor/examples/java/runCreateDataset.sh | 17 + .../Tutor/examples/java/runCreateDataset.sh.in | 17 + doc/html/Tutor/examples/java/runCreateFile.sh | 17 + doc/html/Tutor/examples/java/runCreateFile.sh.in | 17 + doc/html/Tutor/examples/java/runCreateFileInput.sh | 17 + .../Tutor/examples/java/runCreateFileInput.sh.in | 17 + doc/html/Tutor/examples/java/runCreateGroup.sh | 17 + doc/html/Tutor/examples/java/runCreateGroup.sh.in | 17 + doc/html/Tutor/examples/java/runCreateGroupAR.sh | 17 + .../Tutor/examples/java/runCreateGroupAR.sh.in | 17 + .../Tutor/examples/java/runCreateGroupDataset.sh | 17 + .../examples/java/runCreateGroupDataset.sh.in | 17 + doc/html/Tutor/examples/java/runDatasetRdWt.sh | 17 + doc/html/Tutor/examples/java/runDatasetRdWt.sh.in | 17 + doc/html/Tutor/examples/java/runHyperSlab.sh | 17 + doc/html/Tutor/examples/java/runHyperSlab.sh.in | 17 + doc/html/Tutor/examples/mount.f90 | 183 ++++++ doc/html/Tutor/examples/refobjexample.f90 | 136 +++++ doc/html/Tutor/examples/refregexample.f90 | 159 +++++ doc/html/Tutor/examples/rwdsetexample.f90 | 78 +++ doc/html/Tutor/examples/selectele.f90 | 282 +++++++++ doc/html/Tutor/extend.html | 372 ++++++------ doc/html/Tutor/fileorg.html | 25 +- doc/html/Tutor/glossary.html | 35 +- doc/html/Tutor/img001.gif | Bin 635 -> 597 bytes doc/html/Tutor/img002.gif | Bin 954 -> 1125 bytes doc/html/Tutor/img003.gif | Bin 928 -> 981 bytes doc/html/Tutor/img004.gif | Bin 1644 -> 2513 bytes doc/html/Tutor/img005.gif | Bin 1812 -> 3786 bytes doc/html/Tutor/intro.html | 37 +- doc/html/Tutor/iterate.html | 325 +++++----- doc/html/Tutor/mount.html | 343 +++++------ doc/html/Tutor/questions.html | 139 +++-- doc/html/Tutor/rdwt.html | 384 ++++++++---- doc/html/Tutor/references.html | 5 +- doc/html/Tutor/reftoobj.html | 568 +++++++----------- doc/html/Tutor/reftoreg.html | 657 ++++++++------------- doc/html/Tutor/select.html | 317 +++------- doc/html/Tutor/selectc.html | 379 ++++++------ doc/html/Tutor/title.html | 56 +- 86 files changed, 8900 insertions(+), 3043 deletions(-) create mode 100644 doc/html/Tutor/examples/attrexample.f90 create mode 100644 doc/html/Tutor/examples/chunk.f90 create mode 100644 doc/html/Tutor/examples/dsetexample.f90 create mode 100644 doc/html/Tutor/examples/fileexample.f90 create mode 100644 doc/html/Tutor/examples/groupexample.f90 create mode 100644 doc/html/Tutor/examples/grpdsetexample.f90 create mode 100644 doc/html/Tutor/examples/grpit.f90 create mode 100644 doc/html/Tutor/examples/grpsexample.f90 create mode 100644 doc/html/Tutor/examples/hyperslab.f90 create mode 100644 doc/html/Tutor/examples/java/Compound.java create mode 100644 doc/html/Tutor/examples/java/Copy.java create mode 100644 doc/html/Tutor/examples/java/CreateAttribute.java create mode 100644 doc/html/Tutor/examples/java/CreateDataset.java create mode 100644 doc/html/Tutor/examples/java/CreateFile.java create mode 100644 doc/html/Tutor/examples/java/CreateFileInput.java create mode 100644 doc/html/Tutor/examples/java/CreateGroup.java create mode 100644 doc/html/Tutor/examples/java/CreateGroupAR.java create mode 100644 doc/html/Tutor/examples/java/CreateGroupDataset.java create mode 100644 doc/html/Tutor/examples/java/DatasetRdWt.java create mode 100644 doc/html/Tutor/examples/java/HyperSlab.java create mode 100644 doc/html/Tutor/examples/java/Makefile create mode 100644 doc/html/Tutor/examples/java/Makefile.in create mode 100644 doc/html/Tutor/examples/java/README create mode 100644 doc/html/Tutor/examples/java/readme.html create mode 100644 doc/html/Tutor/examples/java/runCompound.sh create mode 100644 doc/html/Tutor/examples/java/runCompound.sh.in create mode 100644 doc/html/Tutor/examples/java/runCopy.sh create mode 100644 doc/html/Tutor/examples/java/runCopy.sh.in create mode 100644 doc/html/Tutor/examples/java/runCreateAttribute.sh create mode 100644 doc/html/Tutor/examples/java/runCreateAttribute.sh.in create mode 100644 doc/html/Tutor/examples/java/runCreateDataset.sh create mode 100644 doc/html/Tutor/examples/java/runCreateDataset.sh.in create mode 100644 doc/html/Tutor/examples/java/runCreateFile.sh create mode 100644 doc/html/Tutor/examples/java/runCreateFile.sh.in create mode 100644 doc/html/Tutor/examples/java/runCreateFileInput.sh create mode 100644 doc/html/Tutor/examples/java/runCreateFileInput.sh.in create mode 100644 doc/html/Tutor/examples/java/runCreateGroup.sh create mode 100644 doc/html/Tutor/examples/java/runCreateGroup.sh.in create mode 100644 doc/html/Tutor/examples/java/runCreateGroupAR.sh create mode 100644 doc/html/Tutor/examples/java/runCreateGroupAR.sh.in create mode 100644 doc/html/Tutor/examples/java/runCreateGroupDataset.sh create mode 100644 doc/html/Tutor/examples/java/runCreateGroupDataset.sh.in create mode 100644 doc/html/Tutor/examples/java/runDatasetRdWt.sh create mode 100644 doc/html/Tutor/examples/java/runDatasetRdWt.sh.in create mode 100644 doc/html/Tutor/examples/java/runHyperSlab.sh create mode 100644 doc/html/Tutor/examples/java/runHyperSlab.sh.in create mode 100644 doc/html/Tutor/examples/mount.f90 create mode 100644 doc/html/Tutor/examples/refobjexample.f90 create mode 100644 doc/html/Tutor/examples/refregexample.f90 create mode 100644 doc/html/Tutor/examples/rwdsetexample.f90 create mode 100644 doc/html/Tutor/examples/selectele.f90 diff --git a/doc/html/Tutor/Contents.html b/doc/html/Tutor/Contents.html index 49c4a32..4500dd3 100644 --- a/doc/html/Tutor/Contents.html +++ b/doc/html/Tutor/Contents.html @@ -2,13 +2,6 @@ - -
-Return to HDF5 Doc Set -

@@ -56,11 +49,11 @@ Remove (or comment out) when served from HDF web server. Advanced Topics -
-Return to HDF5 Doc Set -
-
+Introductory Topics
-Advanced Topics - +Advanced Topics
+ + Additional Information -
HDF5 Utilities -- h5ls and h5dump -
Glossary -
References -
Example Programs +
-
Full TOC -

-
Copyright, Etc.
diff --git a/doc/html/Tutor/ContentsAdv.html b/doc/html/Tutor/ContentsAdv.html index b2ac014..1d6b5b4 100644 --- a/doc/html/Tutor/ContentsAdv.html +++ b/doc/html/Tutor/ContentsAdv.html @@ -3,14 +3,6 @@ - -
-Return to HDF5 Doc Set -
-
(Short TOC) -

Tutorial Title Page -
-
-Introductory Topics - + +Introductory Topics
+ + Advanced Topics -
-Compound Data Types -
-Selections Using H5Sselect_hyperslab -
-Selections Using H5Sselect_elements and H5Scopy -
+Compound Datatypes +Dataspace Selection - hyperslab +Dataspace Selection - Individual Points References to Objects -
References to Dataset Regions -
Chunking and Extendible Datasets -
Mounting Files -
Group Iteration +
-
Additional Information - +
-
Full TOC -
+
-
-
Copyright, Etc.
diff --git a/doc/html/Tutor/ContentsFull.html b/doc/html/Tutor/ContentsFull.html index 09cd5b9..72995aa 100644 --- a/doc/html/Tutor/ContentsFull.html +++ b/doc/html/Tutor/ContentsFull.html @@ -3,14 +3,6 @@ - -
-Return to HDF5 Doc Set -
-
-
-Return to HDF5 Doc Set -
-
+
-
Additional Information - +
-
Full TOC -
+
-
-
Copyright, Etc.
diff --git a/doc/html/Tutor/Graphics/RefObject.gif b/doc/html/Tutor/Graphics/RefObject.gif index 250039f..ae9dc05 100755 Binary files a/doc/html/Tutor/Graphics/RefObject.gif and b/doc/html/Tutor/Graphics/RefObject.gif differ diff --git a/doc/html/Tutor/Graphics/RefRegion.gif b/doc/html/Tutor/Graphics/RefRegion.gif index ae9dc05..250039f 100755 Binary files a/doc/html/Tutor/Graphics/RefRegion.gif and b/doc/html/Tutor/Graphics/RefRegion.gif differ diff --git a/doc/html/Tutor/answers.html b/doc/html/Tutor/answers.html index 6bc239b..45b6c82 100644 --- a/doc/html/Tutor/answers.html +++ b/doc/html/Tutor/answers.html @@ -1,5 +1,5 @@ -HDF5 Tutorial - Introductory Topics Questions with Answers +<TITLE>HDF5 Tutorial - Introductory Topics Quiz with Answers @@ -13,182 +13,290 @@ width=78 height=27 alt="NCSA">

[ HDF5 Tutorial Top ]

-Introductory Topics Questions with -Answers +Introductory Topics Quiz + with Answers


-
 
 
-Section 2: HDF File Organization
-================================
-
-1. Name and describe the two primary objects that can be stored in an HDF5
-   file:
-
-Answer: 
-Group: A grouping structure containing zero or more HDF5 objects, together
-       with supporting metadata.
-
-Dataset: A multidimensional array of data elements, together with
-         supporting metadata.
-
-2. What is an attribute?
-
-Answer: An HDF attribute is a user-defined HDF5 structure that provides extra
-        information about an HDF5 object.
-
-3. Give the path name for an object called "harry" that is a member of a
-   group called "dick," which in turn is a member of the root group.
-
-Answer: /dick/harry
-
-Section 3: The HDF5 API
-=======================
-
-Describe the purpose of each of the following HDF5 APIs:
-
-H5A, H5D, H5E, F5F, H5G, H5T, H5Z
-
-H5A: Attribute access and manipulation routines.
-H5D: Dataset access and manipulation routines.
-H5E: Error handling routines.
-F5F: File access routines.
-H5G: Routines for creating and operating on groups.
-H5T: Routines for creating and manipulating the datatypes of dataset elements.
-H5Z: Data compression routines.
-
-
-Section 4: Creating an HDF File
-===============================
-
-1. What two HDF5 routines must be called in order to create an HDF5 file?
-
-Answer: H5Fcreate and H5Fclose.
-
-2. What include file must be included in any file that uses the HDF5 library.
-
-Answer: hdf5.h must be included because it contains definitions and
-        declarations used by the library.
-
-3. An HDF5 file is never completely empty because as soon as an HDF5 file
-   is created, it automatically contains a certain primary object.  What is
-   that object?
-
-Answer: The root group.
-
-
-Section 5: Creating a Dataset
-=============================
-
-1. Name and describe two major datatype categories.
-
-Answer: atomic datatype - An atomic datatype cannot be decomposed into
-                          smaller units at the API level.
-        compound datatype - A compound datatype is a collection of atomic/  
-                            compound datatypes, or small arrays of such types.
-
-2. List the HDF5 atomic datatypes. Give an example of a predefined datatype.
-
-Answer: There are six HDF5 atomic datatypes: integer, floating point,
-        date and time, character string, bit field, opaque.
-        H5T_IEEE_F32LE - 4-byte little-endian, IEEE floating point,
-        H5T_NATIVE_INT - native integer  
-
-3. What does the dataspace describe? What are the major characteristics of the
-   simple dataspace? 
-
-Answer: The dataspace describes the dimensionality of the dataset. It is 
-        characterized by its rank and dimension sizes.  
+

Section 2: HDF File Organization

+
    + +
  1. Name and describe the two primary objects that can be stored in an HDF5 + file. + +
    +
    Answers: +
    Group: A grouping structure containing zero or more + HDF5 objects, together with supporting metadata. +
    Dataset: A multidimensional array of data elements, + together with supporting metadata. +
    + +

    +

  2. What is an attribute? + +
    +
    Answer: +
    An HDF5 attribute is a user-defined HDF5 structure that provides extra + information about an HDF5 object. +
    + +

    +

  3. Give the path name for an object called harry that is + a member of a group called dick, which, in turn, is a + member of the root group. + +
    +
    Answer: +
    /dick/harry +
    + + +
+

Section 3: The HDF5 API

+
    + +
  1. Describe the purpose of each of the following HDF5 APIs: + + + H5A, H5D, H5E, H5F, H5G, H5T, H5Z + + +
    +
    Answers: + + H5A: Attribute access and manipulation routines
    + H5D: Dataset access and manipulation routines
    + H5E: Error handling routines
    + H5F: File access routines
    + H5G: Routines for creating and operating on groups
    + H5T: Routines for creating and manipulating the + datatypes of dataset elements
    + H5Z: Data compression routines +
    +
    + + +
+

Section 4: Creating an HDF5 File

+
    + +
  1. What two HDF5 routines must be called to create an HDF5 file? + +
    +
    Answer: +
    H5Fcreate and H5Fclose. +
    + +

    +

  2. What include file must be included in any file that uses the HDF5 library? + +
    +
    Answer: +
    hdf5.h must be included because it contains definitions + and declarations used by the library. +
    + +

    +

  3. An HDF5 file is never completely empty because as soon as it is created, + it automatically contains a certain primary object. What is that object? + +
    +
    Answer: +
    The root group. +
    + + +
+

Section 5: Creating a Dataset

+
    + +
  1. Name and describe two major datatype categories. + +
    +
    Answers: +
    Atomic datatype: + An atomic datatype cannot be decomposed into smaller units at the + API level. +
    + Compound datatype: + A compound datatype is a collection of atomic and compound datatypes, + or small arrays of such types. +
    + +

    +

  2. List the HDF5 atomic datatypes. Give an example of a predefined datatype. + +
    +
    Answers: +
    There are six HDF5 atomic datatypes: integer, floating point, + date and time, character string, bit field, and opaque. + Examples of predefined datatypes include the following: + + H5T_IEEE_F32LE + - 4-byte little-endian, IEEE floating point
    + H5T_NATIVE_INT + - native integer +
    +
    + +

    +

  3. What does the dataspace describe? What are the major characteristics of + the simple dataspace? + +
    +
    Answers: +
    The dataspace describes the dimensionality of the dataset. + A simple dataspace is characterized by its rank and dimension sizes. +
    -4. What information needs to be passed to the H5Dcreate function, i.e. +

    +

  4. What information needs to be passed to the H5Dcreate function, i.e., what information is needed to describe a dataset at creation time? -Answer: dataset location, name, dataspace, datatype, and creation properties. +
    +
    Answer: +
    The dataset location, name, dataspace, datatype, and dataset + creation property list. +
    -Section 6: Reading from/Writing to a Dataset -============================================ +
+

Section 6: Reading from and Writing to a Dataset

+
    -1. What are six pieces of information which need to be specified for +
  1. What are six pieces of information which need to be specified for reading and writing a dataset? -Answer: A dataset, a dataset's datatype and dataspace in memory, the - dataspace in the file, the transfer properties and data buffer. - -2. Why are both the memory dataspace and file dataspace needed for - read/write operations, but only the memory datatype is specified for the - datatype? - -Answer: A dataset's file datatype is specified at creation time and cannot be - changed. Both file and memory dataspaces are needed for performing - subsetting and partial I/O operations. - -3. What does the line DATASPACE { SIMPLE (4 , 6 ) / ( 4 , 6 ) } in Fig 6.1 - means? - -Answer: It means that the dataset "dset" has a simple dataspace with the - current dimensions (4,6) and the maximum size of the dimensions (4,6). - - -Section 7: Creating an Attribute -================================ - -1. What is an attribute? - -Answer: An attribute is a dataset attached to an object. It describes the - nature and/or the intended usage of the object. - -2. Can partial I/O operations be performed on attributes? - -Answer: No - - -Section 8: Creating a Group -=========================== - -What are the two primary objects that can be included in -a group? - -Answer: A group and a dataset - - -Section 9: Creating Groups using Absolute/Relative Names -======================================================== - -1. Group names can be specified in two "ways". What are these - two types of group names that you can specify? - -Answer: relative and absolute - -2. You have a dataset named "moo" in the group "boo", which is - in the group "foo", which in turn, is in the root group. How would - you specify an absolute name to access this dataset? - -Answer: /foo/boo/moo - -Section 10: Creating Datasets in Groups -======================================= - -Describe a way to access the dataset "moo" described in the previous section -(Section 9, question 2), using a relative and absolute pathname. - -Answers: 1. Access the group, "/foo", and get the group ID. - Access the group "boo" using the group ID obtained in Step 1. - Access the dataset "moo" using the group ID in Step 2. - gid = H5Gopen (file_id, "/foo", 0); /* absolute path */ - gid1 = H5Gopen (gid, "boo", 0); /* relative path */ - did = H5Dopen (gid1, "moo"); /* relative path */ +
    +
    Answer: +
    The dataset identifier, the dataset's datatype and dataspace in + memory, the dataspace in the file, the dataset transfer property + list, and a data buffer. +
    + +

    +

  2. Why are both the memory dataspace and file dataspace needed for + read/write operations, while only the memory datatype is required? + +
    +
    Answer: +
    A dataset's file datatype is not required for a read/write operation + because the file datatype is specified when the dataset is created + and cannot be changed. Both file and memory dataspaces are required + for dataset subsetting and for performing partial I/O operations. +
    + +

    +

  3. What does the line +
        + DATASPACE { SIMPLE (4 , 6 ) / ( 4 , 6 ) } +
    in Figure 6.1 mean? + +
    +
    Answer: +
    It means that the dataset dset has a simple dataspace + with the current dimensions (4,6) and the maximum size of the + dimensions (4,6). +
    + + +
+

Section 7: Creating an Attribute

+
    + +
  1. What is an attribute? + +
    +
    Answer: +
    An attribute is a dataset attached to an object. It describes the + nature and/or the intended usage of the object. +
    + +

    +

  2. Can partial I/O operations be performed on attributes? + +
    +
    Answer: +
    No. +
    + + +
+

Section 8: Creating a Group

+
    + +
  1. What are the two primary objects that can be included in a group? + +
    +
    Answer: +
    A group and a dataset. +
    + + +
+

Section 9: Creating Groups Using Absolute and Relative Names

+
    + +
  1. Group names can be specified in two ways. What are these two types + of group names? + +
    +
    Answer: +
    Relative and absolute. +
    + +

    +

  2. You have a dataset named moo in the group + boo, which is in the group foo, + which, in turn, is in the root group. + How would you specify an absolute name to access this dataset? + +
    +
    Answer: +
    /foo/boo/moo +
    + + +
+

Section 10: Creating Datasets in Groups

+
    + +
  1. Describe a way to access the dataset moo described in + the previous section (Section 9, question 2) using a + relative name. + Describe a way to access the same dataset using an absolute name. + +
    +
    Answers: +
      +
    1. Access the group /foo and get the group ID. + Access the group boo using the group ID obtained + in Step 1. + Access the dataset moo using the group ID obtained + in Step 2. +
      +gid = H5Gopen (file_id, "/foo", 0);       /* absolute path */
      +gid1 = H5Gopen (gid, "boo", 0);           /* relative path */
      +did = H5Dopen (gid1, "moo");              /* relative path */  
      - 2. Access the group, "/foo", and get the group ID. - Access the dataset "boo/moo", with the group ID just obtained. - gid = H5Gopen (file_id, "/foo", 0); /* absolute path */ - did = H5Dopen (gid, "boo/moo"); /* relative path */ +
    2. Access the group /foo and get the group ID. + Access the dataset boo/moo with the group ID + just obtained. +
      +gid = H5Gopen (file_id, "/foo", 0);       /* absolute path */
      +did = H5Dopen (gid, "boo/moo");           /* relative path */  
      - 3. Access the dataset with an absolute path. - did = H5Dopen (file_id, "/foo/boo/moo"); /* absolute path */ -
+
  • Access the dataset with an absolute path. +
    +did = H5Dopen (file_id, "/foo/boo/moo");  /* absolute path */  
    + + + + +


    @@ -203,7 +311,7 @@ Answers: 1. Access the group, "/foo", and get the group ID. hdfhelp@ncsa.uiuc.edu -
    Last Modified: August 2, 1999

    +
    Last Modified: January 13, 2000


    diff --git a/doc/html/Tutor/api.html b/doc/html/Tutor/api.html index 3ed0a84..4921716 100644 --- a/doc/html/Tutor/api.html +++ b/doc/html/Tutor/api.html @@ -21,11 +21,21 @@ width=78 height=27 alt="NCSA">

    -The HDF5 library provides several interfaces, and is currently implemented in -C. The APIs provide routines for accessing HDF5 files and creating and -manipulating HDF5 objects. All C routines in the HDF5 library begin with -a prefix of the form H5*, where * is a single letter indicating the object on -which the operation is to be performed. The APIs are listed below: +The HDF5 library provides several interfaces, or APIs. +These APIs provide routines for creating, accessing, and manipulating +HDF5 files and objects. +

    +The library itself is implemented in C. To facilitate the work of +FORTRAN90 and Java programmers, HDF5 function wrappers have been developed +in each of these languages. +At the time of this writing, a set of C++ wrappers is in development. +This tutorial discusses the use of the C functions and the FORTRAN wrappers. +

    +All C routines in the HDF5 library begin with a prefix of the form H5*, +where * is one or two uppercase letters indicating the type of object on which the +function operates. +The FORTRAN wrappers come in the form of subroutines that begin with +h5 and end with _f. The APIs are listed below:

    @@ -40,76 +50,78 @@ which the operation is to be performed. The APIs are listed below: - + - + - + - + - + - + - + - + - + - + - + - +
    H5
    Library Functions: the general-purpose H5 functions.Library Functions: general-purpose H5 functions
    H5A
    Annotation Interface: attribute access and manipulating routines.Annotation Interface: attribute access and manipulation + routines
    H5D
    Dataset Interface: dataset access and manipulating routines. - Dataset Interface: dataset access and manipulation + routines
    H5E
    Error Interface: error handling routines.Error Interface: error handling routines
    H5F
    File Interface: file access routines.File Interface: file access routines
    H5G
    Group Interface: group creating and operating routines.Group Interface: group creation and operation routines
    H5I
    Identifier Interface: identifier routines.Identifier Interface: identifier routines
    H5P
    Property List Interface: object property list manipulating - routines.Property List Interface: object property list manipulation + routines
    H5R
    Reference Interface: reference routines.Reference Interface: reference routines
    H5S
    Dataspace Interface: routines for defining dataspaces.Dataspace Interface: dataspace definition and access + routines
    H5T
    Data type Interface: routines for creating and manipulating - the data type of dataset elements.Datatype Interface: datatype creation and manipulation + routines
    H5Z
    Compression Interface: compression routine(s).Compression Interface: compression routine(s)
    @@ -127,8 +139,9 @@ which the operation is to be performed. The APIs are listed below: hdfhelp@ncsa.uiuc.edu -

    Last Modified: July 30, 1999

    +
    Last Modified: December 10, 1999

    +
    diff --git a/doc/html/Tutor/compound.html b/doc/html/Tutor/compound.html index 7983bf4..9471e8a 100644 --- a/doc/html/Tutor/compound.html +++ b/doc/html/Tutor/compound.html @@ -1,5 +1,5 @@ -HDF5 Tutorial - Compound Data Types +<TITLE>HDF5 Tutorial - Compound Datatypes @@ -13,7 +13,7 @@ width=78 height=27 alt="NCSA">

    [ HDF5 Tutorial Top ]

    -Compound Data Types +Compound Datatypes


    @@ -21,7 +21,7 @@ width=78 height=27 alt="NCSA">

    Contents:

    +Note: The FORTRAN API does not yet support compound datatypes.
    -

    Creating Compound Data Types

    -A compound data type is similar to a struct in C or a common block in -Fortran. It is a collection of one or more atomic types or small arrays of -such types. To create and use a compound data type you need to refer to -various properties of the data compound data type: +

    Creating Compound Datatypes

    +A compound datatype is similar to a struct in C or a common block in +FORTRAN. It is a collection of one or more atomic types or small arrays of +such types. To create and use a compound datatype you need to be familiar +with various properties of the compound datatype:
      -
    • It is of class compound. +
    • It is of class compound.
    • It has a fixed total size, in bytes.
    • It consists of zero or more members (defined in any order) with - unique names and which occupy non-overlapping regions within the datum. -
    • Each member has its own data type. -
    • Each member is referenced by an index number between zero and N-1, - where N is the number of members in the compound data type. + unique names and occupying non-overlapping regions within the datum. +
    • Each member has its own datatype. +
    • Each member is referenced by an index number between zero and N-1, + where N is the number of members in the compound datatype.
    • Each member has a name which is unique among its siblings in a - compound data type. -
    • Each member has a fixed byte offset, which is the first byte - (smallest byte address) of that member in a compound data type. + compound datatype. +
    • Each member has a fixed byte offset, which locates the first byte + (smallest byte address) of that member in the compound datatype.
    • Each member can be a small array of up to four dimensions.
    -Properties of members of a compound data type are defined when the -member is added to the compound type and cannot be subsequently modified. +Properties of members of a compound datatype are defined when the +member is added to the compound datatype and cannot be subsequently modified.

    -Compound data types must be built out of other data types. First, one -creates an empty compound data type and specifies its total size. Then -members are added to the compound data type in any order. +Compound datatypes must be built out of other datatypes. First, one +creates an empty compound datatype and specifies its total size. Then +members are added to the compound datatype in any order.

    Programming Example

    Description

    -This example shows how to create a compound data type, write an array -to the file which uses the compound data type, and read back subsets of -the members.
    - -[
    Download h5_compound.c] +This example shows how to create a compound datatype, write an array +to the file which uses the compound datatype, and read back subsets of +the members. +

    +

     +++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
     #include "hdf5.h"
    @@ -126,7 +131,7 @@ main(void)
         file = H5Fcreate(FILE, H5F_ACC_TRUNC, H5P_DEFAULT, H5P_DEFAULT);
     
         /*
    -     * Create the memory data type. 
    +     * Create the memory datatype. 
          */
         s1_tid = H5Tcreate (H5T_COMPOUND, sizeof(s1_t));
         H5Tinsert(s1_tid, "a_name", HOFFSET(s1_t, a), H5T_NATIVE_INT);
    @@ -159,7 +164,7 @@ main(void)
         dataset = H5Dopen(file, DATASETNAME);
     
         /* 
    -     * Create a data type for s2
    +     * Create a datatype for s2
          */
         s2_tid = H5Tcreate(H5T_COMPOUND, sizeof(s2_t));
     
    @@ -186,7 +191,7 @@ main(void)
         printf("\n");
     
         /* 
    -     * Create a data type for s3.
    +     * Create a datatype for s3.
          */
         s3_tid = H5Tcreate(H5T_COMPOUND, sizeof(float));
     
    @@ -235,33 +240,34 @@ Field b :
     
     

    Remarks

      -
    • H5Tcreate creates a new data type of the specified class with +
    • H5Tcreate creates a new datatype of the specified class with the specified number of bytes.
           hid_t H5Tcreate ( H5T_class_t class, size_t size ) 
       
        -
      • The class parameter specifies the data type to create. -Currently only the H5T_COMPOUND data type class is supported with this +
      • The class parameter specifies the datatype to create. +Currently only the H5T_COMPOUND datatype class is supported with this function.
      • The size parameter specifies the number of bytes in the -data type to create. +datatype to create.

      -

    • H5Tinsert adds a member to the compound data type specified by +
    • H5Tinsert adds a member to the compound datatype specified by type_id.
      -   herr_t H5Tinsert ( hid_t type_id, const char * name, off_t offset, hid_t field_id ) 
      +   herr_t H5Tinsert ( hid_t type_id, const char * name, off_t offset, 
      +                      hid_t field_id ) 
       
        -
      • The type_id parameter is the identifier of the compound data type +
      • The type_id parameter is the identifier of the compound datatype to modify.
      • The name parameter is the name of the field to insert. The new -member name must be unique within a compound data type. +member name must be unique within a compound datatype.
      • The offset parameter is the offset in the memory structure of the field to insert. -The library defines the HOFFSET macro to compute the offset of a member within +The library defines the HOFFSET macro to compute the offset of a member within a struct:
           HOFFSET ( s, m ) 
        @@ -269,15 +275,15 @@ a struct:
         This macro computes the offset of member m within a struct 
         variable s. 
         
        -
      • The field_id parameter is the data type identifier of the +
      • The field_id parameter is the datatype identifier of the field to insert.

      -

    • H5Tclose releases a data type. +
    • H5Tclose releases a datatype.
          herr_t H5Tclose ( hid_t type_id ) 
       
      -The type_id parameter is the identifier of the data type to release. +The type_id parameter is the identifier of the datatype to release.

    File Contents

    @@ -365,8 +371,9 @@ GROUP "/" {
    hdfhelp@ncsa.uiuc.edu -
    Last Modified: August 27, 1999

    +
    Last Modified: April 5, 2000

    +
    diff --git a/doc/html/Tutor/crtatt.html b/doc/html/Tutor/crtatt.html index 82de873..6b7f89a 100644 --- a/doc/html/Tutor/crtatt.html +++ b/doc/html/Tutor/crtatt.html @@ -36,160 +36,212 @@ width=78 height=27 alt="NCSA">

    Attributes are small datasets that can be used to describe the nature and/or the intended usage of the object they are attached to. In this section, we -show how to create and read/write an attribute. +show how to create, read, and write an attribute.

    Creating an attribute

    - Creating an attribute is similar to the creation of a dataset. To create an - attribute the application must specify the object which the attribute is - attached to, the data type and space of the attribute data and the creation - properties. + Creating an attribute is similar to creating a dataset. To create an + attribute, the application must specify the object which the attribute is + attached to, the datatype and dataspace of the attribute data, + and the attribute creation property list.

    The steps to create an attribute are as follows:

    1. Obtain the object identifier that the attribute is to be attached to. -
    2. Define the characteristics of the attribute and specify creation - properties. +
    3. Define the characteristics of the attribute and specify the + attribute creation property list.
        -
      • Define the data type. +
      • Define the datatype.
      • Define the dataspace. -
      • Specify the creation properties. +
      • Specify the attribute creation property list.
    4. Create the attribute. -
    5. Close the attribute and data type, dataspace, and creation property - list if necessary. +
    6. Close the attribute and datatype, dataspace, and + attribute creation property list, if necessary.

    - To create an attribute, the calling program must contain the following calls: + To create and close an attribute, the calling program must use +H5Acreate/h5acreate_f and +H5Aclose/h5aclose_f. For example: +

    +C:

    -     attr_id = H5Acreate(loc_id, attr_name, type_id, space_id, create_plist);
    -     H5Aclose(attr_id);
    +     attr_id = H5Acreate (dset_id, attr_name, type_id, space_id, creation_prp);
    +     status = H5Aclose (attr_id);
     
    +FORTRAN: +
    +     CALL h5acreate_f (dset_id, attr_nam, type_id, space_id, attr_id, &
    +                       hdferr, creation_prp=creat_plist_id)
    +          or
    +     CALL h5acreate_f (dset_id, attr_nam, type_id, space_id, attr_id, hdferr)
    +
    +     CALL h5aclose_f (attr_id, hdferr)
    +

    Reading/Writing an attribute

    - Attributes may only be read/written as an entire object. No partial I/O is - currently supported. Therefore, to perform I/O operations on an attribute, the + Attributes may only be read or written as an entire object; no partial I/O is + supported. Therefore, to perform I/O operations on an attribute, the application needs only to specify the attribute and the attribute's memory - data type. + datatype.

    - The steps to read/write an attribute are as follows. + The steps to read or write an attribute are as follows.

    1. Obtain the attribute identifier. -
    2. Specify the attribute's memory data type. +
    3. Specify the attribute's memory datatype.
    4. Perform the desired operation. -
    5. Close the memory data type if necessary. +
    6. Close the memory datatype if necessary.

    -To read/write an attribute, the calling program must contain the following - calls: +To read and/or write an attribute, the calling program must contain the +H5Aread/h5aread_f and/or +H5Awrite/h5awrite_f routines. For example: +

    +C:

    -    status = H5Aread(attr_id, mem_type_id, buf);
    +    status = H5Aread (attr_id, mem_type_id, buf);
    +    status = H5Awrite (attr_id, mem_type_id, buf);
     
    - or +FORTRAN:
    -    status = H5Awrite(attr_id, mem_type_id, buf);
    +    CALL h5awrite_f (attr_id, mem_type_id, buf, hdferr)  
    +    CALL h5aread_f (attr_id, mem_type_id, buf, hdferr)
     

    Programming Example

    Description

    This example shows how to create and write a dataset attribute. -It opens an existing file 'dset.h5', obtains the id of the dataset "/dset1", +It opens an existing file dset.h5 in C +(dsetf.h5 in FORTRAN), +obtains the identifier of the dataset /dset, defines the attribute's dataspace, creates the dataset attribute, writes the attribute, and then closes the attribute's dataspace, attribute, dataset, and file.
    -[
    Download h5_crtatt.c ] -
    -+++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
    -
    -#include <hdf5.h>
    -#define FILE "dset.h5"
    -
    -main() {
    -
    -   hid_t       file_id, dataset_id, attribute_id, dataspace_id;  /* identifiers
    -*/
    -   hsize_t     dims;
    -   int         attr_data[2];
    -   herr_t      status;
    -
    -   /* Initialize the attribute data. */
    -   attr_data[0] = 100;
    -   attr_data[1] = 200;
    -
    -   /* Open an existing file. */
    -   file_id = H5Fopen(FILE, H5F_ACC_RDWR, H5P_DEFAULT);
    -
    -   /* Open an existing dataset. */
    -   dataset_id = H5Dopen(file_id, "/dset");
    -
    -   /* Create the data space for the attribute. */
    -   dims = 2;
    -   dataspace_id = H5Screate_simple(1, &dims, NULL);
    -
    -   /* Create a dataset attribute. */
    -   attribute_id = H5Acreate(dataset_id, "attr", H5T_STD_I32BE, dataspace_id, H5P_DEFAULT);
    -
    -   /* Write the attribute data. */
    -   status = H5Awrite(attribute_id, H5T_NATIVE_INT, attr_data);
    -
    -   /* Close the attribute. */
    -   status = H5Aclose(attribute_id);
    +
     
    -   /* Close the dataspace. */
    -   status = H5Sclose(dataspace_id);
    +NOTE: To download a tar file of the examples, including a Makefile,
    +please go to the References page.
     
    -   /* Close to the dataset. */
    -   status = H5Dclose(dataset_id);
    -
    -   /* Close the file. */
    -   status = H5Fclose(file_id);
    -}
    -+++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
    -

    Remarks

    The dimensions of a dataset can be fixed (unchanging), or they may be - unlimited, which means that they are extendible. A dataspace can also - describe portions of a dataset, making it possible to do partial I/O + unlimited, which means that they are extensible. A dataspace can also + describe a portion of a dataset, making it possible to do partial I/O operations on selections. -

    Dataset creation properties

    +

    Dataset Creation Property Lists

    - When creating a dataset, HDF5 allows users to specify how raw data is - organized on disk and how the raw data is compressed. This information is + When creating a dataset, HDF5 allows the user to specify how raw data is + organized and/or compressed on disk. This information is stored in a dataset creation property list and passed to the dataset interface. The raw data on disk can be stored contiguously (in the same - linear way that it is organized in memory), partitioned into chunks and - stored externally, etc. In this tutorial, we use the default creation - property list; that is, no compression and - contiguous storage layout is used. For more information about the creation - properties, see the HDF5 User's Guide. + linear way that it is organized in memory), partitioned into chunks, + stored externally, etc. In this tutorial, we use the + default dataset creation property list; that is, contiguous storage layout + and no compression are used. For more information about + dataset creation property lists, + see The Dataset Interface (H5D) + in the HDF5 User's Guide.

    -In HDF5, data types and spaces are independent objects, which are created -separately from any dataset that they might be attached to. Because of this the -creation of a dataset requires definitions of data type and dataspace. -In this tutorial, we use HDF5 predefined data types (integer) and consider +In HDF5, datatypes and dataspaces are independent objects which are created +separately from any dataset that they might be attached to. Because of this, +the creation of a dataset requires definition of the datatype and dataspace. +In this tutorial, we use HDF5 predefined datatypes (integer) and consider only simple dataspaces. Hence, only the creation of dataspace objects is needed.

    @@ -142,158 +208,250 @@ needed. To create an empty dataset (no data written) the following steps need to be taken:

      -
    1. Obtain the location id where the dataset is to be created. -
    2. Define the dataset characteristics and creation properties. +
    3. Obtain the location identifier where the dataset is to be created. +
    4. Define the dataset characteristics and the dataset creation property list.
        -
      • define a data type -
      • define a dataspace -
      • specify dataset creation properties +
      • Define a datatype. +
      • Define a dataspace. +
      • Specify the dataset creation property list.
    5. Create the dataset. -
    6. Close the data type, dataspace, and the property list if necessary. +
    7. Close the datatype, the dataspace, and the property list if necessary.
    8. Close the dataset.
    -To create a simple dataspace, the calling program must contain the following -calls: +To create a simple dataspace, the calling program must contain a +call to create and close the dataspace. For example: +

    +C: +

    +   space_id = H5Screate_simple (rank, dims, maxdims);
    +   status = H5Sclose (space_id );
    +
    +FORTRAN:
    -   dataspace_id = H5Screate_simple(rank, dims, maxdims);
    -   H5Sclose(dataspace_id );
    +   CALL h5screate_simple_f (rank, dims, space_id, hdferr, maxdims=max_dims)
    +        or
    +   CALL h5screate_simple_f (rank, dims, space_id, hdferr)
    +
    +   CALL h5sclose_f (space_id, hdferr)
     
    -To create a dataset, the calling program must contain the following calls: +To create a dataset, the calling program must contain calls to create +and close the dataset. For example: +

    +C:

    -   dataset_id = H5Dcreate(hid_t loc_id, const char *name, hid_t type_id,
    -                          hid_t space_id, hid_t create_plist_id);
    -   H5Dclose (dataset_id);
    +   dset_id = H5Dcreate (hid_t loc_id, const char *name, hid_t type_id,
    +                          hid_t space_id, hid_t creation_prp);
    +   status = H5Dclose (dset_id);
     
    +FORTRAN: +
    +   CALL h5dcreate_f (loc_id, name, type_id, space_id, dset_id, &
    +                     hdferr, creation_prp=creat_plist_id)
    +        or
    +   CALL h5dcreate_f (loc_id, name, type_id, space_id, dset_id, hdferr)
     
    +   CALL h5dclose_f (dset_id, hdferr)
    +
    +If using the pre-defined datatypes in FORTRAN, then a call must +be made to initialize and terminate access to the pre-defined datatypes: +
    +  CALL h5init_types_f (hdferr) 
    +  CALL h5close_types_f (hdferr)
    +
    +h5init_types_f must be called before any HDF5 library +subroutine calls are made; +h5close_types_f must be called after the final HDF5 library +subroutine call. +See the programming example below for an illustration of the use of +these calls.

    Programming Example

    Description

    The following example shows how to create an empty dataset. -It creates a file called 'dset.h5', defines the dataset dataspace, creates a +It creates a file called dset.h5 in the C version +(dsetf.h5 in Fortran), defines the dataset dataspace, creates a dataset which is a 4x6 integer array, and then closes the dataspace, the dataset, and the file.
    -[
    Download h5_crtdat.c ] -
    -+++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
    -
    -#include <hdf5.h>
    -#define FILE "dset.h5"
    -
    -main() {
    -
    -   hid_t       file_id, dataset_id, dataspace_id;  /* identifiers */
    -   hsize_t     dims[2];
    -   herr_t      status;
    -
    -   /* Create a new file using default properties. */
    -   file_id = H5Fcreate(FILE, H5F_ACC_TRUNC, H5P_DEFAULT, H5P_DEFAULT);
    -
    -   /* Create the data 
    -                space for the dataset. */
    -   dims[0] = 4;
    -   dims[1] = 6;
    -   dataspace_id = H5Screate_simple(2, dims, NULL);
    -
    -   /* Create the dataset. */
    -   dataset_id = H5Dcreate(file_id, "/dset", H5T_STD_I32BE, dataspace_id, 
    -                H5P_DEFAULT);
    -
    -   /* End access to the dataset and release resources used by it. */
    -   status = H5Dclose(dataset_id);
    -
    -   /* Terminate access to the data space. */
    -   status = H5Sclose(dataspace_id);
    + 
     
    -   /* Close the file. */
    -   status = H5Fclose(file_id);
    -}
    -+++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
    -
    +NOTE: To download a tar file of the examples, including a Makefile, +please go to the References page of this tutorial.

    Remarks

      -
    • H5Screate_simple creates a new simple data space and returns a data space - identifier. +
    • H5Screate_simple/h5screate_simple_f +creates a new simple dataspace and returns a dataspace identifier.
      +C:
         hid_t H5Screate_simple (int rank, const hsize_t * dims, 
                                 const hsize_t * maxdims)
      +FORTRAN:
      +  h5screate_simple_f (rank, dims, space_id, hdferr, maxdims) 
      +
      +            rank        INTEGER
      +            dims(*)     INTEGER(HSIZE_T)
      +            space_id    INTEGER(HID_T)
      +            hdferr      INTEGER 
      +                        (Valid values: 0 on success and -1 on failure)
      +            maxdims(*)  INTEGER(HSIZE_T), OPTIONAL
       
        -
      • The first parameter specifies the rank of the dataset. - -
      • The second parameter specifies the size of the dataset. - -
      • The third parameter is for the upper limit on the size of the dataset. - If it is NULL, the upper limit is the same as the dimension - sizes specified by the second parameter. +
      • The rank parameter specifies the rank, i.e., the number of + dimensions, of the dataset. + +
      • The dims parameter specifies the size of the dataset. + +
      • The maxdims parameter specifies the upper limit on the + size of the dataset. + If this parameter is NULL in C (or not specified in FORTRAN), + then the upper limit is the same as the dimension + sizes specified by the dims parameter. +
      • The function returns the dataspace identifier in C if successful; + otherwise it returns a negative value. + In FORTRAN, the dataspace identifier + is returned in the space_id parameter. If the call is successul + then a 0 is returned in hdferr; otherwise a -1 is returned.

      -

    • H5Dcreate creates a dataset at the specified location and returns a - dataset identifier. +
    • H5Dcreate/h5dcreate_f creates a dataset +at the specified location and returns a dataset identifier.
      +C:
         hid_t H5Dcreate (hid_t loc_id, const char *name, hid_t type_id, 
      -                   hid_t space_id, hid_t create_plist_id) 
      +                   hid_t space_id, hid_t creation_prp) 
      +FORTRAN:
      +  h5dcreate_f (loc_id, name, type_id, space_id, dset_id, & 
      +               hdferr, creation_prp) 
      +
      +            loc_id        INTEGER(HID_T)
      +            name          CHARACTER(LEN=*)
      +            type_id       INTEGER(HID_T)
      +            space_id      INTEGER(HID_T)
      +            dset_id       INTEGER(HID_T)
      +            hdferr        INTEGER 
      +                          (Valid values: 0 on success and -1 on failure)
      +            creation_prp  INTEGER(HID_T), OPTIONAL
       
        -
      • The first parameter is the location identifier. +
      • The loc_id parameter is the location identifier. +

        +

      • The name parameter is the name of the dataset to create. -
      • The second parameter is the name of the dataset to create. +

        +

      • The type_id parameter specifies the datatype identifier. -
      • The third parameter is the data type identifier. H5T_STD_I32BE, a - 32-bit Big Endian integer, is an HDF atomic data type. +

        +

      • The space_id parameter is the dataspace identifier. -
      • The fourth parameter is the data space identifier. +

        +

      • The creation_prp parameter specifies the + dataset creation property list. + H5P_DEFAULT in C and H5P_DEFAULT_F in FORTRAN + specify the default dataset creation property list. + This parameter is optional in FORTRAN; if it is omitted, + the default dataset creation property list will be used. +

        +

      • The C function returns the dataset identifier if successful and + a negative value otherwise. The FORTRAN call returns the + dataset identifier in dset_id. If it is successful, then 0 is + returned in hdferr; otherwise a -1 is returned. -
      • The last parameter specifies the dataset creation property list. - H5P_DEFAULT specifies the default dataset creation property list.

      -

    • H5Dcreate creates an empty array and initializes the data to 0. +
    • H5Dcreate/h5dcreate_f creates an empty array +and initializes the data to 0.

      -

    • When a dataset is no longer accessed by a program, H5Dclose must be -called to release the resource used by the dataset. This call is mandatory. +
    • When a dataset is no longer accessed by a program, +H5Dclose/h5dclose_f must be called to release +the resource used by the dataset. This call is mandatory.
      -  hid_t H5Dclose (hid_t dataset_id)
      +C:
      +    hid_t H5Dclose (hid_t dset_id)
      +FORTRAN:
      +    h5dclose_f (dset_id, hdferr)
      +
      +            dset_id  INTEGER(HID_T)
      +            hdferr   INTEGER 
      +                     (Valid values: 0 on success and -1 on failure)
       

    File Contents

    -The file contents of 'dset.h5' are shown is Figure 5.4 and Figure 5.5. - +The contents of the file dset.h5 (dsetf.h5 +for FORTRAN) are shown in Figure 5.4 and Figures 5.5a +and 5.5b. +

    +

    +
    +Figure 5.4   Contents of dset.h5 ( dsetf.h5) +
    + +
    + + - - + + - - - +
    Figure 5.4   The Contents of 'dset.h5' - Figure 5.5   'dset.h5' in DDL Figure 5.5a   dset.h5 in DDL Figure 5.5b   dsetf.h5 in DDL
    -
          HDF5 "dset.h5" {
    -      GROUP "/" {
    -         DATASET "dset" {
    -            DATATYPE { H5T_STD_I32BE }
    -            DATASPACE { SIMPLE ( 4, 6 ) / ( 4, 6 ) }
    -            DATA {
    -               0, 0, 0, 0, 0, 0,
    -               0, 0, 0, 0, 0, 0,
    -               0, 0, 0, 0, 0, 0,
    -               0, 0, 0, 0, 0, 0
    -            }
    -         }
    +    
    +
    +HDF5 "dset.h5" {
    +GROUP "/" {
    +   DATASET "dset" {
    +      DATATYPE { H5T_STD_I32BE }
    +      DATASPACE { SIMPLE ( 4, 6 ) / ( 4, 6 ) }
    +      DATA {
    +         0, 0, 0, 0, 0, 0,
    +         0, 0, 0, 0, 0, 0,
    +         0, 0, 0, 0, 0, 0,
    +         0, 0, 0, 0, 0, 0
           }
    +   }
    +}
    +}
    +
    +
    +
          
    +HDF5 "dsetf.h5" {
    +GROUP "/" {
    +   DATASET "dset" {
    +      DATATYPE { H5T_STD_I32BE }
    +      DATASPACE { SIMPLE ( 6, 4 ) / ( 6, 4 ) }
    +      DATA {
    +         0, 0, 0, 0,
    +         0, 0, 0, 0,
    +         0, 0, 0, 0,
    +         0, 0, 0, 0,
    +         0, 0, 0, 0,
    +         0, 0, 0, 0
           }
    +   }
    +}
    +}
     
    +

    +Note in Figures 5.5a and 5.5b that +H5T_STD_I32BE, a 32-bit Big Endian integer, +is an HDF atomic datatype. @@ -302,12 +460,12 @@ The following is the simplified DDL dataset definition:

    Fig. 5.6   HDF5 Dataset Definition

    -      <dataset> ::= DATASET "<dataset_name>" { <data type>
    +      <dataset> ::= DATASET "<dataset_name>" { <datatype>
                                                    <dataspace>
                                                    <data>
                                                    <dataset_attribute>* }
     
    -      <data type> ::= DATATYPE { <atomic_type> }
    +      <datatype> ::= DATATYPE { <atomic_type> }
     
           <dataspace> ::= DATASPACE { SIMPLE <current_dims> / <max_dims> }
     
    @@ -329,8 +487,9 @@ The following is the simplified DDL dataset definition:
     
     
     hdfhelp@@ncsa.uiuc.edu
    -
    Last Modified: August 27, 1999

    +
    Last Modified: April 5, 2000

    +
    diff --git a/doc/html/Tutor/crtfile.html b/doc/html/Tutor/crtfile.html index 8786aab..b0bf50d 100644 --- a/doc/html/Tutor/crtfile.html +++ b/doc/html/Tutor/crtfile.html @@ -13,7 +13,7 @@ width=78 height=27 alt="NCSA">

    [ HDF5 Tutorial Top ]

    -Creating an HDF5 file +Creating an HDF5 File


    @@ -34,134 +34,228 @@ width=78 height=27 alt="NCSA">

    What is an HDF5 file?

    -An HDF5 file is a binary file which contains scientific data and supporting -metadata. The two primary objects stored in an HDF5 file are groups and -datasets. Groups and datasets will be discussed in the other sessions. +An HDF5 file is a binary file containing scientific data and supporting +metadata. The primary types of objects stored in an HDF5 file, groups and +datasets, will be discussed in other sections of this tutorial.

    -To create a file, the program application must specify a file name, file +To create a file, an application must specify a filename, file access mode, file creation property list, and file access property list.

    The steps to create and close an HDF5 file are as follows:

      -
    1. Specify the file creation and access property lists if necessary. -
    2. Create a file. -
    3. Close the file and close the property lists if necessary. +
    4. Specify the file creation and access property lists, if necessary. +
    5. Create the file. +
    6. Close the file and close the property lists, if necessary.
    -To create an HDF5 file, the calling program must contain the following calls: - -
    -   file_id = H5Fcreate(filename, access_mode, create_id, access_id);
    +To create an HDF5 file, the calling program must contain calls to 
    +create and close the file.  For example:
    +

    +C:

    +   file_id = H5Fcreate (filename, access_mode, create_id, access_id);
    +   status = H5Fclose (file_id); 
    +
    +FORTRAN:
    +   CALL h5fcreate_f (filename, access_mode, file_id, hdferr, &
    +            creation_prp=create_id, access_prp=access_id)
    +        or
    +   CALL h5fcreate_f (filename, access_mode, file_id, hdferr)
     
    -   H5Fclose(file_id); 
    +   CALL h5fclose_f (file_id, hdferr)
     
    +In FORTRAN, the file creation property list, creation_prp, +and file access property list, access_prp, +are optional parameters; +they can be omitted if the default values are to be used.

    Programming Example

    Description

    The following example demonstrates how to create and close an HDF5 file. -It creates a file called 'file.h5', and then closes the file.
    -[
    Download h5_crtfile.c ] -
    -+++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
    -
    -
    -#include <hdf5.h>
    -#define FILE "file.h5"
    -
    -main() {
    -
    -   hid_t       file_id;   /* file identifier */
    -   herr_t      status;
    -
    -   /* Create a new file using default properties. */
    -   file_id = H5Fcreate(FILE, H5F_ACC_TRUNC, H5P_DEFAULT, H5P_DEFAULT);
    +It creates a file called file.h5 in the C version,
    +filef.h5 in FORTRAN, and then closes the file.

    - /* Terminate access to the file. */ - status = H5Fclose(file_id); -} -+++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++ +

    +

    +NOTE: To download a tar file of all of the examples, including +a Makefile, please go to the References page. -

    Remarks

      -
    • The include file 'hdf5.h' contains definitions and declarations, and it must - be included in any file that uses the HDF5 library. +
    • In C: + The include file hdf5.h contains definitions and declarations + and must be included in any program that uses the HDF5 library. +
      In FORTRAN: + The module HDF5 contains definitions and declarations + and must be used in any program that uses the HDF5 library.

      -

    • H5Fcreate creates an HDF5 file and returns the file identifier. +
    • H5Fcreate/h5fcreate_f creates + an HDF5 file and returns the file identifier.
      - hid_t H5Fcreate (const char *name, unsigned flags, hid_t create_id, 
      -                  hid_t access_id) 
      +C:       
      +  hid_t H5Fcreate (const char *name, unsigned access_mode, hid_t creation_prp, 
      +                   hid_t access_prp) 
      +FORTRAN: 
      +  h5fcreate_f (name, access_mode, file_id, hdferr, creation_prp, access_prp)
      +
      +           name          CHARACTER(LEN=*)
      +           access_flag   INTEGER 
      +                         (Valid values: H5F_ACC_RDWR_F, H5F_ACC_RDONLY_F, 
      +                         H5F_ACC_TRUNC_F, H5F_ACC_EXCL_F, H5F_ACC_DEBUG_F)
      +           file_id       INTEGER(HID_T)
      +           hdferr        INTEGER 
      +                         (Valid values: 0 on success and -1 on failure)
      +           creation_prp  INTEGER(HID_T), OPTIONAL
      +                         (Default value: H5P_DEFAULT_F)
      +           access_prp    INTEGER(HID_T), OPTIONAL
      +                         (Default value: H5P_DEFAULT_F) 
      +         
       
        -
      • The first parameter specifies the name of the file to be created. +
      • The name parameter specifies the name of the file to be created.

        -

      • The second parameter specifies the file access mode. H5F_ACC_TRUNC will - truncate a file if it already exists. +
      • The access_mode parameter specifies the file access mode. + H5F_ACC_TRUNC (H5F_ACC_TRUNC_F in FORTRAN) + will truncate a file if it already exists.

        -

      • The third parameter specifies the file creation property list. - H5P_DEFAULT indicates that the default file creation property list is - used. - +
      • The creation_prp parameter + specifies the file creation property list. + For C, using H5P_DEFAULT indicates that the + default file creation property list is to be used. + This option is optional in FORTRAN; if it is omitted, the default file + creation property list, H5P_DEFAULT_F, is used.

        -

      • The last parameter of H5Fcreate specifies the file access property list. - H5P_DEFAULT indicates that the default file access property list is used. +
      • The access_prp parameter + specifies the file access property list. + For C, using H5P_DEFAULT indicates that the + default file creation property list is to be used. + This option is optional in FORTRAN; if it is omitted, the default file + creation property list, H5P_DEFAULT_F, is used. +

        +

      • In C, this function returns the file identifier if successful and + a negative value otherwise. + In FORTRAN, the file identifier is returned in the + file_id parameter. If the call is successful, 0 (zero) is + passed back in the hdferr parameter. Otherwise, hdferr + will have a value of -1.

      -

    • When a file is no longer accessed by a program, H5Fclose must be called to - release the resource used by the file. This call is mandatory. +
    • When a file is no longer accessed by a program, + H5Fclose/h5fclose_f + must be called to release the resources used by the file. This call + is mandatory.
      +C:
           herr_t H5Fclose (hid_t file_id) 
      +
      +FORTRAN:
      +    h5fclose_f(file_id, hdferr)
       

    • The root group is automatically created when a file is created. - Every file has a root group and the path name of the root group is '/'. + Every file has a root group and the path name of the root group is + always /.

    File Contents

    -HDF has developed tools to examine the contents of HDF5 files. The tool used -in this tutorial is the HDF5 dumper, h5dump. h5dump is a tool that displays -the file contents in human readable form to an ASCII file in DDL. DDL (Data -Description Language) is a language that describes HDF5 objects in Backus-Naur -Form. To view the file contents, type: +The HDF team has developed tools for examining the contents of HDF5 files. +The tool used in this tutorial is the HDF5 dumper, h5dump, +which displays the file contents in human-readable form. +The output of h5dump is an ASCII display formatted according +to the HDF5 DDL grammar. +This grammar is defined, using Backus-Naur Form, in the +
    DDL in BNF for HDF5. +

    +To view the file contents, type:

        h5dump <filename> 
     
    -Figure 4.1 describes the file contents of 'file.h5' using a directed graph. -Each HDF5 object is represented by a rectangle and the arrows indicate -the structure of the contents. In Fig. 4.2, 'file.h5' contains -a group object named '/' (the root group). + +Figure 4.1 describes the file contents of file.h5 (filef.h5) +using a directed graph. +The directed graphs in this tutorial use an oval to represent an HDF5 group +and a rectangle to represent an HDF5 dataset (none in this example). +Arrows indicate the inclusion direction of the contents (none in this example).

    -Fig. 4.1   Contents of 'file.h5' +Fig. 4.1   Contents of file.h5 (filef.h5)

     
     
    -Figure 4.2 is the text-description of 'file.h5' generated by h5dump. The HDF5 -file called 'file.h5' contains a group called '/'. + +Figure 4.2 is the text description of file.h5, as generated by +h5dump. The HDF5 file called file.h5 contains +a group called /, or the root group. +(The file called filef.h5, +created by the FORTRAN version of the example, has the same output except +that the filename shown is filef.h5.)

    - Fig. 4.2   'file.h5' in DDL + Fig. 4.2   file.h5 in DDL

     
              HDF5 "file.h5" {
    @@ -171,15 +265,18 @@ file called 'file.h5' contains a group called '/'.
     
     
    +

    File Definition in DDL

    + Figure 4.3 is the simplified DDL file definition for creating an HDF5 file. For simplicity, a simplified DDL is used in this tutorial. A complete and -more rigorous DDL can be found in the HDF5 User's Guide. See the -
    References section of this tutorial. +more rigorous DDL can be found in the +DDL in BNF for HDF5, a section of the +HDF5 User's Guide.

    Fig. 4.3   HDF5 File Definition

    - The explanation of the symbols used in the DDL: + The following symbol definitions are used in the DDL:

     
             ::=               defined as
    @@ -187,7 +284,7 @@ more rigorous DDL can be found in the HDF5 User's Guide.  See the
             <a> | <b>         one of <a> or <b>
             <a>*              zero or more occurrences of <a>
     
    - The simplified DDL file definition: + The simplified DDL for file definition is as follows:
             <file> ::= HDF5 "<file_name>" { <root_group> }
     
    @@ -211,14 +308,12 @@ more rigorous DDL can be found in the HDF5 User's Guide.  See the
     
     
     hdfhelp@ncsa.uiuc.edu
    -
    Last Modified: August 27, 1999

    +
    Last Modified: April 5, 2000

    + -
    + - - - diff --git a/doc/html/Tutor/crtgrp.html b/doc/html/Tutor/crtgrp.html index 566fd2c..1605539 100644 --- a/doc/html/Tutor/crtgrp.html +++ b/doc/html/Tutor/crtgrp.html @@ -42,13 +42,25 @@ program must:
  • Create the group.
  • Close the group. -To create a group, the calling program must contain the following calls: +To create a group, the calling program must call +H5Gcreate/h5gcreate_f. +To close the group, H5Gclose/h5gclose_f +must be called. For example: +

    +C:

       group_id = H5Gcreate (loc_id, name, size_hint);
    -  H5Gclose (group_id);
    +  status = H5Gclose (group_id);
     
    +FORTRAN: +
    +  CALL h5gcreate_f (loc_id, name, group_id, error, size_hint=size)
    +       or
    +  CALL h5gcreate_f (loc_id, name, group_id, error)
     
     
    +  CALL h5gclose_f (group_id, error)
    +

    @@ -56,77 +68,102 @@ To create a group, the calling program must contain the following calls:

    Description

    The following example shows how to create and close a group. It creates a file -called 'group.h5', creates a group called MyGroup in the root group, +called group.h5 (groupf.h5 for FORTRAN), +creates a group called MyGroup in the root group, and then closes the group and file.
    -[
    Download h5_crtgrp.c ] -
    -
    -+++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
    -
    -#include <hdf5.h>
    -#define FILE "group.h5"
    -
    -main() {
    -
    -   hid_t       file_id, group_id;  /* identifiers */
    -   herr_t      status;
    -
    -   /* Create a new file using default properties. */
    -   file_id = H5Fcreate(FILE, H5F_ACC_TRUNC, H5P_DEFAULT, H5P_DEFAULT);
    -
    -   /* Create a group named "/MyGroup" in the file. */
    -   group_id = H5Gcreate(file_id, "/MyGroup", 0);
    -
    -   /* Close the group. */
    -   status = H5Gclose(group_id);
    +
    +NOTE: To download a tar file of the examples, including a Makefile,
    +please go to the References page.
     
    -   /* Terminate access to the file. */
    -   status = H5Fclose(file_id);
    -}
    -++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
     

    Remarks

      -
    • H5Gcreate creates a new empty group and returns a group identifier. +
    • H5Gcreate/h5gcreate_f creates + a new empty group, named MyGroup and located in the + root group, and returns a group identifier. +

      +C:

         hid_t H5Gcreate (hid_t loc_id, const char *name, size_t size_hint) 
       
      +FORTRAN: +
      +  h5gcreate_f (loc_id, name, group_id, hdferr, size_hint)
      +
      +           loc_id     INTEGER(HID_T)
      +           name       CHARACTER(LEN=*)
      +           group_id   INTEGER(HID_T)
      +           hdferr     INTEGER
      +                      (Possible values: 0 on success and -1 on failure)
      +           size_hint  INTEGER(SIZE_T), OPTIONAL
      +                      (Default value: OBJECT_NAMELEN_DEFAULT_F)
      +         
      +
        -
      • The first parameter specifies the location to create the group. - -
      • The second parameter specifies the name of the group to be created. - -
      • The third parameter specifies how much file space to reserve to store the +
      • The loc_id parameter specifies the location at which + to create the group. +

        +

      • The name parameter specifies the name of the group to be created. +

        +

      • The size_hint parameter specifies how much file space to + reserve to store the names that will appear in the group. If a non-positive value is supplied, then a default size is used. Passing a value of zero is usually adequate since the library is able to dynamically resize the name heap. +

        +

      • In FORTRAN, the return value for the routine is passed in + hdferr: 0 if successful, -1 otherwise. The group identifier + is passed back in group_id. In C, the function returns a valid + group identifier if successful and a negative value otherwise. +

      -

    • H5Gcreate creates a group named MyGroup in the root group of the specified - file. +
    • H5Gclose/h5gclose_f closes the group. + This call is mandatory.

      -

    • H5Gclose closes the group. This call is mandatory. +C:
         herr_t H5Gclose (hid_t group_id) 
       
      +FORTRAN: +
      +  h5gclose_f (group_id, hdferr)
      +
      +           group_id  INTEGER(HID_T)
      +           hdferr    INTEGER
      +                     (Possible values: 0 on success and -1 on failure)
      +         
      +

    File Contents

    -The contents of 'group.h5' and the definition of the group are given in the -following: +The contents of group.h5 and the +definition of the group are shown below. (The FORTRAN program +creates the HDF5 file groupf.h5 and the resulting DDL shows +groupf.h5 in the first line.)

    - - -
    Fig. 8.1   The Contents of 'group.h5'. + + + + - + - - + +
    Fig. 8.1   The Contents of group.h5. +   Fig. 8.2   'group.h5' in DDL Fig. 8.2   group.h5 in DDL
    +  
           
     HDF5 "group.h5" {
     GROUP "/" {
    @@ -154,7 +191,7 @@ GROUP "/" {
     
     
     hdfhelp@ncsa.uiuc.edu
    -
    Last Modified: August 27, 1999

    +
    Last Modified: April 5, 2000


    diff --git a/doc/html/Tutor/crtgrpar.html b/doc/html/Tutor/crtgrpar.html index d5fbc66..f396605 100644 --- a/doc/html/Tutor/crtgrpar.html +++ b/doc/html/Tutor/crtgrpar.html @@ -1,5 +1,5 @@ -HDF5 Tutorial - Creating Groups using Absolute/Relative Names +<TITLE>HDF5 Tutorial - Creating Groups using Absolute and Relative Names @@ -13,8 +13,8 @@ width=78 height=27 alt="NCSA">

    [ HDF5 Tutorial Top ]

    -Creating Groups using -Absolute/Relative Names +Creating Groups Using +Absolute and Relative Names


    @@ -39,95 +39,103 @@ object is to be created. This location is determined by the identifier of an HDF object and the name of the object to be created. The name of the created object can be either an absolute name or a name relative to the specified identifier. -In Example 5, we used the file identifier and the absolute name "/MyGroup" to create a -group. The file identifier and the name "/" specifies the location where the group -"MyGroup" was created. +In the previous example, we used the file identifier and the absolute name +/MyGroup to create a group.

    -In this section, we discuss HDF5 names and show how to use absolute/relative -names by giving an example of creating groups in a file. +In this section, we discuss HDF5 names and show how to use absolute and +relative names.

    Names

    HDF5 object names are a slash-separated list of components. There are few restrictions on names: component names may be any length except zero and may -contain any character except slash ("/") and the null terminator. A full name +contain any character except slash (/) and the null terminator. +A full name may be composed of any number of component names separated by slashes, with any -of the component names being the special name ".". A name which begins with a -slash is an absolute name which is accessed beginning with the root group of the -file while all other relative names are accessed beginning with the specified -group. Multiple consecutive slashes in a full name are treated as single slashes -and trailing slashes are not significant. A special case is the name "/" (or +of the component names being the special name . (a dot or period). +A name which begins with a slash is an absolute name which is accessed +beginning with the root group of the file; +all other names are relative names and and the named object is +accessed beginning with the specified group. +Multiple consecutive slashes in a full name are treated as single slashes +and trailing slashes are not significant. A special case is the name / (or equivalent) which refers to the root group.

    -Functions which operate on names generally take a location identifier which -is either a file ID or a group ID and perform the lookup with respect to that -location. Some possibilities are: +Functions which operate on names generally take a location identifier, which +can be either a file identifier or a group identifier, and perform the lookup +with respect to that location. +Several possibilities are described in the following table: - +
    +
    - + - + - + - + - + - + - + - + - + - + - +
    Location Type Object Name Description
    File IDFile identifier -
    /foo/bar
    +
    /foo/bar
    The object bar in group foo in the root group. The object bar in group foo + in the root group.
    Group ID Group identifier -
    /foo/bar
    +
    /foo/bar
    The object bar in group foo in the root group of the file containing the - specified group. In other words, the group ID's only purpose is to supply - a file. The object bar in group foo in the + root group of the file containing the specified group. + In other words, the group identifier's only purpose is to + specify a file.
    File IDFile identifier
    /
    The root group of the specified file.
    Group IDGroup identifier
    /
    The root group of the file containing the specified group.
    Group IDGroup identifier -
    foo/bar
    +
    foo/bar
    The object bar in group foo in the specified group.The object bar in group foo in + the specified group.
    File IDFile identifier -
    .
    +
    .
    The root group of the file.
    Group IDGroup identifier -
    .
    +
    .
    The specified group.
    Other IDOther identifier -
    .
    +
    .
    The specified object.
    +

    @@ -136,74 +144,51 @@ location. Some possibilities are:

    Description

    The following example code shows how to create groups using absolute and relative names. It creates three groups: the first two groups are -created using the file identifier and the group absolute names, and the -third group is created using a group identifier and the name relative +created using the file identifier and the group absolute names while the +third group is created using a group identifier and a name relative to the specified group.
    -[ Download h5_crtgrpar.c ] - -
    -
    -+++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
    -
    -#include <hdf5.h>
    -#define FILE "groups.h5"
    -
    -main() {
    -
    -   hid_t       file_id, group1_id, group2_id, group3_id;  /* identifiers */
    -   herr_t      status;
    -
    -   /* Create a new file using default properties. */
    -   file_id = H5Fcreate(FILE, H5F_ACC_TRUNC, H5P_DEFAULT, H5P_DEFAULT);
    -
    -   /* Create group "MyGroup" in the root group using absolute name. */
    -   group1_id = H5Gcreate(file_id, "/MyGroup", 0);
    -
    -   /* Create group "Group_A" in group "MyGroup" using absolute name. */
    -   group2_id = H5Gcreate(file_id, "/MyGroup/Group_A", 0);
    -
    -   /* Create group "Group_B" in group "MyGroup" using relative name. */
    -   group3_id = H5Gcreate(group1_id, "Group_B", 0);
    +
     
    -   /* Close groups. */
    -   status = H5Gclose(group1_id);
    -   status = H5Gclose(group2_id);
    -   status = H5Gclose(group3_id);
    -
    -   /* Close the file. */
    -   status = H5Fclose(file_id);
    -}
    -+++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
    -
    +NOTE: To download a tar file of the examples, including a Makefile, +please go to the References page.

    Remarks

      -
    • H5Gcreate creates a group at the location specified by a location ID and a - name. The location ID can be a file ID or a group ID and the name can be - relative or absolute. - -
    • The first H5Gcreate creates the group 'MyGroup' in the root group of the - specified file. - -
    • The second H5Gcreate creates the group 'Group_A' in the group 'MyGroup' - in the root group of the specified file. Note that the parent group (MyGroup) - already exists. - -
    • The third H5Gcreate creates the group 'Group_B' in the specified group. +
    • H5Gcreate/h5gcreate_f creates a group at the + location specified by a location identifier and a name. + The location identifier can be a file identifier or a group identifier + and the name can be relative or absolute. +

      +

    • The first H5Gcreate/h5gcreate_f creates the group + MyGroup in the root group of the specified file. +

      +

    • The second H5Gcreate/h5gcreate_f creates the group + Group_A in the group MyGroup in the root group + of the specified file. Note that the parent group (MyGroup) + already exists. +

      +

    • The third H5Gcreate/h5gcreate_f creates the group + Group_B in the specified group.

    File Contents

    The file contents are shown below:

    -Fig. 9.1   The Contents of 'groups.h5' +Fig. 9.1   The Contents of groups.h5 + (groupsf.h5 for FORTRAN)

    - Fig. 9.2   'groups.h5' in DDL + Fig. 9.2   groups.h5 in DDL + (for FORTRAN, the name in the first line is groupsf.h5)
     
           HDF5 "groups.h5" {
    @@ -233,7 +218,7 @@ The file contents are shown below:
     
     
     hdfhelp@ncsa.uiuc.edu
    -
    Last Modified: August 27, 1999

    +
    Last Modified: April 5, 2000


    diff --git a/doc/html/Tutor/crtgrpd.html b/doc/html/Tutor/crtgrpd.html index e92a101..a7baa1e 100644 --- a/doc/html/Tutor/crtgrpd.html +++ b/doc/html/Tutor/crtgrpd.html @@ -32,13 +32,16 @@ width=78 height=27 alt="NCSA">


    Creating datasets in groups

    -We have shown how to create groups, datasets and attributes. In this section, -we show how to create datasets in groups. Recall that H5Dcreate creates a -dataset at the location specified by a location identifier and a name. Similar to -H5Gcreate, the location identifier can be a file identifier or a group identifier and the name can be -relative or absolute. The location identifier and the name together determine the -location where the dataset is to be created. If the location identifier and name -refers to a group, then the dataset is created in that group. +We have shown how to create groups, datasets, and attributes. +In this section, we show how to create datasets in groups. +Recall that H5Dcreate/h5dcreate_f +creates a dataset at the location specified by a location identifier and +a name. Similar to H5Gcreate/h5gcreate_f, +the location identifier can be a +file identifier or a group identifier and the name can be +relative or absolute. The location identifier and the name together determine +the location where the dataset is to be created. If the location identifier +and name refer to a group, then the dataset is created in that group.

    Programming Example

    @@ -46,127 +49,98 @@ refers to a group, then the dataset is created in that group.

    Description

    This example shows how to create a dataset in a particular group. It opens the file created in the previous example and creates two datasets.
    -[
    Download h5_crtgrpd.c ] -
    -
    -+++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
    -
    -#include <hdf5.h>
    -#define FILE "groups.h5"
    -
    -main() {
    -
    -   hid_t       file_id, group_id, dataset_id, dataspace_id;  /* identifiers */
    -   hsize_t     dims[2];
    -   herr_t      status;
    -   int         i, j, dset1_data[3][3], dset2_data[2][10];
    -
    -   /* Initialize the first dataset. */
    -   for (i = 0; i < 3; i++)
    -      for (j = 0; j < 3; j++)
    -         dset1_data[i][j] = j + 1;
    -
    -   /* Initialize the second dataset. */
    -   for (i = 0; i < 2; i++)
    -      for (j = 0; j < 10; j++)
    -         dset2_data[i][j] = j + 1;
    -
    -   /* Open an existing file. */
    -   file_id = H5Fopen(FILE, H5F_ACC_RDWR, H5P_DEFAULT);
    -
    -   /* Create the data space for the first dataset. */
    -   dims[0] = 3;
    -   dims[1] = 3;
    -   dataspace_id = H5Screate_simple(2, dims, NULL);
    -
    -   /* Create a dataset in group "MyGroup". */
    -   dataset_id = H5Dcreate(file_id, "/MyGroup/dset1", H5T_STD_I32BE, dataspace_id
    -,
    -                       H5P_DEFAULT);
    -
    -   /* Write the first dataset. */
    -   status = H5Dwrite(dataset_id, H5T_NATIVE_INT, H5S_ALL, H5S_ALL, H5P_DEFAULT,
    -                     dset1_data);
    -
    -   /* Close the data space for the first dataset. */
    -   status = H5Sclose(dataspace_id);
    -
    -   /* Close the first dataset. */
    -   status = H5Dclose(dataset_id);
    -
    -   /* Open an existing group of the specified file. */
    -   group_id = H5Gopen(file_id, "/MyGroup/Group_A");
    -
    -   /* Create the data space for the second dataset. */
    -   dims[0] = 2;
    -   dims[1] = 10;
    -   dataspace_id = H5Screate_simple(2, dims, NULL);
    -
    -   /* Create the second dataset in group "Group_A". */
    -   dataset_id = H5Dcreate(group_id, "dset2", H5T_STD_I32BE, dataspace_id, H5P_DEFAULT);
    -
    -   /* Write the second dataset. */
    -   status = H5Dwrite(dataset_id, H5T_NATIVE_INT, H5S_ALL, H5S_ALL, H5P_DEFAULT,
    -                     dset2_data);
    -
    -   /* Close the data space for the second dataset. */
    -   status = H5Sclose(dataspace_id);
    -
    -   /* Close the second dataset */
    -   status = H5Dclose(dataset_id);
    -
    -   /* Close the group. */
    -   status = H5Gclose(group_id);
    -
    -   /* Close the file. */
    -   status = H5Fclose(file_id);
    -}
    -+++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
    -
    + +NOTE: To download a tar file of the examples, including a Makefile, +please go to the References page.

    File Contents

    -Fig. 10.1   The Contents of 'groups.h5' +Fig. 10.1   The Contents of groups.h5 + (groupsf.h5 for FORTRAN)

     
    - Fig. 10.2   'groups.h5' in DDL + Fig. 10.2a   groups.h5 in DDL
     
    -   HDF5 "groups.h5" {
    -      GROUP "/" {
    -         GROUP "MyGroup" {
    -            GROUP "Group_A" {
    -               DATASET "dset2" {
    -                  DATATYPE { H5T_STD_I32BE }
    -                  DATASPACE { SIMPLE ( 2, 10 ) / ( 2, 10 ) }
    -                  DATA {
    -                     1, 2, 3, 4, 5, 6, 7, 8, 9, 10,
    -                     1, 2, 3, 4, 5, 6, 7, 8, 9, 10
    -                  }
    -               }
    -            }
    -            GROUP "Group_B" {
    -            }
    -            DATASET "dset1" {
    -               DATATYPE { H5T_STD_I32BE }
    -               DATASPACE { SIMPLE ( 3, 3 ) / ( 3, 3 ) }
    -               DATA {
    -                  1, 2, 3,
    -                  1, 2, 3,
    -                  1, 2, 3
    -               }
    +HDF5 "groups.h5" {
    +GROUP "/" {
    +   GROUP "MyGroup" {
    +      GROUP "Group_A" {
    +         DATASET "dset2" {
    +            DATATYPE { H5T_STD_I32BE }
    +            DATASPACE { SIMPLE ( 2, 10 ) / ( 2, 10 ) }
    +            DATA {
    +               1, 2, 3, 4, 5, 6, 7, 8, 9, 10,
    +               1, 2, 3, 4, 5, 6, 7, 8, 9, 10
                 }
              }
           }
    +      GROUP "Group_B" {
           }
    +      DATASET "dset1" {
    +         DATATYPE { H5T_STD_I32BE }
    +         DATASPACE { SIMPLE ( 3, 3 ) / ( 3, 3 ) }
    +         DATA {
    +            1, 2, 3,
    +            1, 2, 3,
    +            1, 2, 3
    +         }
    +      }
    +   }
    +}
    +}
     
    + Fig. 10.2b   groupsf.h5 in DDL +
     
    -
    +HDF5 "groupsf.h5" {
    +GROUP "/" {
    +   GROUP "MyGroup" {
    +      GROUP "Group_A" {
    +         DATASET "dset2" {
    +            DATATYPE { H5T_STD_I32BE }
    +            DATASPACE { SIMPLE ( 10, 2 ) / ( 10, 2 ) }
    +            DATA {
    +               1, 1,
    +               2, 2,
    +               3, 3,
    +               4, 4,
    +               5, 5,
    +               6, 6,
    +               7, 7,
    +               8, 8,
    +               9, 9,
    +               10, 10
    +            }
    +         }
    +      }
    +      GROUP "Group_B" {
    +      }
    +      DATASET "dset1" {
    +         DATATYPE { H5T_STD_I32BE }
    +         DATASPACE { SIMPLE ( 3, 3 ) / ( 3, 3 ) }
    +         DATA {
    +            1, 1, 1,
    +            2, 2, 2,
    +            3, 3, 3
    +         }
    +      }
    +   }
    +}
    +}
    +


    @@ -178,7 +152,7 @@ main() {
    hdfhelp@ncsa.uiuc.edu -
    Last Modified: August 27, 1999

    +
    Last Modified: April 5, 2000


    diff --git a/doc/html/Tutor/examples/attrexample.f90 b/doc/html/Tutor/examples/attrexample.f90 new file mode 100644 index 0000000..43a6854 --- /dev/null +++ b/doc/html/Tutor/examples/attrexample.f90 @@ -0,0 +1,87 @@ +! This example shows how to create and write a dataset attribute. +! It opens the existing file 'dset.h5', obtains the identifier of +! the dataset "/dset", defines attribute's dataspace, +! creates dataset attribute, writes the attribute, and then closes +! the attribute's dataspace, attribute, dataset, and file. + + PROGRAM ATTREXAMPLE + + + USE HDF5 ! This module contains all necessary modules + + IMPLICIT NONE + + CHARACTER(LEN=8), PARAMETER :: filename = "dsetf.h5" ! File name + CHARACTER(LEN=4), PARAMETER :: dsetname = "dset" ! Dataset name + CHARACTER(LEN=4), PARAMETER :: aname = "attr" ! Attribute name + + INTEGER(HID_T) :: file_id ! File identifier + INTEGER(HID_T) :: dset_id ! Dataset identifier + INTEGER(HID_T) :: attr_id ! Attribute identifier + INTEGER(HID_T) :: aspace_id ! Attribute Dataspace identifier + + INTEGER(HSIZE_T), DIMENSION(1) :: adims = (/2/) ! Attribute dimension + INTEGER, DIMENSION(2) :: attr_data = (/100,200/)! Attribute data + INTEGER :: arank = 1 ! Attribure rank + + INTEGER :: error ! Error flag + + + ! + ! Initialize FORTRAN predefined datatypes. + ! + CALL h5init_types_f(error) + + ! + ! Open an existing file. + ! + CALL h5fopen_f (filename, H5F_ACC_RDWR_F, file_id, error) + + ! + ! Open an existing dataset. + ! + CALL h5dopen_f(file_id, dsetname, dset_id, error) + + ! + ! Create the data space for the attribute. + ! + CALL h5screate_simple_f(arank, adims, aspace_id, error) + + ! + ! Create dataset attribute. + ! + CALL h5acreate_f(dset_id, aname, H5T_NATIVE_INTEGER,aspace_id, & + attr_id, error) + + ! + ! Write the attribute data. + ! + CALL h5awrite_f(attr_id, H5T_NATIVE_INTEGER, attr_data, error) + + ! + ! Close the attribute. + ! + CALL h5aclose_f(attr_id, error) + + ! + ! Terminate access to the data space. + ! + CALL h5sclose_f(aspace_id, error) + + ! + ! End access to the dataset and release resources used by it. + ! + CALL h5dclose_f(dset_id, error) + + ! + ! Close the file. + ! + CALL h5fclose_f(file_id, error) + + ! + ! Close FORTRAN predefined datatypes. + ! + CALL h5close_types_f(error) + + END PROGRAM ATTREXAMPLE + diff --git a/doc/html/Tutor/examples/chunk.f90 b/doc/html/Tutor/examples/chunk.f90 new file mode 100644 index 0000000..70d973a --- /dev/null +++ b/doc/html/Tutor/examples/chunk.f90 @@ -0,0 +1,310 @@ +! +!This example shows how to work with extendible datasets. +!It creates a 3 x 3 extendible dataset, write to that dataset, +!extend the dataset to 10x3, and write to the dataset again +! + + + + + PROGRAM CHUNKEXAMPLE + + USE HDF5 ! This module contains all necessary modules + + IMPLICIT NONE + + ! + !the dataset is stored in file "extf.h5" + ! + CHARACTER(LEN=7), PARAMETER :: filename = "extf.h5" + + ! + !dataset name is "ExtendibleArray" + ! + CHARACTER(LEN=15), PARAMETER :: dsetname = "ExtendibleArray" + + ! + !dataset rank is 2 + ! + INTEGER :: RANK = 2 + + INTEGER(HID_T) :: file_id ! File identifier + INTEGER(HID_T) :: dset_id ! Dataset identifier + INTEGER(HID_T) :: dataspace ! Dataspace identifier + INTEGER(HID_T) :: filespace ! Dataspace identifier + INTEGER(HID_T) :: memspace ! memspace identifier + INTEGER(HID_T) :: cparms !dataset creatation property identifier + + ! + !dataset dimensions at creation time + ! + INTEGER(HSIZE_T), DIMENSION(2) :: dims = (/3,3/) + + ! + !data1 dimensions + ! + INTEGER(HSIZE_T), DIMENSION(2) :: dims1 = (/3,3/) + + ! + !data2 dimensions + ! + INTEGER(HSIZE_T), DIMENSION(2) :: dims2 = (/7,1/) + + ! + !Maximum dimensions + ! + INTEGER(HSIZE_T), DIMENSION(2) :: maxdims + + ! + !data1 dimensions + ! + INTEGER, DIMENSION(3,3) :: data1 + + ! + !data2 dimensions + ! + INTEGER, DIMENSION(7,1) :: data2 + + ! + !Size of the hyperslab in the file + ! + INTEGER(HSIZE_T), DIMENSION(2) :: size + + ! + !hyperslab offset in the file + ! + INTEGER(HSIZE_T), DIMENSION(2) :: offset + + ! + !general purpose integer + ! + INTEGER :: i, j, k + + ! + !flag to check operation success + ! + INTEGER :: error, error_n + + ! + !Variables used in reading data back + ! + INTEGER(HSIZE_T), DIMENSION(2) :: chunk_dims = (/5,2/) + INTEGER(HSIZE_T), DIMENSION(2) :: chunk_dimsr + INTEGER(HSIZE_T), DIMENSION(2) :: dimsr, maxdimsr + INTEGER, DIMENSION(10,3) :: data_out + INTEGER :: rankr, rank_chunk + + ! + !data initialization + ! + do i = 1, 3 + do j = 1, 3 + data1(i,j) = 1 + end do + end do + + do j = 1, 7 + data2(j,1) = 2 + end do + + + ! + !Initialize FORTRAN predifined datatypes + ! + CALL h5init_types_f(error) + + ! + !Create a new file using default properties. + ! + CALL h5fcreate_f(filename, H5F_ACC_TRUNC_F, file_id, error) + + + ! + !Create the data space with unlimited dimensions. + ! + maxdims = (/H5S_UNLIMITED_f, H5S_UNLIMITED_f/) + + CALL h5screate_simple_f(RANK, dims, dataspace, error, maxdims) + + ! + !Modify dataset creation properties, i.e. enable chunking + ! + CALL h5pcreate_f(H5P_DATASET_CREATE_F, cparms, error) + + CALL h5pset_chunk_f(cparms, RANK, chunk_dims, error) + + ! + !Create a new dataset within the file using cparms creation properties. + ! + !CALL h5dcreate_f(file_id, dsetname, H5T_NATIVE_INT_F, dataspace, & + CALL h5dcreate_f(file_id, dsetname, H5T_NATIVE_INTEGER, dataspace, & + dset_id, error, cparms) + + ! + !Extend the dataset. This call assures that dataset is 3 x 3. + ! + size(1) = 3 + size(2) = 3 + CALL h5dextend_f(dset_id, size, error) + + + ! + !Select a hyperslab. + ! + CALL h5dget_space_f(dset_id, filespace, error) + offset(1) = 0; + offset(2) = 0; + CALL h5sselect_hyperslab_f(filespace, H5S_SELECT_SET_F, & + offset, dims1, error) + + ! + !Write the data to the hyperslab. + ! + !CALL H5Dwrite_f(dset_id, H5T_NATIVE_INT_F, data1, error, & + CALL H5Dwrite_f(dset_id, H5T_NATIVE_INTEGER, data1, error, & + filespace, dataspace) + + ! + !Extend the dataset. Dataset becomes 10 x 3. + ! + dims(1) = dims1(1) + dims2(1); + size(1) = dims(1); + size(2) = dims(2); + CALL h5dextend_f(dset_id, size, error) + + ! + !Select a hyperslab. + ! + CALL h5dget_space_f(dset_id, filespace, error) + offset(1) = 3; + offset(2) = 0; + CALL h5sselect_hyperslab_f(filespace, H5S_SELECT_SET_F, & + offset, dims2, error) + + ! + !create memory dataspace. + ! + CALL h5screate_simple_f(RANK, dims2, memspace, error) + + ! + !Write the data to the hyperslab. + ! + !CALL H5Dwrite_f(dset_id, H5T_NATIVE_INT_F, data2, error, & + CALL H5Dwrite_f(dset_id, H5T_NATIVE_INTEGER, data2, error, & + mem_space_id=memspace, file_space_id=filespace) + + ! + !Close the dataspace for the dataset. + ! + CALL h5sclose_f(dataspace, error) + CALL h5sclose_f(filespace, error) + + ! + !Close the memoryspace. + ! + CALL h5sclose_f(memspace, error) + + ! + !Close the dataset. + ! + CALL h5dclose_f(dset_id, error) + + ! + !Close the property list. + ! + CALL h5pclose_f(cparms, error) + + ! + !Close the file. + ! + CALL h5fclose_f(file_id, error) + + ! + !read the data back + ! + !Open the file. + ! + CALL h5fopen_f (filename, H5F_ACC_RDONLY_F, file_id, error) + + ! + !Open the dataset. + ! + CALL h5dopen_f(file_id, dsetname, dset_id, error) + + ! + !Get dataset's dataspace handle. + ! + CALL h5dget_space_f(dset_id, dataspace, error) + + ! + !Get dataspace's rank. + ! + CALL h5sget_simple_extent_ndims_f(dataspace, rankr, error) + + + ! + !Get dataspace's dimensinons. + ! + CALL h5sget_simple_extent_dims_f(dataspace, dimsr, maxdimsr, error) + + + ! + !Get creation property list. + ! + CALL h5dget_create_plist_f(dset_id, cparms, error) + + ! + !Get chunk dimensions. + ! + CALL h5pget_chunk_f(cparms, 2, chunk_dimsr, error) + + ! + !create memory dataspace. + ! + CALL h5screate_simple_f(rankr, dimsr, memspace, error) + + ! + !Read data + ! + !CALL H5Dread_f(dset_id, H5T_NATIVE_INT_F, data_out, error, & + CALL H5Dread_f(dset_id, H5T_NATIVE_INTEGER, data_out, error, & + memspace, dataspace) + + ! + !Print data + ! + do i = 1, dimsr(1) + print *, (data_out(i,j), j = 1,dimsr(2)) + end do + + ! + !Close the dataspace for the dataset. + ! + CALL h5sclose_f(dataspace, error) + + ! + !Close the memoryspace. + ! + CALL h5sclose_f(memspace, error) + + ! + !Close the dataset. + ! + CALL h5dclose_f(dset_id, error) + + ! + !Close the file. + ! + CALL h5fclose_f(file_id, error) + + ! + !Close the property list. + ! + CALL h5pclose_f(cparms, error) + + ! + ! Close FORTRAN predefined datatypes. + ! + CALL h5close_types_f(error) + + END PROGRAM CHUNKEXAMPLE diff --git a/doc/html/Tutor/examples/dsetexample.f90 b/doc/html/Tutor/examples/dsetexample.f90 new file mode 100644 index 0000000..00974a9 --- /dev/null +++ b/doc/html/Tutor/examples/dsetexample.f90 @@ -0,0 +1,70 @@ +! +! The following example shows how to create an empty dataset. +! It creates a file called 'dsetf.h5', defines the +! dataset dataspace, creates a dataset which is a 4x6 integer array, +! and then closes the dataspace, the dataset, and the file. +! + + PROGRAM DSETEXAMPLE + + USE HDF5 ! This module contains all necessary modules + + IMPLICIT NONE + + CHARACTER(LEN=8), PARAMETER :: filename = "dsetf.h5" ! File name + CHARACTER(LEN=4), PARAMETER :: dsetname = "dset" ! Dataset name + + INTEGER(HID_T) :: file_id ! File identifier + INTEGER(HID_T) :: dset_id ! Dataset identifier + INTEGER(HID_T) :: dspace_id ! Dataspace identifier + + + INTEGER(HSIZE_T), DIMENSION(2) :: dims = (/4,6/) ! Dataset dimensions + INTEGER :: rank = 2 ! Dataset rank + + INTEGER :: error ! Error flag + + ! + ! Initialize FORTRAN predefined datatypes. + ! + CALL h5init_types_f(error) + + ! + ! Create a new file using default properties. + ! + CALL h5fcreate_f(filename, H5F_ACC_TRUNC_F, file_id, error) + + ! + ! Create the dataspace. + ! + CALL h5screate_simple_f(rank, dims, dspace_id, error) + + ! + ! Create the dataset with default properties. + ! + CALL h5dcreate_f(file_id, dsetname, H5T_NATIVE_INTEGER, dspace_id, & + dset_id, error) + + ! + ! End access to the dataset and release resources used by it. + ! + CALL h5dclose_f(dset_id, error) + + ! + ! Terminate access to the data space. + ! + CALL h5sclose_f(dspace_id, error) + + ! + ! Close the file. + ! + CALL h5fclose_f(file_id, error) + + ! + ! Close FORTRAN predefined datatypes. + ! + CALL h5close_types_f(error) + + END PROGRAM DSETEXAMPLE + + diff --git a/doc/html/Tutor/examples/fileexample.f90 b/doc/html/Tutor/examples/fileexample.f90 new file mode 100644 index 0000000..33c3bc2 --- /dev/null +++ b/doc/html/Tutor/examples/fileexample.f90 @@ -0,0 +1,27 @@ +! +! The following example demonstrates how to create and close an HDF5 file. +! It creates a file called 'file.h5', and then closes the file. +! + + PROGRAM FILEEXAMPLE + + USE HDF5 ! This module contains all necessary modules + + IMPLICIT NONE + + CHARACTER(LEN=8), PARAMETER :: filename = "filef.h5" ! File name + INTEGER(HID_T) :: file_id ! File identifier + + INTEGER :: error ! Error flag + + ! + ! Create a new file using default properties. + ! + CALL h5fcreate_f(filename, H5F_ACC_TRUNC_F, file_id, error) + + ! + ! Terminate access to the file. + ! + CALL h5fclose_f(file_id, error) + + END PROGRAM FILEEXAMPLE diff --git a/doc/html/Tutor/examples/groupexample.f90 b/doc/html/Tutor/examples/groupexample.f90 new file mode 100644 index 0000000..148a796 --- /dev/null +++ b/doc/html/Tutor/examples/groupexample.f90 @@ -0,0 +1,42 @@ +! +! The following example shows how to create and close a group. +! It creates a file called 'group.h5', creates a group +! called MyGroup in the root group, and then closes the group and file. +! + + + PROGRAM GROUPEXAMPLE + + USE HDF5 ! This module contains all necessary modules + + IMPLICIT NONE + + CHARACTER(LEN=9), PARAMETER :: filename = "groupf.h5" ! File name + CHARACTER(LEN=7), PARAMETER :: groupname = "MyGroup" ! Group name + + INTEGER(HID_T) :: file_id ! File identifier + INTEGER(HID_T) :: group_id ! Group identifier + + INTEGER :: error ! Error flag + + ! + ! Create a new file using default properties. + ! + CALL h5fcreate_f(filename, H5F_ACC_TRUNC_F, file_id, error) + + ! + ! Create a group named "/MyGroup" in the file. + ! + CALL h5gcreate_f(file_id, groupname, group_id, error) + + ! + ! Close the group. + ! + CALL h5gclose_f(group_id, error) + + ! + ! Terminate access to the file. + ! + CALL h5fclose_f(file_id, error) + + END PROGRAM GROUPEXAMPLE diff --git a/doc/html/Tutor/examples/grpdsetexample.f90 b/doc/html/Tutor/examples/grpdsetexample.f90 new file mode 100644 index 0000000..75bc335 --- /dev/null +++ b/doc/html/Tutor/examples/grpdsetexample.f90 @@ -0,0 +1,136 @@ +! +! This example shows how to create a dataset in a particular group. +! It opens the file created in the previous example and creates two datasets. +! Absolute and relative dataset names are used. +! + + + PROGRAM GRPDSETEXAMPLE + + USE HDF5 ! This module contains all necessary modules + + IMPLICIT NONE + + CHARACTER(LEN=10), PARAMETER :: filename = "groupsf.h5" ! File name + CHARACTER(LEN=15), PARAMETER :: groupname = "MyGroup/Group_A" ! Group name + CHARACTER(LEN=13), PARAMETER :: dsetname1 = "MyGroup/dset1" ! Dataset name + CHARACTER(LEN=5), PARAMETER :: dsetname2 = "dset2" ! dataset name + + INTEGER(HID_T) :: file_id ! File identifier + INTEGER(HID_T) :: group_id ! Group identifier + INTEGER(HID_T) :: dataset_id ! Dataset identifier + INTEGER(HID_T) :: dataspace_id ! Data space identifier + + INTEGER :: i, j + INTEGER :: error ! Error flag + + INTEGER, DIMENSION(3,3) :: dset1_data ! Data arrays + INTEGER, DIMENSION(2,10) :: dset2_data ! + + INTEGER(HSIZE_T), DIMENSION(2) :: dims1 = (/3,3/) ! Datasets dimensions + INTEGER(HSIZE_T), DIMENSION(2) :: dims2 = (/2,10/)! + + INTEGER :: rank = 2 ! Datasets rank + + ! + !Initialize dset1_data array + ! + do i = 1, 3 + do j = 1, 3 + dset1_data(i,j) = j; + end do + end do + + + ! + !Initialize dset2_data array + ! + do i = 1, 2 + do j = 1, 10 + dset2_data(i,j) = j; + end do + end do + + ! + ! Initialize FORTRAN predefined datatypes. + ! + CALL h5init_types_f(error) + + ! + ! Open an existing file. + ! + CALL h5fopen_f (filename, H5F_ACC_RDWR_F, file_id, error) + + ! + ! Create the data space for the first dataset. + ! + CALL h5screate_simple_f(rank, dims1, dataspace_id, error) + + ! + ! Create a dataset in group "MyGroup" with default properties. + ! + CALL h5dcreate_f(file_id, dsetname1, H5T_NATIVE_INTEGER, dataspace_id, & + dataset_id, error) + + ! + ! Write the first dataset. + ! + CALL h5dwrite_f(dataset_id, H5T_NATIVE_INTEGER, dset1_data, error) + + ! + ! Close the dataspace for the first dataset. + ! + CALL h5sclose_f(dataspace_id, error) + + ! + ! Close the first dataset. + ! + CALL h5dclose_f(dataset_id, error) + + ! + ! Open an existing group in the specified file. + ! + CALL h5gopen_f(file_id, groupname, group_id, error) + + ! + !Create the data space for the second dataset. + ! + CALL h5screate_simple_f(rank, dims2, dataspace_id, error) + + ! + ! Create the second dataset in group "Group_A" with default properties. + ! + CALL h5dcreate_f(group_id, dsetname2, H5T_NATIVE_INTEGER, dataspace_id, & + dataset_id, error) + + ! + ! Write the second dataset. + ! + CALL h5dwrite_f(dataset_id, H5T_NATIVE_INTEGER, dset2_data, error) + + ! + ! Close the dataspace for the second dataset. + ! + CALL h5sclose_f(dataspace_id, error) + + ! + ! Close the second dataset. + ! + CALL h5dclose_f(dataset_id, error) + + ! + ! Close the group. + ! + CALL h5gclose_f(group_id, error) + + ! + ! Close the file. + ! + CALL h5fclose_f(file_id, error) + + ! + ! Close FORTRAN predefined datatypes. + ! + CALL h5close_types_f(error) + + END PROGRAM GRPDSETEXAMPLE diff --git a/doc/html/Tutor/examples/grpit.f90 b/doc/html/Tutor/examples/grpit.f90 new file mode 100644 index 0000000..66fb09e --- /dev/null +++ b/doc/html/Tutor/examples/grpit.f90 @@ -0,0 +1,189 @@ +! +! In this example we iterate through the members of the groups. +! + + + PROGRAM GRPITEXAMPLE + + USE HDF5 ! This module contains all necessary modules + + IMPLICIT NONE + + CHARACTER(LEN=11), PARAMETER :: filename = "iteratef.h5" ! File name + CHARACTER(LEN=7), PARAMETER :: groupname1 = "MyGroup" ! Group name + CHARACTER(LEN=15), PARAMETER :: groupname2 = "Group_A" ! Group name + CHARACTER(LEN=13), PARAMETER :: dsetname1 = "dset1" ! Dataset name + CHARACTER(LEN=5), PARAMETER :: dsetname2 = "dset2" ! + + CHARACTER(LEN=20) :: name_buffer ! Buffer to hold object's name + INTEGER :: type ! Type of the object + INTEGER :: nmembers ! Number of group members + + INTEGER(HID_T) :: file_id ! File identifier + INTEGER(HID_T) :: dataset1_id ! Dataset1 identifier + INTEGER(HID_T) :: dataset2_id ! Dataset2 identifier + INTEGER(HID_T) :: dataspace1_id ! Data space identifier + INTEGER(HID_T) :: dataspace2_id ! Data space identifier + INTEGER(HID_T) :: group1_id, group2_id ! Group identifiers + + INTEGER :: i, j + + INTEGER :: error ! Error flag + + INTEGER, DIMENSION(3,3) :: dset1_data ! Arrays to hold data + INTEGER, DIMENSION(2,10) :: dset2_data ! + + INTEGER(HSIZE_T), DIMENSION(2) :: dims1 = (/3,3/) ! Dataset dimensions + INTEGER(HSIZE_T), DIMENSION(2) :: dims2 = (/2,10/)! + INTEGER :: rank = 2 ! Datasets rank + + ! + ! Initialize dset1_data array. + ! + do i = 1, 3 + do j = 1, 3 + dset1_data(i,j) = j; + end do + end do + + + ! + ! Initialize dset2_data array. + ! + do i = 1, 2 + do j = 1, 10 + dset2_data(i,j) = j; + end do + end do + + ! + ! Initialize FORTRAN predefined datatypes. + ! + CALL h5init_types_f(error) + + ! + ! Create a new file using default properties. + ! + CALL h5fcreate_f(filename, H5F_ACC_TRUNC_F, file_id, error) + + ! + ! Create group "MyGroup" in the root group using absolute name. + ! + CALL h5gcreate_f(file_id, groupname1, group1_id, error) + + ! + ! Create group "Group_A" in group "MyGroup" using relative name. + ! + CALL h5gcreate_f(group1_id, groupname2, group2_id, error) + + ! + ! Create the data space for the first dataset. + ! + CALL h5screate_simple_f(rank, dims1, dataspace1_id, error) + + ! + ! Create a dataset in group "MyGroup" with default properties. + ! + CALL h5dcreate_f(group1_id, dsetname1, H5T_NATIVE_INTEGER, dataspace1_id, & + dataset1_id, error) + + ! + ! Write the first dataset. + ! + CALL h5dwrite_f(dataset1_id, H5T_NATIVE_INTEGER, dset1_data, error) + + ! + ! Create the data space for the second dataset. + ! + CALL h5screate_simple_f(rank, dims2, dataspace2_id, error) + + ! + ! Create the second dataset in group "Group_A" with default properties + ! + CALL h5dcreate_f(group2_id, dsetname2, H5T_NATIVE_INTEGER, dataspace2_id, & + dataset2_id, error) + + ! + ! Write the second dataset + ! + CALL h5dwrite_f(dataset2_id, H5T_NATIVE_INTEGER, dset2_data, error) + + ! + ! Get number of members in the root group. + ! + CALL h5gn_members_f(file_id, "/", nmembers, error) + write(*,*) "Number of root group member is " , nmembers + + ! + ! Print each group member's name and type. + ! + do i = 0, nmembers - 1 + CALL h5gget_obj_info_idx_f(file_id, "/", i, name_buffer, type, & + error) + write(*,*) name_buffer, type + end do + + ! + ! Get number of members in MyGroup. + ! + CALL h5gn_members_f(file_id, "MyGroup", nmembers, error) + write(*,*) "Number of group MyGroup member is ", nmembers + + ! + ! Print each group member's name and type in "MyGroup" group. + ! + do i = 0, nmembers - 1 + CALL h5gget_obj_info_idx_f(file_id, groupname1, i, name_buffer, type, & + error) + write(*,*) name_buffer, type + end do + + + ! + ! Get number of members in MyGroup/Group_A. + ! + CALL h5gn_members_f(file_id, "MyGroup/Group_A", nmembers, error) + write(*,*) "Number of group MyGroup/Group_A member is ", nmembers + + ! + ! Print each group member's name and type in "MyGroup/Group_A" group. + ! + do i = 0, nmembers - 1 + CALL h5gget_obj_info_idx_f(file_id,"MyGroup/Group_A" , i, name_buffer, type, & + error) + write(*,*) name_buffer, type + end do + + ! + ! Close the dataspace for the first dataset. + ! + CALL h5sclose_f(dataspace1_id, error) + + ! + ! Close the first dataset. + ! + CALL h5dclose_f(dataset1_id, error) + + ! + ! Close the dataspace for the second dataset. + ! + CALL h5sclose_f(dataspace2_id, error) + + ! + ! Close the second dataset. + ! + CALL h5dclose_f(dataset2_id, error) + + ! + ! Close the groups. + ! + CALL h5gclose_f(group1_id, error) + + CALL h5gclose_f(group2_id, error) + + ! + ! Close the file. + ! + CALL h5fclose_f(file_id, error) + + END PROGRAM GRPITEXAMPLE diff --git a/doc/html/Tutor/examples/grpsexample.f90 b/doc/html/Tutor/examples/grpsexample.f90 new file mode 100644 index 0000000..9f9c867 --- /dev/null +++ b/doc/html/Tutor/examples/grpsexample.f90 @@ -0,0 +1,58 @@ +! +! The following example code shows how to create groups +! using absolute and relative names. It creates three groups: +! the first two groups are created using the file identifier and +! the group absolute names, and the third group is created using +! a group identifier and the name relative to the specified group. +! + + PROGRAM GRPSEXAMPLE + + USE HDF5 ! This module contains all necessary modules + + IMPLICIT NONE + + CHARACTER(LEN=10), PARAMETER :: filename = "groupsf.h5" ! File name + CHARACTER(LEN=8), PARAMETER :: groupname1 = "/MyGroup" ! Group name + CHARACTER(LEN=16), PARAMETER :: groupname2 = "/MyGroup/Group_A" + ! Group name + CHARACTER(LEN=7), PARAMETER :: groupname3 = "Group_B" ! Group name + + INTEGER(HID_T) :: file_id ! File identifier + INTEGER(HID_T) :: group1_id, group2_id, group3_id ! Group identifiers + + INTEGER :: error ! Error flag + + ! + ! Create a new file using default properties. + ! + CALL h5fcreate_f(filename, H5F_ACC_TRUNC_F, file_id, error) + + ! + ! Create group "MyGroup" in the root group using absolute name. + ! + CALL h5gcreate_f(file_id, groupname1, group1_id, error) + + ! + ! Create group "Group_A" in group "MyGroup" using absolute name. + ! + CALL h5gcreate_f(file_id, groupname2, group2_id, error) + + ! + ! Create group "Group_B" in group "MyGroup" using relative name. + ! + CALL h5gcreate_f(group1_id, groupname3, group3_id, error) + + ! + ! Close the groups. + ! + CALL h5gclose_f(group1_id, error) + CALL h5gclose_f(group2_id, error) + CALL h5gclose_f(group3_id, error) + + ! + ! Terminate access to the file. + ! + CALL h5fclose_f(file_id, error) + + END PROGRAM GRPSEXAMPLE diff --git a/doc/html/Tutor/examples/hyperslab.f90 b/doc/html/Tutor/examples/hyperslab.f90 new file mode 100644 index 0000000..0be4b60 --- /dev/null +++ b/doc/html/Tutor/examples/hyperslab.f90 @@ -0,0 +1,199 @@ +! +! This example shows how to write and read a hyperslab. +! + + PROGRAM SELECTEXAMPLE + + USE HDF5 ! This module contains all necessary modules + + IMPLICIT NONE + + CHARACTER(LEN=7), PARAMETER :: filename = "sdsf.h5" ! File name + CHARACTER(LEN=8), PARAMETER :: dsetname = "IntArray" ! Dataset name + + INTEGER(HID_T) :: file_id ! File identifier + INTEGER(HID_T) :: dset_id ! Dataset identifier + INTEGER(HID_T) :: dataspace ! Dataspace identifier + INTEGER(HID_T) :: memspace ! memspace identifier + + INTEGER(HSIZE_T), DIMENSION(3) :: dimsm = (/7,7,3/) ! Dataset dimensions + ! in memory + INTEGER(HSIZE_T), DIMENSION(2) :: dims_out ! Buffer to read in dataset + ! dimesions + INTEGER(HSIZE_T), DIMENSION(2) :: dimsf = (/5,6/) ! Dataset dimensions. + + INTEGER(HSIZE_T), DIMENSION(2) :: count = (/3,4/) + ! Size of the hyperslab in the file + INTEGER(HSIZE_T), DIMENSION(2) :: offset = (/1,2/) + !hyperslab offset in the file + INTEGER(HSIZE_T), DIMENSION(3) :: count_out = (/3,4,1/) + !Size of the hyperslab in memory + INTEGER(HSIZE_T), DIMENSION(3) :: offset_out = (/3,0,0/) + !hyperslab offset in memory + INTEGER, DIMENSION(5,6) :: data ! Data to write + INTEGER, DIMENSION(7,7,3) :: data_out ! Output buffer + INTEGER :: dsetrank = 2 ! Dataset rank ( in file ) + INTEGER :: memrank = 3 ! Dataset rank ( in memory ) + INTEGER :: rank + INTEGER :: i, j, k + + INTEGER :: error, error_n ! Error flags + + + ! + ! Write data to the HDF5 file. + ! + + ! + ! Data initialization. + ! + do i = 1, 5 + do j = 1, 6 + data(i,j) = (i-1) + (j-1); + end do + end do + ! + ! 0, 1, 2, 3, 4, 5 + ! 1, 2, 3, 4, 5, 6 + ! 2, 3, 4, 5, 6, 7 + ! 3, 4, 5, 6, 7, 8 + ! 4, 5, 6, 7, 8, 9 + ! + + ! + ! Initialize FORTRAN predefined datatypes + ! + CALL h5init_types_f(error) + + ! + ! Create a new file using default properties. + ! + CALL h5fcreate_f(filename, H5F_ACC_TRUNC_F, file_id, error) + + ! + ! Create the data space for the dataset. + ! + CALL h5screate_simple_f(dsetrank, dimsf, dataspace, error) + + ! + ! Create the dataset with default properties. + ! + CALL h5dcreate_f(file_id, dsetname, H5T_NATIVE_INTEGER, dataspace, & + dset_id, error) + + ! + ! Write the dataset. + ! + CALL h5dwrite_f(dset_id, H5T_NATIVE_INTEGER, data, error) + + ! + ! Close the dataspace for the dataset. + ! + CALL h5sclose_f(dataspace, error) + + ! + ! Close the dataset. + ! + CALL h5dclose_f(dset_id, error) + + ! + ! Close the file. + ! + CALL h5fclose_f(file_id, error) + + ! + ! This part of the code reads the hyperslab from the sds.h5 file just + ! created, into a 2-dimensional plane of the 3-dimensional dataset. + ! + + ! + ! Initialize data_out array. + ! + do i = 1, 7 + do j = 1, 7 + do k = 1,3 + data_out(i,j,k) = 0; + end do + end do + end do + + ! + ! Open the file. + ! + CALL h5fopen_f (filename, H5F_ACC_RDONLY_F, file_id, error) + + ! + ! Open the dataset. + ! + CALL h5dopen_f(file_id, dsetname, dset_id, error) + + ! + ! Get dataset's dataspace identifier. + ! + CALL h5dget_space_f(dset_id, dataspace, error) + + ! + ! Select hyperslab in the dataset. + ! + CALL h5sselect_hyperslab_f(dataspace, H5S_SELECT_SET_F, & + offset, count, error) + ! + ! Create memory dataspace. + ! + CALL h5screate_simple_f(memrank, dimsm, memspace, error) + + ! + ! Select hyperslab in memory. + ! + CALL h5sselect_hyperslab_f(memspace, H5S_SELECT_SET_F, & + offset_out, count_out, error) + + ! + ! Read data from hyperslab in the file into the hyperslab in + ! memory and display. + ! + CALL H5Dread_f(dset_id, H5T_NATIVE_INTEGER, data_out, error, & + memspace, dataspace) + + ! + ! Display data_out array + ! + do i = 1, 7 + print *, (data_out(i,j,1), j = 1,7) + end do + + ! 0 0 0 0 0 0 0 + ! 0 0 0 0 0 0 0 + ! 0 0 0 0 0 0 0 + ! 3 4 5 6 0 0 0 + ! 4 5 6 7 0 0 0 + ! 5 6 7 8 0 0 0 + ! 0 0 0 0 0 0 0 + ! + + ! + ! Close the dataspace for the dataset. + ! + CALL h5sclose_f(dataspace, error) + + ! + ! Close the memoryspace. + ! + CALL h5sclose_f(memspace, error) + + ! + ! Close the dataset. + ! + CALL h5dclose_f(dset_id, error) + + ! + ! Close the file. + ! + CALL h5fclose_f(file_id, error) + + ! + ! Close FORTRAN predefined datatypes. + ! + CALL h5close_types_f(error) + + END PROGRAM SELECTEXAMPLE diff --git a/doc/html/Tutor/examples/java/Compound.java b/doc/html/Tutor/examples/java/Compound.java new file mode 100644 index 0000000..219e1c1 --- /dev/null +++ b/doc/html/Tutor/examples/java/Compound.java @@ -0,0 +1,540 @@ +/****************************************************************** + * Compound.java (for HDF5 tutorial lesson 11) + * + * -- Creating a compound data type + * (a java conversion from compound.c) + * + ******************************************************************/ + +import ncsa.hdf.hdf5lib.*; +import ncsa.hdf.hdf5lib.exceptions.*; + +public class Compound +{ + public static void main (String []argv) + { + final String FILE = "SDScompound.h5"; + final String DATASETNAME = "ArrayOfStructures"; + final int LENGTH = 10; + final int RANK = 1; + + /* First structure and dataset */ + /* an array of LENGTH 'complex' numbers */ + byte[] data1 = new byte[LENGTH * 16]; + + int[] AR = new int[1]; + float[] BR = new float[1]; + double[] CR = new double[1]; + + byte [] ARec = new byte[4]; + byte [] BRec = new byte[4]; + byte [] CRec = new byte[8]; + + int s1_tid; /* File datatype identifier */ + + /* Second structure (subset of s1_t) and dataset*/ + byte[] data2 = new byte[LENGTH * 12]; + int s2_tid; /* Memory datatype handle */ + + /* Third "structure" ( will be used to read float field of s1) */ + int s3_tid; /* Memory datatype handle */ + float[] s3 = new float[LENGTH]; + + int i; + int file, dataset, space; /* Handles */ + int status; + long[] dim = new long[1]; /* Dataspace dimensions */ + dim[0] = LENGTH; + + /* + * Initialize the data + */ + for (i = 0; i < LENGTH; i++) + { + AR[0] = (int) i; + BR[0] = (float) i * i; + CR[0] = (double) 1. / (i + 1); + + ARec = HDFNativeData.intToByte (0, 1, AR); + BRec = HDFNativeData.floatToByte (0, 1, BR); + CRec = HDFNativeData.doubleToByte (0, 1, CR); + + System.arraycopy (ARec, 0, data1, (i * 16), 4); + System.arraycopy (BRec, 0, data1, (i * 16) + 4, 4); + System.arraycopy (CRec, 0, data1, (i * 16) + 8, 8); + } + + /* + * Create the data space. + */ + space = H5Screate_simple_wrap (RANK, dim, null); + + /* + * Create the file. + */ + file = H5Fcreate_wrap (FILE, HDF5Constants.H5F_ACC_TRUNC, + HDF5Constants.H5P_DEFAULT, + HDF5Constants.H5P_DEFAULT); + + /* + * Create the memory data type. + */ + s1_tid = H5Tcreate_wrap (HDF5Constants.H5T_COMPOUND, 16); + H5Tinsert_wrap (s1_tid, "a_name", 0, + H5.J2C (HDF5CDataTypes.JH5T_NATIVE_INT)); + H5Tinsert_wrap (s1_tid, "b_name", 4, + H5.J2C (HDF5CDataTypes.JH5T_NATIVE_FLOAT)); + H5Tinsert_wrap (s1_tid, "c_name", 8, + H5.J2C (HDF5CDataTypes.JH5T_NATIVE_DOUBLE)); + + /* + * Create the dataset. + */ + dataset = H5Dcreate_wrap (file, DATASETNAME, s1_tid, + space, HDF5Constants.H5P_DEFAULT); + + /* + * Wtite data to the dataset; + */ + status = H5Dwrite_wrap (dataset, s1_tid, + HDF5Constants.H5S_ALL, HDF5Constants.H5S_ALL, + HDF5Constants.H5P_DEFAULT, data1); + + /* + * Release resources + */ + H5Tclose_wrap (s1_tid); + H5Sclose_wrap (space); + H5Dclose_wrap (dataset); + H5Fclose_wrap (file); + + /* + * Open the file and the dataset. + */ + file = H5Fopen_wrap (FILE, HDF5Constants.H5F_ACC_RDONLY, + HDF5Constants.H5P_DEFAULT); + + dataset = H5Dopen_wrap (file, DATASETNAME); + + /* + * Create a data type for s2 + */ + s2_tid = H5Tcreate_wrap (HDF5Constants.H5T_COMPOUND, 12); + H5Tinsert_wrap (s2_tid, "c_name", 0, + H5.J2C (HDF5CDataTypes.JH5T_NATIVE_DOUBLE)); + H5Tinsert_wrap (s2_tid, "a_name", 8, + H5.J2C (HDF5CDataTypes.JH5T_NATIVE_INT)); + + /* + * Read two fields c and a from s1 dataset. Fields in the file + * are found by their names "c_name" and "a_name". + */ + status = H5Dread_wrap (dataset, s2_tid, HDF5Constants.H5S_ALL, + HDF5Constants.H5S_ALL, + HDF5Constants.H5P_DEFAULT, data2); + + /* + * Display the fields. Convert from bytes into numbers. + */ + System.out.println ("\nField c : "); + for( i = 0; i < LENGTH; i++) { + System.arraycopy (data2, (i*12), CRec, 0, 8); + CR = HDFNativeData.byteToDouble(0, 1, CRec); + System.out.print (CR[0]+" "); + } + System.out.println (); + + System.out.println("\nField a :"); + for( i = 0; i < LENGTH; i++) { + System.arraycopy (data2, (i*12)+8, ARec, 0, 4); + AR = HDFNativeData.byteToInt(0, 1, ARec); + System.out.print (AR[0]+" "); + } + System.out.println (); + + /* + * Create a data type for s3. + */ + s3_tid = H5Tcreate_wrap (HDF5Constants.H5T_COMPOUND, 4); + + status = + H5Tinsert_wrap (s3_tid, "b_name", 0, + H5.J2C (HDF5CDataTypes.JH5T_NATIVE_FLOAT)); + + /* + * Read field b from s1 dataset. Field in the file is found by its name. + */ + status = H5Dread_wrap (dataset, s3_tid, HDF5Constants.H5S_ALL, + HDF5Constants.H5S_ALL, + HDF5Constants.H5P_DEFAULT, s3); + + /* + * Display the field. Data is read directly into array of 'float'. + */ + System.out.println (); + System.out.println ("Field b :"); + for( i = 0; i < LENGTH; i++) { + System.out.print (s3[i]+" "); + } + System.out.println (); + + /* + * Release resources + */ + H5Tclose_wrap (s2_tid); + H5Tclose_wrap (s3_tid); + H5Dclose_wrap (dataset); + H5Fclose_wrap (file); + } + + + // Help function for creating a new file + public static int H5Fcreate_wrap (String name, int flags, + int create_id, int access_id) + { + int file_id = -1; // file identifier + try + { + // Create a new file using default file properties. + file_id = H5.H5Fcreate (name, flags, create_id, access_id); + } + catch (HDF5Exception hdf5e) + { + System.out.println + ("Compound.H5Fcreate_wrap() with HDF5Exception: " + + hdf5e.getMessage()); + } + catch (Exception e) + { + System.out.println + ("Compound.H5Fcreate_wrap() with other Exception: " + + e.getMessage()); + } + return file_id; + } + + + // Help function for adding another member to the compound + // datatype datatype_id. + public static int H5Tinsert_wrap (int type_id, String name, + long offset, int field_id) + { + int status = -1; + try + { + // Adding another member to the compound datatype datatype_id. + status = H5.H5Tinsert (type_id, name, offset, field_id); + + } + catch (HDF5Exception hdf5e) + { + System.out.println + ("Compound.H5Tinsert_wrap() with HDF5Exception: " + + hdf5e.getMessage()); + } + catch (Exception e) + { + System.out.println + ("Compound.H5Tinsert_wrap() with HDF5Exception: " + + e.getMessage()); + } + return status; + } + + + // Help function for creating the memory data type. + public static int H5Tcreate_wrap (int dclass, int size) + { + int datatype_id = -1; // memory data type identifier + try + { + // Create the memory data type. + datatype_id = H5.H5Tcreate (dclass, size); + + } + catch (HDF5Exception hdf5e) + { + System.out.println + ("Compound.H5Tcreate_wrap() with HDF5Exception: " + + hdf5e.getMessage()); + } + catch (Exception e) + { + System.out.println + ("Compound.H5Tcreate_wrap() with other Exception: " + + e.getMessage()); + } + return datatype_id; + } + + + // Help function for opening an existing file + public static int H5Fopen_wrap (String name, int flags, int access_id) + { + int file_id = -1; // file identifier + try + { + // Create a new file using default file properties. + file_id = H5.H5Fopen (name, flags, access_id); + } + catch (HDF5Exception hdf5e) + { + System.out.println + ("Compound.H5Fopen_wrap() with HDF5Exception: " + + hdf5e.getMessage()); + } + catch (Exception e) + { + System.out.println + ("Compound.H5Fopen_wrap() with other Exception: " + + e.getMessage()); + } + return file_id; + } + + + // Help function for opening an existing dataset + public static int H5Dopen_wrap (int loc_id, String name) + { + int dataset_id = -1; // dataset identifier + + try + { + // Opening an existing dataset + dataset_id = H5.H5Dopen (loc_id, name); + } + catch (HDF5Exception hdf5e) + { + System.out.println + ("Compound.H5Dopen_wrap() with HDF5Exception: " + + hdf5e.getMessage()); + } + catch (Exception e) + { + System.out.println + ("Compound.H5Dopen_wrap() with other Exception: " + + e.getMessage()); + } + return dataset_id; + } + + + // Help function for creating a new simple dataspace and opening it + // for access + public static int H5Screate_simple_wrap (int rank, long dims[], + long maxdims[]) + { + int dataspace_id = -1; // dataspace identifier + + try + { + // Create the data space for the dataset. + dataspace_id = H5.H5Screate_simple (rank, dims, maxdims); + } + catch (HDF5Exception hdf5e) + { + System.out.println + ("Compound.H5Screate_simple_wrap() with HDF5Exception: " + + hdf5e.getMessage()); + } + catch (Exception e) + { + System.out.println + ("Compound.H5Screate_simple_wrap() with other Exception: " + + e.getMessage()); + } + return dataspace_id; + } + + + // Help function for creating a dataset + public static int H5Dcreate_wrap (int loc_id, String name, int type_id, + int space_id, int create_plist_id) + { + int dataset_id = -1; // dataset identifier + + try + { + // Create the dataset + dataset_id = H5.H5Dcreate (loc_id, name, type_id, space_id, + create_plist_id); + } + catch (HDF5Exception hdf5e) + { + System.out.println + ("Compound.H5Dcreate_wrap() with HDF5Exception: " + + hdf5e.getMessage()); + } + catch (Exception e) + { + System.out.println + ("Compound.H5Dcreate_wrap() with other Exception: " + + e.getMessage()); + } + return dataset_id; + } + + + // Help function for writing the dataset + public static int H5Dwrite_wrap (int dataset_id, int mem_type_id, + int mem_space_id, int file_space_id, + int xfer_plist_id, Object buf) + { + int status = -1; + + try + { + // Write the dataset. + status = H5.H5Dwrite (dataset_id, mem_type_id, mem_space_id, + file_space_id, xfer_plist_id, buf); + } + catch (HDF5Exception hdf5e) + { + System.out.println + ("Compound.H5Dwrite_wrap() with HDF5Exception: " + + hdf5e.getMessage()); + } + catch (Exception e) + { + System.out.println + ("Compound.H5Dwrite_wrap() with other exception: " + + e.getMessage()); + } + return status; + } + + + // Help function for reading the dataset + public static int H5Dread_wrap (int dataset_id, int mem_type_id, + int mem_space_id, int file_space_id, + int xfer_plist_id, Object obj) + { + int status = -1; + + try + { + // Read the dataset. + status = H5.H5Dread (dataset_id, mem_type_id, mem_space_id, + file_space_id, xfer_plist_id, obj); + } + catch (HDF5Exception hdf5e) + { + System.out.println + ("Compound.H5Dread_wrap() with HDF5Exception: " + + hdf5e.getMessage()); + } + catch (Exception e) + { + System.out.println + ("Compound.H5Dread_wrap() with other exception: " + + e.getMessage()); + } + return status; + } + + + + // Help function for terminating access to the data space. + public static int H5Sclose_wrap (int dataspace_id) + { + int status = -1; + + try + { + // Terminate access to the data space. + status = H5.H5Sclose (dataspace_id); + } + catch (HDF5Exception hdf5e) + { + System.out.println + ("Compound.H5Sclose_wrap() with HDF5Exception: " + + hdf5e.getMessage()); + } + catch (Exception e) + { + System.out.println + ("Compound.H5Sclose_wrap() with other exception: " + + e.getMessage()); + } + return status; + } + + + // Help function for releasing a datatype. + public static int H5Tclose_wrap (int type_id) + { + int status = -1; + + try + { + // Releasing a datatype. + status = H5.H5Tclose (type_id); + } + catch (HDF5Exception hdf5e) + { + System.out.println + ("Compound.H5Tclose_wrap() with HDF5Exception: " + + hdf5e.getMessage()); + } + catch (Exception e) + { + System.out.println + ("Compound.H5Tclose_wrap() with other exception: " + + e.getMessage()); + } + return status; + } + + + // Help function for ending access to the dataset and releasing + // resources used by it. + public static int H5Dclose_wrap (int dataset_id) + { + int status = -1; + + try + { + // End access to the dataset and release resources used by it. + status = H5.H5Dclose (dataset_id); + } + catch (HDF5Exception hdf5e) + { + System.out.println + ("Compound.H5Dclose_wrap() with HDF5Exception: " + + hdf5e.getMessage()); + } + catch (Exception e) + { + System.out.println + ("Compound.H5Dclose_wrap() with other exception: " + + e.getMessage()); + } + return status; + } + + + // Help function for terminating access to the file. + public static int H5Fclose_wrap (int file_id) + { + int status = -1; + + try + { + // Terminate access to the file. + status = H5.H5Fclose (file_id); + } + catch (HDF5Exception hdf5e) + { + System.out.println + ("Compound.H5Fclose_wrap() with HDF5Exception: " + + hdf5e.getMessage()); + } + catch (Exception e) + { + System.out.println + ("Compound.H5Fclose_wrap() with other exception: " + + e.getMessage()); + } + return status; + } +} diff --git a/doc/html/Tutor/examples/java/Copy.java b/doc/html/Tutor/examples/java/Copy.java new file mode 100644 index 0000000..f174210 --- /dev/null +++ b/doc/html/Tutor/examples/java/Copy.java @@ -0,0 +1,541 @@ +/****************************************************************** + * Copy.java (for HDF5 tutorial lesson 13) + * + * -- Showing how to use the H5SCOPY function. + * (a java conversion from h5_copy.c) + * + ******************************************************************/ + +import ncsa.hdf.hdf5lib.*; +import ncsa.hdf.hdf5lib.exceptions.*; + +public class Copy +{ + public static void main (String []argv) + { + final String FILE1 = "copy1.h5"; + final String FILE2 = "copy2.h5"; + + final int RANK = 2; + final int DIM1 = 3; + final int DIM2 = 4; + final int NUMP = 2; + + int file1, file2, dataset1, dataset2; + int mid1, mid2, fid1, fid2; + long[] fdim = new long[2]; + fdim[0] = DIM1; + fdim[1] = DIM2; + long[] mdim = new long[2]; + fdim[0] = DIM1; + fdim[1] = DIM2; + + long[] start = new long[2]; + long[] stride = new long[2]; + long[] count = new long[2]; + long[] block = new long[2]; + + int[][] buf1 = new int[DIM1][DIM2]; + int[][] buf2 = new int[DIM1][DIM2]; + int[][] bufnew = new int[DIM1][DIM2]; + + int[] val = new int[2]; + val[0] = 53; + val[1] = 59; + + long[] marray = {2}; + long[][] coord = new long[NUMP][RANK]; + int ret; + int i, j; + + +/***********************************************************************/ +/* */ +/* Create two files containing identical datasets. Write 0's to one */ +/* and 1's to the other. */ +/* */ +/***********************************************************************/ + + for ( i = 0; i < DIM1; i++ ) + for ( j = 0; j < DIM2; j++ ) + buf1[i][j] = 0; + + for ( i = 0; i < DIM1; i++ ) + for ( j = 0; j < DIM2; j++ ) + buf2[i][j] = 1; + + file1 = H5Fcreate_wrap (FILE1, HDF5Constants.H5F_ACC_TRUNC, + HDF5Constants.H5P_DEFAULT, + HDF5Constants.H5P_DEFAULT); + file2 = H5Fcreate_wrap (FILE2, HDF5Constants.H5F_ACC_TRUNC, + HDF5Constants.H5P_DEFAULT, + HDF5Constants.H5P_DEFAULT); + + fid1 = H5Screate_simple_wrap (RANK, fdim, null); + fid2 = H5Screate_simple_wrap (RANK, fdim, null); + + dataset1 = H5Dcreate_wrap + (file1, "Copy1", H5.J2C (HDF5CDataTypes.JH5T_NATIVE_INT), fid1, + HDF5Constants.H5P_DEFAULT); + + dataset2 = H5Dcreate_wrap + (file2, "Copy2", H5.J2C (HDF5CDataTypes.JH5T_NATIVE_INT), fid2, + HDF5Constants.H5P_DEFAULT); + + + ret = H5Dwrite_wrap (dataset1, H5.J2C (HDF5CDataTypes.JH5T_NATIVE_INT), + HDF5Constants.H5S_ALL, HDF5Constants.H5S_ALL, + HDF5Constants.H5P_DEFAULT, buf1); + + ret = H5Dwrite_wrap (dataset2, H5.J2C (HDF5CDataTypes.JH5T_NATIVE_INT), + HDF5Constants.H5S_ALL, HDF5Constants.H5S_ALL, + HDF5Constants.H5P_DEFAULT, buf2); + + ret = H5Dclose_wrap (dataset1); + ret = H5Dclose_wrap (dataset2); + + ret = H5Sclose_wrap (fid1); + ret = H5Sclose_wrap (fid2); + + ret = H5Fclose_wrap (file1); + ret = H5Fclose_wrap (file2); + + +/***********************************************************************/ +/* */ +/* Open the two files. Select two points in one file, write values to */ +/* those point locations, then do H5Scopy and write the values to the */ +/* other file. Close files. */ +/* */ +/***********************************************************************/ + + file1 = H5Fopen_wrap (FILE1, HDF5Constants.H5F_ACC_RDWR, + HDF5Constants.H5P_DEFAULT); + + file2 = H5Fopen_wrap (FILE2, HDF5Constants.H5F_ACC_RDWR, + HDF5Constants.H5P_DEFAULT); + + dataset1 = H5Dopen_wrap (file1, "Copy1"); + dataset2 = H5Dopen_wrap (file2, "Copy2"); + + fid1 = H5Dget_space_wrap (dataset1); + mid1 = H5Screate_simple_wrap (1, marray, null); + + coord[0][0] = 0; coord[0][1] = 3; + coord[1][0] = 0; coord[1][1] = 1; + + ret = H5Sselect_elements_wrap (fid1, HDF5Constants.H5S_SELECT_SET, + NUMP, coord); + + ret = H5Dwrite_wrap (dataset1, H5.J2C (HDF5CDataTypes.JH5T_NATIVE_INT), + mid1, fid1, HDF5Constants.H5P_DEFAULT, val); + + fid2 = H5Scopy_wrap (fid1); + + ret = H5Dwrite_wrap (dataset2, H5.J2C (HDF5CDataTypes.JH5T_NATIVE_INT), + mid1, fid2, HDF5Constants.H5P_DEFAULT, val); + + ret = H5Dclose_wrap (dataset1); + ret = H5Dclose_wrap (dataset2); + ret = H5Sclose_wrap (fid1); + ret = H5Sclose_wrap (fid2); + ret = H5Fclose_wrap (file1); + ret = H5Fclose_wrap (file2); + ret = H5Sclose_wrap (mid1); + + +/***********************************************************************/ +/* */ +/* Open both files and print the contents of the datasets. */ +/* */ +/***********************************************************************/ + + file1 = H5Fopen_wrap (FILE1, HDF5Constants.H5F_ACC_RDWR, + HDF5Constants.H5P_DEFAULT); + file2 = H5Fopen_wrap (FILE2, HDF5Constants.H5F_ACC_RDWR, + HDF5Constants.H5P_DEFAULT); + dataset1 = H5Dopen_wrap (file1, "Copy1"); + dataset2 = H5Dopen_wrap (file2, "Copy2"); + + ret = H5Dread_wrap (dataset1, H5.J2C (HDF5CDataTypes.JH5T_NATIVE_INT), + HDF5Constants.H5S_ALL, HDF5Constants.H5S_ALL, + HDF5Constants.H5P_DEFAULT, bufnew); + + System.out.println ("\nDataset 'Copy1' in file 'copy1.h5' contains: "); + + for (i = 0;i < DIM1; i++) + { + for (j = 0;j < DIM2; j++) + System.out.print (bufnew[i][j]); + System.out.println (); + } + + System.out.println ("\nDataset 'Copy2' in file 'copy2.h5' contains: "); + + ret = H5Dread_wrap (dataset2, H5.J2C (HDF5CDataTypes.JH5T_NATIVE_INT), + HDF5Constants.H5S_ALL, HDF5Constants.H5S_ALL, + HDF5Constants.H5P_DEFAULT, bufnew); + + for (i = 0;i < DIM1; i++) + { + for (j = 0;j < DIM2; j++) + System.out.print (bufnew[i][j]); + System.out.println (); + } + + ret = H5Dclose_wrap (dataset1); + ret = H5Dclose_wrap (dataset2); + ret = H5Fclose_wrap (file1); + ret = H5Fclose_wrap (file2); + } + + + // Help function for creating a new file + public static int H5Fcreate_wrap (String name, int flags, + int create_id, int access_id) + { + int file_id = -1; // file identifier + try + { + // Create a new file using default file properties. + file_id = H5.H5Fcreate (name, flags, create_id, access_id); + } + catch (HDF5Exception hdf5e) + { + System.out.println + ("Copy.H5Fcreate_wrap() with HDF5Exception: " + + hdf5e.getMessage()); + } + catch (Exception e) + { + System.out.println + ("Copy.H5Fcreate_wrap() with other Exception: " + + e.getMessage()); + } + return file_id; + } + + + // Help function for opening an existing file + public static int H5Fopen_wrap (String name, int flags, int access_id) + { + int file_id = -1; // file identifier + try + { + // Create a new file using default file properties. + file_id = H5.H5Fopen (name, flags, access_id); + } + catch (HDF5Exception hdf5e) + { + System.out.println + ("Copy.H5Fopen_wrap() with HDF5Exception: " + + hdf5e.getMessage()); + } + catch (Exception e) + { + System.out.println + ("Copy.H5Fopen_wrap() with other Exception: " + + e.getMessage()); + } + return file_id; + } + + + // Help function for opening an existing dataset + public static int H5Dopen_wrap (int loc_id, String name) + { + int dataset_id = -1; // dataset identifier + + try + { + // Opening an existing dataset + dataset_id = H5.H5Dopen (loc_id, name); + } + catch (HDF5Exception hdf5e) + { + System.out.println + ("Copy.H5Dopen_wrap() with HDF5Exception: " + + hdf5e.getMessage()); + } + catch (Exception e) + { + System.out.println + ("Copy.H5Dopen_wrap() with other Exception: " + + e.getMessage()); + } + return dataset_id; + } + + + // Help function for creating a new simple dataspace and opening it + // for access + public static int H5Screate_simple_wrap (int rank, long dims[], + long maxdims[]) + { + int dataspace_id = -1; // dataspace identifier + + try + { + // Create the data space for the dataset. + dataspace_id = H5.H5Screate_simple (rank, dims, maxdims); + } + catch (HDF5Exception hdf5e) + { + System.out.println + ("Copy.H5Screate_simple_wrap() with HDF5Exception: " + + hdf5e.getMessage()); + } + catch (Exception e) + { + System.out.println + ("Copy.H5Screate_simple_wrap() with other Exception: " + + e.getMessage()); + } + return dataspace_id; + } + + + // Help function for getting an identifier for a copy of + // the dataspace for a dataset + public static int H5Dget_space_wrap (int dataset_id) + { + int dataspace_id = -1; + + try + { + // Returning an identifier for a copy of the dataspace for a dataset + dataspace_id = H5.H5Dget_space (dataset_id); + } + catch (HDF5Exception hdf5e) + { + System.out.println + ("Copy.H5Dget_space_wrap() with HDF5Exception: " + + hdf5e.getMessage()); + } + catch (Exception e) + { + System.out.println + ("Copy.H5Dget_space_wrap() with other Exception: " + + e.getMessage()); + } + return dataspace_id; + } + + + // Help function for selecting array elements to be included in + // the selection for the space_id dataspace. + public static int H5Sselect_elements_wrap (int space_id, int op, + int num_elements, + long coord2D[][]) + { + int status = -1; + + try + { + status = H5.H5Sselect_elements (space_id, op, num_elements, + coord2D); + } + catch (HDF5Exception hdf5e) + { + System.out.println + ("Copy.H5Sselect_elements_wrap() with HDF5Exception: " + + hdf5e.getMessage()); + } + catch (Exception e) + { + System.out.println + ("Copy.H5Sselect_elements_wrap() with other Exception: " + + e.getMessage()); + } + return status; + } + + + // Help function for creating a new dataspace which is an exact + // copy of the dataspace identified by space_id. + public static int H5Scopy_wrap (int space_id) + { + int dataspace_id = -1; + + try + { + dataspace_id = H5.H5Scopy(space_id); + } + catch (HDF5Exception hdf5e) + { + System.out.println ("Copy.H5Scopy_wrap() with HDF5Exception: " + + hdf5e.getMessage()); + } + catch (Exception e) + { + System.out.println ("Copy.H5Scopy_wrap() with other Exception: " + + e.getMessage()); + } + return dataspace_id; + } + + + // Help function for creating a dataset + public static int H5Dcreate_wrap (int loc_id, String name, int type_id, + int space_id, int create_plist_id) + { + int dataset_id = -1; // dataset identifier + + try + { + // Create the dataset + dataset_id = H5.H5Dcreate (loc_id, name, type_id, space_id, + create_plist_id); + } + catch (HDF5Exception hdf5e) + { + System.out.println + ("Copy.H5Dcreate_wrap() with HDF5Exception: " + + hdf5e.getMessage()); + } + catch (Exception e) + { + System.out.println + ("Copy.H5Dcreate_wrap() with other Exception: " + + e.getMessage()); + } + return dataset_id; + } + + + // Help function for writing the dataset + public static int H5Dwrite_wrap (int dataset_id, int mem_type_id, + int mem_space_id, int file_space_id, + int xfer_plist_id, Object buf) + { + int status = -1; + + try + { + // Write the dataset. + status = H5.H5Dwrite (dataset_id, mem_type_id, mem_space_id, + file_space_id, xfer_plist_id, buf); + } + catch (HDF5Exception hdf5e) + { + System.out.println + ("Copy.H5Dwrite_wrap() with HDF5Exception: " + + hdf5e.getMessage()); + } + catch (Exception e) + { + System.out.println + ("Copy.H5Dwrite_wrap() with other exception: " + + e.getMessage()); + } + return status; + } + + + // Help function for reading the dataset + public static int H5Dread_wrap (int dataset_id, int mem_type_id, + int mem_space_id, int file_space_id, + int xfer_plist_id, Object obj) + { + int status = -1; + + try + { + // Read the dataset. + status = H5.H5Dread (dataset_id, mem_type_id, mem_space_id, + file_space_id, xfer_plist_id, obj); + } + catch (HDF5Exception hdf5e) + { + System.out.println + ("Copy.H5Dread_wrap() with HDF5Exception: " + + hdf5e.getMessage()); + } + catch (Exception e) + { + System.out.println + ("Copy.H5Dread_wrap() with other exception: " + + e.getMessage()); + } + return status; + } + + + // Help function for terminating access to the data space. + public static int H5Sclose_wrap (int dataspace_id) + { + int status = -1; + + try + { + // Terminate access to the data space. + status = H5.H5Sclose (dataspace_id); + } + catch (HDF5Exception hdf5e) + { + System.out.println + ("Copy.H5Sclose_wrap() with HDF5Exception: " + + hdf5e.getMessage()); + } + catch (Exception e) + { + System.out.println + ("Copy.H5Sclose_wrap() with other exception: " + + e.getMessage()); + } + return status; + } + + + // Help function for ending access to the dataset and releasing + // resources used by it. + public static int H5Dclose_wrap (int dataset_id) + { + int status = -1; + + try + { + // End access to the dataset and release resources used by it. + status = H5.H5Dclose (dataset_id); + } + catch (HDF5Exception hdf5e) + { + System.out.println + ("Copy.H5Dclose_wrap() with HDF5Exception: " + + hdf5e.getMessage()); + } + catch (Exception e) + { + System.out.println + ("Copy.H5Dclose_wrap() with other exception: " + + e.getMessage()); + } + return status; + } + + + // Help function for terminating access to the file. + public static int H5Fclose_wrap (int file_id) + { + int status = -1; + + try + { + // Terminate access to the file. + status = H5.H5Fclose (file_id); + } + catch (HDF5Exception hdf5e) + { + System.out.println + ("Copy.H5Fclose_wrap() with HDF5Exception: " + + hdf5e.getMessage()); + } + catch (Exception e) + { + System.out.println + ("Copy.H5Fclose_wrap() with other exception: " + + e.getMessage()); + } + return status; + } +} diff --git a/doc/html/Tutor/examples/java/CreateAttribute.java b/doc/html/Tutor/examples/java/CreateAttribute.java new file mode 100644 index 0000000..c926422 --- /dev/null +++ b/doc/html/Tutor/examples/java/CreateAttribute.java @@ -0,0 +1,302 @@ +/****************************************************************** + * CreateAttribute.java (for HDF5 tutorial lesson 7) + * + * -- Creating and Writing a dataset attribute + * (a java conversion from h5_crtatt.c) + * + ******************************************************************/ + +import ncsa.hdf.hdf5lib.*; +import ncsa.hdf.hdf5lib.exceptions.*; + +public class CreateAttribute +{ + public static void main(String []argv) + { + final String FILE = "dset.h5"; + int file_id = -1; // file identifier + int dataset_id = -1; // dataset identifier + int attribute_id = -1; + int dataspace_id = -1; // dataspace identifier + long[] dims = new long[1]; + int[] attr_data = new int[2]; + int status = -1; + + // Initialize the attribute data. + attr_data[0] = 100; + attr_data[1] = 200; + + // Open an existing file. + file_id = H5Fopen_wrap (FILE, HDF5Constants.H5F_ACC_RDWR, + HDF5Constants.H5P_DEFAULT); + + // Open an existing dataset. + dataset_id = H5Dopen_wrap (file_id, "/dset"); + + // Create the data space for the attribute. + dims[0] = 2; + dataspace_id = H5Screate_simple_wrap (1, dims, null); + + // Create a dataset attribute. + attribute_id = H5Acreate_wrap + (dataset_id, "attr", + H5.J2C (HDF5CDataTypes.JH5T_STD_I32BE), + dataspace_id, HDF5Constants.H5P_DEFAULT); + + // Write the attribute data. + status = H5Awrite_wrap + (attribute_id, + H5.J2C (HDF5CDataTypes.JH5T_NATIVE_INT), + attr_data); + + // Close the attribute. + status = H5Aclose_wrap (attribute_id); + + // Close the dataspace. + status = H5Sclose_wrap (dataspace_id); + + // Close to the dataset. + status = H5Dclose_wrap (dataset_id); + + // Close the file. + status = H5Fclose_wrap (file_id); + } + + + // Help function for opening an existing file + public static int H5Fopen_wrap (String name, int flags, int access_id) + { + int file_id = -1; // file identifier + try + { + // Create a new file using default file properties. + file_id = H5.H5Fopen (name, flags, access_id); + } + catch (HDF5Exception hdf5e) + { + System.out.println + ("CreateAttribute.H5Fopen_wrap() with HDF5Exception: " + + hdf5e.getMessage()); + } + catch (Exception e) + { + System.out.println + ("CreateAttribute.H5Fopen_wrap() with other Exception: " + + e.getMessage()); + } + return file_id; + } + + + // Help function for opening an existing dataset + public static int H5Dopen_wrap (int loc_id, String name) + { + int dataset_id = -1; // dataset identifier + + try + { + // Opening an existing dataset + dataset_id = H5.H5Dopen (loc_id, name); + } + catch (HDF5Exception hdf5e) + { + System.out.println + ("CreateAttribute.H5Dopen_wrap() with HDF5Exception: " + + hdf5e.getMessage()); + } + catch (Exception e) + { + System.out.println + ("CreateAttribute.H5Dopen_wrap() with other Exception: " + + e.getMessage()); + } + return dataset_id; + } + + + // Create the data space for the attribute. + public static int H5Screate_simple_wrap (int rank, long dims[], + long maxdims[]) + { + int dataspace_id = -1; // dataspace identifier + + try + { + // Create the data space for the dataset. + dataspace_id = H5.H5Screate_simple (rank, dims, maxdims); + } + catch (HDF5Exception hdf5e) + { + System.out.println + ("CreateAttribute.H5Screate_simple_wrap() with HDF5Exception: " + + hdf5e.getMessage()); + + } + catch (Exception e) + { + System.out.println + ("CreateAttribute.H5Screate_simple_wrap() with other Exception: " + + e.getMessage()); + } + return dataspace_id; + } + + + // Help function for creating a dataset attribute. + public static int H5Acreate_wrap (int loc_id, String name, int type_id, + int space_id, int create_plist) + { + int attribute_id = -1; // attribute identifier + + try + { + // Create the dataset + attribute_id = H5.H5Acreate (loc_id, name, type_id, space_id, + create_plist); + } + catch (HDF5Exception hdf5e) + { + System.out.println + ("CreateAttribute.H5Acreate_wrap() with HDF5Exception: " + + hdf5e.getMessage()); + } + catch (Exception e) + { + System.out.println + ("CreateAttribute.H5Acreate_wrap() with other Exception: " + + e.getMessage()); + } + return attribute_id; + } + + + // Help function for writing the attribute data. + public static int H5Awrite_wrap (int attr_id, int mem_type_id, + Object buf) + { + int status = -1; + + try + { + // Write the attribute data. + status = H5.H5Awrite (attr_id, mem_type_id, buf); + } + catch (HDF5Exception hdf5e) + { + System.out.println + ("CreateAttribute.H5Awrite_wrap() with HDF5Exception: " + + hdf5e.getMessage()); + } + catch (Exception e) + { + System.out.println + ("CreateAttribute.H5Awrite_wrap() with other exception: " + + e.getMessage()); + } + return status; + } + + + // Help function for closing the attribute + public static int H5Aclose_wrap (int attribute_id) + { + int status = -1; + + try + { + // Close the dataset + status = H5.H5Aclose (attribute_id); + } + catch (HDF5Exception hdf5e) + { + System.out.println + ("CreateAttribute.H5Aclose_wrap() with HDF5Exception: " + + hdf5e.getMessage()); + } + catch (Exception e) + { + System.out.println + ("CreateAttribute.H5Aclose_wrap() with other exception: " + + e.getMessage()); + } + return status; + } + + + // Help function for closing the dataset + public static int H5Dclose_wrap (int dataset_id) + { + int status = -1; + + try + { + // Close the dataset + status = H5.H5Dclose (dataset_id); + } + catch (HDF5Exception hdf5e) + { + System.out.println + ("CreateAttribute.H5Dclose_wrap() with HDF5Exception: " + + hdf5e.getMessage()); + } + catch (Exception e) + { + System.out.println + ("CreateAttribute.H5Dclose_wrap() with other exception: " + + e.getMessage()); + } + return status; + } + + + // Help function for closing the dataspace + public static int H5Sclose_wrap (int dataspace_id) + { + int status = -1; + + try + { + // Terminate access to the data space. + status = H5.H5Sclose (dataspace_id); + } + catch (HDF5Exception hdf5e) + { + System.out.println + ("CreateAttribute.H5Sclose_wrap() with HDF5Exception: " + + hdf5e.getMessage()); + } + catch (Exception e) + { + System.out.println + ("CreateAttribute.H5Sclose_wrap() with other exception: " + + e.getMessage()); + } + return status; + } + + + // Help function for terminating access to the file. + public static int H5Fclose_wrap (int file_id) + { + int status = -1; + + try + { + // Terminate access to the file. + status = H5.H5Fclose (file_id); + } + catch (HDF5Exception hdf5e) + { + System.out.println + ("CreateAttribute.H5Fclose_wrap() with HDF5Exception: " + + hdf5e.getMessage()); + } + catch (Exception e) + { + System.out.println + ("CreateAttribute.H5Fclose_wrap() with other exception: " + + e.getMessage()); + } + return status; + } +} diff --git a/doc/html/Tutor/examples/java/CreateDataset.java b/doc/html/Tutor/examples/java/CreateDataset.java new file mode 100644 index 0000000..05f3f6b --- /dev/null +++ b/doc/html/Tutor/examples/java/CreateDataset.java @@ -0,0 +1,210 @@ +/****************************************************************** + * CreateDataset.java (for HDF5 tutorial lesson 5) + * + * -- Creating a HDF5 Dataset + * (a java conversion from h5_crtdat.c) + * + ******************************************************************/ + +import ncsa.hdf.hdf5lib.*; +import ncsa.hdf.hdf5lib.exceptions.*; + +public class CreateDataset +{ + public static void main(String []argv) + { + final String FILE = "dset.h5"; + int file_id = -1; // file identifier + int dataset_id = -1; // dataset identifier + int dataspace_id = -1; // dataspace identifier + long[] dims = new long[2]; + int status = -1; + + // Create a new file using default properties. + file_id = H5Fcreate_wrap (FILE, HDF5Constants.H5F_ACC_TRUNC, + HDF5Constants.H5P_DEFAULT, + HDF5Constants.H5P_DEFAULT); + + // Create the data space for the dataset. + dims[0] = 4; + dims[1] = 6; + dataspace_id = H5Screate_simple_wrap (2, dims, null); + + // Create the dataset. + dataset_id = + H5Dcreate_wrap (file_id, "/dset", + H5.J2C (HDF5CDataTypes.JH5T_STD_I32BE), + dataspace_id, HDF5Constants.H5P_DEFAULT); + + // End access to the dataset and release resources used by it. + status = H5Dclose_wrap (dataset_id); + + // Terminate access to the data space. + status = H5Sclose_wrap (dataspace_id); + + // Close the file. + status = H5Fclose_wrap (file_id); + } + + + // Help function for creating a new file + public static int H5Fcreate_wrap (String name, int flags, + int create_id, int access_id) + { + int file_id = -1; // file identifier + try + { + // Create a new file using default file properties. + file_id = H5.H5Fcreate (name, flags, create_id, access_id); + } + catch (HDF5Exception hdf5e) + { + System.out.println + ("CreateDataset.H5Fcreate_wrap() with HDF5Exception: " + + hdf5e.getMessage()); + } + catch (Exception e) + { + System.out.println + ("CreateDataset.H5Fcreate_wrap() with other Exception: " + + e.getMessage()); + } + return file_id; + } + + + // Help function for creating a new simple dataspace and opening it + // for access + public static int H5Screate_simple_wrap (int rank, long dims[], + long maxdims[]) + { + int dataspace_id = -1; // dataspace identifier + + try + { + // Create the data space for the dataset. + dataspace_id = H5.H5Screate_simple (rank, dims, maxdims); + } + catch (HDF5Exception hdf5e) + { + System.out.println + ("CreateDataset.H5Screate_simple_wrap() with HDF5Exception: " + + hdf5e.getMessage()); + } + catch (Exception e) + { + System.out.println + ("CreateDataset.H5Screate_simple_wrap() with other Exception: " + + e.getMessage()); + } + return dataspace_id; + } + + + // Help function for creating a dataset + public static int H5Dcreate_wrap (int loc_id, String name, int type_id, + int space_id, int create_plist_id) + { + int dataset_id = -1; // dataset identifier + + try + { + // Create the dataset + dataset_id = H5.H5Dcreate (loc_id, name, type_id, space_id, + create_plist_id); + } + catch (HDF5Exception hdf5e) + { + System.out.println + ("CreateDataset.H5Dcreate_wrap() with HDF5Exception: " + + hdf5e.getMessage()); + } + catch (Exception e) + { + System.out.println + ("CreateDataset.H5Dcreate_wrap() with other Exception: " + + e.getMessage()); + } + return dataset_id; + } + + + // Help function for ending access to the dataset and releasing + // resources used by it. + public static int H5Dclose_wrap (int dataset_id) + { + int status = -1; + + try + { + // End access to the dataset and release resources used by it. + status = H5.H5Dclose (dataset_id); + } + catch (HDF5Exception hdf5e) + { + System.out.println + ("CreateDataset.H5Dclose_wrap() with HDF5Exception: " + + hdf5e.getMessage()); + } + catch (Exception e) + { + System.out.println + ("CreateDataset.H5Dclose_wrap() with other exception: " + + e.getMessage()); + } + return status; + } + + + // Help function for terminating access to the data space. + public static int H5Sclose_wrap (int dataspace_id) + { + int status = -1; + + try + { + // Terminate access to the data space. + status = H5.H5Sclose (dataspace_id); + } + catch (HDF5Exception hdf5e) + { + System.out.println + ("CreateDataset.H5Sclose_wrap() with HDF5Exception: " + + hdf5e.getMessage()); + } + catch (Exception e) + { + System.out.println + ("CreateDataset.H5Sclose_wrap() with other exception: " + + e.getMessage()); + } + return status; + } + + + // Help function for terminating access to the file. + public static int H5Fclose_wrap (int file_id) + { + int status = -1; + + try + { + // Terminate access to the file. + status = H5.H5Fclose (file_id); + } + catch (HDF5Exception hdf5e) + { + System.out.println + ("CreateDataset.H5Fclose_wrap() with HDF5Exception: " + + hdf5e.getMessage()); + } + catch (Exception e) + { + System.out.println + ("CreateDataset.H5Fclose_wrap() with other exception: " + + e.getMessage()); + } + return status; + } +} + diff --git a/doc/html/Tutor/examples/java/CreateFile.java b/doc/html/Tutor/examples/java/CreateFile.java new file mode 100644 index 0000000..550b263 --- /dev/null +++ b/doc/html/Tutor/examples/java/CreateFile.java @@ -0,0 +1,83 @@ +/****************************************************************** + * CreateFile.java (for HDF5 tutorial lesson 4) + * + * -- Creating a HDF5 file + * (a java conversion from h5_crtfile.c) + * + ******************************************************************/ + +import ncsa.hdf.hdf5lib.*; +import ncsa.hdf.hdf5lib.exceptions.*; + +public class CreateFile +{ + public static void main(String []argv) + { + final String FILE = "file.h5"; + int file_id = -1; // file identifier + int status = -1; + + file_id = H5Fcreate_wrap (FILE, HDF5Constants.H5F_ACC_TRUNC, + HDF5Constants.H5P_DEFAULT, + HDF5Constants.H5P_DEFAULT); + status = H5Fclose_wrap (file_id); + } + + + // Help function for creating a new file + public static int H5Fcreate_wrap (String name, int flags, + int create_id, int access_id) + { + int file_id = -1; // file identifier + try + { + // Create a new file using default file properties. + file_id = H5.H5Fcreate (name, flags, create_id, access_id); + } + catch (HDF5Exception hdf5e) + { + System.out.println + ("CreateFile.H5Fcreate_wrap() with HDF5Exception: " + + hdf5e.getMessage()); + } + catch (Exception e) + { + System.out.println + ("CreateFile.H5Fcreate_wrap() with other Exception: " + + e.getMessage()); + } + + System.out.println ("\nThe file name is: " + name); + System.out.println ("The file ID is: " + file_id); + + return file_id; + } + + + // Help function for terminating access to the file. + public static int H5Fclose_wrap (int file_id) + { + int status = -1; + + try + { + // Terminate access to the file. + status = H5.H5Fclose (file_id); + } + catch (HDF5Exception hdf5e) + { + System.out.println + ("CreateFile.H5Fclose_wrap() with HDF5Exception: " + + hdf5e.getMessage()); + } + catch (Exception e) + { + System.out.println + ("CreateFile.H5Fclose_wrap() with other exception: " + + e.getMessage()); + } + return status; + } +} + + diff --git a/doc/html/Tutor/examples/java/CreateFileInput.java b/doc/html/Tutor/examples/java/CreateFileInput.java new file mode 100644 index 0000000..0e7fd4d --- /dev/null +++ b/doc/html/Tutor/examples/java/CreateFileInput.java @@ -0,0 +1,118 @@ +/****************************************************************** + * CreateFileInput.java (for HDF5 tutorial Lesson 4) + * + * -- Creating a HDF5 file + * (another java conversion from h5_crtfile.c, give user two options: + * one for library path and one for file name, if user chooses + * nothing, then the default file name is used.) + * + ******************************************************************/ + +import java.lang.System; +import java.util.*; +import ncsa.hdf.hdf5lib.*; +import ncsa.hdf.hdf5lib.exceptions.*; + +public class CreateFileInput +{ + // The run command should be like: + // "./runCreateFileInput -l /usr/lib/hdf5.dll -f ./open.h5" + public static void main(String []argv) + { + int file_id = -1; // file identifier + int status = -1; + String libpath = null; + String filename = null; + + for (int i = 0; i < argv.length; i++) + { + if ("-l".equalsIgnoreCase (argv[i])) + libpath = argv[++i]; + + if ("-f".equalsIgnoreCase (argv[i])) + filename = argv[++i]; + } + + if (libpath != null) + { + Properties pros = System.getProperties (); + pros.put (H5.H5PATH_PROPERTY_KEY, libpath); + + /* + this function call could be used in Java 1.2 + System.setProperty (H5.H5PATH_PROPERTY_KEY, libpath); + */ + } + + if (filename == null) + { + filename = "file.h5"; // if no input file name, use the default name + } + + file_id = H5Fcreate_wrap (filename, + HDF5Constants.H5F_ACC_TRUNC, + HDF5Constants.H5P_DEFAULT, + HDF5Constants.H5P_DEFAULT); + status = H5Fclose_wrap (filename, file_id); + } + + + // Help function for creating a new file + public static int H5Fcreate_wrap (String name, int flags, + int create_id, int access_id) + { + int file_id = -1; // file identifier + try + { + // Create a new file using default file properties. + file_id = H5.H5Fcreate (name, flags, create_id, access_id); + + } + catch (HDF5Exception hdf5e) + { + System.out.println + ("CreateFileInput.H5Fcreate_wrap() with HDF5Exception: " + + hdf5e.getMessage()); + } + catch (Exception e) + { + System.out.println + ("CreateFileInput.H5Fcreate_wrap() with other Exception: " + + e.getMessage()); + } + + System.out.println ("\nThe file name is: " + name); + System.out.println ("The file ID is: " + file_id); + + return file_id; + } + + + // Help function for terminating access to the file. + public static int H5Fclose_wrap (String name, int file_id) + { + int status = -1; + + try + { + // Terminate access to the file. + status = H5.H5Fclose (file_id); + } + catch (HDF5Exception hdf5e) + { + System.out.println + ("CreateFileInput.H5Fclose_wrap() with HDF5Exception: " + + hdf5e.getMessage()); + } + catch (Exception e) + { + System.out.println + ("CreateFileInput.H5Fclose_wrap() with other exception: " + + e.getMessage()); + } + + return status; + } +} + + diff --git a/doc/html/Tutor/examples/java/CreateGroup.java b/doc/html/Tutor/examples/java/CreateGroup.java new file mode 100644 index 0000000..48ef4af --- /dev/null +++ b/doc/html/Tutor/examples/java/CreateGroup.java @@ -0,0 +1,139 @@ +/****************************************************************** + * CreateGroup.java (for HDF5 tutorial lesson 8) + * + * -- Creating and closing a group + * (a java conversion from h5_crtgrp.c) + * + ******************************************************************/ + +import ncsa.hdf.hdf5lib.*; +import ncsa.hdf.hdf5lib.exceptions.*; + +public class CreateGroup +{ + public static void main(String []argv) + { + final String FILE = "group.h5"; + int file_id = -1; // file identifier + int group_id = -1; // group identifier + int status = -1; + + // Create a new file using default properties. + file_id = H5Fcreate_wrap (FILE, HDF5Constants.H5F_ACC_TRUNC, + HDF5Constants.H5P_DEFAULT, + HDF5Constants.H5P_DEFAULT); + + // Create a group named "/MyGroup" in the file. + group_id = H5Gcreate_wrap (file_id, "/MyGroup", 0); + + // Close the group. + status = H5Gclose_wrap (group_id); + + // Close the file. + status = H5Fclose_wrap (file_id); + } + + + // Help function for creating a new file + public static int H5Fcreate_wrap (String name, int flags, + int create_id, int access_id) + { + int file_id = -1; // file identifier + try + { + // Create a new file using default file properties. + file_id = H5.H5Fcreate (name, flags, create_id, access_id); + } + catch (HDF5Exception hdf5e) + { + System.out.println + ("CreateGroup.H5Fcreate_wrap() with HDF5Exception: " + + hdf5e.getMessage()); + } + catch (Exception e) + { + System.out.println + ("CreateGroup.H5Fcreate_wrap() with other Exception: " + + e.getMessage()); + } + return file_id; + } + + + // Help function for creating a group named "/MyGroup" in the file. + public static int H5Gcreate_wrap (int loc_id, String name, int size_hint) + { + int group_id = -1; // group identifier + try + { + // Create a group + group_id = H5.H5Gcreate (loc_id, name, size_hint); + + } + catch (HDF5Exception hdf5e) + { + System.out.println + ("CreateGroup.H5Gcreate_wrap() with HDF5Exception: " + + hdf5e.getMessage()); + } + catch (Exception e) + { + System.out.println + ("CreateGroup.H5Gcreate_wrap() with other Exception: " + + e.getMessage()); + } + return group_id; + } + + + // Help function for closing the group + public static int H5Gclose_wrap (int group_id) + { + int status = -1; + + try + { + // Close the group + status = H5.H5Gclose (group_id); + } + catch (HDF5Exception hdf5e) + { + System.out.println + ("CreateGroup.H5Gclose_wrap() with HDF5Exception: " + + hdf5e.getMessage()); + } + catch (Exception e) + { + System.out.println + ("CreateGroup.H5Gclose_wrap() with other exception: " + + e.getMessage()); + } + return status; + } + + + // Help function for terminating access to the file. + public static int H5Fclose_wrap (int file_id) + { + int status = -1; + + try + { + // Terminate access to the file. + status = H5.H5Fclose (file_id); + } + catch (HDF5Exception hdf5e) + { + System.out.println + ("CreateGroup.H5Fclose_wrap() with HDF5Exception: " + + hdf5e.getMessage()); + } + catch (Exception e) + { + System.out.println + ("CreateGroup.H5Fclose_wrap() with other exception: " + + e.getMessage()); + } + return status; + } +} diff --git a/doc/html/Tutor/examples/java/CreateGroupAR.java b/doc/html/Tutor/examples/java/CreateGroupAR.java new file mode 100644 index 0000000..672f1d1 --- /dev/null +++ b/doc/html/Tutor/examples/java/CreateGroupAR.java @@ -0,0 +1,152 @@ +/****************************************************************** + * CreateGroupAR.java (for HDF5 tutorial lesson 9) + * + * -- Creating groups using absolute and relative names. + * (a java conversion from h5_crtgrpar.c) + * + ******************************************************************/ + +import ncsa.hdf.hdf5lib.*; +import ncsa.hdf.hdf5lib.exceptions.*; + +public class CreateGroupAR +{ + public static void main(String []argv) + { + final String FILE = "groups.h5"; + int file_id = -1; // file identifier + int group1_id = -1; // group identifier + int group2_id = -1; + int group3_id = -1; + + int status = -1; + + // Create a new file using default properties. + file_id = H5Fcreate_wrap (FILE, HDF5Constants.H5F_ACC_TRUNC, + HDF5Constants.H5P_DEFAULT, + HDF5Constants.H5P_DEFAULT); + + // Create group "MyGroup" in the root group using absolute name. + group1_id = H5Gcreate_wrap (file_id, "/MyGroup", 0); + + + // Create group "Group_A" in group "MyGroup" using absolute name. + group2_id = H5Gcreate_wrap (file_id, "/MyGroup/Group_A", 0); + + // Create group "Group_B" in group "MyGroup" using relative name. + group3_id = H5Gcreate_wrap (group1_id, "Group_B", 0); + + // Close groups. + status = H5Gclose_wrap (group1_id); + status = H5Gclose_wrap (group2_id); + status = H5Gclose_wrap (group3_id); + + // Close the file. + status = H5Fclose_wrap (file_id); + } + + + // Help function for creating a new file + public static int H5Fcreate_wrap (String name, int flags, + int create_id, int access_id) + { + int file_id = -1; // file identifier + try + { + // Create a new file using default file properties. + file_id = H5.H5Fcreate (name, flags, create_id, access_id); + + } + catch (HDF5Exception hdf5e) + { + System.out.println + ("CreateGroupAR.H5Fcreate_wrap() with HDF5Exception: " + + hdf5e.getMessage()); + } + catch (Exception e) + { + System.out.println + ("CreateGroupAR.H5Fcreate_wrap() with other Exception: " + + e.getMessage()); + } + return file_id; + } + + + // Help function for creating a group named "/MyGroup" in the file. + public static int H5Gcreate_wrap (int loc_id, String name, int size_hint) + { + int group_id = -1; // group identifier + try + { + // Create a group + group_id = H5.H5Gcreate (loc_id, name, size_hint); + + } + catch (HDF5Exception hdf5e) + { + System.out.println + ("CreateGroupAR.H5Gcreate_wrap() with HDF5Exception: " + + hdf5e.getMessage()); + } + catch (Exception e) + { + System.out.println + ("CreateGroupAR.H5Gcreate_wrap() with other Exception: " + + e.getMessage()); + } + return group_id; + } + + + // Help function for closing the group + public static int H5Gclose_wrap (int group_id) + { + int status = -1; + + try + { + // Close the group + status = H5.H5Gclose (group_id); + } + catch (HDF5Exception hdf5e) + { + System.out.println + ("CreateGroupAR.H5Gclose_wrap() with HDF5Exception: " + + hdf5e.getMessage()); + } + catch (Exception e) + { + System.out.println + ("CreateGroupAR.H5Gclose_wrap() with other exception: " + + e.getMessage()); + } + return status; + } + + + // Help function for terminating access to the file. + public static int H5Fclose_wrap (int file_id) + { + int status = -1; + + try + { + // Terminate access to the file. + status = H5.H5Fclose (file_id); + } + catch (HDF5Exception hdf5e) + { + System.out.println + ("CreateGroupAR.H5Fclose_wrap() with HDF5Exception: " + + hdf5e.getMessage()); + } + catch (Exception e) + { + System.out.println + ("CreateGroupAR.H5Fclose_wrap() with other exception: " + + e.getMessage()); + } + return status; + } +} diff --git a/doc/html/Tutor/examples/java/CreateGroupDataset.java b/doc/html/Tutor/examples/java/CreateGroupDataset.java new file mode 100644 index 0000000..f0fbeaa --- /dev/null +++ b/doc/html/Tutor/examples/java/CreateGroupDataset.java @@ -0,0 +1,340 @@ +/****************************************************************** + * CreateGroupDataset.java (for HDF5 tutorial lesson 10) + * + * -- Creating a dataset in a particular group + * (a java conversion from h5_crtgrpd.c) + * + ******************************************************************/ + +import ncsa.hdf.hdf5lib.*; +import ncsa.hdf.hdf5lib.exceptions.*; + +public class CreateGroupDataset +{ + public static void main(String []argv) + { + final String FILE = "groups.h5"; + int file_id = -1; // file identifier + int group_id = -1; // group identifier + int dataset_id; + int dataspace_id; + int status = -1; + + long[] dims = new long[2]; + int[][] dset1_data = new int[3][3]; + int[][] dset2_data = new int[2][10]; + int i = -1, j = -1; + + // Initialize the first dataset. + for (i = 0; i < 3; i++) + for (j = 0; j < 3; j++) + dset1_data[i][j] = j + 1; + + // Initialize the second dataset. + for (i = 0; i < 2; i++) + for (j = 0; j < 10; j++) + dset2_data[i][j] = j + 1; + + // Open an existing file. + file_id = H5Fopen_wrap (FILE, HDF5Constants.H5F_ACC_RDWR, + HDF5Constants.H5P_DEFAULT); + + // Create the data space for the first dataset. + dims[0] = 3; + dims[1] = 3; + dataspace_id = H5Screate_simple_wrap (2, dims, null); + + // Create a dataset in group "MyGroup". + dataset_id = + H5Dcreate_wrap (file_id, "/MyGroup/dset1", + H5.J2C (HDF5CDataTypes.JH5T_STD_I32BE), + dataspace_id, HDF5Constants.H5P_DEFAULT); + + // Write the first dataset. + status = H5Dwrite_wrap + (dataset_id, + H5.J2C (HDF5CDataTypes.JH5T_NATIVE_INT), + HDF5Constants.H5S_ALL, HDF5Constants.H5S_ALL, + HDF5Constants.H5P_DEFAULT, dset1_data); + + // Close the data space for the first dataset. + status = H5Sclose_wrap (dataspace_id); + + // Close the first dataset. + status = H5Dclose_wrap (dataset_id); + + // Open an existing group of the specified file. + group_id = H5Gopen_wrap (file_id, "/MyGroup/Group_A"); + + // Create the data space for the second dataset. + dims[0] = 2; + dims[1] = 10; + dataspace_id = H5Screate_simple_wrap (2, dims, null); + + // Create the second dataset in group "Group_A". + dataset_id = + H5Dcreate_wrap (group_id, "dset2", + H5.J2C (HDF5CDataTypes.JH5T_STD_I32BE), + dataspace_id, HDF5Constants.H5P_DEFAULT); + + // Write the second dataset. + status = H5Dwrite_wrap + (dataset_id, + H5.J2C (HDF5CDataTypes.JH5T_NATIVE_INT), + HDF5Constants.H5S_ALL, HDF5Constants.H5S_ALL, + HDF5Constants.H5P_DEFAULT, dset2_data); + + // Close the data space for the second dataset. + status = H5Sclose_wrap (dataspace_id); + + // Close the second dataset + status = H5Dclose_wrap (dataset_id); + + // Close the group. + status = H5Gclose_wrap (group_id); + + // Close the file. + status = H5Fclose_wrap (file_id); + } + + + // Help function for opening an existing file + public static int H5Fopen_wrap (String name, int flags, int access_id) + { + int file_id = -1; // file identifier + try + { + // Create a new file using default file properties. + file_id = H5.H5Fopen (name, flags, access_id); + } + catch (HDF5Exception hdf5e) + { + System.out.println + ("CreateGroupDataset.H5Fopen_wrap() with HDF5Exception: " + + hdf5e.getMessage()); + } + catch (Exception e) + { + System.out.println + ("CreateGroupDataset.H5Fopen_wrap() with other Exception: " + + e.getMessage()); + } + return file_id; + } + + + // Help function for creating a new simple dataspace and opening it + // for access + public static int H5Screate_simple_wrap (int rank, long dims[], + long maxdims[]) + { + int dataspace_id = -1; // dataspace identifier + + try + { + // Create the data space for the dataset. + dataspace_id = H5.H5Screate_simple (rank, dims, maxdims); + } + catch (HDF5Exception hdf5e) + { + System.out.println + ("CreateGroupDataset.H5Screate_simple_wrap() with HDF5Exception: " + + hdf5e.getMessage()); + } + catch (Exception e) + { + System.out.println + ("CreateGroupDataset.H5Screate_simple_wrap() with other Exception: " + + e.getMessage()); + } + return dataspace_id; + } + + + // Help function for creating a dataset + public static int H5Dcreate_wrap (int loc_id, String name, int type_id, + int space_id, int create_plist_id) + { + int dataset_id = -1; // dataset identifier + + try + { + // Create the dataset + dataset_id = H5.H5Dcreate (loc_id, name, type_id, space_id, + create_plist_id); + } + catch (HDF5Exception hdf5e) + { + System.out.println + ("CreateGroupDataset.H5Dcreate_wrap() with HDF5Exception: " + + hdf5e.getMessage()); + } + catch (Exception e) + { + System.out.println + ("CreateGroupDataset.H5Dcreate_wrap() with other Exception: " + + e.getMessage()); + } + return dataset_id; + } + + + // Help function for writing the dataset + public static int H5Dwrite_wrap (int dataset_id, int mem_type_id, + int mem_space_id, int file_space_id, + int xfer_plist_id, Object buf) + { + int status = -1; + + try + { + // Write the dataset. + status = H5.H5Dwrite (dataset_id, mem_type_id, mem_space_id, + file_space_id, xfer_plist_id, buf); + } + catch (HDF5Exception hdf5e) + { + System.out.println + ("CreateGroupDataset.H5Dwrite_wrap() with HDF5Exception: " + + hdf5e.getMessage()); + } + catch (Exception e) + { + System.out.println + ("CreateGroupDataset.H5Dwrite_wrap() with other exception: " + + e.getMessage()); + } + return status; + } + + + // Help function for terminating access to the data space. + public static int H5Sclose_wrap (int dataspace_id) + { + int status = -1; + + try + { + // Terminate access to the data space. + status = H5.H5Sclose (dataspace_id); + } + catch (HDF5Exception hdf5e) + { + System.out.println + ("CreateGroupDataset.H5Sclose_wrap() with HDF5Exception: " + + hdf5e.getMessage()); + } + catch (Exception e) + { + System.out.println + ("CreateGroupDataset.H5Sclose_wrap() with other exception: " + + e.getMessage()); + } + return status; + } + + + // Help function for ending access to the dataset and releasing + // resources used by it. + public static int H5Dclose_wrap (int dataset_id) + { + int status = -1; + + try + { + // End access to the dataset and release resources used by it. + status = H5.H5Dclose (dataset_id); + } + catch (HDF5Exception hdf5e) + { + System.out.println + ("CreateGroupDataset.H5Dclose_wrap() with HDF5Exception: " + + hdf5e.getMessage()); + } + catch (Exception e) + { + System.out.println + ("CreateGroupDataset.H5Dclose_wrap() with other exception: " + + e.getMessage()); + } + return status; + } + + + // Help function for opening a group + public static int H5Gopen_wrap (int loc_id, String name) + { + int group_id = -1; // group identifier + try + { + // Create a group + group_id = H5.H5Gopen (loc_id, name); + + } + catch (HDF5Exception hdf5e) + { + System.out.println + ("CreateGroupDataset.H5Gopen_wrap() with HDF5Exception: " + + hdf5e.getMessage()); + } + catch (Exception e) + { + System.out.println + ("CreateGroupDataset.H5Gopen_wrap() with other Exception: " + + e.getMessage()); + } + return group_id; + } + + + // Help function for closing the group + public static int H5Gclose_wrap (int group_id) + { + int status = -1; + + try + { + // Close the group + status = H5.H5Gclose (group_id); + } + catch (HDF5Exception hdf5e) + { + System.out.println + ("CreateGroupDataset.H5Gclose_wrap() with HDF5Exception: " + + hdf5e.getMessage()); + } + catch (Exception e) + { + System.out.println + ("CreateGroupDataset.H5Gclose_wrap() with other exception: " + + e.getMessage()); + } + return status; + } + + + // Help function for terminating access to the file. + public static int H5Fclose_wrap (int file_id) + { + int status = -1; + + try + { + // Terminate access to the file. + status = H5.H5Fclose (file_id); + } + catch (HDF5Exception hdf5e) + { + System.out.println + ("CreateGroupDataset.H5Fclose_wrap() with HDF5Exception: " + + hdf5e.getMessage()); + } + catch (Exception e) + { + System.out.println + ("CreateGroupDataset.H5Fclose_wrap() with other exception: " + + e.getMessage()); + } + return status; + } +} diff --git a/doc/html/Tutor/examples/java/DatasetRdWt.java b/doc/html/Tutor/examples/java/DatasetRdWt.java new file mode 100644 index 0000000..4c26d0f --- /dev/null +++ b/doc/html/Tutor/examples/java/DatasetRdWt.java @@ -0,0 +1,213 @@ +/****************************************************************** + * DatasetRdWt.java (for HDF5 tutorial lesson 6) + * + * -- Reading and Writing an existing Dataset + * (a java conversion from h5_rdwt.c) + * + ******************************************************************/ + +import ncsa.hdf.hdf5lib.*; +import ncsa.hdf.hdf5lib.exceptions.*; + +public class DatasetRdWt +{ + public static void main(String []argv) + { + final String FILE = "dset.h5"; + int file_id = -1; // file identifier + int dataset_id = -1; // dataset identifier + int status = -1; + int[][] dset_data = new int[4][6]; + + // Initialize the dataset. + for (int i = 0; i < 4; i++) + for (int j = 0; j < 6; j++) + dset_data[i][j] = i * 6 + j + 1; + + // Open an existing file + file_id = H5Fopen_wrap (FILE, HDF5Constants.H5F_ACC_RDWR, + HDF5Constants.H5P_DEFAULT); + + // Open an existing dataset. + dataset_id = H5Dopen_wrap (file_id, "/dset"); + + // Write the dataset. + status = H5Dwrite_wrap + (dataset_id, H5.J2C (HDF5CDataTypes.JH5T_NATIVE_INT), + HDF5Constants.H5S_ALL, HDF5Constants.H5S_ALL, + HDF5Constants.H5P_DEFAULT, dset_data); + + status = H5Dread_wrap + (dataset_id, H5.J2C (HDF5CDataTypes.JH5T_NATIVE_INT), + HDF5Constants.H5S_ALL, HDF5Constants.H5S_ALL, + HDF5Constants.H5P_DEFAULT, dset_data); + + // Close the dataset. + status = H5Dclose_wrap (dataset_id); + + // Close the file. + status = H5Fclose_wrap (file_id); + } + + + // Help function for opening an existing file + public static int H5Fopen_wrap (String name, int flags, int access_id) + { + int file_id = -1; // file identifier + try + { + // Create a new file using default file properties. + file_id = H5.H5Fopen (name, flags, access_id); + } + catch (HDF5Exception hdf5e) + { + System.out.println + ("DatasetRdWt.H5Fopen_wrap() with HDF5Exception: " + + hdf5e.getMessage()); + } + catch (Exception e) + { + System.out.println + ("DatasetRdWt.H5Fopen_wrap() with other Exception: " + + e.getMessage()); + } + return file_id; + } + + + // Help function for opening an existing dataset + public static int H5Dopen_wrap (int loc_id, String name) + { + int dataset_id = -1; // dataset identifier + + try + { + // Opening an existing dataset + dataset_id = H5.H5Dopen (loc_id, name); + } + catch (HDF5Exception hdf5e) + { + System.out.println + ("DatasetRdWt.H5Dopen_wrap() with HDF5Exception: " + + hdf5e.getMessage()); + } + catch (Exception e) + { + System.out.println + ("DatasetRdWt.H5Dopen_wrap() with other Exception: " + + e.getMessage()); + } + return dataset_id; + } + + + // Help function for writing the dataset + public static int H5Dwrite_wrap (int dataset_id, int mem_type_id, + int mem_space_id, int file_space_id, + int xfer_plist_id, Object buf) + { + int status = -1; + + try + { + // Write the dataset. + status = H5.H5Dwrite (dataset_id, mem_type_id, mem_space_id, + file_space_id, xfer_plist_id, buf); + } + catch (HDF5Exception hdf5e) + { + System.out.println + ("DatasetRdWt.H5Dwrite_wrap() with HDF5Exception: " + + hdf5e.getMessage()); + } + catch (Exception e) + { + System.out.println + ("DatasetRdWt.H5Dwrite_wrap() with other exception: " + + e.getMessage()); + } + return status; + } + + + // Help function for reading the dataset + public static int H5Dread_wrap (int dataset_id, int mem_type_id, + int mem_space_id, int file_space_id, + int xfer_plist_id, Object obj) + { + int status = -1; + + try + { + // Read the dataset. + status = H5.H5Dread (dataset_id, mem_type_id, mem_space_id, + file_space_id, xfer_plist_id, obj); + } + catch (HDF5Exception hdf5e) + { + System.out.println + ("DatasetRdWt.H5Dread_wrap() with HDF5Exception: " + + hdf5e.getMessage()); + } + catch (Exception e) + { + System.out.println + ("DatasetRdWt.H5Dread_wrap() with other exception: " + + e.getMessage()); + } + return status; + } + + + // Help function for ending access to the dataset and releasing + // resources used by it. + public static int H5Dclose_wrap (int dataset_id) + { + int status = -1; + + try + { + // End access to the dataset and release resources used by it. + status = H5.H5Dclose (dataset_id); + } + catch (HDF5Exception hdf5e) + { + System.out.println + ("DatasetRdWt.H5Dclose_wrap() with HDF5Exception: " + + hdf5e.getMessage()); + } + catch (Exception e) + { + System.out.println + ("DatasetRdWt.H5Dclose_wrap() with other exception: " + + e.getMessage()); + } + return status; + } + + + // Help function for terminating access to the file. + public static int H5Fclose_wrap (int file_id) + { + int status = -1; + + try + { + // Terminate access to the file. + status = H5.H5Fclose (file_id); + } + catch (HDF5Exception hdf5e) + { + System.out.println + ("DatasetRdWt.H5Fclose_wrap() with HDF5Exception: " + + hdf5e.getMessage()); + } + catch (Exception e) + { + System.out.println + ("DatasetRdWt.H5Fclose_wrap() with other exception: " + + e.getMessage()); + } + return status; + } +} diff --git a/doc/html/Tutor/examples/java/HyperSlab.java b/doc/html/Tutor/examples/java/HyperSlab.java new file mode 100644 index 0000000..5f8818d --- /dev/null +++ b/doc/html/Tutor/examples/java/HyperSlab.java @@ -0,0 +1,590 @@ +/****************************************************************** + * HyperSlab.java (for HDF5 tutorial lesson 12) + * + * -- Writing and reading a hyperslab + * (a java conversion from h5_hyperslab.c) + * + ******************************************************************/ + +import ncsa.hdf.hdf5lib.*; +import ncsa.hdf.hdf5lib.exceptions.*; + +public class HyperSlab +{ + public static void main (String []argv) + { + final String FILE = "sds.h5"; + final String DATASETNAME = "IntArray"; + final int NX_SUB = 3; /* hyperslab dimensions */ + final int NY_SUB = 4; + final int NX = 7; /* output buffer dimensions */ + final int NY = 7; + final int NZ = 3; + final int RANK = 2; + final int RANK_OUT = 3; + final int X = 5; /* dataset dimensions */ + final int Y = 6; + + long[] dimsf = new long[2]; /* dataset dimensions */ + int[][] data = new int[X][Y]; /* data to write */ + + /* + * Data and output buffer initialization. + */ + int file, dataset; /* handles */ + int dataspace; + int memspace; + long[] dimsm = new long[3]; /* memory space dimensions */ + long[] dims_out = new long[2]; /* dataset dimensions */ + int status; + + int[][][] data_out = new int[NX][NY][NZ]; /* output buffer */ + + long[] count = new long[2]; /* size of the hyperslab in the file */ + long[] offset = new long[2]; /* hyperslab offset in the file */ + long[] count_out = new long[3]; /* size of the hyperslab in memory */ + long[] offset_out = new long[3]; /* hyperslab offset in memory */ + int i, j, k, status_n, rank; + + /********************************************************* + This writes data to the HDF5 file. + *********************************************************/ + + /* + * Data and output buffer initialization. + */ + for (j = 0; j < X; j++) + { + for (i = 0; i < Y; i++) + data[j][i] = i + j; + } + /* + * 0 1 2 3 4 5 + * 1 2 3 4 5 6 + * 2 3 4 5 6 7 + * 3 4 5 6 7 8 + * 4 5 6 7 8 9 + */ + + /* + * Create a new file using H5F_ACC_TRUNC access, + * the default file creation properties, and the default file + * access properties. + */ + file = H5Fcreate_wrap (FILE, HDF5Constants.H5F_ACC_TRUNC, + HDF5Constants.H5P_DEFAULT, + HDF5Constants.H5P_DEFAULT); + + /* + * Describe the size of the array and create the data space for fixed + * size dataset. + */ + dimsf[0] = X; + dimsf[1] = Y; + dataspace = H5Screate_simple_wrap (RANK, dimsf, null); + + /* + * Create a new dataset within the file using defined dataspace and + * default dataset creation properties. + */ + dataset = H5Dcreate_wrap + (file, DATASETNAME, H5.J2C (HDF5CDataTypes.JH5T_STD_I32BE), + dataspace, HDF5Constants.H5P_DEFAULT); + + /* + * Write the data to the dataset using default transfer properties. + */ + status = H5Dwrite_wrap + (dataset, H5.J2C (HDF5CDataTypes.JH5T_NATIVE_INT), + HDF5Constants.H5S_ALL, HDF5Constants.H5S_ALL, + HDF5Constants.H5P_DEFAULT, data); + + /* + * Close/release resources. + */ + H5Sclose_wrap (dataspace); + H5Dclose_wrap (dataset); + H5Fclose_wrap (file); + + /************************************************************* + + This reads the hyperslab from the sds.h5 file just + created, into a 2-dimensional plane of the 3-dimensional + array. + + ************************************************************/ + + for (j = 0; j < NX; j++) + { + for (i = 0; i < NY; i++) + { + for (k = 0; k < NZ ; k++) + data_out[j][i][k] = 0; + } + } + + /* + * Open the file and the dataset. + */ + file = H5Fopen_wrap (FILE, HDF5Constants.H5F_ACC_RDONLY, + HDF5Constants.H5P_DEFAULT); + dataset = H5Dopen_wrap (file, DATASETNAME); + + dataspace = H5Dget_space_wrap (dataset); /* dataspace handle */ + rank = H5Sget_simple_extent_ndims_wrap (dataspace); + status_n = H5Sget_simple_extent_dims_wrap (dataspace, dims_out, null); + + System.out.println ("Rank: " + rank); + System.out.println ("Dimensions: "+ dims_out[0] + " x " + dims_out[1]); + + /* + * Define hyperslab in the dataset. + */ + offset[0] = 1; + offset[1] = 2; + count[0] = NX_SUB; + count[1] = NY_SUB; + status = H5Sselect_hyperslab_wrap (dataspace, + HDF5Constants.H5S_SELECT_SET, + offset, null, count, null); + + /* + * Define the memory dataspace. + */ + dimsm[0] = NX; + dimsm[1] = NY; + dimsm[2] = NZ; + memspace = H5Screate_simple_wrap (RANK_OUT, dimsm, null); + + /* + * Define memory hyperslab. + */ + offset_out[0] = 3; + offset_out[1] = 0; + offset_out[2] = 0; + count_out[0] = NX_SUB; + count_out[1] = NY_SUB; + count_out[2] = 1; + status = H5Sselect_hyperslab_wrap (memspace, + HDF5Constants.H5S_SELECT_SET, + offset_out, null, count_out, null); + + /* + * Read data from hyperslab in the file into the hyperslab in + * memory and display. + */ + status = + H5Dread_wrap (dataset, H5.J2C (HDF5CDataTypes.JH5T_NATIVE_INT), + memspace, dataspace, HDF5Constants.H5P_DEFAULT, + data_out); + + System.out.println ("Data:"); + for (j = 0; j < NX; j++) + { + for (i = 0; i < NY; i++) + System.out.print (data_out[j][i][0]); + System.out.println (); + } + System.out.println (); + + /* + * 0 0 0 0 0 0 0 + * 0 0 0 0 0 0 0 + * 0 0 0 0 0 0 0 + * 3 4 5 6 0 0 0 + * 4 5 6 7 0 0 0 + * 5 6 7 8 0 0 0 + * 0 0 0 0 0 0 0 + */ + + /* + * Close and release resources. + */ + H5Dclose_wrap (dataset); + H5Sclose_wrap (dataspace); + H5Sclose_wrap (memspace); + H5Fclose_wrap (file); + } + + + // Help function for creating a new file + public static int H5Fcreate_wrap (String name, int flags, + int create_id, int access_id) + { + int file_id = -1; // file identifier + try + { + // Create a new file using default file properties. + file_id = H5.H5Fcreate (name, flags, create_id, access_id); + } + catch (HDF5Exception hdf5e) + { + System.out.println + ("HyperSlab.H5Fcreate_wrap() with HDF5Exception: " + + hdf5e.getMessage()); + } + catch (Exception e) + { + System.out.println + ("HyperSlab.H5Fcreate_wrap() with other Exception: " + + e.getMessage()); + } + return file_id; + } + + + // Help function for opening an existing file + public static int H5Fopen_wrap (String name, int flags, int access_id) + { + int file_id = -1; // file identifier + try + { + // Create a new file using default file properties. + file_id = H5.H5Fopen (name, flags, access_id); + } + catch (HDF5Exception hdf5e) + { + System.out.println + ("HyperSlab.H5Fopen_wrap() with HDF5Exception: " + + hdf5e.getMessage()); + } + catch (Exception e) + { + System.out.println + ("HyperSlab.H5Fopen_wrap() with other Exception: " + + e.getMessage()); + } + return file_id; + } + + + // Help function for opening an existing dataset + public static int H5Dopen_wrap (int loc_id, String name) + { + int dataset_id = -1; // dataset identifier + + try + { + // Opening an existing dataset + dataset_id = H5.H5Dopen (loc_id, name); + } + catch (HDF5Exception hdf5e) + { + System.out.println + ("HyperSlab.H5Dopen_wrap() with HDF5Exception: " + + hdf5e.getMessage()); + } + catch (Exception e) + { + System.out.println + ("HyperSlab.H5Dopen_wrap() with other Exception: " + + e.getMessage()); + } + return dataset_id; + } + + + // Help function for creating a new simple dataspace and opening it + // for access + public static int H5Screate_simple_wrap (int rank, long dims[], + long maxdims[]) + { + int dataspace_id = -1; // dataspace identifier + + try + { + // Create the data space for the dataset. + dataspace_id = H5.H5Screate_simple (rank, dims, maxdims); + } + catch (HDF5Exception hdf5e) + { + System.out.println + ("HyperSlab.H5Screate_simple_wrap() with HDF5Exception: " + + hdf5e.getMessage()); + } + catch (Exception e) + { + System.out.println + ("HyperSlab.H5Screate_simple_wrap() with other Exception: " + + e.getMessage()); + } + return dataspace_id; + } + + + // Help function for getting an identifier for a copy of + // the dataspace for a dataset + public static int H5Dget_space_wrap (int dataset_id) + { + int dataspace_id = -1; + + try + { + // Returning an identifier for a copy of the dataspace for a dataset + dataspace_id = H5.H5Dget_space (dataset_id); + } + catch (HDF5Exception hdf5e) + { + System.out.println + ("HyperSlab.H5Dget_space_wrap() with HDF5Exception: " + + hdf5e.getMessage()); + } + catch (Exception e) + { + System.out.println + ("HyperSlab.H5Dget_space_wrap() with other Exception: " + + e.getMessage()); + } + return dataspace_id; + } + + + // Help function for determining the dimensionality (or rank) of + // a dataspace + public static int H5Sget_simple_extent_ndims_wrap (int space_id) + { + int rank = -1; + + try + { + // Determine the dimensionality (or rank) of a dataspace. + rank = H5.H5Sget_simple_extent_ndims (space_id); + } + catch (HDF5Exception hdf5e) + { + System.out.println + ("HyperSlab.H5Sget_simple_extent_ndims_wrap() with HDF5Exception: " + + hdf5e.getMessage()); + } + catch (Exception e) + { + System.out.println + ("HyperSlab.H5Sget_simple_extent_ndims_wrap() with other Exception: " + + e.getMessage()); + } + return rank; + } + + + // Help function for returning the size and maximum sizes of each + // dimension of a dataspace through the dims and maxdims parameters. + public static int H5Sget_simple_extent_dims_wrap (int space_id, + long dims[], + long maxdims[]) + { + int dimension_number = -1; + + try + { + dimension_number = H5.H5Sget_simple_extent_dims (space_id, dims, + maxdims); + } + catch (HDF5Exception hdf5e) + { + System.out.println + ("HyperSlab.H5Sget_simple_extent_dims_wrap() with HDF5Exception: " + + hdf5e.getMessage()); + } + catch (Exception e) + { + System.out.println + ("HyperSlab.H5Sget_simple_extent_dims_wrap() with other Exception: " + + e.getMessage()); + } + return dimension_number; + } + + + // Help function for selecting a hyperslab region to add to the + // current selected region for the dataspace specified by space_id. + public static int H5Sselect_hyperslab_wrap (int space_id, int op, + long start[], long stride[], + long count[], long block[]) + { + int status = -1; + + try + { + status = H5.H5Sselect_hyperslab (space_id, op, start, stride, + count, block); + } + catch (HDF5Exception hdf5e) + { + System.out.println + ("HyperSlab.H5Sselect_hyperslab_wrap() with HDF5Exception: " + + hdf5e.getMessage()); + } + catch (Exception e) + { + System.out.println + ("HyperSlab.H5Sselect_hyperslab_wrap() with other Exception: " + + e.getMessage()); + } + return status; + } + + + // Help function for creating a dataset + public static int H5Dcreate_wrap (int loc_id, String name, int type_id, + int space_id, int create_plist_id) + { + int dataset_id = -1; // dataset identifier + + try + { + // Create the dataset + dataset_id = H5.H5Dcreate (loc_id, name, type_id, space_id, + create_plist_id); + } + catch (HDF5Exception hdf5e) + { + System.out.println + ("HyperSlab.H5Dcreate_wrap() with HDF5Exception: " + + hdf5e.getMessage()); + } + catch (Exception e) + { + System.out.println + ("HyperSlab.H5Dcreate_wrap() with other Exception: " + + e.getMessage()); + } + return dataset_id; + } + + + // Help function for writing the dataset + public static int H5Dwrite_wrap (int dataset_id, int mem_type_id, + int mem_space_id, int file_space_id, + int xfer_plist_id, Object buf) + { + int status = -1; + + try + { + // Write the dataset. + status = H5.H5Dwrite (dataset_id, mem_type_id, mem_space_id, + file_space_id, xfer_plist_id, buf); + } + catch (HDF5Exception hdf5e) + { + System.out.println + ("HyperSlab.H5Dwrite_wrap() with HDF5Exception: " + + hdf5e.getMessage()); + } + catch (Exception e) + { + System.out.println + ("HyperSlab.H5Dwrite_wrap() with other exception: " + + e.getMessage()); + } + return status; + } + + + // Help function for reading the dataset + public static int H5Dread_wrap (int dataset_id, int mem_type_id, + int mem_space_id, int file_space_id, + int xfer_plist_id, Object obj) + { + int status = -1; + + try + { + // Read the dataset. + status = H5.H5Dread (dataset_id, mem_type_id, mem_space_id, + file_space_id, xfer_plist_id, obj); + } + catch (HDF5Exception hdf5e) + { + System.out.println + ("HyperSlab.H5Dread_wrap() with HDF5Exception: " + + hdf5e.getMessage()); + } + catch (Exception e) + { + System.out.println + ("HyperSlab.H5Dread_wrap() with other exception: " + + e.getMessage()); + } + return status; + } + + + // Help function for terminating access to the data space. + public static int H5Sclose_wrap (int dataspace_id) + { + int status = -1; + + try + { + // Terminate access to the data space. + status = H5.H5Sclose (dataspace_id); + } + catch (HDF5Exception hdf5e) + { + System.out.println + ("HyperSlab.H5Sclose_wrap() with HDF5Exception: " + + hdf5e.getMessage()); + } + catch (Exception e) + { + System.out.println + ("HyperSlab.H5Sclose_wrap() with other exception: " + + e.getMessage()); + } + return status; + } + + + // Help function for ending access to the dataset and releasing + // resources used by it. + public static int H5Dclose_wrap (int dataset_id) + { + int status = -1; + + try + { + // End access to the dataset and release resources used by it. + status = H5.H5Dclose (dataset_id); + } + catch (HDF5Exception hdf5e) + { + System.out.println + ("HyperSlab.H5Dclose_wrap() with HDF5Exception: " + + hdf5e.getMessage()); + } + catch (Exception e) + { + System.out.println + ("HyperSlab.H5Dclose_wrap() with other exception: " + + e.getMessage()); + } + return status; + } + + + // Help function for terminating access to the file. + public static int H5Fclose_wrap (int file_id) + { + int status = -1; + + try + { + // Terminate access to the file. + status = H5.H5Fclose (file_id); + } + catch (HDF5Exception hdf5e) + { + System.out.println + ("HyperSlab.H5Fclose_wrap() with HDF5Exception: " + + hdf5e.getMessage()); + } + catch (Exception e) + { + System.out.println + ("HyperSlab.H5Fclose_wrap() with other exception: " + + e.getMessage()); + } + return status; + } +} diff --git a/doc/html/Tutor/examples/java/Makefile b/doc/html/Tutor/examples/java/Makefile new file mode 100644 index 0000000..a70ab0b --- /dev/null +++ b/doc/html/Tutor/examples/java/Makefile @@ -0,0 +1,92 @@ +# Generated automatically from Makefile.in by configure. +# /*======================================================================= +# UNIVERSITY OF ILLINOIS (UI), NATIONAL CENTER FOR SUPERCOMPUTING +# APPLICATIONS (NCSA), Software Distribution Policy for Public Domain +# Software +# +# NCSA HDF Version 5 source code and documentation are in the public +# domain, available without fee for education, research, non-commercial and +# commercial purposes. Users may distribute the binary or source code to +# third parties provided that this statement appears on all copies and that +# no charge is made for such copies. +# +# UI MAKES NO REPRESENTATIONS ABOUT THE SUITABILITY OF THE SOFTWARE FOR ANY +# PURPOSE. IT IS PROVIDED "AS IS" WITHOUT EXPRESS OR IMPLIED WARRANTY. THE +# UI SHALL NOT BE LIABLE FOR ANY DAMAGES SUFFERED BY THE USER OF THIS +# SOFTWARE. The software may have been developed under agreements between +# the UI and the Federal Government which entitle the Government to certain +# rights. +# +# We ask, but do not require that the following message be include in all +# derived works: +# +# Portions developed at the National Center for Supercomputing Applications +# at the University of Illinois at Urbana-Champaign. +# +# By copying this program, you, the user, agree to abide by the conditions +# and understandings with respect to any software which is marked with a +# public domain notice. +# +# =======================================================================*/ +# + + +JAVAC = /usr/java1.2/bin/javac +FIND = /bin/find + +CLASSPATH=/usr/java1.2/jre/lib/rt.jar:/afs/ncsa.uiuc.edu/projects/hdf/java/java2/mcgrath/arabica/java-hdf5 + + +.SUFFIXES: .java .class + +.java.class: + $(JAVAC) -classpath $(CLASSPATH) $< + +tutorial: ./Compound.class \ + ./Copy.class \ + ./CreateAttribute.class \ + ./CreateDataset.class \ + ./CreateFile.class \ + ./CreateFileInput.class \ + ./CreateGroup.class \ + ./CreateGroupAR.class \ + ./CreateGroupDataset.class \ + ./DatasetRdWt.class \ + ./HyperSlab.class + chmod u+x *.sh + +clean: clean-classes + +distclean: clean-classes clean-data + rm config.cache config.status config.log + rm -rf ./Makefile + +clean-classes: + $(FIND) . \( -name '#*' -o -name '*~' -o -name '*.class' \) -exec rm -f {} \; ;\ + +clean-data: + rm -rf *.h5 + +Compound: ./Compound.class +Copy: ./Copy.class +CreateAttribute: ./CreateAttribute.class +CreateDataset: ./CreateDataset.class +CreateFile: ./CreateFile.class +CreateFileInput: ./CreateFileInput.class +CreateGroup: ./CreateGroup.class +CreateGroupAR: ./CreateGroupAR.class +CreateGroupDataset: ./CreateGroupDataset.class +DatasetRdWt: ./DatasetRdWt.class +HyperSlab: ./HyperSlab.class + +CLASSES= ./Compound.class \ + ./Copy.class \ + ./CreateAttribute.class \ + ./CreateDataset.class \ + ./CreateFileInput.class \ + ./CreateFile.class \ + ./CreateGroup.class \ + ./CreateGroupAR.class \ + ./CreateGroupDataset.class \ + ./DatasetRdWt.class \ + ./HyperSlab.class diff --git a/doc/html/Tutor/examples/java/Makefile.in b/doc/html/Tutor/examples/java/Makefile.in new file mode 100644 index 0000000..e6bd408 --- /dev/null +++ b/doc/html/Tutor/examples/java/Makefile.in @@ -0,0 +1,91 @@ +# /*======================================================================= +# UNIVERSITY OF ILLINOIS (UI), NATIONAL CENTER FOR SUPERCOMPUTING +# APPLICATIONS (NCSA), Software Distribution Policy for Public Domain +# Software +# +# NCSA HDF Version 5 source code and documentation are in the public +# domain, available without fee for education, research, non-commercial and +# commercial purposes. Users may distribute the binary or source code to +# third parties provided that this statement appears on all copies and that +# no charge is made for such copies. +# +# UI MAKES NO REPRESENTATIONS ABOUT THE SUITABILITY OF THE SOFTWARE FOR ANY +# PURPOSE. IT IS PROVIDED "AS IS" WITHOUT EXPRESS OR IMPLIED WARRANTY. THE +# UI SHALL NOT BE LIABLE FOR ANY DAMAGES SUFFERED BY THE USER OF THIS +# SOFTWARE. The software may have been developed under agreements between +# the UI and the Federal Government which entitle the Government to certain +# rights. +# +# We ask, but do not require that the following message be include in all +# derived works: +# +# Portions developed at the National Center for Supercomputing Applications +# at the University of Illinois at Urbana-Champaign. +# +# By copying this program, you, the user, agree to abide by the conditions +# and understandings with respect to any software which is marked with a +# public domain notice. +# +# =======================================================================*/ +# + + +JAVAC = @JAVAC@ +FIND = @FIND@ + +CLASSPATH=@CLASSPATH@ + + +.SUFFIXES: .java .class + +.java.class: + $(JAVAC) -classpath $(CLASSPATH) $< + +tutorial: ./Compound.class \ + ./Copy.class \ + ./CreateAttribute.class \ + ./CreateDataset.class \ + ./CreateFile.class \ + ./CreateFileInput.class \ + ./CreateGroup.class \ + ./CreateGroupAR.class \ + ./CreateGroupDataset.class \ + ./DatasetRdWt.class \ + ./HyperSlab.class + chmod u+x *.sh + +clean: clean-classes + +distclean: clean-classes clean-data + rm config.cache config.status config.log + rm -rf ./Makefile + +clean-classes: + $(FIND) . \( -name '#*' -o -name '*~' -o -name '*.class' \) -exec rm -f {} \; ;\ + +clean-data: + rm -rf *.h5 + +Compound: ./Compound.class +Copy: ./Copy.class +CreateAttribute: ./CreateAttribute.class +CreateDataset: ./CreateDataset.class +CreateFile: ./CreateFile.class +CreateFileInput: ./CreateFileInput.class +CreateGroup: ./CreateGroup.class +CreateGroupAR: ./CreateGroupAR.class +CreateGroupDataset: ./CreateGroupDataset.class +DatasetRdWt: ./DatasetRdWt.class +HyperSlab: ./HyperSlab.class + +CLASSES= ./Compound.class \ + ./Copy.class \ + ./CreateAttribute.class \ + ./CreateDataset.class \ + ./CreateFileInput.class \ + ./CreateFile.class \ + ./CreateGroup.class \ + ./CreateGroupAR.class \ + ./CreateGroupDataset.class \ + ./DatasetRdWt.class \ + ./HyperSlab.class diff --git a/doc/html/Tutor/examples/java/README b/doc/html/Tutor/examples/java/README new file mode 100644 index 0000000..95c9360 --- /dev/null +++ b/doc/html/Tutor/examples/java/README @@ -0,0 +1,21 @@ +These files are Java versions of the example programs used in +the HDF-5 tutoral: + http://hdf.ncsa.uiuc.edu/training/hdf5/ + +The examples here correspond to the examples explained in the first 13 +sections of the tutorial. + +Lesson C program Java program Topic + +4 h5_crtfile.c CreateFile.java Create an HDF-5 file. +5 h5_crtdat.c CreateDataset.java Create a dataset. +6 h5_rdwt.c DatasetRdWt.java Write/Read a dataset. +7 h5_crtatt.c CreateAttribute.java Create an attribute. +8 h5_crtgrp.c CreateGroup.java Create a group. +9 h5_crtgrpar.c CreateGroupAR.java Abs. and Rel. paths. +10 h5_crtgrpd.c CreateGroupDataset.java Create dataset in grp. + +11 h5_compound.c Compound.java Compound datatype +12 h5_hyperslab.c Hyperslab.java Selection of hyperslab +13 h5_copy.c Copy.java Selection of elements + diff --git a/doc/html/Tutor/examples/java/readme.html b/doc/html/Tutor/examples/java/readme.html new file mode 100644 index 0000000..ac96004 --- /dev/null +++ b/doc/html/Tutor/examples/java/readme.html @@ -0,0 +1,192 @@ + + + + + + readme + + + +

    +HDF 5 Tutorial Examples in Java

    + +


    These files are Java versions of the example programs used in the +HDF-5 tutoral: +
          http://hdf.ncsa.uiuc.edu/training/hdf5/ +

    The examples here correspond to the examples explained in the first +13 sections of the tutorial. +
      +
      + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
    +
    Lesson
    +
    +
    Topic
    +
    +
    C file
    +
    +
    Java file
    +
    Lesson +4Create an HDF-5 file.h5_crtfile.cCreateFile.java
    Lesson +5Create a Dataset in an HDF-5 fileh5_crtdat.cCreateDataset.java
    Lesson 6Write and Read data in a dataseth5_rdwt.cDatasetRdWt.java
    Lesson +7Create an attribute.h5_crtatt.cCreateAttribute.java
    Lesson +8Create a group.h5_crtgrp.cCreateGroup.java
    Lesson +9Using Absolute and relative pathsh5_crtgrpar.cCreateGroupAR.java
    Lesson +10Create a dataset in a group.h5_crtgrpd.cCreateGroupDataset.java
    Lesson +11Using Compound Datatypesh5_compound.cCompound.java
    Lesson +12Selection of a hyperslab.h5_hyperslab.cHyperslab.java
    Lesson +13Selection of elements.h5_copy.cCopy.java
    + +

    +


    Some Explanation About Tutorial Examples +

    The Java tutorial programs try to stay close to the corresponding C +program. The main function's structure almost same as C program, with one +call for each HDF5 library function. For example, where the C program has +a call to H5Fopen(), the Java program has a call to H5Fopen_wrap(). +

    The wrapper functions call the HDF-5 library using the Java HDF-5 Interface +(JHI5). The HDF-5 C interface returns error codes; these are represented +by Java Exceptions in the JHI5. The wrapper function catches the exception +and prints a message. +

    For example, the H5Fopen_wrap() method calls the JHI5, and catches +any exceptions which may occur: +

       public static int H5Fopen_wrap (String name, int flags, int access_id)
    +   {
    +      int file_id = -1;    // file identifier 
    +      try 
    +      {
    +         // Create a new file using default file properties.
    +         file_id = H5.H5Fopen (name, flags, access_id);
    +      }
    +      catch (HDF5Exception hdf5e)
    +      {
    +         System.out.println 
    +             ("DatasetRdWt.H5Fopen_wrap() with HDF5Exception: "
    +              + hdf5e.getMessage());
    +      }
    +      catch (Exception e)
    +      {
    +         System.out.println 
    +             ("DatasetRdWt.H5Fopen_wrap() with other Exception: " 
    +              + e.getMessage());
    +      }
    +      return file_id;
    +   }
    + +


    +


    NCSA +
    The +National Center for Supercomputing Applications +
    University +of Illinois at Urbana-Champaign +

    hdfhelp@ncsa.uiuc.edu + + diff --git a/doc/html/Tutor/examples/java/runCompound.sh b/doc/html/Tutor/examples/java/runCompound.sh new file mode 100644 index 0000000..ef2be38 --- /dev/null +++ b/doc/html/Tutor/examples/java/runCompound.sh @@ -0,0 +1,17 @@ +#!/bin/sh + +JH5INSTALLDIR=/afs/ncsa/projects/hdf/java/java2/mcgrath/arabica/New5 +HDF5LIB=/afs/ncsa/projects/hdf/release/prehdf5-1.2.1/SunOS_5.7/lib + +#make this relative to the source root... +PWD=/afs/ncsa.uiuc.edu/projects/hdf/java/java2/mcgrath/arabica/java-hdf5 +LIBDIR=$JH5INSTALLDIR"/lib" + +CLASSPATH=".:"$LIBDIR"/jhdf5.jar" + +LD_LIBRARY_PATH=$HDF5LIB":"$LIBDIR"/solaris" + +export CLASSPATH +export LD_LIBRARY_PATH + +/usr/java1.2/bin/java Compound $* diff --git a/doc/html/Tutor/examples/java/runCompound.sh.in b/doc/html/Tutor/examples/java/runCompound.sh.in new file mode 100644 index 0000000..bc58088 --- /dev/null +++ b/doc/html/Tutor/examples/java/runCompound.sh.in @@ -0,0 +1,17 @@ +#!/bin/sh + +JH5INSTALLDIR=@JH5INST@ +HDF5LIB=@HDF5LIB@ + +#make this relative to the source root... +PWD=@PWD@ +LIBDIR=$JH5INSTALLDIR"/lib" + +CLASSPATH=".:"$LIBDIR"/jhdf5.jar" + +LD_LIBRARY_PATH=$HDF5LIB":"$LIBDIR"/@JAVATARG@" + +export CLASSPATH +export LD_LIBRARY_PATH + +@JAVA@ Compound $* diff --git a/doc/html/Tutor/examples/java/runCopy.sh b/doc/html/Tutor/examples/java/runCopy.sh new file mode 100644 index 0000000..de71783 --- /dev/null +++ b/doc/html/Tutor/examples/java/runCopy.sh @@ -0,0 +1,17 @@ +#!/bin/sh + +JH5INSTALLDIR=/afs/ncsa/projects/hdf/java/java2/mcgrath/arabica/New5 +HDF5LIB=/afs/ncsa/projects/hdf/release/prehdf5-1.2.1/SunOS_5.7/lib + +#make this relative to the source root... +PWD=/afs/ncsa.uiuc.edu/projects/hdf/java/java2/mcgrath/arabica/java-hdf5 +LIBDIR=$JH5INSTALLDIR"/lib" + +CLASSPATH=".:"$LIBDIR"/jhdf5.jar" + +LD_LIBRARY_PATH=$HDF5LIB":"$LIBDIR"/solaris" + +export CLASSPATH +export LD_LIBRARY_PATH + +/usr/java1.2/bin/java Copy $* diff --git a/doc/html/Tutor/examples/java/runCopy.sh.in b/doc/html/Tutor/examples/java/runCopy.sh.in new file mode 100644 index 0000000..2fd8a46 --- /dev/null +++ b/doc/html/Tutor/examples/java/runCopy.sh.in @@ -0,0 +1,17 @@ +#!/bin/sh + +JH5INSTALLDIR=@JH5INST@ +HDF5LIB=@HDF5LIB@ + +#make this relative to the source root... +PWD=@PWD@ +LIBDIR=$JH5INSTALLDIR"/lib" + +CLASSPATH=".:"$LIBDIR"/jhdf5.jar" + +LD_LIBRARY_PATH=$HDF5LIB":"$LIBDIR"/@JAVATARG@" + +export CLASSPATH +export LD_LIBRARY_PATH + +@JAVA@ Copy $* diff --git a/doc/html/Tutor/examples/java/runCreateAttribute.sh b/doc/html/Tutor/examples/java/runCreateAttribute.sh new file mode 100644 index 0000000..419abce --- /dev/null +++ b/doc/html/Tutor/examples/java/runCreateAttribute.sh @@ -0,0 +1,17 @@ +#!/bin/sh + +JH5INSTALLDIR=/afs/ncsa/projects/hdf/java/java2/mcgrath/arabica/New5 +HDF5LIB=/afs/ncsa/projects/hdf/release/prehdf5-1.2.1/SunOS_5.7/lib + +#make this relative to the source root... +PWD=/afs/ncsa.uiuc.edu/projects/hdf/java/java2/mcgrath/arabica/java-hdf5 +LIBDIR=$JH5INSTALLDIR"/lib" + +CLASSPATH=".:"$LIBDIR"/jhdf5.jar" + +LD_LIBRARY_PATH=$HDF5LIB":"$LIBDIR"/solaris" + +export CLASSPATH +export LD_LIBRARY_PATH + +/usr/java1.2/bin/java CreateAttribute $* diff --git a/doc/html/Tutor/examples/java/runCreateAttribute.sh.in b/doc/html/Tutor/examples/java/runCreateAttribute.sh.in new file mode 100644 index 0000000..83bcdc7 --- /dev/null +++ b/doc/html/Tutor/examples/java/runCreateAttribute.sh.in @@ -0,0 +1,17 @@ +#!/bin/sh + +JH5INSTALLDIR=@JH5INST@ +HDF5LIB=@HDF5LIB@ + +#make this relative to the source root... +PWD=@PWD@ +LIBDIR=$JH5INSTALLDIR"/lib" + +CLASSPATH=".:"$LIBDIR"/jhdf5.jar" + +LD_LIBRARY_PATH=$HDF5LIB":"$LIBDIR"/@JAVATARG@" + +export CLASSPATH +export LD_LIBRARY_PATH + +@JAVA@ CreateAttribute $* diff --git a/doc/html/Tutor/examples/java/runCreateDataset.sh b/doc/html/Tutor/examples/java/runCreateDataset.sh new file mode 100644 index 0000000..371e811 --- /dev/null +++ b/doc/html/Tutor/examples/java/runCreateDataset.sh @@ -0,0 +1,17 @@ +#!/bin/sh + +JH5INSTALLDIR=/afs/ncsa/projects/hdf/java/java2/mcgrath/arabica/New5 +HDF5LIB=/afs/ncsa/projects/hdf/release/prehdf5-1.2.1/SunOS_5.7/lib + +#make this relative to the source root... +PWD=/afs/ncsa.uiuc.edu/projects/hdf/java/java2/mcgrath/arabica/java-hdf5 +LIBDIR=$JH5INSTALLDIR"/lib" + +CLASSPATH=".:"$LIBDIR"/jhdf5.jar" + +LD_LIBRARY_PATH=$HDF5LIB":"$LIBDIR"/solaris" + +export CLASSPATH +export LD_LIBRARY_PATH + +/usr/java1.2/bin/java CreateDataset $* diff --git a/doc/html/Tutor/examples/java/runCreateDataset.sh.in b/doc/html/Tutor/examples/java/runCreateDataset.sh.in new file mode 100644 index 0000000..606e153 --- /dev/null +++ b/doc/html/Tutor/examples/java/runCreateDataset.sh.in @@ -0,0 +1,17 @@ +#!/bin/sh + +JH5INSTALLDIR=@JH5INST@ +HDF5LIB=@HDF5LIB@ + +#make this relative to the source root... +PWD=@PWD@ +LIBDIR=$JH5INSTALLDIR"/lib" + +CLASSPATH=".:"$LIBDIR"/jhdf5.jar" + +LD_LIBRARY_PATH=$HDF5LIB":"$LIBDIR"/@JAVATARG@" + +export CLASSPATH +export LD_LIBRARY_PATH + +@JAVA@ CreateDataset $* diff --git a/doc/html/Tutor/examples/java/runCreateFile.sh b/doc/html/Tutor/examples/java/runCreateFile.sh new file mode 100644 index 0000000..e32c0ab --- /dev/null +++ b/doc/html/Tutor/examples/java/runCreateFile.sh @@ -0,0 +1,17 @@ +#!/bin/sh + +JH5INSTALLDIR=/afs/ncsa/projects/hdf/java/java2/mcgrath/arabica/New5 +HDF5LIB=/afs/ncsa/projects/hdf/release/prehdf5-1.2.1/SunOS_5.7/lib + +#make this relative to the source root... +PWD=/afs/ncsa.uiuc.edu/projects/hdf/java/java2/mcgrath/arabica/java-hdf5 +LIBDIR=$JH5INSTALLDIR"/lib" + +CLASSPATH=".:"$LIBDIR"/jhdf5.jar" + +LD_LIBRARY_PATH=$HDF5LIB":"$LIBDIR"/solaris" + +export CLASSPATH +export LD_LIBRARY_PATH + +/usr/java1.2/bin/java CreateFile $* diff --git a/doc/html/Tutor/examples/java/runCreateFile.sh.in b/doc/html/Tutor/examples/java/runCreateFile.sh.in new file mode 100644 index 0000000..bf48b9c --- /dev/null +++ b/doc/html/Tutor/examples/java/runCreateFile.sh.in @@ -0,0 +1,17 @@ +#!/bin/sh + +JH5INSTALLDIR=@JH5INST@ +HDF5LIB=@HDF5LIB@ + +#make this relative to the source root... +PWD=@PWD@ +LIBDIR=$JH5INSTALLDIR"/lib" + +CLASSPATH=".:"$LIBDIR"/jhdf5.jar" + +LD_LIBRARY_PATH=$HDF5LIB":"$LIBDIR"/@JAVATARG@" + +export CLASSPATH +export LD_LIBRARY_PATH + +@JAVA@ CreateFile $* diff --git a/doc/html/Tutor/examples/java/runCreateFileInput.sh b/doc/html/Tutor/examples/java/runCreateFileInput.sh new file mode 100644 index 0000000..fa12f06 --- /dev/null +++ b/doc/html/Tutor/examples/java/runCreateFileInput.sh @@ -0,0 +1,17 @@ +#!/bin/sh + +JH5INSTALLDIR=/afs/ncsa/projects/hdf/java/java2/mcgrath/arabica/New5 +HDF5LIB=/afs/ncsa/projects/hdf/release/prehdf5-1.2.1/SunOS_5.7/lib + +#make this relative to the source root... +PWD=/afs/ncsa.uiuc.edu/projects/hdf/java/java2/mcgrath/arabica/java-hdf5 +LIBDIR=$JH5INSTALLDIR"/lib" + +CLASSPATH=".:"$LIBDIR"/jhdf5.jar" + +LD_LIBRARY_PATH=$HDF5LIB":"$LIBDIR"/solaris" + +export CLASSPATH +export LD_LIBRARY_PATH + +/usr/java1.2/bin/java CreateFileInput $* diff --git a/doc/html/Tutor/examples/java/runCreateFileInput.sh.in b/doc/html/Tutor/examples/java/runCreateFileInput.sh.in new file mode 100644 index 0000000..776eac5 --- /dev/null +++ b/doc/html/Tutor/examples/java/runCreateFileInput.sh.in @@ -0,0 +1,17 @@ +#!/bin/sh + +JH5INSTALLDIR=@JH5INST@ +HDF5LIB=@HDF5LIB@ + +#make this relative to the source root... +PWD=@PWD@ +LIBDIR=$JH5INSTALLDIR"/lib" + +CLASSPATH=".:"$LIBDIR"/jhdf5.jar" + +LD_LIBRARY_PATH=$HDF5LIB":"$LIBDIR"/@JAVATARG@" + +export CLASSPATH +export LD_LIBRARY_PATH + +@JAVA@ CreateFileInput $* diff --git a/doc/html/Tutor/examples/java/runCreateGroup.sh b/doc/html/Tutor/examples/java/runCreateGroup.sh new file mode 100644 index 0000000..ee9deee --- /dev/null +++ b/doc/html/Tutor/examples/java/runCreateGroup.sh @@ -0,0 +1,17 @@ +#!/bin/sh + +JH5INSTALLDIR=/afs/ncsa/projects/hdf/java/java2/mcgrath/arabica/New5 +HDF5LIB=/afs/ncsa/projects/hdf/release/prehdf5-1.2.1/SunOS_5.7/lib + +#make this relative to the source root... +PWD=/afs/ncsa.uiuc.edu/projects/hdf/java/java2/mcgrath/arabica/java-hdf5 +LIBDIR=$JH5INSTALLDIR"/lib" + +CLASSPATH=".:"$LIBDIR"/jhdf5.jar" + +LD_LIBRARY_PATH=$HDF5LIB":"$LIBDIR"/solaris" + +export CLASSPATH +export LD_LIBRARY_PATH + +/usr/java1.2/bin/java CreateGroup $* diff --git a/doc/html/Tutor/examples/java/runCreateGroup.sh.in b/doc/html/Tutor/examples/java/runCreateGroup.sh.in new file mode 100644 index 0000000..e2eadb5 --- /dev/null +++ b/doc/html/Tutor/examples/java/runCreateGroup.sh.in @@ -0,0 +1,17 @@ +#!/bin/sh + +JH5INSTALLDIR=@JH5INST@ +HDF5LIB=@HDF5LIB@ + +#make this relative to the source root... +PWD=@PWD@ +LIBDIR=$JH5INSTALLDIR"/lib" + +CLASSPATH=".:"$LIBDIR"/jhdf5.jar" + +LD_LIBRARY_PATH=$HDF5LIB":"$LIBDIR"/@JAVATARG@" + +export CLASSPATH +export LD_LIBRARY_PATH + +@JAVA@ CreateGroup $* diff --git a/doc/html/Tutor/examples/java/runCreateGroupAR.sh b/doc/html/Tutor/examples/java/runCreateGroupAR.sh new file mode 100644 index 0000000..2619a11 --- /dev/null +++ b/doc/html/Tutor/examples/java/runCreateGroupAR.sh @@ -0,0 +1,17 @@ +#!/bin/sh + +JH5INSTALLDIR=/afs/ncsa/projects/hdf/java/java2/mcgrath/arabica/New5 +HDF5LIB=/afs/ncsa/projects/hdf/release/prehdf5-1.2.1/SunOS_5.7/lib + +#make this relative to the source root... +PWD=/afs/ncsa.uiuc.edu/projects/hdf/java/java2/mcgrath/arabica/java-hdf5 +LIBDIR=$JH5INSTALLDIR"/lib" + +CLASSPATH=".:"$LIBDIR"/jhdf5.jar" + +LD_LIBRARY_PATH=$HDF5LIB":"$LIBDIR"/solaris" + +export CLASSPATH +export LD_LIBRARY_PATH + +/usr/java1.2/bin/java CreateGroupAR $* diff --git a/doc/html/Tutor/examples/java/runCreateGroupAR.sh.in b/doc/html/Tutor/examples/java/runCreateGroupAR.sh.in new file mode 100644 index 0000000..d61d852 --- /dev/null +++ b/doc/html/Tutor/examples/java/runCreateGroupAR.sh.in @@ -0,0 +1,17 @@ +#!/bin/sh + +JH5INSTALLDIR=@JH5INST@ +HDF5LIB=@HDF5LIB@ + +#make this relative to the source root... +PWD=@PWD@ +LIBDIR=$JH5INSTALLDIR"/lib" + +CLASSPATH=".:"$LIBDIR"/jhdf5.jar" + +LD_LIBRARY_PATH=$HDF5LIB":"$LIBDIR"/@JAVATARG@" + +export CLASSPATH +export LD_LIBRARY_PATH + +@JAVA@ CreateGroupAR $* diff --git a/doc/html/Tutor/examples/java/runCreateGroupDataset.sh b/doc/html/Tutor/examples/java/runCreateGroupDataset.sh new file mode 100644 index 0000000..15b7bfa --- /dev/null +++ b/doc/html/Tutor/examples/java/runCreateGroupDataset.sh @@ -0,0 +1,17 @@ +#!/bin/sh + +JH5INSTALLDIR=/afs/ncsa/projects/hdf/java/java2/mcgrath/arabica/New5 +HDF5LIB=/afs/ncsa/projects/hdf/release/prehdf5-1.2.1/SunOS_5.7/lib + +#make this relative to the source root... +PWD=/afs/ncsa.uiuc.edu/projects/hdf/java/java2/mcgrath/arabica/java-hdf5 +LIBDIR=$JH5INSTALLDIR"/lib" + +CLASSPATH=".:"$LIBDIR"/jhdf5.jar" + +LD_LIBRARY_PATH=$HDF5LIB":"$LIBDIR"/solaris" + +export CLASSPATH +export LD_LIBRARY_PATH + +/usr/java1.2/bin/java CreateGroupDataset $* diff --git a/doc/html/Tutor/examples/java/runCreateGroupDataset.sh.in b/doc/html/Tutor/examples/java/runCreateGroupDataset.sh.in new file mode 100644 index 0000000..af2b4b5 --- /dev/null +++ b/doc/html/Tutor/examples/java/runCreateGroupDataset.sh.in @@ -0,0 +1,17 @@ +#!/bin/sh + +JH5INSTALLDIR=@JH5INST@ +HDF5LIB=@HDF5LIB@ + +#make this relative to the source root... +PWD=@PWD@ +LIBDIR=$JH5INSTALLDIR"/lib" + +CLASSPATH=".:"$LIBDIR"/jhdf5.jar" + +LD_LIBRARY_PATH=$HDF5LIB":"$LIBDIR"/@JAVATARG@" + +export CLASSPATH +export LD_LIBRARY_PATH + +@JAVA@ CreateGroupDataset $* diff --git a/doc/html/Tutor/examples/java/runDatasetRdWt.sh b/doc/html/Tutor/examples/java/runDatasetRdWt.sh new file mode 100644 index 0000000..a049ea8 --- /dev/null +++ b/doc/html/Tutor/examples/java/runDatasetRdWt.sh @@ -0,0 +1,17 @@ +#!/bin/sh + +JH5INSTALLDIR=/afs/ncsa/projects/hdf/java/java2/mcgrath/arabica/New5 +HDF5LIB=/afs/ncsa/projects/hdf/release/prehdf5-1.2.1/SunOS_5.7/lib + +#make this relative to the source root... +PWD=/afs/ncsa.uiuc.edu/projects/hdf/java/java2/mcgrath/arabica/java-hdf5 +LIBDIR=$JH5INSTALLDIR"/lib" + +CLASSPATH=".:"$LIBDIR"/jhdf5.jar" + +LD_LIBRARY_PATH=$HDF5LIB":"$LIBDIR"/solaris" + +export CLASSPATH +export LD_LIBRARY_PATH + +/usr/java1.2/bin/java DatasetRdWt $* diff --git a/doc/html/Tutor/examples/java/runDatasetRdWt.sh.in b/doc/html/Tutor/examples/java/runDatasetRdWt.sh.in new file mode 100644 index 0000000..ad3a049 --- /dev/null +++ b/doc/html/Tutor/examples/java/runDatasetRdWt.sh.in @@ -0,0 +1,17 @@ +#!/bin/sh + +JH5INSTALLDIR=@JH5INST@ +HDF5LIB=@HDF5LIB@ + +#make this relative to the source root... +PWD=@PWD@ +LIBDIR=$JH5INSTALLDIR"/lib" + +CLASSPATH=".:"$LIBDIR"/jhdf5.jar" + +LD_LIBRARY_PATH=$HDF5LIB":"$LIBDIR"/@JAVATARG@" + +export CLASSPATH +export LD_LIBRARY_PATH + +@JAVA@ DatasetRdWt $* diff --git a/doc/html/Tutor/examples/java/runHyperSlab.sh b/doc/html/Tutor/examples/java/runHyperSlab.sh new file mode 100644 index 0000000..549f807 --- /dev/null +++ b/doc/html/Tutor/examples/java/runHyperSlab.sh @@ -0,0 +1,17 @@ +#!/bin/sh + +JH5INSTALLDIR=/afs/ncsa/projects/hdf/java/java2/mcgrath/arabica/New5 +HDF5LIB=/afs/ncsa/projects/hdf/release/prehdf5-1.2.1/SunOS_5.7/lib + +#make this relative to the source root... +PWD=/afs/ncsa.uiuc.edu/projects/hdf/java/java2/mcgrath/arabica/java-hdf5 +LIBDIR=$JH5INSTALLDIR"/lib" + +CLASSPATH=".:"$LIBDIR"/jhdf5.jar" + +LD_LIBRARY_PATH=$HDF5LIB":"$LIBDIR"/solaris" + +export CLASSPATH +export LD_LIBRARY_PATH + +/usr/java1.2/bin/java HyperSlab $* diff --git a/doc/html/Tutor/examples/java/runHyperSlab.sh.in b/doc/html/Tutor/examples/java/runHyperSlab.sh.in new file mode 100644 index 0000000..f515fc9 --- /dev/null +++ b/doc/html/Tutor/examples/java/runHyperSlab.sh.in @@ -0,0 +1,17 @@ +#!/bin/sh + +JH5INSTALLDIR=@JH5INST@ +HDF5LIB=@HDF5LIB@ + +#make this relative to the source root... +PWD=@PWD@ +LIBDIR=$JH5INSTALLDIR"/lib" + +CLASSPATH=".:"$LIBDIR"/jhdf5.jar" + +LD_LIBRARY_PATH=$HDF5LIB":"$LIBDIR"/@JAVATARG@" + +export CLASSPATH +export LD_LIBRARY_PATH + +@JAVA@ HyperSlab $* diff --git a/doc/html/Tutor/examples/mount.f90 b/doc/html/Tutor/examples/mount.f90 new file mode 100644 index 0000000..b9c8772 --- /dev/null +++ b/doc/html/Tutor/examples/mount.f90 @@ -0,0 +1,183 @@ +! +!In the following example we create one file with a group in it, +!and another file with a dataset. Mounting is used to +!access the dataset from the second file as a member of a group +!in the first file. +! + + PROGRAM MOUNTEXAMPLE + + USE HDF5 ! This module contains all necessary modules + + IMPLICIT NONE + + ! + !the respective filename is "mount1.h5" and "mount2.h5" + ! + CHARACTER(LEN=9), PARAMETER :: filename1 = "mount1.h5" + CHARACTER(LEN=9), PARAMETER :: filename2 = "mount2.h5" + + ! + !data space rank and dimensions + ! + INTEGER, PARAMETER :: RANK = 2 + INTEGER, PARAMETER :: NX = 4 + INTEGER, PARAMETER :: NY = 5 + + ! + ! File identifiers + ! + INTEGER(HID_T) :: file1_id, file2_id + + ! + ! Group identifier + ! + INTEGER(HID_T) :: gid + + ! + ! dataset identifier + ! + INTEGER(HID_T) :: dset_id + + ! + ! data space identifier + ! + INTEGER(HID_T) :: dataspace + + ! + ! data type identifier + ! + INTEGER(HID_T) :: dtype_id + + ! + !The dimensions for the dataset. + ! + INTEGER(HSIZE_T), DIMENSION(2) :: dims = (/NX,NY/) + + ! + !flag to check operation success + ! + INTEGER :: error + + ! + !general purpose integer + ! + INTEGER :: i, j + + ! + !data buffers + ! + INTEGER, DIMENSION(NX,NY) :: data_in, data_out + + ! + !Initialize FORTRAN predifined datatypes + ! + CALL h5init_types_f(error) + + ! + !Initialize data_in buffer + ! + do i = 1, NX + do j = 1, NY + data_in(i,j) = (i-1) + (j-1) + end do + end do + + ! + !Create first file "mount1.h5" using default properties. + ! + CALL h5fcreate_f(filename1, H5F_ACC_TRUNC_F, file1_id, error) + + ! + !Create group "/G" inside file "mount1.h5". + ! + CALL h5gcreate_f(file1_id, "/G", gid, error) + + ! + !close file and group identifiers. + ! + CALL h5gclose_f(gid, error) + CALL h5fclose_f(file1_id, error) + + ! + !Create second file "mount2.h5" using default properties. + ! + CALL h5fcreate_f(filename2, H5F_ACC_TRUNC_F, file2_id, error) + + ! + !Create data space for the dataset. + ! + CALL h5screate_simple_f(RANK, dims, dataspace, error) + + ! + !Create dataset "/D" inside file "mount2.h5". + ! + CALL h5dcreate_f(file2_id, "/D", H5T_NATIVE_INTEGER, dataspace, & + dset_id, error) + + ! + ! Write data_in to the dataset + ! + CALL h5dwrite_f(dset_id, H5T_NATIVE_INTEGER, data_in, error) + + ! + !close file, dataset and dataspace identifiers. + ! + CALL h5sclose_f(dataspace, error) + CALL h5dclose_f(dset_id, error) + CALL h5fclose_f(file2_id, error) + + ! + !reopen both files. + ! + CALL h5fopen_f (filename1, H5F_ACC_RDONLY_F, file1_id, error) + CALL h5fopen_f (filename2, H5F_ACC_RDONLY_F, file2_id, error) + + ! + !mount the second file under the first file's "/G" group. + ! + CALL h5fmount_f (file1_id, "/G", file2_id, error) + + + ! + !Access dataset D in the first file under /G/D name. + ! + CALL h5dopen_f(file1_id, "/G/D", dset_id, error) + + ! + !Get dataset's data type. + ! + CALL h5dget_type_f(dset_id, dtype_id, error) + + ! + !Read the dataset. + ! + CALL h5dread_f(dset_id, dtype_id, data_out, error) + + ! + !Print out the data. + ! + do i = 1, NX + print *, (data_out(i,j), j = 1, NY) + end do + + + ! + !Close dset_id and dtype_id. + ! + CALL h5dclose_f(dset_id, error) + CALL h5tclose_f(dtype_id, error) + + ! + !unmount the second file. + ! + CALL h5funmount_f(file1_id, "/G", error); + + ! + !Close both files. + ! + CALL h5fclose_f(file1_id, error) + CALL h5fclose_f(file2_id, error) + + END PROGRAM MOUNTEXAMPLE + diff --git a/doc/html/Tutor/examples/refobjexample.f90 b/doc/html/Tutor/examples/refobjexample.f90 new file mode 100644 index 0000000..a1c0a44 --- /dev/null +++ b/doc/html/Tutor/examples/refobjexample.f90 @@ -0,0 +1,136 @@ +! +! This program shows how to create and store references to the objects. +! Program creates a file, two groups, a dataset to store integer data and +! a dataset to store references to the objects. +! Stored references are used to open the objects they are point to. +! Data is written to the dereferenced dataset, and class type is displayed for +! the shared datatype. +! + PROGRAM OBJ_REFERENCES + + USE HDF5 ! This module contains all necessary modules + + IMPLICIT NONE + CHARACTER(LEN=10), PARAMETER :: filename = "FORTRAN.h5" ! File + CHARACTER(LEN=8), PARAMETER :: dsetnamei = "INTEGERS" ! Dataset with the integer data + CHARACTER(LEN=17), PARAMETER :: dsetnamer = "OBJECT_REFERENCES" ! Dataset wtih object + ! references + CHARACTER(LEN=6), PARAMETER :: groupname1 = "GROUP1" ! Groups in the file + CHARACTER(LEN=6), PARAMETER :: groupname2 = "GROUP2" ! + + INTEGER(HID_T) :: file_id ! File identifier + INTEGER(HID_T) :: grp1_id ! Group identifiers + INTEGER(HID_T) :: grp2_id ! + INTEGER(HID_T) :: dset_id ! Dataset identifiers + INTEGER(HID_T) :: dsetr_id ! + INTEGER(HID_T) :: type_id ! Type identifier + INTEGER(HID_T) :: space_id ! Dataspace identifiers + INTEGER(HID_T) :: spacer_id ! + INTEGER :: error + INTEGER(HSIZE_T), DIMENSION(1) :: dims = (/5/) + INTEGER(HSIZE_T), DIMENSION(1) :: dimsr= (/4/) + INTEGER(HSIZE_T), DIMENSION(1) :: my_maxdims = (/5/) + INTEGER :: rank = 1 + INTEGER :: rankr = 1 + TYPE(hobj_ref_t_f), DIMENSION(4) :: ref + TYPE(hobj_ref_t_f), DIMENSION(4) :: ref_out + INTEGER, DIMENSION(5) :: data = (/1, 2, 3, 4, 5/) + INTEGER :: class + ! + ! Initialize FORTRAN predefined datatypes + ! + CALL h5init_types_f(error) + ! + ! Create a file + ! + CALL h5fcreate_f(filename, H5F_ACC_TRUNC_F, file_id, error) + ! Default file access and file creation + ! properties are used. + ! + ! Create a group in the file + ! + CALL h5gcreate_f(file_id, groupname1, grp1_id, error) + ! + ! Create a group inside the created gorup + ! + CALL h5gcreate_f(grp1_id, groupname2, grp2_id, error) + ! + ! Create dataspaces for datasets + ! + CALL h5screate_simple_f(rank, dims, space_id, error, maxdims=my_maxdims) + CALL h5screate_simple_f(rankr, dimsr, spacer_id, error) + ! + ! Create integer dataset + ! + CALL h5dcreate_f(file_id, dsetnamei, H5T_NATIVE_INTEGER, space_id, & + dset_id, error) + ! + ! Create dataset to store references to the objects + ! + CALL h5dcreate_f(file_id, dsetnamer, H5T_STD_REF_OBJ, spacer_id, & + dsetr_id, error) + ! + ! Create a datatype and store in the file + ! + CALL h5tcopy_f(H5T_NATIVE_REAL, type_id, error) + CALL h5tcommit_f(file_id, "MyType", type_id, error) + ! + ! Close dataspaces, groups and integer dataset + ! + CALL h5sclose_f(space_id, error) + CALL h5sclose_f(spacer_id, error) + CALL h5tclose_f(type_id, error) + CALL h5dclose_f(dset_id, error) + CALL h5gclose_f(grp1_id, error) + CALL h5gclose_f(grp2_id, error) + ! + ! Craete references to two groups, integer dataset and shared datatype + ! and write it to the dataset in the file + ! + CALL h5rcreate_f(file_id, groupname1, ref(1), error) + CALL h5rcreate_f(file_id, "/GROUP1/GROUP2", ref(2), error) + CALL h5rcreate_f(file_id, dsetnamei, ref(3), error) + CALL h5rcreate_f(file_id, "MyType", ref(4), error) + CALL h5dwrite_f(dsetr_id, H5T_STD_REF_OBJ, ref, error) + ! + ! Close the dataset + ! + CALL h5dclose_f(dsetr_id, error) + ! + ! Reopen the dataset with object references and read references to the buffer + ! + CALL h5dopen_f(file_id, dsetnamer,dsetr_id,error) + CALL h5dread_f(dsetr_id, H5T_STD_REF_OBJ, ref_out, error) + ! + ! Dereference the third reference. We know that it is a dataset. On practice + ! one should use h5rget_object_type_f function to find out + ! the type of an object the reference points to. + ! + CALL h5rdereference_f(dsetr_id, ref(3), dset_id, error) + ! + ! Write data to the dataset. + ! + CALL h5dwrite_f(dset_id, H5T_NATIVE_INTEGER, data, error) + if (error .eq. 0) write(*,*) "Data has been successfully written to the dataset " + ! + ! Dereference the fourth reference. We know that it is a datatype. On practice + ! one should use h5rget_object_type_f function to find out + ! the type of an object the reference points to. + ! + CALL h5rdereference_f(dsetr_id, ref(4), type_id, error) + ! + ! Get datatype class and display it if it is of a FLOAT class. + ! + CALL h5tget_class_f(type_id, class, error) + if(class .eq. H5T_FLOAT_F) write(*,*) "Stored datatype is of a FLOAT class" + ! + ! Close all objects. + ! + CALL h5dclose_f(dset_id, error) + CALL h5tclose_f(type_id, error) + CALL h5dclose_f(dsetr_id, error) + CALL h5fclose_f(file_id, error) + + END PROGRAM OBJ_REFERENCES + + diff --git a/doc/html/Tutor/examples/refregexample.f90 b/doc/html/Tutor/examples/refregexample.f90 new file mode 100644 index 0000000..3a5bea4 --- /dev/null +++ b/doc/html/Tutor/examples/refregexample.f90 @@ -0,0 +1,159 @@ +! +! This program shows how to create, store and dereference references +! to the dataset regions. +! Program creates a file and writes two dimensional integer dataset +! to it. Then program creates and stores references to the hyperslab +! and 3 points selected in the integer dataset, in the second dataset. +! Program reopens the second dataset, reads and dereferences region +! references, and then reads and displays selected data from the +! integer dataset. +! + PROGRAM REG_REFERENCE + + USE HDF5 ! This module contains all necessary modules + + IMPLICIT NONE + CHARACTER(LEN=10), PARAMETER :: filename = "FORTRAN.h5" + CHARACTER(LEN=6), PARAMETER :: dsetnamev = "MATRIX" + CHARACTER(LEN=17), PARAMETER :: dsetnamer = "REGION_REFERENCES" + + INTEGER(HID_T) :: file_id ! File identifier + INTEGER(HID_T) :: space_id ! Dataspace identifier + INTEGER(HID_T) :: spacer_id ! Dataspace identifier + INTEGER(HID_T) :: dsetv_id ! Dataset identifier + INTEGER(HID_T) :: dsetr_id ! Dataset identifier + INTEGER :: error + TYPE(hdset_reg_ref_t_f) , DIMENSION(2) :: ref ! Buffers to store references + TYPE(hdset_reg_ref_t_f) , DIMENSION(2) :: ref_out ! + INTEGER(HSIZE_T), DIMENSION(2) :: dims = (/2,9/) ! Datasets dimensions + INTEGER(HSIZE_T), DIMENSION(1) :: dimsr = (/2/) ! + INTEGER(HSSIZE_T), DIMENSION(2) :: start + INTEGER(HSIZE_T), DIMENSION(2) :: count + INTEGER :: rankr = 1 + INTEGER :: rank = 2 + INTEGER , DIMENSION(2,9) :: data + INTEGER , DIMENSION(2,9) :: data_out = 0 + INTEGER(HSSIZE_T) , DIMENSION(2,3) :: coord + INTEGER ::num_points = 3 ! Number of selected points + INTEGER :: i, j + coord = reshape((/1,1,2,7,1,9/), (/2,3/)) ! Coordinates of selected points + data = reshape ((/1,1,1,2,2,2,3,3,3,4,4,4,5,5,5,6,6,6/), (/2,9/)) + ! + ! Initialize FORTRAN predefined datatypes. + ! + CALL h5init_types_f(error) + ! + ! Create a new file. + ! + CALL h5fcreate_f(filename, H5F_ACC_TRUNC_F, file_id, error) + ! Default file access and file creation + ! properties are used. + ! + ! Create dataspaces: + ! + ! for dataset with references to dataset regions + ! + CALL h5screate_simple_f(rankr, dimsr, spacer_id, error) + ! + ! for integer dataset + ! + CALL h5screate_simple_f(rank, dims, space_id, error) + ! + ! Create and write datasets: + ! + ! Integer dataset + ! + CALL h5dcreate_f(file_id, dsetnamev, H5T_NATIVE_INTEGER, space_id, & + dsetv_id, error) + CALL h5dwrite_f(dsetv_id, H5T_NATIVE_INTEGER, data, error) + CALL h5dclose_f(dsetv_id, error) + ! + ! Dataset with references + ! + CALL h5dcreate_f(file_id, dsetnamer, H5T_STD_REF_DSETREG, spacer_id, & + dsetr_id, error) + ! + ! Create a reference to the hyperslab selection. + ! + start(1) = 0 + start(2) = 3 + count(1) = 2 + count(2) = 3 + CALL h5sselect_hyperslab_f(space_id, H5S_SELECT_SET_F, & + start, count, error) + CALL h5rcreate_f(file_id, dsetnamev, space_id, ref(1), error) + ! + ! Create a reference to elements selection. + ! + CALL h5sselect_none_f(space_id, error) + CALL h5sselect_elements_f(space_id, H5S_SELECT_SET_F, rank, num_points,& + coord, error) + CALL h5rcreate_f(file_id, dsetnamev, space_id, ref(2), error) + ! + ! Write dataset with the references. + ! + CALL h5dwrite_f(dsetr_id, H5T_STD_REF_DSETREG, ref, error) + ! + ! Close all objects. + ! + CALL h5sclose_f(space_id, error) + CALL h5sclose_f(spacer_id, error) + CALL h5dclose_f(dsetr_id, error) + CALL h5fclose_f(file_id, error) + ! + ! Reopen the file to test selections. + ! + CALL h5fopen_f (filename, H5F_ACC_RDWR_F, file_id, error) + CALL h5dopen_f(file_id, dsetnamer, dsetr_id, error) + ! + ! Read references to the dataset regions. + ! + CALL h5dread_f(dsetr_id, H5T_STD_REF_DSETREG, ref_out, error) + ! + ! Dereference the first reference. + ! + CALL H5rdereference_f(dsetr_id, ref_out(1), dsetv_id, error) + CALL H5rget_region_f(dsetr_id, ref_out(1), space_id, error) + ! + ! Read selected data from the dataset. + ! + CALL h5dread_f(dsetv_id, H5T_NATIVE_INTEGER, data_out, error, & + mem_space_id = space_id, file_space_id = space_id) + write(*,*) "Hyperslab selection" + write(*,*) + do i = 1,2 + write(*,*) (data_out (i,j), j = 1,9) + enddo + write(*,*) + CALL h5sclose_f(space_id, error) + CALL h5dclose_f(dsetv_id, error) + data_out = 0 + ! + ! Dereference the second reference. + ! + CALL H5rdereference_f(dsetr_id, ref_out(2), dsetv_id, error) + CALL H5rget_region_f(dsetr_id, ref_out(2), space_id, error) + ! + ! Read selected data from the dataset. + ! + CALL h5dread_f(dsetv_id, H5T_NATIVE_INTEGER, data_out, error, & + mem_space_id = space_id, file_space_id = space_id) + write(*,*) "Point selection" + write(*,*) + do i = 1,2 + write(*,*) (data_out (i,j), j = 1,9) + enddo + ! + ! Close all objects + ! + CALL h5sclose_f(space_id, error) + CALL h5dclose_f(dsetv_id, error) + CALL h5dclose_f(dsetr_id, error) + ! + ! Close FORTRAN predefined datatypes. + ! + CALL h5close_types_f(error) + + END PROGRAM REG_REFERENCE + + diff --git a/doc/html/Tutor/examples/rwdsetexample.f90 b/doc/html/Tutor/examples/rwdsetexample.f90 new file mode 100644 index 0000000..085b4e8 --- /dev/null +++ b/doc/html/Tutor/examples/rwdsetexample.f90 @@ -0,0 +1,78 @@ +! +! The following example shows how to write and read to/from an existing dataset. +! It opens the file created in the previous example, obtains the dataset +! identifier, writes the data to the dataset in the file, +! then reads the dataset to memory. +! + + + PROGRAM RWDSETEXAMPLE + + USE HDF5 ! This module contains all necessary modules + + IMPLICIT NONE + + CHARACTER(LEN=8), PARAMETER :: filename = "dsetf.h5" ! File name + CHARACTER(LEN=4), PARAMETER :: dsetname = "dset" ! Dataset name + + INTEGER(HID_T) :: file_id ! File identifier + INTEGER(HID_T) :: dset_id ! Dataset identifier + + INTEGER :: error ! Error flag + INTEGER :: i, j + + INTEGER, DIMENSION(4,6) :: dset_data, data_out ! Data buffers + + ! + ! Initialize the dset_data array. + ! + do i = 1, 4 + do j = 1, 6 + dset_data(i,j) = (i-1)*6 + j; + end do + end do + + ! + ! Initialize FORTRAN predefined datatypes + ! + CALL h5init_types_f(error) + + ! + ! Open an existing file. + ! + CALL h5fopen_f (filename, H5F_ACC_RDWR_F, file_id, error) + + ! + ! Open an existing dataset. + ! + CALL h5dopen_f(file_id, dsetname, dset_id, error) + + ! + ! Write the dataset. + ! + CALL h5dwrite_f(dset_id, H5T_NATIVE_INTEGER, dset_data, error) + + ! + ! Read the dataset. + ! + CALL h5dread_f(dset_id, H5T_NATIVE_INTEGER, data_out, error) + + ! + ! Close the dataset. + ! + CALL h5dclose_f(dset_id, error) + + ! + ! Close the file. + ! + CALL h5fclose_f(file_id, error) + + ! + ! Close FORTRAN predefined datatypes. + ! + CALL h5close_types_f(error) + + END PROGRAM RWDSETEXAMPLE + + + diff --git a/doc/html/Tutor/examples/selectele.f90 b/doc/html/Tutor/examples/selectele.f90 new file mode 100644 index 0000000..04a6478 --- /dev/null +++ b/doc/html/Tutor/examples/selectele.f90 @@ -0,0 +1,282 @@ +! +! This program creates two files, copy1.h5, and copy2.h5. +! In copy1.h5, it creates a 3x4 dataset called 'Copy1', +! and write 0's to this dataset. +! In copy2.h5, it create a 3x4 dataset called 'Copy2', +! and write 1's to this dataset. +! It closes both files, reopens both files, selects two +! points in copy1.h5 and writes values to them. Then it +! uses an H5Scopy to write the same selection to copy2.h5. +! Program reopens the files, and reads and prints the contents of +! the two datasets. +! + + PROGRAM SELECTEXAMPLE + + USE HDF5 ! This module contains all necessary modules + + IMPLICIT NONE + + CHARACTER(LEN=8), PARAMETER :: filename1 = "copy1.h5" ! File name + CHARACTER(LEN=8), PARAMETER :: filename2 = "copy2.h5" ! + CHARACTER(LEN=5), PARAMETER :: dsetname1 = "Copy1" ! Dataset name + CHARACTER(LEN=5), PARAMETER :: dsetname2 = "Copy2" ! + + INTEGER, PARAMETER :: RANK = 2 ! Dataset rank + + INTEGER, PARAMETER :: NUMP = 2 ! Number of points selected + + INTEGER(HID_T) :: file1_id ! File1 identifier + INTEGER(HID_T) :: file2_id ! File2 identifier + INTEGER(HID_T) :: dset1_id ! Dataset1 identifier + INTEGER(HID_T) :: dset2_id ! Dataset2 identifier + INTEGER(HID_T) :: dataspace1 ! Dataspace identifier + INTEGER(HID_T) :: dataspace2 ! Dataspace identifier + INTEGER(HID_T) :: memspace ! memspace identifier + + INTEGER(HSIZE_T), DIMENSION(1) :: dimsm = (/2/) + ! Memory dataspace dimensions + INTEGER(HSIZE_T), DIMENSION(2) :: dimsf = (/3,4/) + ! File dataspace dimensions + INTEGER(HSSIZE_T), DIMENSION(RANK,NUMP) :: coord ! Elements coordinates + ! in the file + + INTEGER, DIMENSION(3,4) :: buf1, buf2, bufnew ! Data buffers + INTEGER, DIMENSION(2) :: val = (/53, 59/) ! Values to write + + INTEGER :: memrank = 1 ! Rank of the dataset in memory + + INTEGER :: i, j + + INTEGER :: error ! Error flag + LOGICAL :: status + + + ! + ! Create two files containing identical datasets. Write 0's to one + ! and 1's to the other. + ! + + ! + ! Data initialization. + ! + do i = 1, 3 + do j = 1, 4 + buf1(i,j) = 0; + end do + end do + + do i = 1, 3 + do j = 1, 4 + buf2(i,j) = 1; + end do + end do + + ! + ! Initialize FORTRAN predefined datatypes + ! + CALL h5init_types_f(error) + + ! + ! Create file1, file2 using default properties. + ! + CALL h5fcreate_f(filename1, H5F_ACC_TRUNC_F, file1_id, error) + + CALL h5fcreate_f(filename2, H5F_ACC_TRUNC_F, file2_id, error) + + ! + ! Create the data space for the datasets. + ! + CALL h5screate_simple_f(RANK, dimsf, dataspace1, error) + + CALL h5screate_simple_f(RANK, dimsf, dataspace2, error) + + ! + ! Create the datasets with default properties. + ! + CALL h5dcreate_f(file1_id, dsetname1, H5T_NATIVE_INTEGER, dataspace1, & + dset1_id, error) + + CALL h5dcreate_f(file2_id, dsetname2, H5T_NATIVE_INTEGER, dataspace2, & + dset2_id, error) + + ! + ! Write the datasets. + ! + CALL h5dwrite_f(dset1_id, H5T_NATIVE_INTEGER, buf1, error) + + CALL h5dwrite_f(dset2_id, H5T_NATIVE_INTEGER, buf2, error) + + ! + ! Close the dataspace for the datasets. + ! + CALL h5sclose_f(dataspace1, error) + + CALL h5sclose_f(dataspace2, error) + + ! + ! Close the datasets. + ! + CALL h5dclose_f(dset1_id, error) + + CALL h5dclose_f(dset2_id, error) + + ! + ! Close the files. + ! + CALL h5fclose_f(file1_id, error) + + CALL h5fclose_f(file2_id, error) + + ! + ! Open the two files. Select two points in one file, write values to + ! those point locations, then do H5Scopy and write the values to the + ! other file. Close files. + ! + + ! + ! Open the files. + ! + CALL h5fopen_f (filename1, H5F_ACC_RDWR_F, file1_id, error) + + CALL h5fopen_f (filename2, H5F_ACC_RDWR_F, file2_id, error) + + ! + ! Open the datasets. + ! + CALL h5dopen_f(file1_id, dsetname1, dset1_id, error) + + CALL h5dopen_f(file2_id, dsetname2, dset2_id, error) + + ! + ! Get dataset1's dataspace identifier. + ! + CALL h5dget_space_f(dset1_id, dataspace1, error) + + ! + ! Create memory dataspace. + ! + CALL h5screate_simple_f(memrank, dimsm, memspace, error) + + ! + ! Set the selected point positions. Because Fortran array index starts + ! from 1, so add one to the actual select points in C. + ! + coord(1,1) = 1 + coord(2,1) = 2 + coord(1,2) = 1 + coord(2,2) = 4 + + ! + ! Select the elements in file space. + ! + CALL h5sselect_elements_f(dataspace1, H5S_SELECT_SET_F, RANK, NUMP,& + coord, error) + + ! + ! Write value into the selected points in dataset1. + ! + CALL H5dwrite_f(dset1_id, H5T_NATIVE_INTEGER, val, error, & + mem_space_id=memspace, file_space_id=dataspace1) + + ! + ! Copy the daspace1 into dataspace2. + ! + CALL h5scopy_f(dataspace1, dataspace2, error) + + ! + ! Write value into the selected points in dataset2. + ! + CALL H5dwrite_f(dset2_id, H5T_NATIVE_INTEGER, val, error, & + mem_space_id=memspace, file_space_id=dataspace2) + + ! + ! Close the dataspace for the datasets. + ! + CALL h5sclose_f(dataspace1, error) + + CALL h5sclose_f(dataspace2, error) + + ! + ! Close the memoryspace. + ! + CALL h5sclose_f(memspace, error) + + ! + ! Close the datasets. + ! + CALL h5dclose_f(dset1_id, error) + + CALL h5dclose_f(dset2_id, error) + + ! + ! Close the files. + ! + CALL h5fclose_f(file1_id, error) + + CALL h5fclose_f(file2_id, error) + + ! + ! Open both files and print the contents of the datasets. + ! + + ! + ! Open the files. + ! + CALL h5fopen_f (filename1, H5F_ACC_RDWR_F, file1_id, error) + + CALL h5fopen_f (filename2, H5F_ACC_RDWR_F, file2_id, error) + + ! + ! Open the datasets. + ! + CALL h5dopen_f(file1_id, dsetname1, dset1_id, error) + + CALL h5dopen_f(file2_id, dsetname2, dset2_id, error) + + ! + ! Read dataset from the first file. + ! + CALL h5dread_f(dset1_id, H5T_NATIVE_INTEGER, bufnew, error) + + ! + ! Display the data read from dataset "Copy1" + ! + write(*,*) "The data in dataset Copy1 is: " + do i = 1, 3 + print *, (bufnew(i,j), j = 1,4) + end do + + ! + ! Read dataset from the second file. + ! + CALL h5dread_f(dset2_id, H5T_NATIVE_INTEGER, bufnew, error) + + ! + ! Display the data read from dataset "Copy2" + ! + write(*,*) "The data in dataset Copy2 is: " + do i = 1, 3 + print *, (bufnew(i,j), j = 1,4) + end do + + ! + ! Close datasets. + ! + CALL h5dclose_f(dset1_id, error) + + CALL h5dclose_f(dset2_id, error) + + ! + ! Close files. + ! + CALL h5fclose_f(file1_id, error) + + CALL h5fclose_f(file2_id, error) + + ! + ! Close FORTRAN predefined datatypes. + ! + CALL h5close_types_f(error) + + END PROGRAM SELECTEXAMPLE diff --git a/doc/html/Tutor/extend.html b/doc/html/Tutor/extend.html index 03b5972..55ff9e9 100644 --- a/doc/html/Tutor/extend.html +++ b/doc/html/Tutor/extend.html @@ -35,16 +35,19 @@ width=78 height=27 alt="NCSA">


    Creating an Extendible Dataset

    -An extendible dataset is one whose dimensions can grow. In HDF5, it is possible to define a dataset to have certain initial dimensions, then later -to increase the size of any of the initial dimensions. +An extendible dataset is one whose dimensions can grow. +HDF5 allows you to define a dataset to have certain initial dimensions, +then to later increase the size of any of the initial dimensions.

    -HDF5 requires you to use chunking in order to define extendible datasets. Chunking makes it possible to extend datasets efficiently, without -having to reorganize storage excessively. +HDF5 requires you to use chunking to define extendible datasets. +This makes it possible to extend datasets efficiently without +having to excessively reorganize storage.

    The following operations are required in order to write an extendible dataset:

    1. Declare the dataspace of the dataset to have unlimited dimensions for all dimensions that might eventually be extended. -
    2. Set dataset creation properties to enable chunking and create a dataset. +
    3. Set dataset creation properties to enable chunking. +
    4. Create the dataset.
    5. Extend the size of the dataset.

    Programming Example

    @@ -52,229 +55,197 @@ The following operations are required in order to write an extendible dataset:

    Description

    This example shows how to create a 3 x 3 extendible dataset, write to that dataset, extend the dataset to 10x3, and write to the dataset again. -[
    Download h5_extend.c] -
    -
    -/**************************************************************  
    - *
    - *   This example shows how to work with extendible datasets.
    - *   In the current version of the library a dataset MUST be
    - *   chunked in order to be extendible.  
    - *
    - *   This example is derived from the h5_extend_write.c and 
    - *   h5_read_chunk.c examples that are in the "Introduction 
    - *   to HDF5".
    - *   
    - *************************************************************/
    - 
    -#include "hdf5.h"
    -
    -#define FILE        "ext.h5"
    -#define DATASETNAME "ExtendibleArray" 
    -#define RANK         2
    -
    -int
    -main (void)
    -{
    -    hid_t       file;                          /* handles */
    -    hid_t       dataspace, dataset;  
    -    hid_t       filespace;                   
    -    hid_t       cparms;                     
    -    hid_t       memspace;
    -
    -    hsize_t      dims[2]  = { 3, 3};           /* dataset dimensions			
    -                                                  at creation time */
    -    hsize_t      dims1[2] = { 3, 3};           /* data1 dimensions */ 
    -    hsize_t      dims2[2] = { 7, 1};           /* data2 dimensions */  
    -
    -    hsize_t      maxdims[2] = {H5S_UNLIMITED, H5S_UNLIMITED};
    -    hsize_t      size[2];
    -    hssize_t     offset[2];
    -    hsize_t      i,j;
    -    herr_t       status, status_n;                             
    -    int          data1[3][3] = { {1, 1, 1},      /* data to write */
    -                                 {1, 1, 1},
    -                                 {1, 1, 1} };      
    -
    -    int          data2[7]    = { 2, 2, 2, 2, 2, 2, 2};
    -
    -    /* Variables used in reading data back */
    -    hsize_t      chunk_dims[2] ={2, 5};
    -    hsize_t      chunk_dimsr[2];
    -    hsize_t      dimsr[2];
    -    int          data_out[10][3];
    -    int          rank, rank_chunk;
    -
    -    /* Create the data space with unlimited dimensions. */
    -    dataspace = H5Screate_simple (RANK, dims, maxdims); 
    -
    -    /* Create a new file. If file exists its contents will be overwritten. */
    -    file = H5Fcreate (FILE, H5F_ACC_TRUNC, H5P_DEFAULT, H5P_DEFAULT);
    -
    -    /* Modify dataset creation properties, i.e. enable chunking  */
    -    cparms = H5Pcreate (H5P_DATASET_CREATE);
    -    status = H5Pset_chunk ( cparms, RANK, chunk_dims);
    -
    -    /* Create a new dataset within the file using cparms
    -       creation properties.  */
    -    dataset = H5Dcreate (file, DATASETNAME, H5T_NATIVE_INT, dataspace,
    -                         cparms);
    -
    -    /* Extend the dataset. This call assures that dataset is 3 x 3.*/
    -    size[0]   = 3; 
    -    size[1]   = 3; 
    -    status = H5Dextend (dataset, size);
    -
    -    /* Select a hyperslab  */
    -    filespace = H5Dget_space (dataset);
    -    offset[0] = 0;
    -    offset[1] = 0;
    -    status = H5Sselect_hyperslab (filespace, H5S_SELECT_SET, offset, NULL,
    -                                  dims1, NULL);  
    -
    -    /* Write the data to the hyperslab  */
    -    status = H5Dwrite (dataset, H5T_NATIVE_INT, dataspace, filespace,
    -                       H5P_DEFAULT, data1);
    -
    -    /* Extend the dataset. Dataset becomes 10 x 3  */
    -    dims[0]   = dims1[0] + dims2[0];
    -    size[0]   = dims[0];  
    -    size[1]   = dims[1]; 
    -    status = H5Dextend (dataset, size);
    -
    -    /* Select a hyperslab  */
    -    filespace = H5Dget_space (dataset);
    -    offset[0] = 3;
    -    offset[1] = 0;
    -    status = H5Sselect_hyperslab (filespace, H5S_SELECT_SET, offset, NULL,
    -                                  dims2, NULL);  
    -
    -    /* Define memory space */
    -    dataspace = H5Screate_simple (RANK, dims2, NULL); 
    -
    -    /* Write the data to the hyperslab  */
    -    status = H5Dwrite (dataset, H5T_NATIVE_INT, dataspace, filespace,
    -                       H5P_DEFAULT, data2);
    -
    -    /* Close resources */
    -    status = H5Dclose (dataset);
    -    status = H5Sclose (dataspace);
    -    status = H5Sclose (filespace);
    -    status = H5Fclose (file);
    -
    -/****************************************************************
    -    Read the data back 
    - ***************************************************************/
    -
    -    file = H5Fopen (FILE, H5F_ACC_RDONLY, H5P_DEFAULT);
    -    dataset = H5Dopen (file, DATASETNAME);
    -    filespace = H5Dget_space (dataset);
    -    rank = H5Sget_simple_extent_ndims (filespace);
    -    status_n = H5Sget_simple_extent_dims (filespace, dimsr, NULL);
    -
    -    cparms = H5Dget_create_plist (dataset);
    -    if (H5D_CHUNKED == H5Pget_layout (cparms))
    -    {
    -       rank_chunk = H5Pget_chunk (cparms, 2, chunk_dimsr);
    -    }
    -
    -    memspace = H5Screate_simple (rank,dimsr,NULL);
    -    status = H5Dread (dataset, H5T_NATIVE_INT, memspace, filespace,
    -                      H5P_DEFAULT, data_out);
    -    printf("\n");
    -    printf("Dataset: \n");
    -    for (j = 0; j < dimsr[0]; j++)
    -    {
    -       for (i = 0; i < dimsr[1]; i++)
    -           printf("%d ", data_out[j][i]);
    -       printf("\n");
    -    }
    +
    +NOTE: To download a tar file of the examples, including a Makefile,
    +please go to the References page.
     
    -    status = H5Pclose (cparms);
    -    status = H5Dclose (dataset);
    -    status = H5Sclose (filespace);
    -    status = H5Sclose (memspace);
    -    status = H5Fclose (file);
    -}     
    -+++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
    -

    Remarks

      -
    • The function H5Pcreate creates a new property as an instance of - a property list. The signature of this function is as follows: -
      -  hid_t H5Pcreate ( H5P_class_t type )
      -
      +
    • The routine H5Pcreate / h5pcreate_f +creates a new property as an instance of + a property list. The signature is as follows: +

      +C: +

      +    hid_t H5Pcreate (H5P_class_t classtype)
      +
      +

      +FORTRAN: +

      +    h5pcreate_f (classtype, prp_id, hdferr) 
      +
      +            classtype  IN: INTEGER 
      +            prp_id    OUT: INTEGER(HID_T)
      +            hdferr    OUT: INTEGER 
      +
      +

        -
      • The parameter type is the type of property list to create.
        - The class types are: H5P_FILE_CREATE, H5P_FILE_ACCESS, H5P_DATASET_CREATE, - H5P_DATASET_XFER, and H5P_MOUNT +
      • The parameter classtype is the type of property list to create. + Valid class types are as follows: +
        + + + + + + + + +
        CFORTRAN

        + H5P_FILE_CREATE
        + H5P_FILE_ACCESS
        + H5P_DATASET_CREATE
        + H5P_DATASET_XFER
        + H5P_MOUNT

        +

        + H5P_FILE_CREATE_F
        + H5P_FILE_ACCESS_F
        + H5P_DATASET_CREATE_F
        + H5P_DATASET_XFER_F
        + H5P_MOUNT_F

        +
        +
        +
      • In C, the property list identifier is returned if successful; +otherwise a negative value is returned, if not. +In FORTRAN, the property list identifier is returned in prp_id +and the return value for the call is returned in hdferr.

      -

    • The function H5Pset_chunk sets the size of the chunks used +
    • The routine H5Pset_chunk / h5pset_chunk_f +sets the size of the chunks used to store a chunked layout dataset. - The signature of this function is as follows: -
      -  herr_t H5Pset_chunk ( hid_t plist, int ndims, const hsize_t * dim ) 
      -
      + The signature of this routine is as follows: +

      +C: +

      +    herr_t H5Pset_chunk (hid_t prp_id, int ndims, 
      +                         const hsize_t * dims) 
      +
      +

      +FORTRAN: +

      +    h5pset_chunk_f (prp_id, ndims, dims, hdferr) 
      +
      +            prp_id    IN: INTEGER(HID_T)
      +            ndims     IN: INTEGER
      +            dims      IN: INTEGER(HSIZE_T), DIMENSION(ndims) 
      +            hdferr   OUT: INTEGER
      +
      +
      +

        -
      • The first parameter, plist, is the identifier for the property +
      • The prp_id parameter is the identifier for the property list to query. -
      • The second parameter, ndims, is the number of dimensions of +
      • The ndims parameter is the number of dimensions of each chunk. -
      • The third parameter, dim, is an array containing the size of +
      • The dims parameter is an array containing the size of each chunk. +
      • In C, a non-negative value is returned if successful; otherwise a + negative value is returned. + In FORTRAN, the return value is returned in hdferr: 0 if + successful and -1 otherwise.

      -A non-negative value is returned if successful; otherwise a negative -value is returned. -

      -

    • The function H5Dextend extends a dataset that has an unlimited +
    • The H5Dextend / h5dextend_f routine +extends a dataset that has an unlimited dimension. The signature is as follows: -
      -  herr_t H5Dextend ( hid_t dataset_id, const hsize_t * size ) 
      -
      +

      +C: +

      +    herr_t H5Dextend (hid_t dset_id, const hsize_t * size) 
      +
      +

      +FORTRAN: +

      +    h5dextend_f (dset_id, size, hdferr) 
      +
      +            dset_id   IN: INTEGER(HID_T) 
      +            size         IN: INTEGER(HSIZE_T), DIMENSION(*)  
      +            hdferr      OUT: INTEGER
      +
      +

        -
      • The first parameter, dataset_id, is the identifier of - the dataset. -
      • The second parater, size, is an array containing the +
      • The dset_id parameter is the dataset identifier. +
      • The size parameter, is an array containing the new magnitude of each dimension. +
      • In C, this function returns a non-negative value if successful and + a negative value otherwise. + In FORTRAN, the return value is returned in hdferr: + 0 if successful and -1 otherwise.

      -This function returns a non-negative value if successful; otherwise -it returns a negative value. -

      -

    • The H5Dget_create_plist function returns an identifier for a +
    • The H5Dget_create_plist / h5dget_create_plist_f +routine returns an identifier for a copy of the dataset creation property list for a dataset.

      -

    • The H5Pget_layout function returns the layout of the raw data for a -dataset. Valid types are H5D_COMPACT, H5D_CONTIGUOUS, and H5D_CHUNKED. +
    • The C function, H5Pget_layout, returns the layout of the raw data for a +dataset. Valid types are H5D_CONTIGUOUS and +H5D_CHUNKED. +A FORTRAN routine for H5Pget_layout does not yet exist. +

      +

    • The H5Pget_chunk / h5pget_chunk_f +routine retrieves the size of chunks +for the raw data of a chunked layout dataset. +The signature is as follows: +

      +C: +

      +    int H5Pget_chunk (hid_t prp_id, int ndims, hsize_t * dims) 
      +
      +

      +FORTRAN: +

      +    h5pget_chunk_f (prp_id, ndims, dims, hdferr)
      +
      +            prp_id    IN: INTEGER(HID_T) 
      +            ndims     IN: INTEGER
      +            dims     OUT: INTEGER(HSIZE_T), DIMENSION(ndims) 
      +            hdferr   OUT: INTEGER 
      +

      -

    • The H5Pget_chunk function retrieves the size of chunks for the -raw data of a chunked layout dataset. -The signature of this function is: -
      -  int H5Pget_chunk ( hid_t plist, int max_ndims, hsize_t * dims ) 
      -
        -
      • The first parameter, plist, is the identifier of the + +
      • The prp_id parameter is the identifier of the property list to query. -
      • The second parameter, max_ndims, is the size of the dims +
      • The ndims parameter is the size of the dims array. -
      • The third parameter, dims, is the array to store the chunk - dimensions +
      • The dims parameter is the array in which to store the chunk + dimensions. +
      • In C, this function returns the chunk dimensionality if successful + and a negative value otherwise. + In FORTRAN, the return value is returned in hdferr: + the chunked rank if successful and -1 otherwise.

      -

    • The H5Pclose function terminates access to a property list. - The signature of this function is: -
      -  herr_t H5Pclose ( hid_t plist ) 
      -
      -where plist is the identifier of the property list to terminate -access to. +
    • The H5Pclose / h5pclose_f routine + terminates access to a property list. + The signature is as follows: +

      +C: +

      +    herr_t H5Pclose (hid_t prp_id) 
      +
      +

      +FORTRAN: +

      +    h5pclose_f (prp_id, hdferr) 
      +
      +            prp_id    IN: INTEGER(HID_T) 
      +            hdferr   OUT: INTEGER 
      +
      +

      +

        +
      • The prp_id parameter is the identifier of the property list + to terminate access to. +
    @@ -301,8 +272,9 @@ access to.
    hdfhelp@ncsa.uiuc.edu -
    Last Modified: August 27, 1999

    +
    Last Modified: January 19, 2000

    +
    diff --git a/doc/html/Tutor/fileorg.html b/doc/html/Tutor/fileorg.html index 03c2c29..4ed3a12 100644 --- a/doc/html/Tutor/fileorg.html +++ b/doc/html/Tutor/fileorg.html @@ -21,28 +21,28 @@ width=78 height=27 alt="NCSA">

    -An HDF5 file is a container for storing a variety of scientific data, and the -two primary HDF5 objects are groups and datasets. +An HDF5 file is a container for storing a variety of scientific data +is composed of two primary types of objects: groups and datasets.

    • HDF5 group: a grouping structure containing zero or more HDF5 - objects, together with supporting metadata. + objects, together with supporting metadata
    • HDF5 dataset: a multidimensional array of data elements, together - with supporting metadata. + with supporting metadata
    -Any HDF5 group or dataset may have an associated attribute list. An HDF5 -attribute is a user-defined HDF5 structure that provides extra information +Any HDF5 group or dataset may have an associated attribute list. An HDF5 +attribute is a user-defined HDF5 structure that provides extra information about an HDF5 object.

    -Working with groups and group members (datasets for example) is similar in many +Working with groups and datasets is similar in many ways to working with directories and files in UNIX. As with UNIX directories -and files, objects in an HDF5 file are often described by giving their full (or -absolute) path names. +and files, an HDF5 object in an HDF5 file is often referred to by its +full path name (also called an absolute path name).

      / signifies the root group.
      - /foo signifies a member of the root group called foo. + /foo signifies a member of the root group called foo.
      - /foo/zoo signifies a member of the group foo, which in + /foo/zoo signifies a member of the group foo, which in turn is a member of the root group.

    @@ -90,8 +90,9 @@ The tutorial ends with a glossary and references. hdfhelp@ncsa.uiuc.edu -

    Last Modified: July 30, 1999

    +
    Last Modified: December 10, 1999

    +
    diff --git a/doc/html/Tutor/glossary.html b/doc/html/Tutor/glossary.html index 84a938d..edec3e2 100644 --- a/doc/html/Tutor/glossary.html +++ b/doc/html/Tutor/glossary.html @@ -44,8 +44,8 @@ number of entries in symbol tables (used to store groups) and additional version manner, called a complex dataspace.

    -

    DATA TYPE -
    An HDF5 Data Type is an object that describes the type of the +
    DATATYPE +
    An HDF5 Datatype is an object that describes the type of the element in an HDF5 multi-dimensional array. There are two categories of datatypes: atomic and compound data types. An atomic type is a type which cannot be decomposed into smaller @@ -181,6 +181,13 @@ A hyperslab is a portion of a dataset. A hyperslab selection can be a logically contiguous collection of points in a dataspace, or it can be a regular pattern of points or blocks in a dataspace.

    +

    MOUNTING FILES +
    +HDF5 allows you to combine two or more HDF5 files in a manner similar +to mounting files in UNIX. The group structure and metadata +from one file appear as though they exist in another file. +

    +

    NAMES
    HDF5 object names are a slash-separated list of components. A name which begins with a slash is an absolute name which is accessed @@ -192,6 +199,28 @@ can be a regular pattern of points or blocks in a dataspace. MPI (Message Passing Interface).

    +

    REFERENCE +
    +OBJECT REFERENCE:
    + A reference to an entire object in the current HDF5 file. +

    + An object + reference points to an entire object in the current HDF5 file by storing + the relative file address (OID) of the object header for the object + pointed to. The relative file address of an object header is constant + for the life of the object. An object reference is of a fixed size in + the file. +

    +DATASET REGION REFERENCE:
    + Reference to a specific dataset region. +

    + A dataset region reference points to a region of a dataset in the + current HDF5 file by storing the OID of the dataset and the global + heap offset of the region referenced. The region referenced is + located by retrieving the coordinates of the areas in the region + from the global heap. A dataset region reference is of a variable + size in the file. +

    THREADSAFE (HDF5)
    A "thread-safe" version of HDF-5 (TSHDF5) is one that can be called from any thread of a multi-threaded program. Any calls to HDF can be made in any order, and each individual HDF call will perform correctly. A calling program does not have to explicitly lock the HDF @@ -221,7 +250,7 @@ library as regular HDF-5 library, with additional code to synchronize access to hdfhelp@ncsa.uiuc.edu -
    Last Modified: September 1, 1999

    +
    Last Modified: January 5, 2000


    diff --git a/doc/html/Tutor/img001.gif b/doc/html/Tutor/img001.gif index b79c6d6..8ed766c 100644 Binary files a/doc/html/Tutor/img001.gif and b/doc/html/Tutor/img001.gif differ diff --git a/doc/html/Tutor/img002.gif b/doc/html/Tutor/img002.gif index 67585ef..e65f785 100644 Binary files a/doc/html/Tutor/img002.gif and b/doc/html/Tutor/img002.gif differ diff --git a/doc/html/Tutor/img003.gif b/doc/html/Tutor/img003.gif index ac1dcf9..0dec000 100644 Binary files a/doc/html/Tutor/img003.gif and b/doc/html/Tutor/img003.gif differ diff --git a/doc/html/Tutor/img004.gif b/doc/html/Tutor/img004.gif index d48dbab..26ff731 100644 Binary files a/doc/html/Tutor/img004.gif and b/doc/html/Tutor/img004.gif differ diff --git a/doc/html/Tutor/img005.gif b/doc/html/Tutor/img005.gif index 3383dc6..aa57847 100644 Binary files a/doc/html/Tutor/img005.gif and b/doc/html/Tutor/img005.gif differ diff --git a/doc/html/Tutor/intro.html b/doc/html/Tutor/intro.html index abf664c..31b5cc9 100644 --- a/doc/html/Tutor/intro.html +++ b/doc/html/Tutor/intro.html @@ -22,27 +22,39 @@ width=78 height=27 alt="NCSA">

    Welcome to the HDF5 Tutorial provided by the HDF User Support Group.

    HDF5 is a file format and library for storing scientific data. -HDF5 was designed and implemented to address the deficiencies of HDF4.x. -It has a more powerful and flexible data model, supports files larger than 2 GB, -supports parallel I/O, and is thread-safe. For a short overview presentation -of the HDF5 data model, library and tools see: +It was designed and implemented + to meet growing and ever-changing scientific data-storage + and data-handling needs, + to take advantage of the power and features of today's + computing systems, and + to address the deficiencies of HDF4.x. +HDF5 has a powerful and flexible data model, + supports files larger than 2 GB (the limit of HDF4.x files), and + supports parallel I/O. +Thread-safety is designed and is to be implemented in the near future. +For a short overview of the HDF5 data model, library, and tools, see +the slide presentation at the following URL:

    -   http://hdf.ncsa.uiuc.edu/HDF5/HDF5_overview/index.htm
    +   http://hdf.ncsa.uiuc.edu/HDF5/papers/HDF5_overview/index.htm
     
    This tutorial covers the basic HDF5 data objects and file structure, -the HDF5 programming model and the API functions necessary for creating and -modifying data objects. It also introduces the available HDF5 tools to access +the HDF5 programming model, and the API functions necessary for creating and +modifying data objects. It also introduces the available HDF5 tools for accessing HDF5 files.

    -The examples used in this tutorial, along with a Makefile to compile them +The examples used in this tutorial, along with a Makefile to compile them, can be found in ./examples/. You can also download a tar -file with the examples and Makefile. In -order to use the Makefile you may have to edit it and update the +file with the examples and Makefile. +To use the Makefile, you may have to edit it and update the compiler and compiler options, as well as the path for the HDF5 binary distribution. +The Java examples can be found in +a subdirectory of the ./examples/ directory called java/. The java/ +directory contains a Makefile and shell scripts for running the java +programs.

    -Please check the References for where to find +Please check the References for pointers to other examples of HDF5 Programs.

    We hope that the step-by-step examples and instructions will give you a quick @@ -68,8 +80,9 @@ Please send your comments and suggestions to hdfhelp@ncsa.uiuc.edu. hdfhelp@ncsa.uiuc.edu -

    Last Modified: October 8, 1999

    +
    Last Modified: April 5, 2000

    +
    diff --git a/doc/html/Tutor/iterate.html b/doc/html/Tutor/iterate.html index 4cb0475..6ba6349 100644 --- a/doc/html/Tutor/iterate.html +++ b/doc/html/Tutor/iterate.html @@ -21,11 +21,13 @@ width=78 height=27 alt="NCSA">

    Contents:

    +FORTRAN:
    -   H5Dwrite(dataset_id, mem_type_id, mem_space_id, file_space_id,
    -            xfer_plist_id, buf);
    +   CALL h5dread_f(dset_id, mem_type_id, buf, error, &
    +                     mem_space_id=mspace_id, file_space_id=fspace_id, &
    +                     xfer_prp=xfer_plist_id)
    +        or
    +   CALL h5dread_f(dset_id, mem_type_id, buf, error)
    +
    +
    +   CALL h5dwrite_f(dset_id, mem_type_id, buf, error, &
    +                     mem_space_id=mspace_id, file_space_id=fspace_id, &
    +                     xfer_prp=xfer_plist_id)
    +        or
    +   CALL h5dwrite_f(dset_id, mem_type_id, buf, error)
     
    @@ -88,130 +108,241 @@ To read to/write from a dataset, the calling program must contain the following

    Description

    The following example shows how to read and write an existing dataset. It opens the file created in the previous example, obtains the dataset -identifier, -/dset, writes the dataset to the file, then reads the dataset back from +identifier for the dataset /dset, +writes the dataset to the file, then reads the dataset back from memory. It then closes the dataset and file.
    -[
    Download h5_rdwt.c ] - -
    -
    -+++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
    -
    -#include <hdf5.h>
    -#define FILE "dset.h5"
    -
    -main() {
    -
    -   hid_t       file_id, dataset_id;  /* identifiers */
    -   herr_t      status;
    -   int         i, j, dset_data[4][6];
    -
    -   /* Initialize the dataset. */
    -   for (i = 0; i < 4; i++)
    -      for (j = 0; j < 6; j++)
    -         dset_data[i][j] = i * 6 + j + 1;
    -
    -   /* Open an existing file. */
    -   file_id = H5Fopen(FILE, H5F_ACC_RDWR, H5P_DEFAULT);
    -
    -   /* Open an existing dataset. */
    -
    -   dataset_id = H5Dopen(file_id, "/dset");
    -
    -   /* Write the dataset. */
    -   status = H5Dwrite(dataset_id, H5T_NATIVE_INT, H5S_ALL, H5S_ALL, H5P_DEFAULT,
    -                     dset_data);
    -
    -   status = H5Dread(dataset_id, H5T_NATIVE_INT, H5S_ALL, H5S_ALL, H5P_DEFAULT,
    -                    dset_data);
    +
     
    -   /* Close the dataset. */
    -   status = H5Dclose(dataset_id);
    +NOTE: To download a tar file of the examples, including a Makefile,
    +please go to the References page.
     
    -   /* Close the file. */
    -   status = H5Fclose(file_id);
    -}
    -+++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
     

    Remarks

    + +

    File Contents

    +

    +HDF5 File Created by C Example +

    +Fig. A   REF_REG.h5 in DDL

    -HDF5 "trefer2.h5" {
    +
    +HDF5 "REF_REG.h5" {
     GROUP "/" {
    -   DATASET "Dataset1" {
    +   DATASET "MATRIX" {
    +      DATATYPE { H5T_STD_I32BE }
    +      DATASPACE { SIMPLE ( 2, 9 ) / ( 2, 9 ) }
    +      DATA {
    +         1, 1, 2, 3, 3, 4, 5, 5, 6,
    +         1, 2, 2, 3, 4, 4, 5, 6, 6
    +      }
    +   }
    +   DATASET "REGION_REFERENCES" {
           DATATYPE { H5T_REFERENCE }
    -      DATASPACE { SIMPLE ( 4 ) / ( 4 ) }
    +      DATASPACE { SIMPLE ( 2 ) / ( 2 ) }
           DATA {
    -         DATASET 0:744 {(2,2)-(7,7)}, DATASET 0:744 {(6,9), (2,2), (8,4), (1,6),
    -          (2,8), (3,2), (0,4), (9,0), (7,1), (3,3)}, NULL, NULL
    +         DATASET 0:744 {(0,3)-(1,5)}, DATASET 0:744 {(0,0), (1,6), (0,8)}
           }
        }
    -   DATASET "Dataset2" {
    -      DATATYPE { H5T_STD_U8LE }
    -      DATASPACE { SIMPLE ( 10, 10 ) / ( 10, 10 ) }
    +}
    +}
    +
    +
    +HDF5 File Created by FORTRAN Example: +

    +Fig. B   FORTRAN.h5 in DDL +

    +
    +HDF5 "FORTRAN.h5" {
    +GROUP "/" {
    +   DATASET "MATRIX" {
    +      DATATYPE { H5T_STD_I32BE }
    +      DATASPACE { SIMPLE ( 9, 2 ) / ( 9, 2 ) }
           DATA {
    -         0, 3, 6, 9, 12, 15, 18, 21, 24, 27,
    -         30, 33, 36, 39, 42, 45, 48, 51, 54, 57,
    -         60, 63, 66, 69, 72, 75, 78, 81, 84, 87,
    -         90, 93, 96, 99, 102, 105, 108, 111, 114, 117,
    -         120, 123, 126, 129, 132, 135, 138, 141, 144, 147,
    -         150, 153, 156, 159, 162, 165, 168, 171, 174, 177,
    -         180, 183, 186, 189, 192, 195, 198, 201, 204, 207,
    -         210, 213, 216, 219, 222, 225, 228, 231, 234, 237,
    -         240, 243, 246, 249, 252, 255, 255, 255, 255, 255,
    -         255, 255, 255, 255, 255, 255, 255, 255, 255, 255
    +         1, 1,
    +         1, 2,
    +         2, 2,
    +         3, 3,
    +         3, 4,
    +         4, 4,
    +         5, 5,
    +         5, 6,
    +         6, 6
    +      }
    +   }
    +   DATASET "REGION_REFERENCES" {
    +      DATATYPE { H5T_REFERENCE }
    +      DATASPACE { SIMPLE ( 2 ) / ( 2 ) }
    +      DATA {
    +         DATASET 0:744 {(3,0)-(5,1)}, DATASET 0:744 {(0,0), (6,1), (8,0)}
           }
        }
     }
     }
     
    -Notice how raw data of the dataset with the dataset regions is displayed. + +Notice how the raw data in the dataset with the dataset regions is displayed. Each element of the raw data consists of a reference to the dataset (DATASET number1:number2) and its selected region. If the selection is a hyperslab, the corner coordinates of the hyperslab are displayed. For the point selection, the coordinates of each point are displayed. + @@ -532,8 +354,9 @@ Output of this program is :
    hdfhelp@ncsa.uiuc.edu -
    Last Modified: August 27, 1999

    +
    Last Modified: January 19, 2000

    +
    diff --git a/doc/html/Tutor/select.html b/doc/html/Tutor/select.html index 76ef846..459043d 100644 --- a/doc/html/Tutor/select.html +++ b/doc/html/Tutor/select.html @@ -1,5 +1,5 @@ -HDF5 Tutorial - Selections using H5Sselect_hyperslab +<TITLE>HDF5 Tutorial - Hyperslab Selections @@ -13,7 +13,7 @@ width=78 height=27 alt="NCSA">

    [ HDF5 Tutorial Top ]

    -Selections using H5Sselect_hyperslab +Hyperslab Selections


    @@ -34,18 +34,21 @@ width=78 height=27 alt="NCSA">


    Selecting a Portion of a Dataspace

    -Hyperslabs are portions of datasets. A hyperslab selection can be a logically contiguous collection of points in a dataspace, or it +Hyperslabs are portions of datasets. A hyperslab selection can be a +logically contiguous collection of points in a dataspace, or it can be a regular pattern of points or blocks in a dataspace. -You can select a hyperslab to write to/read from with the function -H5Sselect_hyperslab. +You can select a hyperslab to write to or read from with the function +H5Sselect_hyperslab / h5sselect_hyperslab_f.

    Programming Example

    Description

    -This example creates a 5 x 6 integer array in a -file called sds.h5. It selects a 3 x 4 hyperslab from the dataset, -as follows (Dimension 0 is offset by 1 and Dimension 1 is offset by 2): +This example creates a 5 x 6 integer array in a file called sds.h5 +(sdsf.h5 in FORTRAN). It +selects a 3 x 4 hyperslab from the dataset as follows (Dimension 0 is +offset by 1 and Dimension 1 is offset by 2):

    +5 x 6 array: @@ -171,245 +174,100 @@ follows (with Dimension 0 offset by 3):
       

    -[ Download h5_hyperslab.c ] -

     
    -/************************************************************
    -  
    -  This example shows how to write and read a hyperslab.  It 
    -  is derived from the h5_read.c and h5_write.c examples in 
    -  the "Introduction to HDF5".
    -
    - ************************************************************/
    - 
    -#include "hdf5.h"
    -
    -#define FILE        "sds.h5"
    -#define DATASETNAME "IntArray" 
    -#define NX_SUB  3                      /* hyperslab dimensions */ 
    -#define NY_SUB  4 
    -#define NX 7                           /* output buffer dimensions */ 
    -#define NY 7 
    -#define NZ  3 
    -#define RANK         2
    -#define RANK_OUT     3
    -
    -#define X     5                        /* dataset dimensions */
    -#define Y     6
    -
    -int
    -main (void)
    -{
    -    hsize_t     dimsf[2];              /* dataset dimensions */
    -    int         data[X][Y];            /* data to write */
    -
    -    /* 
    -     * Data  and output buffer initialization. 
    -     */
    -    hid_t       file, dataset;         /* handles */
    -    hid_t       dataspace;   
    -    hid_t       memspace; 
    -    hsize_t     dimsm[3];              /* memory space dimensions */
    -    hsize_t     dims_out[2];           /* dataset dimensions */      
    -    herr_t      status;                             
    -
    -    int         data_out[NX][NY][NZ ]; /* output buffer */
    -   
    -    hsize_t     count[2];              /* size of the hyperslab in the file */
    -    hssize_t    offset[2];             /* hyperslab offset in the file */
    -    hsize_t     count_out[3];          /* size of the hyperslab in memory */
    -    hssize_t    offset_out[3];         /* hyperslab offset in memory */
    -    int         i, j, k, status_n, rank;
    -
    -
    -
    -/*********************************************************  
    -   This writes data to the HDF5 file.  
    - *********************************************************/  
    - 
    -    /* 
    -     * Data  and output buffer initialization. 
    -     */
    -    for (j = 0; j < X; j++) {
    -	for (i = 0; i < Y; i++)
    -	    data[j][i] = i + j;
    -    }     
    -    /*
    -     * 0 1 2 3 4 5 
    -     * 1 2 3 4 5 6
    -     * 2 3 4 5 6 7
    -     * 3 4 5 6 7 8
    -     * 4 5 6 7 8 9
    -     */
    -
    -    /*
    -     * Create a new file using H5F_ACC_TRUNC access,
    -     * the default file creation properties, and the default file
    -     * access properties.
    -     */
    -    file = H5Fcreate (FILE, H5F_ACC_TRUNC, H5P_DEFAULT, H5P_DEFAULT);
    -
    -    /*
    -     * Describe the size of the array and create the data space for fixed
    -     * size dataset. 
    -     */
    -    dimsf[0] = X;
    -    dimsf[1] = Y;
    -    dataspace = H5Screate_simple (RANK, dimsf, NULL); 
    -
    -    /*
    -     * Create a new dataset within the file using defined dataspace and
    -     * default dataset creation properties.
    -     */
    -    dataset = H5Dcreate (file, DATASETNAME, H5T_STD_I32BE, dataspace,
    -                         H5P_DEFAULT);
    -
    -    /*
    -     * Write the data to the dataset using default transfer properties.
    -     */
    -    status = H5Dwrite (dataset, H5T_NATIVE_INT, H5S_ALL, H5S_ALL,
    -                      H5P_DEFAULT, data);
    -
    -    /*
    -     * Close/release resources.
    -     */
    -    H5Sclose (dataspace);
    -    H5Dclose (dataset);
    -    H5Fclose (file);
    - 
    -
    -/*************************************************************  
    -
    -  This reads the hyperslab from the sds.h5 file just 
    -  created, into a 2-dimensional plane of the 3-dimensional 
    -  array.
    -
    - ************************************************************/  
    -
    -    for (j = 0; j < NX; j++) {
    -	for (i = 0; i < NY; i++) {
    -	    for (k = 0; k < NZ ; k++)
    -		data_out[j][i][k] = 0;
    -	}
    -    } 
    - 
    -    /*
    -     * Open the file and the dataset.
    -     */
    -    file = H5Fopen (FILE, H5F_ACC_RDONLY, H5P_DEFAULT);
    -    dataset = H5Dopen (file, DATASETNAME);
    -
    -    dataspace = H5Dget_space (dataset);    /* dataspace handle */
    -    rank      = H5Sget_simple_extent_ndims (dataspace);
    -    status_n  = H5Sget_simple_extent_dims (dataspace, dims_out, NULL);
    -    printf("\nRank: %d\nDimensions: %lu x %lu \n", rank,
    -	   (unsigned long)(dims_out[0]), (unsigned long)(dims_out[1]));
    -
    -    /* 
    -     * Define hyperslab in the dataset. 
    -     */
    -    offset[0] = 1;
    -    offset[1] = 2;
    -    count[0]  = NX_SUB;
    -    count[1]  = NY_SUB;
    -    status = H5Sselect_hyperslab (dataspace, H5S_SELECT_SET, offset, NULL, 
    -                                  count, NULL);
    -
    -    /*
    -     * Define the memory dataspace.
    -     */
    -    dimsm[0] = NX;
    -    dimsm[1] = NY;
    -    dimsm[2] = NZ;
    -    memspace = H5Screate_simple (RANK_OUT, dimsm, NULL);   
    -
    -    /* 
    -     * Define memory hyperslab. 
    -     */
    -    offset_out[0] = 3;
    -    offset_out[1] = 0;
    -    offset_out[2] = 0;
    -    count_out[0]  = NX_SUB;
    -    count_out[1]  = NY_SUB;
    -    count_out[2]  = 1;
    -    status = H5Sselect_hyperslab (memspace, H5S_SELECT_SET, offset_out, NULL, 
    -                                  count_out, NULL);
    -
    -    /*
    -     * Read data from hyperslab in the file into the hyperslab in 
    -     * memory and display.
    -     */
    -    status = H5Dread (dataset, H5T_NATIVE_INT, memspace, dataspace,
    -                      H5P_DEFAULT, data_out);
    -    printf ("Data:\n ");
    -    for (j = 0; j < NX; j++) {
    -	for (i = 0; i < NY; i++) printf("%d ", data_out[j][i][0]);
    -	printf("\n ");
    -    }
    -	printf("\n");
    -    /*
    -     * 0 0 0 0 0 0 0
    -     * 0 0 0 0 0 0 0
    -     * 0 0 0 0 0 0 0
    -     * 3 4 5 6 0 0 0  
    -     * 4 5 6 7 0 0 0
    -     * 5 6 7 8 0 0 0
    -     * 0 0 0 0 0 0 0
    -     */
    -
    -    /*
    -     * Close and release resources.
    -     */
    -    H5Dclose (dataset);
    -    H5Sclose (dataspace);
    -    H5Sclose (memspace);
    -    H5Fclose (file);
    +To obtain the example, download:
    +
    +NOTE: To download a tar file of the examples, including a Makefile,
    +please go to the References page.
    +

    -} -

    Remarks

      -
    • H5Sselect_hyperslab selects a hyperslab region to add to the current -selected region for a specified dataspace. -
      -  herr_t H5Sselect_hyperslab (hid_t space_id, H5S_seloper_t op, 
      -         const hssize_t *start, const hsize_t *stride, 
      -         const hsize_t *count, const hsize_t *block ) 
      -
      +
    • H5Sselect_hyperslab / h5sselect_hyperslab_f +selects a hyperslab region to +add to the current selected region for a specified dataspace. +

      +C: +

      +    herr_t H5Sselect_hyperslab (hid_t space_id, H5S_seloper_t operator,
      +        const hssize_t *start, const hsize_t *stride,
      +        const hsize_t *count, const hsize_t *block ) 
      +
      +

      +FORTRAN: +

      +    h5sselect_hyperslab_f (space_id, operator, start, count, &
      +                           hdferr, stride, block)
      +
      +            space_id    IN: INTEGER(HID_T) 
      +            operator    IN: INTEGER 
      +            start       IN: INTEGER(HSSIZE_T), DIMENSION(*)
      +            count       IN: INTEGER(HSIZE_T), DIMENSION(*)
      +            hdferr     OUT: INTEGER
      +            stride      IN: INTEGER(HSIZE_T), DIMENSION(*), OPTIONAL
      +            block       IN: INTEGER(HSIZE_T), DIMENSION(*), OPTIONAL 
      +
      +

        -
      • The first parameter, space_id, is the dataspace identifier for the +
      • The parameter space_id is the dataspace identifier for the specified dataspace. -
      • The second parameter, op, can only be set to H5S_SELECT_SET - in the current release. It replaces the existing selection with the - parameters from this call. Overlapping blocks are not supported. -
      • The start array determines the starting coordinates of the hyperslab to select. +

        +

      • The parameter operator can be set to one of the following: +
        +
        H5S_SELECT_SET (H5S_SELECT_SET_F in FORTRAN) +
        Replace the existing selection with the parameters from this call. + Overlapping blocks are not supported with this operator. + +
        H5S_SELECT_OR (H5S_SELECT_OR_F in FORTRAN) +
        Add the new selection to the existing selection. +
        + +

        +

      • The start array determines the starting coordinates of the +hyperslab to select. +

      • The stride array indicates which elements along a dimension are to be selected. +

      • The count array determines how many positions to select from the dataspace in each dimension. +

      • The block array determines the size of the element block selected by the dataspace. +

        +

      • In C, a non-negative value is returned if successful, and a negative +value otherwise. In FORTRAN, the return value is returned in hdferr: +0 if successful and -1 otherwise.

      The start, stride, count, and block arrays must be the same size as the rank of the dataspace.

      -

    • This example introduces the following H5Dget_* functions: -
        - H5Dget_space: returns an identifier for a copy of the dataspace - of a dataset.
        - H5Dget_type: returns an identifier for a copy of the data type - of a dataset.
        -
      +
    • The examples introduce the following call: +
      +
      H5Dget_space / h5dget_space_f: +
      Returns an identifier for a copy of the dataspace of a dataset.

      +

      +
    • The C example also introduces the following calls: +
      +
      H5Sget_simple_extent_dims: +
      Returns the size and maximum size of each dimension of a dataspace. +
      H5Sget_simple_extent_ndims: +
      Determines the dimensionality (or rank) of a dataspace. +

      -

    • This example introduces the following H5Sget_* functions used to -obtain information about selections: -
        - H5Sget_simple_extent_dims: returns the size and maximum sizes - of each dimension of a dataspace.
        - H5Sget_simple_extent_ndims: determines the dimensionality - (or rank) of a dataspace.
        +The FORTRAN example does not use these calls, though they +are available as h5sget_simple_extent_dims_f and +h5sget_simple_extent_ndims_f. +
    @@ -439,8 +297,9 @@ obtain information about selections:
    hdfhelp@ncsa.uiuc.edu -
    Last Modified: August 27, 1999

    +
    Last Modified: April 5, 2000

    +
    diff --git a/doc/html/Tutor/selectc.html b/doc/html/Tutor/selectc.html index e01d9db..dec7b1f 100644 --- a/doc/html/Tutor/selectc.html +++ b/doc/html/Tutor/selectc.html @@ -1,5 +1,5 @@ -HDF5 Tutorial - Selections using H5Sselect_elements and H5Scopy +<TITLE>HDF5 Tutorial - Selecting Individual Points and Copying a Dataspace @@ -13,7 +13,8 @@ width=78 height=27 alt="NCSA">

    [ HDF5 Tutorial Top ]

    -Selections using H5Sselect_elements and H5SCopy +Selecting Individual Points and Copying +a Dataspace


    @@ -21,7 +22,7 @@ width=78 height=27 alt="NCSA">

    Contents:

    @@ -231,42 +160,83 @@ by the number of elements to be selected, num_elem.

    File Contents

    Following is the DDL for copy1.h5 and copy2.h5, as viewed with -the commands "h5dump copy1.h5" and "h5dump copy2.h5". +the following commands:
    +             +h5dump copy1.h5
    +             +h5dump copy2.h5 +

    -Fig. S.1   'copy1.h5' in DDL +


    +C:

    +Fig. S.1a   copy1.h5 in DDL

    -HDF5 "copy1.h5" {
    -GROUP "/" {
    -   DATASET "Copy1" {
    -      DATATYPE { H5T_STD_I32BE }
    -      DATASPACE { SIMPLE ( 3, 4 ) / ( 3, 4 ) }
    -      DATA {
    -         0, 59, 0, 53,
    -         0, 0, 0, 0,
    -         0, 0, 0, 0
    +   HDF5 "copy1.h5" {
    +   GROUP "/" {
    +      DATASET "Copy1" {
    +         DATATYPE { H5T_STD_I32BE }
    +         DATASPACE { SIMPLE ( 3, 4 ) / ( 3, 4 ) }
    +         DATA {
    +            0, 59, 0, 53,
    +            0, 0, 0, 0,
    +            0, 0, 0, 0
    +         }
           }
        }
    -}
    -}
    +   }
     
    -Fig. S.2   'copy2.h5' in DDL +Fig. S.1b   copy2.h5 in DDL
    -HDF5 "copy2.h5" {
    -GROUP "/" {
    -   DATASET "Copy2" {
    -      DATATYPE { H5T_STD_I32BE }
    -      DATASPACE { SIMPLE ( 3, 4 ) / ( 3, 4 ) }
    -      DATA {
    -         1, 59, 1, 53,
    -         1, 1, 1, 1,
    -         1, 1, 1, 1
    +   HDF5 "copy2.h5" {
    +   GROUP "/" {
    +      DATASET "Copy2" {
    +         DATATYPE { H5T_STD_I32BE }
    +         DATASPACE { SIMPLE ( 3, 4 ) / ( 3, 4 ) }
    +         DATA {
    +            1, 59, 1, 53,
    +            1, 1, 1, 1,
    +            1, 1, 1, 1
    +         }
           }
        }
    -}
    -}
    -
    +   }
    +
    +
    +FORTRAN:

    +Fig. S.2a   copy1.h5 in DDL +

    +   HDF5 "copy1.h5" {
    +   GROUP "/" {
    +      DATASET "Copy1" {
    +         DATATYPE { H5T_STD_I32BE }
    +         DATASPACE { SIMPLE ( 4, 3 ) / ( 4, 3 ) }
    +         DATA {
    +            0, 0, 0,
    +            53, 0, 0,
    +            0, 0, 0,
    +            59, 0, 0
    +         }
    +      }
    +   }
    +   }
    +
    +Fig. S.2b   copy2.h5 in DDL +
    +   HDF5 "copy2.h5" {
    +   GROUP "/" {
    +      DATASET "Copy2" {
    +         DATATYPE { H5T_STD_I32BE }
    +         DATASPACE { SIMPLE ( 4, 3 ) / ( 4, 3 ) }
    +         DATA {
    +            1, 1, 1,
    +            53, 1, 1,
    +            1, 1, 1,
    +            59, 1, 1
    +         }
    +      }
    +   }
    +   }
     
    - @@ -283,8 +253,9 @@ GROUP "/" {
    hdfhelp@ncsa.uiuc.edu -
    Last Modified: August 27, 1999

    +
    Last Modified: April 5, 2000

    +
    diff --git a/doc/html/Tutor/title.html b/doc/html/Tutor/title.html index 35283b7..36f724a 100644 --- a/doc/html/Tutor/title.html +++ b/doc/html/Tutor/title.html @@ -19,8 +19,6 @@ width=78 height=27 alt="NCSA">


    - - + +
    +NOTE:   +This tutorial does NOT include the software needed to compile the +examples. You will need to obtain this first:

    +

      + C: +
        Obtain the HDF5 library. We provide pre-compiled binaries for the +platforms on which we tested at +ftp://ftp.ncsa.uiuc.edu/HDF/HDF5/current/bin. +If using the pre-compiled binaries you must +also obtain the GZIP library, as they were compiled with GZIP included, but do +not include this library. We provide the GZIP library for the platforms on +which we +tested at:   +ftp://ftp.ncsa.uiuc.edu/HDF/gzip/ +
      +

      + FORTRAN90: +

      +
      + Java:    You will need the JHI5 code. Go to the +Java HDF5 web page +for more details. + +
    +
    +

    Contents:

    Introductory Topics

      @@ -42,11 +78,11 @@ width=78 height=27 alt="NCSA">

    1. The HDF5 API
    2. Creating an HDF5 File
    3. Creating a Dataset -
    4. Reading from/Writing to a Dataset +
    5. Reading from or Writing to a Dataset
    6. Creating an Attribute
    7. Creating a Group -
    8. Creating Groups using Absolute/Relative -Names +
    9. Creating Groups Using Absolute and + Relative Names
    10. Creating Datasets in Groups
      @@ -55,9 +91,9 @@ Names

    Advanced Topics