summaryrefslogtreecommitdiffstats
path: root/doc/html
diff options
context:
space:
mode:
authorcvs2svn <no_author@cvs2svn>2001-03-14 04:42:20 (GMT)
committercvs2svn <no_author@cvs2svn>2001-03-14 04:42:20 (GMT)
commit1c47477b05ee7190c77d63b3f35e6d5b1721b98f (patch)
treeaaa61b06d1677c5eb4ff2079592f5b84011a4e5b /doc/html
parentc5adbd01ac08f77388ba0ecbac523181afd9ab35 (diff)
downloadhdf5-1c47477b05ee7190c77d63b3f35e6d5b1721b98f.zip
hdf5-1c47477b05ee7190c77d63b3f35e6d5b1721b98f.tar.gz
hdf5-1c47477b05ee7190c77d63b3f35e6d5b1721b98f.tar.bz2
[svn-r3621] This commit was manufactured by cvs2svn to create branch 'hdf5_1_4'.
Diffstat (limited to 'doc/html')
-rw-r--r--doc/html/Tutor/examples/attrexample.f9087
-rw-r--r--doc/html/Tutor/examples/chunk.f90310
-rw-r--r--doc/html/Tutor/examples/compound.f90215
-rw-r--r--doc/html/Tutor/examples/dsetexample.f9070
-rw-r--r--doc/html/Tutor/examples/fileexample.f9034
-rw-r--r--doc/html/Tutor/examples/groupexample.f9049
-rw-r--r--doc/html/Tutor/examples/grpdsetexample.f90136
-rw-r--r--doc/html/Tutor/examples/grpit.f90194
-rw-r--r--doc/html/Tutor/examples/grpsexample.f9068
-rw-r--r--doc/html/Tutor/examples/hyperslab.f90199
-rw-r--r--doc/html/Tutor/examples/java/Compound.java540
-rw-r--r--doc/html/Tutor/examples/java/Copy.java541
-rw-r--r--doc/html/Tutor/examples/java/CreateAttribute.java302
-rw-r--r--doc/html/Tutor/examples/java/CreateDataset.java210
-rw-r--r--doc/html/Tutor/examples/java/CreateFile.java83
-rw-r--r--doc/html/Tutor/examples/java/CreateFileInput.java118
-rw-r--r--doc/html/Tutor/examples/java/CreateGroup.java139
-rw-r--r--doc/html/Tutor/examples/java/CreateGroupAR.java152
-rw-r--r--doc/html/Tutor/examples/java/CreateGroupDataset.java340
-rw-r--r--doc/html/Tutor/examples/java/DatasetRdWt.java213
-rw-r--r--doc/html/Tutor/examples/java/HyperSlab.java590
-rw-r--r--doc/html/Tutor/examples/java/Makefile92
-rw-r--r--doc/html/Tutor/examples/java/README21
-rw-r--r--doc/html/Tutor/examples/java/readme.html192
-rw-r--r--doc/html/Tutor/examples/java/runCompound.sh17
-rw-r--r--doc/html/Tutor/examples/java/runCompound.sh.in17
-rw-r--r--doc/html/Tutor/examples/java/runCopy.sh17
-rw-r--r--doc/html/Tutor/examples/java/runCopy.sh.in17
-rw-r--r--doc/html/Tutor/examples/java/runCreateAttribute.sh17
-rw-r--r--doc/html/Tutor/examples/java/runCreateAttribute.sh.in17
-rw-r--r--doc/html/Tutor/examples/java/runCreateDataset.sh17
-rw-r--r--doc/html/Tutor/examples/java/runCreateDataset.sh.in17
-rw-r--r--doc/html/Tutor/examples/java/runCreateFile.sh17
-rw-r--r--doc/html/Tutor/examples/java/runCreateFile.sh.in17
-rw-r--r--doc/html/Tutor/examples/java/runCreateFileInput.sh17
-rw-r--r--doc/html/Tutor/examples/java/runCreateFileInput.sh.in17
-rw-r--r--doc/html/Tutor/examples/java/runCreateGroup.sh17
-rw-r--r--doc/html/Tutor/examples/java/runCreateGroup.sh.in17
-rw-r--r--doc/html/Tutor/examples/java/runCreateGroupAR.sh17
-rw-r--r--doc/html/Tutor/examples/java/runCreateGroupAR.sh.in17
-rw-r--r--doc/html/Tutor/examples/java/runCreateGroupDataset.sh17
-rw-r--r--doc/html/Tutor/examples/java/runCreateGroupDataset.sh.in17
-rw-r--r--doc/html/Tutor/examples/java/runDatasetRdWt.sh17
-rw-r--r--doc/html/Tutor/examples/java/runDatasetRdWt.sh.in17
-rw-r--r--doc/html/Tutor/examples/java/runHyperSlab.sh17
-rw-r--r--doc/html/Tutor/examples/java/runHyperSlab.sh.in17
-rw-r--r--doc/html/Tutor/examples/mountexample.f90187
-rw-r--r--doc/html/Tutor/examples/refobjexample.f90142
-rw-r--r--doc/html/Tutor/examples/refregexample.f90162
-rw-r--r--doc/html/Tutor/examples/rwdsetexample.f9078
-rw-r--r--doc/html/Tutor/examples/selectele.f90282
-rw-r--r--doc/html/Tutor/property.html169
-rw-r--r--doc/html/Tutor/software.html88
53 files changed, 6377 insertions, 0 deletions
diff --git a/doc/html/Tutor/examples/attrexample.f90 b/doc/html/Tutor/examples/attrexample.f90
new file mode 100644
index 0000000..9198eb8
--- /dev/null
+++ b/doc/html/Tutor/examples/attrexample.f90
@@ -0,0 +1,87 @@
+! This example shows how to create and write a dataset attribute.
+! It opens the existing file 'dset.h5', obtains the identifier of
+! the dataset "/dset", defines attribute's dataspace,
+! creates dataset attribute, writes the attribute, and then closes
+! the attribute's dataspace, attribute, dataset, and file.
+
+ PROGRAM ATTREXAMPLE
+
+
+ USE HDF5 ! This module contains all necessary modules
+
+ IMPLICIT NONE
+
+ CHARACTER(LEN=8), PARAMETER :: filename = "dsetf.h5" ! File name
+ CHARACTER(LEN=4), PARAMETER :: dsetname = "dset" ! Dataset name
+ CHARACTER(LEN=4), PARAMETER :: aname = "attr" ! Attribute name
+
+ INTEGER(HID_T) :: file_id ! File identifier
+ INTEGER(HID_T) :: dset_id ! Dataset identifier
+ INTEGER(HID_T) :: attr_id ! Attribute identifier
+ INTEGER(HID_T) :: aspace_id ! Attribute Dataspace identifier
+
+ INTEGER(HSIZE_T), DIMENSION(1) :: adims = (/2/) ! Attribute dimension
+ INTEGER, DIMENSION(2) :: attr_data = (/100,200/)! Attribute data
+ INTEGER :: arank = 1 ! Attribure rank
+
+ INTEGER :: error ! Error flag
+
+
+ !
+ ! Initialize FORTRAN predefined datatypes.
+ !
+ CALL h5open_f(error)
+
+ !
+ ! Open an existing file.
+ !
+ CALL h5fopen_f (filename, H5F_ACC_RDWR_F, file_id, error)
+
+ !
+ ! Open an existing dataset.
+ !
+ CALL h5dopen_f(file_id, dsetname, dset_id, error)
+
+ !
+ ! Create the data space for the attribute.
+ !
+ CALL h5screate_simple_f(arank, adims, aspace_id, error)
+
+ !
+ ! Create dataset attribute.
+ !
+ CALL h5acreate_f(dset_id, aname, H5T_NATIVE_INTEGER,aspace_id, &
+ attr_id, error)
+
+ !
+ ! Write the attribute data.
+ !
+ CALL h5awrite_f(attr_id, H5T_NATIVE_INTEGER, attr_data, error)
+
+ !
+ ! Close the attribute.
+ !
+ CALL h5aclose_f(attr_id, error)
+
+ !
+ ! Terminate access to the data space.
+ !
+ CALL h5sclose_f(aspace_id, error)
+
+ !
+ ! End access to the dataset and release resources used by it.
+ !
+ CALL h5dclose_f(dset_id, error)
+
+ !
+ ! Close the file.
+ !
+ CALL h5fclose_f(file_id, error)
+
+ !
+ ! Close FORTRAN predefined datatypes.
+ !
+ CALL h5close_f(error)
+
+ END PROGRAM ATTREXAMPLE
+
diff --git a/doc/html/Tutor/examples/chunk.f90 b/doc/html/Tutor/examples/chunk.f90
new file mode 100644
index 0000000..2810b5c
--- /dev/null
+++ b/doc/html/Tutor/examples/chunk.f90
@@ -0,0 +1,310 @@
+!
+!This example shows how to work with extendible datasets.
+!It creates a 3 x 3 extendible dataset, write to that dataset,
+!extend the dataset to 10x3, and write to the dataset again
+!
+
+
+
+
+ PROGRAM CHUNKEXAMPLE
+
+ USE HDF5 ! This module contains all necessary modules
+
+ IMPLICIT NONE
+
+ !
+ !the dataset is stored in file "extf.h5"
+ !
+ CHARACTER(LEN=7), PARAMETER :: filename = "extf.h5"
+
+ !
+ !dataset name is "ExtendibleArray"
+ !
+ CHARACTER(LEN=15), PARAMETER :: dsetname = "ExtendibleArray"
+
+ !
+ !dataset rank is 2
+ !
+ INTEGER :: RANK = 2
+
+ INTEGER(HID_T) :: file_id ! File identifier
+ INTEGER(HID_T) :: dset_id ! Dataset identifier
+ INTEGER(HID_T) :: dataspace ! Dataspace identifier
+ INTEGER(HID_T) :: filespace ! Dataspace identifier
+ INTEGER(HID_T) :: memspace ! memspace identifier
+ INTEGER(HID_T) :: cparms !dataset creatation property identifier
+
+ !
+ !dataset dimensions at creation time
+ !
+ INTEGER(HSIZE_T), DIMENSION(2) :: dims = (/3,3/)
+
+ !
+ !data1 dimensions
+ !
+ INTEGER(HSIZE_T), DIMENSION(2) :: dims1 = (/3,3/)
+
+ !
+ !data2 dimensions
+ !
+ INTEGER(HSIZE_T), DIMENSION(2) :: dims2 = (/7,1/)
+
+ !
+ !Maximum dimensions
+ !
+ INTEGER(HSIZE_T), DIMENSION(2) :: maxdims
+
+ !
+ !data1 dimensions
+ !
+ INTEGER, DIMENSION(3,3) :: data1
+
+ !
+ !data2 dimensions
+ !
+ INTEGER, DIMENSION(7,1) :: data2
+
+ !
+ !Size of the hyperslab in the file
+ !
+ INTEGER(HSIZE_T), DIMENSION(2) :: size
+
+ !
+ !hyperslab offset in the file
+ !
+ INTEGER(HSIZE_T), DIMENSION(2) :: offset
+
+ !
+ !general purpose integer
+ !
+ INTEGER :: i, j, k
+
+ !
+ !flag to check operation success
+ !
+ INTEGER :: error, error_n
+
+ !
+ !Variables used in reading data back
+ !
+ INTEGER(HSIZE_T), DIMENSION(2) :: chunk_dims = (/5,2/)
+ INTEGER(HSIZE_T), DIMENSION(2) :: chunk_dimsr
+ INTEGER(HSIZE_T), DIMENSION(2) :: dimsr, maxdimsr
+ INTEGER, DIMENSION(10,3) :: data_out
+ INTEGER :: rankr, rank_chunk
+
+ !
+ !data initialization
+ !
+ do i = 1, 3
+ do j = 1, 3
+ data1(i,j) = 1
+ end do
+ end do
+
+ do j = 1, 7
+ data2(j,1) = 2
+ end do
+
+
+ !
+ !Initialize FORTRAN predifined datatypes
+ !
+ CALL h5open_f(error)
+
+ !
+ !Create a new file using default properties.
+ !
+ CALL h5fcreate_f(filename, H5F_ACC_TRUNC_F, file_id, error)
+
+
+ !
+ !Create the data space with unlimited dimensions.
+ !
+ maxdims = (/H5S_UNLIMITED_f, H5S_UNLIMITED_f/)
+
+ CALL h5screate_simple_f(RANK, dims, dataspace, error, maxdims)
+
+ !
+ !Modify dataset creation properties, i.e. enable chunking
+ !
+ CALL h5pcreate_f(H5P_DATASET_CREATE_F, cparms, error)
+
+ CALL h5pset_chunk_f(cparms, RANK, chunk_dims, error)
+
+ !
+ !Create a new dataset within the file using cparms creation properties.
+ !
+ !CALL h5dcreate_f(file_id, dsetname, H5T_NATIVE_INT_F, dataspace, &
+ CALL h5dcreate_f(file_id, dsetname, H5T_NATIVE_INTEGER, dataspace, &
+ dset_id, error, cparms)
+
+ !
+ !Extend the dataset. This call assures that dataset is 3 x 3.
+ !
+ size(1) = 3
+ size(2) = 3
+ CALL h5dextend_f(dset_id, size, error)
+
+
+ !
+ !Select a hyperslab.
+ !
+ CALL h5dget_space_f(dset_id, filespace, error)
+ offset(1) = 0;
+ offset(2) = 0;
+ CALL h5sselect_hyperslab_f(filespace, H5S_SELECT_SET_F, &
+ offset, dims1, error)
+
+ !
+ !Write the data to the hyperslab.
+ !
+ !CALL H5Dwrite_f(dset_id, H5T_NATIVE_INT_F, data1, error, &
+ CALL H5Dwrite_f(dset_id, H5T_NATIVE_INTEGER, data1, error, &
+ filespace, dataspace)
+
+ !
+ !Extend the dataset. Dataset becomes 10 x 3.
+ !
+ dims(1) = dims1(1) + dims2(1);
+ size(1) = dims(1);
+ size(2) = dims(2);
+ CALL h5dextend_f(dset_id, size, error)
+
+ !
+ !Select a hyperslab.
+ !
+ CALL h5dget_space_f(dset_id, filespace, error)
+ offset(1) = 3;
+ offset(2) = 0;
+ CALL h5sselect_hyperslab_f(filespace, H5S_SELECT_SET_F, &
+ offset, dims2, error)
+
+ !
+ !create memory dataspace.
+ !
+ CALL h5screate_simple_f(RANK, dims2, memspace, error)
+
+ !
+ !Write the data to the hyperslab.
+ !
+ !CALL H5Dwrite_f(dset_id, H5T_NATIVE_INT_F, data2, error, &
+ CALL H5Dwrite_f(dset_id, H5T_NATIVE_INTEGER, data2, error, &
+ mem_space_id=memspace, file_space_id=filespace)
+
+ !
+ !Close the dataspace for the dataset.
+ !
+ CALL h5sclose_f(dataspace, error)
+ CALL h5sclose_f(filespace, error)
+
+ !
+ !Close the memoryspace.
+ !
+ CALL h5sclose_f(memspace, error)
+
+ !
+ !Close the dataset.
+ !
+ CALL h5dclose_f(dset_id, error)
+
+ !
+ !Close the property list.
+ !
+ CALL h5pclose_f(cparms, error)
+
+ !
+ !Close the file.
+ !
+ CALL h5fclose_f(file_id, error)
+
+ !
+ !read the data back
+ !
+ !Open the file.
+ !
+ CALL h5fopen_f (filename, H5F_ACC_RDONLY_F, file_id, error)
+
+ !
+ !Open the dataset.
+ !
+ CALL h5dopen_f(file_id, dsetname, dset_id, error)
+
+ !
+ !Get dataset's dataspace handle.
+ !
+ CALL h5dget_space_f(dset_id, dataspace, error)
+
+ !
+ !Get dataspace's rank.
+ !
+ CALL h5sget_simple_extent_ndims_f(dataspace, rankr, error)
+
+
+ !
+ !Get dataspace's dimensinons.
+ !
+ CALL h5sget_simple_extent_dims_f(dataspace, dimsr, maxdimsr, error)
+
+
+ !
+ !Get creation property list.
+ !
+ CALL h5dget_create_plist_f(dset_id, cparms, error)
+
+ !
+ !Get chunk dimensions.
+ !
+ CALL h5pget_chunk_f(cparms, 2, chunk_dimsr, error)
+
+ !
+ !create memory dataspace.
+ !
+ CALL h5screate_simple_f(rankr, dimsr, memspace, error)
+
+ !
+ !Read data
+ !
+ !CALL H5Dread_f(dset_id, H5T_NATIVE_INT_F, data_out, error, &
+ CALL H5Dread_f(dset_id, H5T_NATIVE_INTEGER, data_out, error, &
+ memspace, dataspace)
+
+ !
+ !Print data
+ !
+ do i = 1, dimsr(1)
+ print *, (data_out(i,j), j = 1,dimsr(2))
+ end do
+
+ !
+ !Close the dataspace for the dataset.
+ !
+ CALL h5sclose_f(dataspace, error)
+
+ !
+ !Close the memoryspace.
+ !
+ CALL h5sclose_f(memspace, error)
+
+ !
+ !Close the dataset.
+ !
+ CALL h5dclose_f(dset_id, error)
+
+ !
+ !Close the file.
+ !
+ CALL h5fclose_f(file_id, error)
+
+ !
+ !Close the property list.
+ !
+ CALL h5pclose_f(cparms, error)
+
+ !
+ ! Close FORTRAN predefined datatypes.
+ !
+ CALL h5close_f(error)
+
+ END PROGRAM CHUNKEXAMPLE
diff --git a/doc/html/Tutor/examples/compound.f90 b/doc/html/Tutor/examples/compound.f90
new file mode 100644
index 0000000..a2bd6b0
--- /dev/null
+++ b/doc/html/Tutor/examples/compound.f90
@@ -0,0 +1,215 @@
+!
+! This program creates a dataset that is one dimensional array of
+! structures {
+! character*2
+! integer
+! double precision
+! real
+! }
+! Data is written and read back by fields.
+!
+
+ PROGRAM COMPOUNDEXAMPLE
+
+ USE HDF5 ! This module contains all necessary modules
+
+ IMPLICIT NONE
+
+ CHARACTER(LEN=11), PARAMETER :: filename = "compound.h5" ! File name
+ CHARACTER(LEN=8), PARAMETER :: dsetname = "Compound" ! Dataset name
+ INTEGER, PARAMETER :: dimsize = 6 ! Size of the dataset
+
+ INTEGER(HID_T) :: file_id ! File identifier
+ INTEGER(HID_T) :: dset_id ! Dataset identifier
+ INTEGER(HID_T) :: dspace_id ! Dataspace identifier
+ INTEGER(HID_T) :: dtype_id ! Compound datatype identifier
+ INTEGER(HID_T) :: dt1_id ! Memory datatype identifier (for character field)
+ INTEGER(HID_T) :: dt2_id ! Memory datatype identifier (for integer field)
+ INTEGER(HID_T) :: dt3_id ! Memory datatype identifier (for double precision field)
+ INTEGER(HID_T) :: dt4_id ! Memory datatype identifier (for real field)
+ INTEGER(HID_T) :: dt5_id ! Memory datatype identifier
+ INTEGER(HID_T) :: plist_id ! Dataset trasfer property
+ INTEGER(SIZE_T) :: typesize
+
+
+ INTEGER(HSIZE_T), DIMENSION(1) :: dims = (/dimsize/) ! Dataset dimensions
+ INTEGER :: rank = 1 ! Dataset rank
+
+ INTEGER :: error ! Error flag
+ INTEGER(SIZE_T) :: type_size ! Size of the datatype
+ INTEGER(SIZE_T) :: type_sizec ! Size of the character datatype
+ INTEGER(SIZE_T) :: type_sizei ! Size of the integer datatype
+ INTEGER(SIZE_T) :: type_sized ! Size of the double precision datatype
+ INTEGER(SIZE_T) :: type_sizer ! Size of the real datatype
+ INTEGER(SIZE_T) :: offset ! Member's offset
+ CHARACTER*2, DIMENSION(dimsize) :: char_member
+ CHARACTER*2, DIMENSION(dimsize) :: char_member_out ! Buffer to read data out
+ INTEGER, DIMENSION(dimsize) :: int_member
+ DOUBLE PRECISION, DIMENSION(dimsize) :: double_member
+ REAL, DIMENSION(dimsize) :: real_member
+ INTEGER :: i
+ !
+ ! Initialize data buffer.
+ !
+ do i = 1, dimsize
+ char_member(i)(1:1) = char(65+i)
+ char_member(i)(2:2) = char(65+i)
+ char_member_out(i)(1:1) = char(65)
+ char_member_out(i)(2:2) = char(65)
+ int_member(i) = i
+ double_member(i) = 2.* i
+ real_member(i) = 3. * i
+ enddo
+
+ !
+ ! Initialize FORTRAN interface.
+ !
+ CALL h5open_f(error)
+ !
+ ! Set dataset transfer property to preserve partially initialized fields
+ ! during write/read to/from dataset with compound datatype.
+ !
+ CALL h5pcreate_f(H5P_DATASET_XFER_F, plist_id, error)
+ CALL h5pset_preserve_f(plist_id, 1, error)
+
+ !
+ ! Create a new file using default properties.
+ !
+ CALL h5fcreate_f(filename, H5F_ACC_TRUNC_F, file_id, error)
+
+ !
+ ! Create the dataspace.
+ !
+ CALL h5screate_simple_f(rank, dims, dspace_id, error)
+ !
+ ! Create compound datatype.
+ !
+ ! First calculate total size by calculating sizes of each member
+ !
+ CALL h5tcopy_f(H5T_NATIVE_CHARACTER, dt5_id, error)
+ typesize = 2
+ CALL h5tset_size_f(dt5_id, typesize, error)
+ CALL h5tget_size_f(dt5_id, type_sizec, error)
+ CALL h5tget_size_f(H5T_NATIVE_INTEGER, type_sizei, error)
+ CALL h5tget_size_f(H5T_NATIVE_DOUBLE, type_sized, error)
+ CALL h5tget_size_f(H5T_NATIVE_REAL, type_sizer, error)
+ type_size = type_sizec + type_sizei + type_sized + type_sizer
+ CALL h5tcreate_f(H5T_COMPOUND_F, type_size, dtype_id, error)
+ !
+ ! Insert memebers
+ !
+ ! CHARACTER*2 memeber
+ !
+ offset = 0
+ CALL h5tinsert_f(dtype_id, "char_field", offset, dt5_id, error)
+ !
+ ! INTEGER member
+ !
+ offset = offset + type_sizec ! Offset of the second memeber is 2
+ CALL h5tinsert_f(dtype_id, "integer_field", offset, H5T_NATIVE_INTEGER, error)
+ !
+ ! DOUBLE PRECISION member
+ !
+ offset = offset + type_sizei ! Offset of the third memeber is 6
+ CALL h5tinsert_f(dtype_id, "double_field", offset, H5T_NATIVE_DOUBLE, error)
+ !
+ ! REAL member
+ !
+ offset = offset + type_sized ! Offset of the last member is 14
+ CALL h5tinsert_f(dtype_id, "real_field", offset, H5T_NATIVE_REAL, error)
+
+ !
+ ! Create the dataset with compound datatype.
+ !
+ CALL h5dcreate_f(file_id, dsetname, dtype_id, dspace_id, &
+ dset_id, error)
+ !
+ ! Create memory types. We have to create a compound datatype
+ ! for each member we want to write.
+ !
+ CALL h5tcreate_f(H5T_COMPOUND_F, type_sizec, dt1_id, error)
+ offset = 0
+ CALL h5tinsert_f(dt1_id, "char_field", offset, dt5_id, error)
+ !
+ CALL h5tcreate_f(H5T_COMPOUND_F, type_sizei, dt2_id, error)
+ offset = 0
+ CALL h5tinsert_f(dt2_id, "integer_field", offset, H5T_NATIVE_INTEGER, error)
+ !
+ CALL h5tcreate_f(H5T_COMPOUND_F, type_sized, dt3_id, error)
+ offset = 0
+ CALL h5tinsert_f(dt3_id, "double_field", offset, H5T_NATIVE_DOUBLE, error)
+ !
+ CALL h5tcreate_f(H5T_COMPOUND_F, type_sizer, dt4_id, error)
+ offset = 0
+ CALL h5tinsert_f(dt4_id, "real_field", offset, H5T_NATIVE_REAL, error)
+ !
+ ! Write data by fields in the datatype. Fields order is not important.
+ !
+ CALL h5dwrite_f(dset_id, dt4_id, real_member, error, xfer_prp = plist_id)
+ CALL h5dwrite_f(dset_id, dt1_id, char_member, error, xfer_prp = plist_id)
+ CALL h5dwrite_f(dset_id, dt3_id, double_member, error, xfer_prp = plist_id)
+ CALL h5dwrite_f(dset_id, dt2_id, int_member, error, xfer_prp = plist_id)
+
+ !
+ ! End access to the dataset and release resources used by it.
+ !
+ CALL h5dclose_f(dset_id, error)
+
+ !
+ ! Terminate access to the data space.
+ !
+ CALL h5sclose_f(dspace_id, error)
+ !
+ ! Terminate access to the datatype
+ !
+ CALL h5tclose_f(dtype_id, error)
+ CALL h5tclose_f(dt1_id, error)
+ CALL h5tclose_f(dt2_id, error)
+ CALL h5tclose_f(dt3_id, error)
+ CALL h5tclose_f(dt4_id, error)
+ CALL h5tclose_f(dt5_id, error)
+
+ !
+ ! Close the file.
+ !
+ CALL h5fclose_f(file_id, error)
+
+ !
+ ! Open the file.
+ !
+ CALL h5fopen_f (filename, H5F_ACC_RDWR_F, file_id, error)
+ !
+ ! Open the dataset.
+ !
+ CALL h5dopen_f(file_id, dsetname, dset_id, error)
+ !
+ ! Create memeory datatyoe to read character member of the compound datatype.
+ !
+ CALL h5tcopy_f(H5T_NATIVE_CHARACTER, dt2_id, error)
+ typesize = 2
+ CALL h5tset_size_f(dt2_id, typesize, error)
+ CALL h5tget_size_f(dt2_id, type_size, error)
+ CALL h5tcreate_f(H5T_COMPOUND_F, type_size, dt1_id, error)
+ offset = 0
+ CALL h5tinsert_f(dt1_id, "char_field", offset, dt2_id, error)
+ !
+ ! Read part of the datatset and display it.
+ !
+ CALL h5dread_f(dset_id, dt1_id, char_member_out, error)
+ write(*,*) (char_member_out(i), i=1, dimsize)
+
+ !
+ ! Close all open objects.
+ !
+ CALL h5dclose_f(dset_id, error)
+ CALL h5tclose_f(dt1_id, error)
+ CALL h5tclose_f(dt2_id, error)
+ CALL h5fclose_f(file_id, error)
+ !
+ ! Close FORTRAN interface.
+ !
+ CALL h5close_f(error)
+
+ END PROGRAM COMPOUNDEXAMPLE
+
+
diff --git a/doc/html/Tutor/examples/dsetexample.f90 b/doc/html/Tutor/examples/dsetexample.f90
new file mode 100644
index 0000000..9b69a3f
--- /dev/null
+++ b/doc/html/Tutor/examples/dsetexample.f90
@@ -0,0 +1,70 @@
+!
+! The following example shows how to create an empty dataset.
+! It creates a file called 'dsetf.h5', defines the
+! dataset dataspace, creates a dataset which is a 4x6 integer array,
+! and then closes the dataspace, the dataset, and the file.
+!
+
+ PROGRAM DSETEXAMPLE
+
+ USE HDF5 ! This module contains all necessary modules
+
+ IMPLICIT NONE
+
+ CHARACTER(LEN=8), PARAMETER :: filename = "dsetf.h5" ! File name
+ CHARACTER(LEN=4), PARAMETER :: dsetname = "dset" ! Dataset name
+
+ INTEGER(HID_T) :: file_id ! File identifier
+ INTEGER(HID_T) :: dset_id ! Dataset identifier
+ INTEGER(HID_T) :: dspace_id ! Dataspace identifier
+
+
+ INTEGER(HSIZE_T), DIMENSION(2) :: dims = (/4,6/) ! Dataset dimensions
+ INTEGER :: rank = 2 ! Dataset rank
+
+ INTEGER :: error ! Error flag
+
+ !
+ ! Initialize FORTRAN predefined datatypes.
+ !
+ CALL h5open_f(error)
+
+ !
+ ! Create a new file using default properties.
+ !
+ CALL h5fcreate_f(filename, H5F_ACC_TRUNC_F, file_id, error)
+
+ !
+ ! Create the dataspace.
+ !
+ CALL h5screate_simple_f(rank, dims, dspace_id, error)
+
+ !
+ ! Create the dataset with default properties.
+ !
+ CALL h5dcreate_f(file_id, dsetname, H5T_NATIVE_INTEGER, dspace_id, &
+ dset_id, error)
+
+ !
+ ! End access to the dataset and release resources used by it.
+ !
+ CALL h5dclose_f(dset_id, error)
+
+ !
+ ! Terminate access to the data space.
+ !
+ CALL h5sclose_f(dspace_id, error)
+
+ !
+ ! Close the file.
+ !
+ CALL h5fclose_f(file_id, error)
+
+ !
+ ! Close FORTRAN predefined datatypes.
+ !
+ CALL h5close_f(error)
+
+ END PROGRAM DSETEXAMPLE
+
+
diff --git a/doc/html/Tutor/examples/fileexample.f90 b/doc/html/Tutor/examples/fileexample.f90
new file mode 100644
index 0000000..e11dcaa
--- /dev/null
+++ b/doc/html/Tutor/examples/fileexample.f90
@@ -0,0 +1,34 @@
+!
+! The following example demonstrates how to create and close an HDF5 file.
+! It creates a file called 'file.h5', and then closes the file.
+!
+
+ PROGRAM FILEEXAMPLE
+
+ USE HDF5 ! This module contains all necessary modules
+
+ IMPLICIT NONE
+
+ CHARACTER(LEN=8), PARAMETER :: filename = "filef.h5" ! File name
+ INTEGER(HID_T) :: file_id ! File identifier
+
+ INTEGER :: error ! Error flag
+
+!
+! Initialize FORTRAN interface.
+!
+ CALL h5open_f (error)
+ !
+ ! Create a new file using default properties.
+ !
+ CALL h5fcreate_f(filename, H5F_ACC_TRUNC_F, file_id, error)
+
+ !
+ ! Terminate access to the file.
+ !
+ CALL h5fclose_f(file_id, error)
+!
+! Close FORTRAN interface.
+!
+ CALL h5close_f(error)
+ END PROGRAM FILEEXAMPLE
diff --git a/doc/html/Tutor/examples/groupexample.f90 b/doc/html/Tutor/examples/groupexample.f90
new file mode 100644
index 0000000..d98d7cd
--- /dev/null
+++ b/doc/html/Tutor/examples/groupexample.f90
@@ -0,0 +1,49 @@
+!
+! The following example shows how to create and close a group.
+! It creates a file called 'group.h5', creates a group
+! called MyGroup in the root group, and then closes the group and file.
+!
+
+
+ PROGRAM GROUPEXAMPLE
+
+ USE HDF5 ! This module contains all necessary modules
+
+ IMPLICIT NONE
+
+ CHARACTER(LEN=9), PARAMETER :: filename = "groupf.h5" ! File name
+ CHARACTER(LEN=7), PARAMETER :: groupname = "MyGroup" ! Group name
+
+ INTEGER(HID_T) :: file_id ! File identifier
+ INTEGER(HID_T) :: group_id ! Group identifier
+
+ INTEGER :: error ! Error flag
+!
+! Initialize FORTRAN interface.
+!
+ CALL h5open_f(error)
+ !
+ ! Create a new file using default properties.
+ !
+ CALL h5fcreate_f(filename, H5F_ACC_TRUNC_F, file_id, error)
+
+ !
+ ! Create a group named "/MyGroup" in the file.
+ !
+ CALL h5gcreate_f(file_id, groupname, group_id, error)
+
+ !
+ ! Close the group.
+ !
+ CALL h5gclose_f(group_id, error)
+
+ !
+ ! Terminate access to the file.
+ !
+ CALL h5fclose_f(file_id, error)
+!
+! Close FORTRAN interface.
+!
+ CALL h5close_f(error)
+
+ END PROGRAM GROUPEXAMPLE
diff --git a/doc/html/Tutor/examples/grpdsetexample.f90 b/doc/html/Tutor/examples/grpdsetexample.f90
new file mode 100644
index 0000000..ceb2fe9
--- /dev/null
+++ b/doc/html/Tutor/examples/grpdsetexample.f90
@@ -0,0 +1,136 @@
+!
+! This example shows how to create a dataset in a particular group.
+! It opens the file created in the previous example and creates two datasets.
+! Absolute and relative dataset names are used.
+!
+
+
+ PROGRAM GRPDSETEXAMPLE
+
+ USE HDF5 ! This module contains all necessary modules
+
+ IMPLICIT NONE
+
+ CHARACTER(LEN=10), PARAMETER :: filename = "groupsf.h5" ! File name
+ CHARACTER(LEN=15), PARAMETER :: groupname = "MyGroup/Group_A" ! Group name
+ CHARACTER(LEN=13), PARAMETER :: dsetname1 = "MyGroup/dset1" ! Dataset name
+ CHARACTER(LEN=5), PARAMETER :: dsetname2 = "dset2" ! dataset name
+
+ INTEGER(HID_T) :: file_id ! File identifier
+ INTEGER(HID_T) :: group_id ! Group identifier
+ INTEGER(HID_T) :: dataset_id ! Dataset identifier
+ INTEGER(HID_T) :: dataspace_id ! Data space identifier
+
+ INTEGER :: i, j
+ INTEGER :: error ! Error flag
+
+ INTEGER, DIMENSION(3,3) :: dset1_data ! Data arrays
+ INTEGER, DIMENSION(2,10) :: dset2_data !
+
+ INTEGER(HSIZE_T), DIMENSION(2) :: dims1 = (/3,3/) ! Datasets dimensions
+ INTEGER(HSIZE_T), DIMENSION(2) :: dims2 = (/2,10/)!
+
+ INTEGER :: rank = 2 ! Datasets rank
+
+ !
+ !Initialize dset1_data array
+ !
+ do i = 1, 3
+ do j = 1, 3
+ dset1_data(i,j) = j;
+ end do
+ end do
+
+
+ !
+ !Initialize dset2_data array
+ !
+ do i = 1, 2
+ do j = 1, 10
+ dset2_data(i,j) = j;
+ end do
+ end do
+
+ !
+ ! Initialize FORTRAN predefined datatypes.
+ !
+ CALL h5open_f(error)
+
+ !
+ ! Open an existing file.
+ !
+ CALL h5fopen_f (filename, H5F_ACC_RDWR_F, file_id, error)
+
+ !
+ ! Create the data space for the first dataset.
+ !
+ CALL h5screate_simple_f(rank, dims1, dataspace_id, error)
+
+ !
+ ! Create a dataset in group "MyGroup" with default properties.
+ !
+ CALL h5dcreate_f(file_id, dsetname1, H5T_NATIVE_INTEGER, dataspace_id, &
+ dataset_id, error)
+
+ !
+ ! Write the first dataset.
+ !
+ CALL h5dwrite_f(dataset_id, H5T_NATIVE_INTEGER, dset1_data, error)
+
+ !
+ ! Close the dataspace for the first dataset.
+ !
+ CALL h5sclose_f(dataspace_id, error)
+
+ !
+ ! Close the first dataset.
+ !
+ CALL h5dclose_f(dataset_id, error)
+
+ !
+ ! Open an existing group in the specified file.
+ !
+ CALL h5gopen_f(file_id, groupname, group_id, error)
+
+ !
+ !Create the data space for the second dataset.
+ !
+ CALL h5screate_simple_f(rank, dims2, dataspace_id, error)
+
+ !
+ ! Create the second dataset in group "Group_A" with default properties.
+ !
+ CALL h5dcreate_f(group_id, dsetname2, H5T_NATIVE_INTEGER, dataspace_id, &
+ dataset_id, error)
+
+ !
+ ! Write the second dataset.
+ !
+ CALL h5dwrite_f(dataset_id, H5T_NATIVE_INTEGER, dset2_data, error)
+
+ !
+ ! Close the dataspace for the second dataset.
+ !
+ CALL h5sclose_f(dataspace_id, error)
+
+ !
+ ! Close the second dataset.
+ !
+ CALL h5dclose_f(dataset_id, error)
+
+ !
+ ! Close the group.
+ !
+ CALL h5gclose_f(group_id, error)
+
+ !
+ ! Close the file.
+ !
+ CALL h5fclose_f(file_id, error)
+
+ !
+ ! Close FORTRAN predefined datatypes.
+ !
+ CALL h5close_f(error)
+
+ END PROGRAM GRPDSETEXAMPLE
diff --git a/doc/html/Tutor/examples/grpit.f90 b/doc/html/Tutor/examples/grpit.f90
new file mode 100644
index 0000000..3aff2ad
--- /dev/null
+++ b/doc/html/Tutor/examples/grpit.f90
@@ -0,0 +1,194 @@
+!
+! In this example we iterate through the members of the groups.
+!
+
+
+ PROGRAM GRPITEXAMPLE
+
+ USE HDF5 ! This module contains all necessary modules
+
+ IMPLICIT NONE
+
+ CHARACTER(LEN=11), PARAMETER :: filename = "iteratef.h5" ! File name
+ CHARACTER(LEN=7), PARAMETER :: groupname1 = "MyGroup" ! Group name
+ CHARACTER(LEN=15), PARAMETER :: groupname2 = "Group_A" ! Group name
+ CHARACTER(LEN=13), PARAMETER :: dsetname1 = "dset1" ! Dataset name
+ CHARACTER(LEN=5), PARAMETER :: dsetname2 = "dset2" !
+
+ CHARACTER(LEN=20) :: name_buffer ! Buffer to hold object's name
+ INTEGER :: type ! Type of the object
+ INTEGER :: nmembers ! Number of group members
+
+ INTEGER(HID_T) :: file_id ! File identifier
+ INTEGER(HID_T) :: dataset1_id ! Dataset1 identifier
+ INTEGER(HID_T) :: dataset2_id ! Dataset2 identifier
+ INTEGER(HID_T) :: dataspace1_id ! Data space identifier
+ INTEGER(HID_T) :: dataspace2_id ! Data space identifier
+ INTEGER(HID_T) :: group1_id, group2_id ! Group identifiers
+
+ INTEGER :: i, j
+
+ INTEGER :: error ! Error flag
+
+ INTEGER, DIMENSION(3,3) :: dset1_data ! Arrays to hold data
+ INTEGER, DIMENSION(2,10) :: dset2_data !
+
+ INTEGER(HSIZE_T), DIMENSION(2) :: dims1 = (/3,3/) ! Dataset dimensions
+ INTEGER(HSIZE_T), DIMENSION(2) :: dims2 = (/2,10/)!
+ INTEGER :: rank = 2 ! Datasets rank
+
+ !
+ ! Initialize dset1_data array.
+ !
+ do i = 1, 3
+ do j = 1, 3
+ dset1_data(i,j) = j;
+ end do
+ end do
+
+
+ !
+ ! Initialize dset2_data array.
+ !
+ do i = 1, 2
+ do j = 1, 10
+ dset2_data(i,j) = j;
+ end do
+ end do
+
+ !
+ ! Initialize FORTRAN interface.
+ !
+ CALL h5open_f(error)
+
+ !
+ ! Create a new file using default properties.
+ !
+ CALL h5fcreate_f(filename, H5F_ACC_TRUNC_F, file_id, error)
+
+ !
+ ! Create group "MyGroup" in the root group using absolute name.
+ !
+ CALL h5gcreate_f(file_id, groupname1, group1_id, error)
+
+ !
+ ! Create group "Group_A" in group "MyGroup" using relative name.
+ !
+ CALL h5gcreate_f(group1_id, groupname2, group2_id, error)
+
+ !
+ ! Create the data space for the first dataset.
+ !
+ CALL h5screate_simple_f(rank, dims1, dataspace1_id, error)
+
+ !
+ ! Create a dataset in group "MyGroup" with default properties.
+ !
+ CALL h5dcreate_f(group1_id, dsetname1, H5T_NATIVE_INTEGER, dataspace1_id, &
+ dataset1_id, error)
+
+ !
+ ! Write the first dataset.
+ !
+ CALL h5dwrite_f(dataset1_id, H5T_NATIVE_INTEGER, dset1_data, error)
+
+ !
+ ! Create the data space for the second dataset.
+ !
+ CALL h5screate_simple_f(rank, dims2, dataspace2_id, error)
+
+ !
+ ! Create the second dataset in group "Group_A" with default properties
+ !
+ CALL h5dcreate_f(group2_id, dsetname2, H5T_NATIVE_INTEGER, dataspace2_id, &
+ dataset2_id, error)
+
+ !
+ ! Write the second dataset
+ !
+ CALL h5dwrite_f(dataset2_id, H5T_NATIVE_INTEGER, dset2_data, error)
+
+ !
+ ! Get number of members in the root group.
+ !
+ CALL h5gn_members_f(file_id, "/", nmembers, error)
+ write(*,*) "Number of root group member is " , nmembers
+
+ !
+ ! Print each group member's name and type.
+ !
+ do i = 0, nmembers - 1
+ CALL h5gget_obj_info_idx_f(file_id, "/", i, name_buffer, type, &
+ error)
+ write(*,*) name_buffer, type
+ end do
+
+ !
+ ! Get number of members in MyGroup.
+ !
+ CALL h5gn_members_f(file_id, "MyGroup", nmembers, error)
+ write(*,*) "Number of group MyGroup member is ", nmembers
+
+ !
+ ! Print each group member's name and type in "MyGroup" group.
+ !
+ do i = 0, nmembers - 1
+ CALL h5gget_obj_info_idx_f(file_id, groupname1, i, name_buffer, type, &
+ error)
+ write(*,*) name_buffer, type
+ end do
+
+
+ !
+ ! Get number of members in MyGroup/Group_A.
+ !
+ CALL h5gn_members_f(file_id, "MyGroup/Group_A", nmembers, error)
+ write(*,*) "Number of group MyGroup/Group_A member is ", nmembers
+
+ !
+ ! Print each group member's name and type in "MyGroup/Group_A" group.
+ !
+ do i = 0, nmembers - 1
+ CALL h5gget_obj_info_idx_f(file_id,"MyGroup/Group_A" , i, name_buffer, type, &
+ error)
+ write(*,*) name_buffer, type
+ end do
+
+ !
+ ! Close the dataspace for the first dataset.
+ !
+ CALL h5sclose_f(dataspace1_id, error)
+
+ !
+ ! Close the first dataset.
+ !
+ CALL h5dclose_f(dataset1_id, error)
+
+ !
+ ! Close the dataspace for the second dataset.
+ !
+ CALL h5sclose_f(dataspace2_id, error)
+
+ !
+ ! Close the second dataset.
+ !
+ CALL h5dclose_f(dataset2_id, error)
+
+ !
+ ! Close the groups.
+ !
+ CALL h5gclose_f(group1_id, error)
+
+ CALL h5gclose_f(group2_id, error)
+
+ !
+ ! Close the file.
+ !
+ CALL h5fclose_f(file_id, error)
+
+ !
+ ! Close FORTRAN interface.
+ !
+ CALL h5close_f(error)
+
+ END PROGRAM GRPITEXAMPLE
diff --git a/doc/html/Tutor/examples/grpsexample.f90 b/doc/html/Tutor/examples/grpsexample.f90
new file mode 100644
index 0000000..4b53bf0
--- /dev/null
+++ b/doc/html/Tutor/examples/grpsexample.f90
@@ -0,0 +1,68 @@
+!
+! The following example code shows how to create groups
+! using absolute and relative names. It creates three groups:
+! the first two groups are created using the file identifier and
+! the group absolute names, and the third group is created using
+! a group identifier and the name relative to the specified group.
+!
+
+
+ PROGRAM GRPSEXAMPLE
+
+ USE HDF5 ! This module contains all necessary modules
+
+ IMPLICIT NONE
+
+ CHARACTER(LEN=10), PARAMETER :: filename = "groupsf.h5" ! File name
+ CHARACTER(LEN=8), PARAMETER :: groupname1 = "/MyGroup" ! Group name
+ CHARACTER(LEN=16), PARAMETER :: groupname2 = "/MyGroup/Group_A"
+ ! Group name
+ CHARACTER(LEN=7), PARAMETER :: groupname3 = "Group_B" ! Group name
+
+ INTEGER(HID_T) :: file_id ! File identifier
+ INTEGER(HID_T) :: group1_id, group2_id, group3_id ! Group identifiers
+
+ INTEGER :: error ! Error flag
+ !
+ ! Initialize FORTRAN interface.
+ !
+ CALL h5open_f(error)
+
+ !
+ ! Create a new file using default properties.
+ !
+ CALL h5fcreate_f(filename, H5F_ACC_TRUNC_F, file_id, error)
+
+ !
+ ! Create group "MyGroup" in the root group using absolute name.
+ !
+ CALL h5gcreate_f(file_id, groupname1, group1_id, error)
+
+ !
+ ! Create group "Group_A" in group "MyGroup" using absolute name.
+ !
+ CALL h5gcreate_f(file_id, groupname2, group2_id, error)
+
+ !
+ ! Create group "Group_B" in group "MyGroup" using relative name.
+ !
+ CALL h5gcreate_f(group1_id, groupname3, group3_id, error)
+
+ !
+ ! Close the groups.
+ !
+ CALL h5gclose_f(group1_id, error)
+ CALL h5gclose_f(group2_id, error)
+ CALL h5gclose_f(group3_id, error)
+
+ !
+ ! Terminate access to the file.
+ !
+ CALL h5fclose_f(file_id, error)
+
+ !
+ ! Close FORTRAN interface.
+ !
+ CALL h5close_f(error)
+
+ END PROGRAM GRPSEXAMPLE
diff --git a/doc/html/Tutor/examples/hyperslab.f90 b/doc/html/Tutor/examples/hyperslab.f90
new file mode 100644
index 0000000..e49f18b
--- /dev/null
+++ b/doc/html/Tutor/examples/hyperslab.f90
@@ -0,0 +1,199 @@
+!
+! This example shows how to write and read a hyperslab.
+!
+
+ PROGRAM SELECTEXAMPLE
+
+ USE HDF5 ! This module contains all necessary modules
+
+ IMPLICIT NONE
+
+ CHARACTER(LEN=7), PARAMETER :: filename = "sdsf.h5" ! File name
+ CHARACTER(LEN=8), PARAMETER :: dsetname = "IntArray" ! Dataset name
+
+ INTEGER(HID_T) :: file_id ! File identifier
+ INTEGER(HID_T) :: dset_id ! Dataset identifier
+ INTEGER(HID_T) :: dataspace ! Dataspace identifier
+ INTEGER(HID_T) :: memspace ! memspace identifier
+
+ INTEGER(HSIZE_T), DIMENSION(3) :: dimsm = (/7,7,3/) ! Dataset dimensions
+ ! in memory
+ INTEGER(HSIZE_T), DIMENSION(2) :: dims_out ! Buffer to read in dataset
+ ! dimesions
+ INTEGER(HSIZE_T), DIMENSION(2) :: dimsf = (/5,6/) ! Dataset dimensions.
+
+ INTEGER(HSIZE_T), DIMENSION(2) :: count = (/3,4/)
+ ! Size of the hyperslab in the file
+ INTEGER(HSIZE_T), DIMENSION(2) :: offset = (/1,2/)
+ !hyperslab offset in the file
+ INTEGER(HSIZE_T), DIMENSION(3) :: count_out = (/3,4,1/)
+ !Size of the hyperslab in memory
+ INTEGER(HSIZE_T), DIMENSION(3) :: offset_out = (/3,0,0/)
+ !hyperslab offset in memory
+ INTEGER, DIMENSION(5,6) :: data ! Data to write
+ INTEGER, DIMENSION(7,7,3) :: data_out ! Output buffer
+ INTEGER :: dsetrank = 2 ! Dataset rank ( in file )
+ INTEGER :: memrank = 3 ! Dataset rank ( in memory )
+ INTEGER :: rank
+ INTEGER :: i, j, k
+
+ INTEGER :: error, error_n ! Error flags
+
+
+ !
+ ! Write data to the HDF5 file.
+ !
+
+ !
+ ! Data initialization.
+ !
+ do i = 1, 5
+ do j = 1, 6
+ data(i,j) = (i-1) + (j-1);
+ end do
+ end do
+ !
+ ! 0, 1, 2, 3, 4, 5
+ ! 1, 2, 3, 4, 5, 6
+ ! 2, 3, 4, 5, 6, 7
+ ! 3, 4, 5, 6, 7, 8
+ ! 4, 5, 6, 7, 8, 9
+ !
+
+ !
+ ! Initialize FORTRAN predefined datatypes
+ !
+ CALL h5open_f(error)
+
+ !
+ ! Create a new file using default properties.
+ !
+ CALL h5fcreate_f(filename, H5F_ACC_TRUNC_F, file_id, error)
+
+ !
+ ! Create the data space for the dataset.
+ !
+ CALL h5screate_simple_f(dsetrank, dimsf, dataspace, error)
+
+ !
+ ! Create the dataset with default properties.
+ !
+ CALL h5dcreate_f(file_id, dsetname, H5T_NATIVE_INTEGER, dataspace, &
+ dset_id, error)
+
+ !
+ ! Write the dataset.
+ !
+ CALL h5dwrite_f(dset_id, H5T_NATIVE_INTEGER, data, error)
+
+ !
+ ! Close the dataspace for the dataset.
+ !
+ CALL h5sclose_f(dataspace, error)
+
+ !
+ ! Close the dataset.
+ !
+ CALL h5dclose_f(dset_id, error)
+
+ !
+ ! Close the file.
+ !
+ CALL h5fclose_f(file_id, error)
+
+ !
+ ! This part of the code reads the hyperslab from the sds.h5 file just
+ ! created, into a 2-dimensional plane of the 3-dimensional dataset.
+ !
+
+ !
+ ! Initialize data_out array.
+ !
+ do i = 1, 7
+ do j = 1, 7
+ do k = 1,3
+ data_out(i,j,k) = 0;
+ end do
+ end do
+ end do
+
+ !
+ ! Open the file.
+ !
+ CALL h5fopen_f (filename, H5F_ACC_RDONLY_F, file_id, error)
+
+ !
+ ! Open the dataset.
+ !
+ CALL h5dopen_f(file_id, dsetname, dset_id, error)
+
+ !
+ ! Get dataset's dataspace identifier.
+ !
+ CALL h5dget_space_f(dset_id, dataspace, error)
+
+ !
+ ! Select hyperslab in the dataset.
+ !
+ CALL h5sselect_hyperslab_f(dataspace, H5S_SELECT_SET_F, &
+ offset, count, error)
+ !
+ ! Create memory dataspace.
+ !
+ CALL h5screate_simple_f(memrank, dimsm, memspace, error)
+
+ !
+ ! Select hyperslab in memory.
+ !
+ CALL h5sselect_hyperslab_f(memspace, H5S_SELECT_SET_F, &
+ offset_out, count_out, error)
+
+ !
+ ! Read data from hyperslab in the file into the hyperslab in
+ ! memory and display.
+ !
+ CALL H5Dread_f(dset_id, H5T_NATIVE_INTEGER, data_out, error, &
+ memspace, dataspace)
+
+ !
+ ! Display data_out array
+ !
+ do i = 1, 7
+ print *, (data_out(i,j,1), j = 1,7)
+ end do
+
+ ! 0 0 0 0 0 0 0
+ ! 0 0 0 0 0 0 0
+ ! 0 0 0 0 0 0 0
+ ! 3 4 5 6 0 0 0
+ ! 4 5 6 7 0 0 0
+ ! 5 6 7 8 0 0 0
+ ! 0 0 0 0 0 0 0
+ !
+
+ !
+ ! Close the dataspace for the dataset.
+ !
+ CALL h5sclose_f(dataspace, error)
+
+ !
+ ! Close the memoryspace.
+ !
+ CALL h5sclose_f(memspace, error)
+
+ !
+ ! Close the dataset.
+ !
+ CALL h5dclose_f(dset_id, error)
+
+ !
+ ! Close the file.
+ !
+ CALL h5fclose_f(file_id, error)
+
+ !
+ ! Close FORTRAN predefined datatypes.
+ !
+ CALL h5close_f(error)
+
+ END PROGRAM SELECTEXAMPLE
diff --git a/doc/html/Tutor/examples/java/Compound.java b/doc/html/Tutor/examples/java/Compound.java
new file mode 100644
index 0000000..219e1c1
--- /dev/null
+++ b/doc/html/Tutor/examples/java/Compound.java
@@ -0,0 +1,540 @@
+/******************************************************************
+ * Compound.java (for HDF5 tutorial lesson 11)
+ *
+ * -- Creating a compound data type
+ * (a java conversion from compound.c)
+ *
+ ******************************************************************/
+
+import ncsa.hdf.hdf5lib.*;
+import ncsa.hdf.hdf5lib.exceptions.*;
+
+public class Compound
+{
+ public static void main (String []argv)
+ {
+ final String FILE = "SDScompound.h5";
+ final String DATASETNAME = "ArrayOfStructures";
+ final int LENGTH = 10;
+ final int RANK = 1;
+
+ /* First structure and dataset */
+ /* an array of LENGTH 'complex' numbers */
+ byte[] data1 = new byte[LENGTH * 16];
+
+ int[] AR = new int[1];
+ float[] BR = new float[1];
+ double[] CR = new double[1];
+
+ byte [] ARec = new byte[4];
+ byte [] BRec = new byte[4];
+ byte [] CRec = new byte[8];
+
+ int s1_tid; /* File datatype identifier */
+
+ /* Second structure (subset of s1_t) and dataset*/
+ byte[] data2 = new byte[LENGTH * 12];
+ int s2_tid; /* Memory datatype handle */
+
+ /* Third "structure" ( will be used to read float field of s1) */
+ int s3_tid; /* Memory datatype handle */
+ float[] s3 = new float[LENGTH];
+
+ int i;
+ int file, dataset, space; /* Handles */
+ int status;
+ long[] dim = new long[1]; /* Dataspace dimensions */
+ dim[0] = LENGTH;
+
+ /*
+ * Initialize the data
+ */
+ for (i = 0; i < LENGTH; i++)
+ {
+ AR[0] = (int) i;
+ BR[0] = (float) i * i;
+ CR[0] = (double) 1. / (i + 1);
+
+ ARec = HDFNativeData.intToByte (0, 1, AR);
+ BRec = HDFNativeData.floatToByte (0, 1, BR);
+ CRec = HDFNativeData.doubleToByte (0, 1, CR);
+
+ System.arraycopy (ARec, 0, data1, (i * 16), 4);
+ System.arraycopy (BRec, 0, data1, (i * 16) + 4, 4);
+ System.arraycopy (CRec, 0, data1, (i * 16) + 8, 8);
+ }
+
+ /*
+ * Create the data space.
+ */
+ space = H5Screate_simple_wrap (RANK, dim, null);
+
+ /*
+ * Create the file.
+ */
+ file = H5Fcreate_wrap (FILE, HDF5Constants.H5F_ACC_TRUNC,
+ HDF5Constants.H5P_DEFAULT,
+ HDF5Constants.H5P_DEFAULT);
+
+ /*
+ * Create the memory data type.
+ */
+ s1_tid = H5Tcreate_wrap (HDF5Constants.H5T_COMPOUND, 16);
+ H5Tinsert_wrap (s1_tid, "a_name", 0,
+ H5.J2C (HDF5CDataTypes.JH5T_NATIVE_INT));
+ H5Tinsert_wrap (s1_tid, "b_name", 4,
+ H5.J2C (HDF5CDataTypes.JH5T_NATIVE_FLOAT));
+ H5Tinsert_wrap (s1_tid, "c_name", 8,
+ H5.J2C (HDF5CDataTypes.JH5T_NATIVE_DOUBLE));
+
+ /*
+ * Create the dataset.
+ */
+ dataset = H5Dcreate_wrap (file, DATASETNAME, s1_tid,
+ space, HDF5Constants.H5P_DEFAULT);
+
+ /*
+ * Wtite data to the dataset;
+ */
+ status = H5Dwrite_wrap (dataset, s1_tid,
+ HDF5Constants.H5S_ALL, HDF5Constants.H5S_ALL,
+ HDF5Constants.H5P_DEFAULT, data1);
+
+ /*
+ * Release resources
+ */
+ H5Tclose_wrap (s1_tid);
+ H5Sclose_wrap (space);
+ H5Dclose_wrap (dataset);
+ H5Fclose_wrap (file);
+
+ /*
+ * Open the file and the dataset.
+ */
+ file = H5Fopen_wrap (FILE, HDF5Constants.H5F_ACC_RDONLY,
+ HDF5Constants.H5P_DEFAULT);
+
+ dataset = H5Dopen_wrap (file, DATASETNAME);
+
+ /*
+ * Create a data type for s2
+ */
+ s2_tid = H5Tcreate_wrap (HDF5Constants.H5T_COMPOUND, 12);
+ H5Tinsert_wrap (s2_tid, "c_name", 0,
+ H5.J2C (HDF5CDataTypes.JH5T_NATIVE_DOUBLE));
+ H5Tinsert_wrap (s2_tid, "a_name", 8,
+ H5.J2C (HDF5CDataTypes.JH5T_NATIVE_INT));
+
+ /*
+ * Read two fields c and a from s1 dataset. Fields in the file
+ * are found by their names "c_name" and "a_name".
+ */
+ status = H5Dread_wrap (dataset, s2_tid, HDF5Constants.H5S_ALL,
+ HDF5Constants.H5S_ALL,
+ HDF5Constants.H5P_DEFAULT, data2);
+
+ /*
+ * Display the fields. Convert from bytes into numbers.
+ */
+ System.out.println ("\nField c : ");
+ for( i = 0; i < LENGTH; i++) {
+ System.arraycopy (data2, (i*12), CRec, 0, 8);
+ CR = HDFNativeData.byteToDouble(0, 1, CRec);
+ System.out.print (CR[0]+" ");
+ }
+ System.out.println ();
+
+ System.out.println("\nField a :");
+ for( i = 0; i < LENGTH; i++) {
+ System.arraycopy (data2, (i*12)+8, ARec, 0, 4);
+ AR = HDFNativeData.byteToInt(0, 1, ARec);
+ System.out.print (AR[0]+" ");
+ }
+ System.out.println ();
+
+ /*
+ * Create a data type for s3.
+ */
+ s3_tid = H5Tcreate_wrap (HDF5Constants.H5T_COMPOUND, 4);
+
+ status =
+ H5Tinsert_wrap (s3_tid, "b_name", 0,
+ H5.J2C (HDF5CDataTypes.JH5T_NATIVE_FLOAT));
+
+ /*
+ * Read field b from s1 dataset. Field in the file is found by its name.
+ */
+ status = H5Dread_wrap (dataset, s3_tid, HDF5Constants.H5S_ALL,
+ HDF5Constants.H5S_ALL,
+ HDF5Constants.H5P_DEFAULT, s3);
+
+ /*
+ * Display the field. Data is read directly into array of 'float'.
+ */
+ System.out.println ();
+ System.out.println ("Field b :");
+ for( i = 0; i < LENGTH; i++) {
+ System.out.print (s3[i]+" ");
+ }
+ System.out.println ();
+
+ /*
+ * Release resources
+ */
+ H5Tclose_wrap (s2_tid);
+ H5Tclose_wrap (s3_tid);
+ H5Dclose_wrap (dataset);
+ H5Fclose_wrap (file);
+ }
+
+
+ // Help function for creating a new file
+ public static int H5Fcreate_wrap (String name, int flags,
+ int create_id, int access_id)
+ {
+ int file_id = -1; // file identifier
+ try
+ {
+ // Create a new file using default file properties.
+ file_id = H5.H5Fcreate (name, flags, create_id, access_id);
+ }
+ catch (HDF5Exception hdf5e)
+ {
+ System.out.println
+ ("Compound.H5Fcreate_wrap() with HDF5Exception: "
+ + hdf5e.getMessage());
+ }
+ catch (Exception e)
+ {
+ System.out.println
+ ("Compound.H5Fcreate_wrap() with other Exception: "
+ + e.getMessage());
+ }
+ return file_id;
+ }
+
+
+ // Help function for adding another member to the compound
+ // datatype datatype_id.
+ public static int H5Tinsert_wrap (int type_id, String name,
+ long offset, int field_id)
+ {
+ int status = -1;
+ try
+ {
+ // Adding another member to the compound datatype datatype_id.
+ status = H5.H5Tinsert (type_id, name, offset, field_id);
+
+ }
+ catch (HDF5Exception hdf5e)
+ {
+ System.out.println
+ ("Compound.H5Tinsert_wrap() with HDF5Exception: "
+ + hdf5e.getMessage());
+ }
+ catch (Exception e)
+ {
+ System.out.println
+ ("Compound.H5Tinsert_wrap() with HDF5Exception: "
+ + e.getMessage());
+ }
+ return status;
+ }
+
+
+ // Help function for creating the memory data type.
+ public static int H5Tcreate_wrap (int dclass, int size)
+ {
+ int datatype_id = -1; // memory data type identifier
+ try
+ {
+ // Create the memory data type.
+ datatype_id = H5.H5Tcreate (dclass, size);
+
+ }
+ catch (HDF5Exception hdf5e)
+ {
+ System.out.println
+ ("Compound.H5Tcreate_wrap() with HDF5Exception: "
+ + hdf5e.getMessage());
+ }
+ catch (Exception e)
+ {
+ System.out.println
+ ("Compound.H5Tcreate_wrap() with other Exception: "
+ + e.getMessage());
+ }
+ return datatype_id;
+ }
+
+
+ // Help function for opening an existing file
+ public static int H5Fopen_wrap (String name, int flags, int access_id)
+ {
+ int file_id = -1; // file identifier
+ try
+ {
+ // Create a new file using default file properties.
+ file_id = H5.H5Fopen (name, flags, access_id);
+ }
+ catch (HDF5Exception hdf5e)
+ {
+ System.out.println
+ ("Compound.H5Fopen_wrap() with HDF5Exception: "
+ + hdf5e.getMessage());
+ }
+ catch (Exception e)
+ {
+ System.out.println
+ ("Compound.H5Fopen_wrap() with other Exception: "
+ + e.getMessage());
+ }
+ return file_id;
+ }
+
+
+ // Help function for opening an existing dataset
+ public static int H5Dopen_wrap (int loc_id, String name)
+ {
+ int dataset_id = -1; // dataset identifier
+
+ try
+ {
+ // Opening an existing dataset
+ dataset_id = H5.H5Dopen (loc_id, name);
+ }
+ catch (HDF5Exception hdf5e)
+ {
+ System.out.println
+ ("Compound.H5Dopen_wrap() with HDF5Exception: "
+ + hdf5e.getMessage());
+ }
+ catch (Exception e)
+ {
+ System.out.println
+ ("Compound.H5Dopen_wrap() with other Exception: "
+ + e.getMessage());
+ }
+ return dataset_id;
+ }
+
+
+ // Help function for creating a new simple dataspace and opening it
+ // for access
+ public static int H5Screate_simple_wrap (int rank, long dims[],
+ long maxdims[])
+ {
+ int dataspace_id = -1; // dataspace identifier
+
+ try
+ {
+ // Create the data space for the dataset.
+ dataspace_id = H5.H5Screate_simple (rank, dims, maxdims);
+ }
+ catch (HDF5Exception hdf5e)
+ {
+ System.out.println
+ ("Compound.H5Screate_simple_wrap() with HDF5Exception: "
+ + hdf5e.getMessage());
+ }
+ catch (Exception e)
+ {
+ System.out.println
+ ("Compound.H5Screate_simple_wrap() with other Exception: "
+ + e.getMessage());
+ }
+ return dataspace_id;
+ }
+
+
+ // Help function for creating a dataset
+ public static int H5Dcreate_wrap (int loc_id, String name, int type_id,
+ int space_id, int create_plist_id)
+ {
+ int dataset_id = -1; // dataset identifier
+
+ try
+ {
+ // Create the dataset
+ dataset_id = H5.H5Dcreate (loc_id, name, type_id, space_id,
+ create_plist_id);
+ }
+ catch (HDF5Exception hdf5e)
+ {
+ System.out.println
+ ("Compound.H5Dcreate_wrap() with HDF5Exception: "
+ + hdf5e.getMessage());
+ }
+ catch (Exception e)
+ {
+ System.out.println
+ ("Compound.H5Dcreate_wrap() with other Exception: "
+ + e.getMessage());
+ }
+ return dataset_id;
+ }
+
+
+ // Help function for writing the dataset
+ public static int H5Dwrite_wrap (int dataset_id, int mem_type_id,
+ int mem_space_id, int file_space_id,
+ int xfer_plist_id, Object buf)
+ {
+ int status = -1;
+
+ try
+ {
+ // Write the dataset.
+ status = H5.H5Dwrite (dataset_id, mem_type_id, mem_space_id,
+ file_space_id, xfer_plist_id, buf);
+ }
+ catch (HDF5Exception hdf5e)
+ {
+ System.out.println
+ ("Compound.H5Dwrite_wrap() with HDF5Exception: "
+ + hdf5e.getMessage());
+ }
+ catch (Exception e)
+ {
+ System.out.println
+ ("Compound.H5Dwrite_wrap() with other exception: "
+ + e.getMessage());
+ }
+ return status;
+ }
+
+
+ // Help function for reading the dataset
+ public static int H5Dread_wrap (int dataset_id, int mem_type_id,
+ int mem_space_id, int file_space_id,
+ int xfer_plist_id, Object obj)
+ {
+ int status = -1;
+
+ try
+ {
+ // Read the dataset.
+ status = H5.H5Dread (dataset_id, mem_type_id, mem_space_id,
+ file_space_id, xfer_plist_id, obj);
+ }
+ catch (HDF5Exception hdf5e)
+ {
+ System.out.println
+ ("Compound.H5Dread_wrap() with HDF5Exception: "
+ + hdf5e.getMessage());
+ }
+ catch (Exception e)
+ {
+ System.out.println
+ ("Compound.H5Dread_wrap() with other exception: "
+ + e.getMessage());
+ }
+ return status;
+ }
+
+
+
+ // Help function for terminating access to the data space.
+ public static int H5Sclose_wrap (int dataspace_id)
+ {
+ int status = -1;
+
+ try
+ {
+ // Terminate access to the data space.
+ status = H5.H5Sclose (dataspace_id);
+ }
+ catch (HDF5Exception hdf5e)
+ {
+ System.out.println
+ ("Compound.H5Sclose_wrap() with HDF5Exception: "
+ + hdf5e.getMessage());
+ }
+ catch (Exception e)
+ {
+ System.out.println
+ ("Compound.H5Sclose_wrap() with other exception: "
+ + e.getMessage());
+ }
+ return status;
+ }
+
+
+ // Help function for releasing a datatype.
+ public static int H5Tclose_wrap (int type_id)
+ {
+ int status = -1;
+
+ try
+ {
+ // Releasing a datatype.
+ status = H5.H5Tclose (type_id);
+ }
+ catch (HDF5Exception hdf5e)
+ {
+ System.out.println
+ ("Compound.H5Tclose_wrap() with HDF5Exception: "
+ + hdf5e.getMessage());
+ }
+ catch (Exception e)
+ {
+ System.out.println
+ ("Compound.H5Tclose_wrap() with other exception: "
+ + e.getMessage());
+ }
+ return status;
+ }
+
+
+ // Help function for ending access to the dataset and releasing
+ // resources used by it.
+ public static int H5Dclose_wrap (int dataset_id)
+ {
+ int status = -1;
+
+ try
+ {
+ // End access to the dataset and release resources used by it.
+ status = H5.H5Dclose (dataset_id);
+ }
+ catch (HDF5Exception hdf5e)
+ {
+ System.out.println
+ ("Compound.H5Dclose_wrap() with HDF5Exception: "
+ + hdf5e.getMessage());
+ }
+ catch (Exception e)
+ {
+ System.out.println
+ ("Compound.H5Dclose_wrap() with other exception: "
+ + e.getMessage());
+ }
+ return status;
+ }
+
+
+ // Help function for terminating access to the file.
+ public static int H5Fclose_wrap (int file_id)
+ {
+ int status = -1;
+
+ try
+ {
+ // Terminate access to the file.
+ status = H5.H5Fclose (file_id);
+ }
+ catch (HDF5Exception hdf5e)
+ {
+ System.out.println
+ ("Compound.H5Fclose_wrap() with HDF5Exception: "
+ + hdf5e.getMessage());
+ }
+ catch (Exception e)
+ {
+ System.out.println
+ ("Compound.H5Fclose_wrap() with other exception: "
+ + e.getMessage());
+ }
+ return status;
+ }
+}
diff --git a/doc/html/Tutor/examples/java/Copy.java b/doc/html/Tutor/examples/java/Copy.java
new file mode 100644
index 0000000..f174210
--- /dev/null
+++ b/doc/html/Tutor/examples/java/Copy.java
@@ -0,0 +1,541 @@
+/******************************************************************
+ * Copy.java (for HDF5 tutorial lesson 13)
+ *
+ * -- Showing how to use the H5SCOPY function.
+ * (a java conversion from h5_copy.c)
+ *
+ ******************************************************************/
+
+import ncsa.hdf.hdf5lib.*;
+import ncsa.hdf.hdf5lib.exceptions.*;
+
+public class Copy
+{
+ public static void main (String []argv)
+ {
+ final String FILE1 = "copy1.h5";
+ final String FILE2 = "copy2.h5";
+
+ final int RANK = 2;
+ final int DIM1 = 3;
+ final int DIM2 = 4;
+ final int NUMP = 2;
+
+ int file1, file2, dataset1, dataset2;
+ int mid1, mid2, fid1, fid2;
+ long[] fdim = new long[2];
+ fdim[0] = DIM1;
+ fdim[1] = DIM2;
+ long[] mdim = new long[2];
+ fdim[0] = DIM1;
+ fdim[1] = DIM2;
+
+ long[] start = new long[2];
+ long[] stride = new long[2];
+ long[] count = new long[2];
+ long[] block = new long[2];
+
+ int[][] buf1 = new int[DIM1][DIM2];
+ int[][] buf2 = new int[DIM1][DIM2];
+ int[][] bufnew = new int[DIM1][DIM2];
+
+ int[] val = new int[2];
+ val[0] = 53;
+ val[1] = 59;
+
+ long[] marray = {2};
+ long[][] coord = new long[NUMP][RANK];
+ int ret;
+ int i, j;
+
+
+/***********************************************************************/
+/* */
+/* Create two files containing identical datasets. Write 0's to one */
+/* and 1's to the other. */
+/* */
+/***********************************************************************/
+
+ for ( i = 0; i < DIM1; i++ )
+ for ( j = 0; j < DIM2; j++ )
+ buf1[i][j] = 0;
+
+ for ( i = 0; i < DIM1; i++ )
+ for ( j = 0; j < DIM2; j++ )
+ buf2[i][j] = 1;
+
+ file1 = H5Fcreate_wrap (FILE1, HDF5Constants.H5F_ACC_TRUNC,
+ HDF5Constants.H5P_DEFAULT,
+ HDF5Constants.H5P_DEFAULT);
+ file2 = H5Fcreate_wrap (FILE2, HDF5Constants.H5F_ACC_TRUNC,
+ HDF5Constants.H5P_DEFAULT,
+ HDF5Constants.H5P_DEFAULT);
+
+ fid1 = H5Screate_simple_wrap (RANK, fdim, null);
+ fid2 = H5Screate_simple_wrap (RANK, fdim, null);
+
+ dataset1 = H5Dcreate_wrap
+ (file1, "Copy1", H5.J2C (HDF5CDataTypes.JH5T_NATIVE_INT), fid1,
+ HDF5Constants.H5P_DEFAULT);
+
+ dataset2 = H5Dcreate_wrap
+ (file2, "Copy2", H5.J2C (HDF5CDataTypes.JH5T_NATIVE_INT), fid2,
+ HDF5Constants.H5P_DEFAULT);
+
+
+ ret = H5Dwrite_wrap (dataset1, H5.J2C (HDF5CDataTypes.JH5T_NATIVE_INT),
+ HDF5Constants.H5S_ALL, HDF5Constants.H5S_ALL,
+ HDF5Constants.H5P_DEFAULT, buf1);
+
+ ret = H5Dwrite_wrap (dataset2, H5.J2C (HDF5CDataTypes.JH5T_NATIVE_INT),
+ HDF5Constants.H5S_ALL, HDF5Constants.H5S_ALL,
+ HDF5Constants.H5P_DEFAULT, buf2);
+
+ ret = H5Dclose_wrap (dataset1);
+ ret = H5Dclose_wrap (dataset2);
+
+ ret = H5Sclose_wrap (fid1);
+ ret = H5Sclose_wrap (fid2);
+
+ ret = H5Fclose_wrap (file1);
+ ret = H5Fclose_wrap (file2);
+
+
+/***********************************************************************/
+/* */
+/* Open the two files. Select two points in one file, write values to */
+/* those point locations, then do H5Scopy and write the values to the */
+/* other file. Close files. */
+/* */
+/***********************************************************************/
+
+ file1 = H5Fopen_wrap (FILE1, HDF5Constants.H5F_ACC_RDWR,
+ HDF5Constants.H5P_DEFAULT);
+
+ file2 = H5Fopen_wrap (FILE2, HDF5Constants.H5F_ACC_RDWR,
+ HDF5Constants.H5P_DEFAULT);
+
+ dataset1 = H5Dopen_wrap (file1, "Copy1");
+ dataset2 = H5Dopen_wrap (file2, "Copy2");
+
+ fid1 = H5Dget_space_wrap (dataset1);
+ mid1 = H5Screate_simple_wrap (1, marray, null);
+
+ coord[0][0] = 0; coord[0][1] = 3;
+ coord[1][0] = 0; coord[1][1] = 1;
+
+ ret = H5Sselect_elements_wrap (fid1, HDF5Constants.H5S_SELECT_SET,
+ NUMP, coord);
+
+ ret = H5Dwrite_wrap (dataset1, H5.J2C (HDF5CDataTypes.JH5T_NATIVE_INT),
+ mid1, fid1, HDF5Constants.H5P_DEFAULT, val);
+
+ fid2 = H5Scopy_wrap (fid1);
+
+ ret = H5Dwrite_wrap (dataset2, H5.J2C (HDF5CDataTypes.JH5T_NATIVE_INT),
+ mid1, fid2, HDF5Constants.H5P_DEFAULT, val);
+
+ ret = H5Dclose_wrap (dataset1);
+ ret = H5Dclose_wrap (dataset2);
+ ret = H5Sclose_wrap (fid1);
+ ret = H5Sclose_wrap (fid2);
+ ret = H5Fclose_wrap (file1);
+ ret = H5Fclose_wrap (file2);
+ ret = H5Sclose_wrap (mid1);
+
+
+/***********************************************************************/
+/* */
+/* Open both files and print the contents of the datasets. */
+/* */
+/***********************************************************************/
+
+ file1 = H5Fopen_wrap (FILE1, HDF5Constants.H5F_ACC_RDWR,
+ HDF5Constants.H5P_DEFAULT);
+ file2 = H5Fopen_wrap (FILE2, HDF5Constants.H5F_ACC_RDWR,
+ HDF5Constants.H5P_DEFAULT);
+ dataset1 = H5Dopen_wrap (file1, "Copy1");
+ dataset2 = H5Dopen_wrap (file2, "Copy2");
+
+ ret = H5Dread_wrap (dataset1, H5.J2C (HDF5CDataTypes.JH5T_NATIVE_INT),
+ HDF5Constants.H5S_ALL, HDF5Constants.H5S_ALL,
+ HDF5Constants.H5P_DEFAULT, bufnew);
+
+ System.out.println ("\nDataset 'Copy1' in file 'copy1.h5' contains: ");
+
+ for (i = 0;i < DIM1; i++)
+ {
+ for (j = 0;j < DIM2; j++)
+ System.out.print (bufnew[i][j]);
+ System.out.println ();
+ }
+
+ System.out.println ("\nDataset 'Copy2' in file 'copy2.h5' contains: ");
+
+ ret = H5Dread_wrap (dataset2, H5.J2C (HDF5CDataTypes.JH5T_NATIVE_INT),
+ HDF5Constants.H5S_ALL, HDF5Constants.H5S_ALL,
+ HDF5Constants.H5P_DEFAULT, bufnew);
+
+ for (i = 0;i < DIM1; i++)
+ {
+ for (j = 0;j < DIM2; j++)
+ System.out.print (bufnew[i][j]);
+ System.out.println ();
+ }
+
+ ret = H5Dclose_wrap (dataset1);
+ ret = H5Dclose_wrap (dataset2);
+ ret = H5Fclose_wrap (file1);
+ ret = H5Fclose_wrap (file2);
+ }
+
+
+ // Help function for creating a new file
+ public static int H5Fcreate_wrap (String name, int flags,
+ int create_id, int access_id)
+ {
+ int file_id = -1; // file identifier
+ try
+ {
+ // Create a new file using default file properties.
+ file_id = H5.H5Fcreate (name, flags, create_id, access_id);
+ }
+ catch (HDF5Exception hdf5e)
+ {
+ System.out.println
+ ("Copy.H5Fcreate_wrap() with HDF5Exception: "
+ + hdf5e.getMessage());
+ }
+ catch (Exception e)
+ {
+ System.out.println
+ ("Copy.H5Fcreate_wrap() with other Exception: "
+ + e.getMessage());
+ }
+ return file_id;
+ }
+
+
+ // Help function for opening an existing file
+ public static int H5Fopen_wrap (String name, int flags, int access_id)
+ {
+ int file_id = -1; // file identifier
+ try
+ {
+ // Create a new file using default file properties.
+ file_id = H5.H5Fopen (name, flags, access_id);
+ }
+ catch (HDF5Exception hdf5e)
+ {
+ System.out.println
+ ("Copy.H5Fopen_wrap() with HDF5Exception: "
+ + hdf5e.getMessage());
+ }
+ catch (Exception e)
+ {
+ System.out.println
+ ("Copy.H5Fopen_wrap() with other Exception: "
+ + e.getMessage());
+ }
+ return file_id;
+ }
+
+
+ // Help function for opening an existing dataset
+ public static int H5Dopen_wrap (int loc_id, String name)
+ {
+ int dataset_id = -1; // dataset identifier
+
+ try
+ {
+ // Opening an existing dataset
+ dataset_id = H5.H5Dopen (loc_id, name);
+ }
+ catch (HDF5Exception hdf5e)
+ {
+ System.out.println
+ ("Copy.H5Dopen_wrap() with HDF5Exception: "
+ + hdf5e.getMessage());
+ }
+ catch (Exception e)
+ {
+ System.out.println
+ ("Copy.H5Dopen_wrap() with other Exception: "
+ + e.getMessage());
+ }
+ return dataset_id;
+ }
+
+
+ // Help function for creating a new simple dataspace and opening it
+ // for access
+ public static int H5Screate_simple_wrap (int rank, long dims[],
+ long maxdims[])
+ {
+ int dataspace_id = -1; // dataspace identifier
+
+ try
+ {
+ // Create the data space for the dataset.
+ dataspace_id = H5.H5Screate_simple (rank, dims, maxdims);
+ }
+ catch (HDF5Exception hdf5e)
+ {
+ System.out.println
+ ("Copy.H5Screate_simple_wrap() with HDF5Exception: "
+ + hdf5e.getMessage());
+ }
+ catch (Exception e)
+ {
+ System.out.println
+ ("Copy.H5Screate_simple_wrap() with other Exception: "
+ + e.getMessage());
+ }
+ return dataspace_id;
+ }
+
+
+ // Help function for getting an identifier for a copy of
+ // the dataspace for a dataset
+ public static int H5Dget_space_wrap (int dataset_id)
+ {
+ int dataspace_id = -1;
+
+ try
+ {
+ // Returning an identifier for a copy of the dataspace for a dataset
+ dataspace_id = H5.H5Dget_space (dataset_id);
+ }
+ catch (HDF5Exception hdf5e)
+ {
+ System.out.println
+ ("Copy.H5Dget_space_wrap() with HDF5Exception: "
+ + hdf5e.getMessage());
+ }
+ catch (Exception e)
+ {
+ System.out.println
+ ("Copy.H5Dget_space_wrap() with other Exception: "
+ + e.getMessage());
+ }
+ return dataspace_id;
+ }
+
+
+ // Help function for selecting array elements to be included in
+ // the selection for the space_id dataspace.
+ public static int H5Sselect_elements_wrap (int space_id, int op,
+ int num_elements,
+ long coord2D[][])
+ {
+ int status = -1;
+
+ try
+ {
+ status = H5.H5Sselect_elements (space_id, op, num_elements,
+ coord2D);
+ }
+ catch (HDF5Exception hdf5e)
+ {
+ System.out.println
+ ("Copy.H5Sselect_elements_wrap() with HDF5Exception: "
+ + hdf5e.getMessage());
+ }
+ catch (Exception e)
+ {
+ System.out.println
+ ("Copy.H5Sselect_elements_wrap() with other Exception: "
+ + e.getMessage());
+ }
+ return status;
+ }
+
+
+ // Help function for creating a new dataspace which is an exact
+ // copy of the dataspace identified by space_id.
+ public static int H5Scopy_wrap (int space_id)
+ {
+ int dataspace_id = -1;
+
+ try
+ {
+ dataspace_id = H5.H5Scopy(space_id);
+ }
+ catch (HDF5Exception hdf5e)
+ {
+ System.out.println ("Copy.H5Scopy_wrap() with HDF5Exception: "
+ + hdf5e.getMessage());
+ }
+ catch (Exception e)
+ {
+ System.out.println ("Copy.H5Scopy_wrap() with other Exception: "
+ + e.getMessage());
+ }
+ return dataspace_id;
+ }
+
+
+ // Help function for creating a dataset
+ public static int H5Dcreate_wrap (int loc_id, String name, int type_id,
+ int space_id, int create_plist_id)
+ {
+ int dataset_id = -1; // dataset identifier
+
+ try
+ {
+ // Create the dataset
+ dataset_id = H5.H5Dcreate (loc_id, name, type_id, space_id,
+ create_plist_id);
+ }
+ catch (HDF5Exception hdf5e)
+ {
+ System.out.println
+ ("Copy.H5Dcreate_wrap() with HDF5Exception: "
+ + hdf5e.getMessage());
+ }
+ catch (Exception e)
+ {
+ System.out.println
+ ("Copy.H5Dcreate_wrap() with other Exception: "
+ + e.getMessage());
+ }
+ return dataset_id;
+ }
+
+
+ // Help function for writing the dataset
+ public static int H5Dwrite_wrap (int dataset_id, int mem_type_id,
+ int mem_space_id, int file_space_id,
+ int xfer_plist_id, Object buf)
+ {
+ int status = -1;
+
+ try
+ {
+ // Write the dataset.
+ status = H5.H5Dwrite (dataset_id, mem_type_id, mem_space_id,
+ file_space_id, xfer_plist_id, buf);
+ }
+ catch (HDF5Exception hdf5e)
+ {
+ System.out.println
+ ("Copy.H5Dwrite_wrap() with HDF5Exception: "
+ + hdf5e.getMessage());
+ }
+ catch (Exception e)
+ {
+ System.out.println
+ ("Copy.H5Dwrite_wrap() with other exception: "
+ + e.getMessage());
+ }
+ return status;
+ }
+
+
+ // Help function for reading the dataset
+ public static int H5Dread_wrap (int dataset_id, int mem_type_id,
+ int mem_space_id, int file_space_id,
+ int xfer_plist_id, Object obj)
+ {
+ int status = -1;
+
+ try
+ {
+ // Read the dataset.
+ status = H5.H5Dread (dataset_id, mem_type_id, mem_space_id,
+ file_space_id, xfer_plist_id, obj);
+ }
+ catch (HDF5Exception hdf5e)
+ {
+ System.out.println
+ ("Copy.H5Dread_wrap() with HDF5Exception: "
+ + hdf5e.getMessage());
+ }
+ catch (Exception e)
+ {
+ System.out.println
+ ("Copy.H5Dread_wrap() with other exception: "
+ + e.getMessage());
+ }
+ return status;
+ }
+
+
+ // Help function for terminating access to the data space.
+ public static int H5Sclose_wrap (int dataspace_id)
+ {
+ int status = -1;
+
+ try
+ {
+ // Terminate access to the data space.
+ status = H5.H5Sclose (dataspace_id);
+ }
+ catch (HDF5Exception hdf5e)
+ {
+ System.out.println
+ ("Copy.H5Sclose_wrap() with HDF5Exception: "
+ + hdf5e.getMessage());
+ }
+ catch (Exception e)
+ {
+ System.out.println
+ ("Copy.H5Sclose_wrap() with other exception: "
+ + e.getMessage());
+ }
+ return status;
+ }
+
+
+ // Help function for ending access to the dataset and releasing
+ // resources used by it.
+ public static int H5Dclose_wrap (int dataset_id)
+ {
+ int status = -1;
+
+ try
+ {
+ // End access to the dataset and release resources used by it.
+ status = H5.H5Dclose (dataset_id);
+ }
+ catch (HDF5Exception hdf5e)
+ {
+ System.out.println
+ ("Copy.H5Dclose_wrap() with HDF5Exception: "
+ + hdf5e.getMessage());
+ }
+ catch (Exception e)
+ {
+ System.out.println
+ ("Copy.H5Dclose_wrap() with other exception: "
+ + e.getMessage());
+ }
+ return status;
+ }
+
+
+ // Help function for terminating access to the file.
+ public static int H5Fclose_wrap (int file_id)
+ {
+ int status = -1;
+
+ try
+ {
+ // Terminate access to the file.
+ status = H5.H5Fclose (file_id);
+ }
+ catch (HDF5Exception hdf5e)
+ {
+ System.out.println
+ ("Copy.H5Fclose_wrap() with HDF5Exception: "
+ + hdf5e.getMessage());
+ }
+ catch (Exception e)
+ {
+ System.out.println
+ ("Copy.H5Fclose_wrap() with other exception: "
+ + e.getMessage());
+ }
+ return status;
+ }
+}
diff --git a/doc/html/Tutor/examples/java/CreateAttribute.java b/doc/html/Tutor/examples/java/CreateAttribute.java
new file mode 100644
index 0000000..c926422
--- /dev/null
+++ b/doc/html/Tutor/examples/java/CreateAttribute.java
@@ -0,0 +1,302 @@
+/******************************************************************
+ * CreateAttribute.java (for HDF5 tutorial lesson 7)
+ *
+ * -- Creating and Writing a dataset attribute
+ * (a java conversion from h5_crtatt.c)
+ *
+ ******************************************************************/
+
+import ncsa.hdf.hdf5lib.*;
+import ncsa.hdf.hdf5lib.exceptions.*;
+
+public class CreateAttribute
+{
+ public static void main(String []argv)
+ {
+ final String FILE = "dset.h5";
+ int file_id = -1; // file identifier
+ int dataset_id = -1; // dataset identifier
+ int attribute_id = -1;
+ int dataspace_id = -1; // dataspace identifier
+ long[] dims = new long[1];
+ int[] attr_data = new int[2];
+ int status = -1;
+
+ // Initialize the attribute data.
+ attr_data[0] = 100;
+ attr_data[1] = 200;
+
+ // Open an existing file.
+ file_id = H5Fopen_wrap (FILE, HDF5Constants.H5F_ACC_RDWR,
+ HDF5Constants.H5P_DEFAULT);
+
+ // Open an existing dataset.
+ dataset_id = H5Dopen_wrap (file_id, "/dset");
+
+ // Create the data space for the attribute.
+ dims[0] = 2;
+ dataspace_id = H5Screate_simple_wrap (1, dims, null);
+
+ // Create a dataset attribute.
+ attribute_id = H5Acreate_wrap
+ (dataset_id, "attr",
+ H5.J2C (HDF5CDataTypes.JH5T_STD_I32BE),
+ dataspace_id, HDF5Constants.H5P_DEFAULT);
+
+ // Write the attribute data.
+ status = H5Awrite_wrap
+ (attribute_id,
+ H5.J2C (HDF5CDataTypes.JH5T_NATIVE_INT),
+ attr_data);
+
+ // Close the attribute.
+ status = H5Aclose_wrap (attribute_id);
+
+ // Close the dataspace.
+ status = H5Sclose_wrap (dataspace_id);
+
+ // Close to the dataset.
+ status = H5Dclose_wrap (dataset_id);
+
+ // Close the file.
+ status = H5Fclose_wrap (file_id);
+ }
+
+
+ // Help function for opening an existing file
+ public static int H5Fopen_wrap (String name, int flags, int access_id)
+ {
+ int file_id = -1; // file identifier
+ try
+ {
+ // Create a new file using default file properties.
+ file_id = H5.H5Fopen (name, flags, access_id);
+ }
+ catch (HDF5Exception hdf5e)
+ {
+ System.out.println
+ ("CreateAttribute.H5Fopen_wrap() with HDF5Exception: "
+ + hdf5e.getMessage());
+ }
+ catch (Exception e)
+ {
+ System.out.println
+ ("CreateAttribute.H5Fopen_wrap() with other Exception: "
+ + e.getMessage());
+ }
+ return file_id;
+ }
+
+
+ // Help function for opening an existing dataset
+ public static int H5Dopen_wrap (int loc_id, String name)
+ {
+ int dataset_id = -1; // dataset identifier
+
+ try
+ {
+ // Opening an existing dataset
+ dataset_id = H5.H5Dopen (loc_id, name);
+ }
+ catch (HDF5Exception hdf5e)
+ {
+ System.out.println
+ ("CreateAttribute.H5Dopen_wrap() with HDF5Exception: "
+ + hdf5e.getMessage());
+ }
+ catch (Exception e)
+ {
+ System.out.println
+ ("CreateAttribute.H5Dopen_wrap() with other Exception: "
+ + e.getMessage());
+ }
+ return dataset_id;
+ }
+
+
+ // Create the data space for the attribute.
+ public static int H5Screate_simple_wrap (int rank, long dims[],
+ long maxdims[])
+ {
+ int dataspace_id = -1; // dataspace identifier
+
+ try
+ {
+ // Create the data space for the dataset.
+ dataspace_id = H5.H5Screate_simple (rank, dims, maxdims);
+ }
+ catch (HDF5Exception hdf5e)
+ {
+ System.out.println
+ ("CreateAttribute.H5Screate_simple_wrap() with HDF5Exception: "
+ + hdf5e.getMessage());
+
+ }
+ catch (Exception e)
+ {
+ System.out.println
+ ("CreateAttribute.H5Screate_simple_wrap() with other Exception: "
+ + e.getMessage());
+ }
+ return dataspace_id;
+ }
+
+
+ // Help function for creating a dataset attribute.
+ public static int H5Acreate_wrap (int loc_id, String name, int type_id,
+ int space_id, int create_plist)
+ {
+ int attribute_id = -1; // attribute identifier
+
+ try
+ {
+ // Create the dataset
+ attribute_id = H5.H5Acreate (loc_id, name, type_id, space_id,
+ create_plist);
+ }
+ catch (HDF5Exception hdf5e)
+ {
+ System.out.println
+ ("CreateAttribute.H5Acreate_wrap() with HDF5Exception: "
+ + hdf5e.getMessage());
+ }
+ catch (Exception e)
+ {
+ System.out.println
+ ("CreateAttribute.H5Acreate_wrap() with other Exception: "
+ + e.getMessage());
+ }
+ return attribute_id;
+ }
+
+
+ // Help function for writing the attribute data.
+ public static int H5Awrite_wrap (int attr_id, int mem_type_id,
+ Object buf)
+ {
+ int status = -1;
+
+ try
+ {
+ // Write the attribute data.
+ status = H5.H5Awrite (attr_id, mem_type_id, buf);
+ }
+ catch (HDF5Exception hdf5e)
+ {
+ System.out.println
+ ("CreateAttribute.H5Awrite_wrap() with HDF5Exception: "
+ + hdf5e.getMessage());
+ }
+ catch (Exception e)
+ {
+ System.out.println
+ ("CreateAttribute.H5Awrite_wrap() with other exception: "
+ + e.getMessage());
+ }
+ return status;
+ }
+
+
+ // Help function for closing the attribute
+ public static int H5Aclose_wrap (int attribute_id)
+ {
+ int status = -1;
+
+ try
+ {
+ // Close the dataset
+ status = H5.H5Aclose (attribute_id);
+ }
+ catch (HDF5Exception hdf5e)
+ {
+ System.out.println
+ ("CreateAttribute.H5Aclose_wrap() with HDF5Exception: "
+ + hdf5e.getMessage());
+ }
+ catch (Exception e)
+ {
+ System.out.println
+ ("CreateAttribute.H5Aclose_wrap() with other exception: "
+ + e.getMessage());
+ }
+ return status;
+ }
+
+
+ // Help function for closing the dataset
+ public static int H5Dclose_wrap (int dataset_id)
+ {
+ int status = -1;
+
+ try
+ {
+ // Close the dataset
+ status = H5.H5Dclose (dataset_id);
+ }
+ catch (HDF5Exception hdf5e)
+ {
+ System.out.println
+ ("CreateAttribute.H5Dclose_wrap() with HDF5Exception: "
+ + hdf5e.getMessage());
+ }
+ catch (Exception e)
+ {
+ System.out.println
+ ("CreateAttribute.H5Dclose_wrap() with other exception: "
+ + e.getMessage());
+ }
+ return status;
+ }
+
+
+ // Help function for closing the dataspace
+ public static int H5Sclose_wrap (int dataspace_id)
+ {
+ int status = -1;
+
+ try
+ {
+ // Terminate access to the data space.
+ status = H5.H5Sclose (dataspace_id);
+ }
+ catch (HDF5Exception hdf5e)
+ {
+ System.out.println
+ ("CreateAttribute.H5Sclose_wrap() with HDF5Exception: "
+ + hdf5e.getMessage());
+ }
+ catch (Exception e)
+ {
+ System.out.println
+ ("CreateAttribute.H5Sclose_wrap() with other exception: "
+ + e.getMessage());
+ }
+ return status;
+ }
+
+
+ // Help function for terminating access to the file.
+ public static int H5Fclose_wrap (int file_id)
+ {
+ int status = -1;
+
+ try
+ {
+ // Terminate access to the file.
+ status = H5.H5Fclose (file_id);
+ }
+ catch (HDF5Exception hdf5e)
+ {
+ System.out.println
+ ("CreateAttribute.H5Fclose_wrap() with HDF5Exception: "
+ + hdf5e.getMessage());
+ }
+ catch (Exception e)
+ {
+ System.out.println
+ ("CreateAttribute.H5Fclose_wrap() with other exception: "
+ + e.getMessage());
+ }
+ return status;
+ }
+}
diff --git a/doc/html/Tutor/examples/java/CreateDataset.java b/doc/html/Tutor/examples/java/CreateDataset.java
new file mode 100644
index 0000000..05f3f6b
--- /dev/null
+++ b/doc/html/Tutor/examples/java/CreateDataset.java
@@ -0,0 +1,210 @@
+/******************************************************************
+ * CreateDataset.java (for HDF5 tutorial lesson 5)
+ *
+ * -- Creating a HDF5 Dataset
+ * (a java conversion from h5_crtdat.c)
+ *
+ ******************************************************************/
+
+import ncsa.hdf.hdf5lib.*;
+import ncsa.hdf.hdf5lib.exceptions.*;
+
+public class CreateDataset
+{
+ public static void main(String []argv)
+ {
+ final String FILE = "dset.h5";
+ int file_id = -1; // file identifier
+ int dataset_id = -1; // dataset identifier
+ int dataspace_id = -1; // dataspace identifier
+ long[] dims = new long[2];
+ int status = -1;
+
+ // Create a new file using default properties.
+ file_id = H5Fcreate_wrap (FILE, HDF5Constants.H5F_ACC_TRUNC,
+ HDF5Constants.H5P_DEFAULT,
+ HDF5Constants.H5P_DEFAULT);
+
+ // Create the data space for the dataset.
+ dims[0] = 4;
+ dims[1] = 6;
+ dataspace_id = H5Screate_simple_wrap (2, dims, null);
+
+ // Create the dataset.
+ dataset_id =
+ H5Dcreate_wrap (file_id, "/dset",
+ H5.J2C (HDF5CDataTypes.JH5T_STD_I32BE),
+ dataspace_id, HDF5Constants.H5P_DEFAULT);
+
+ // End access to the dataset and release resources used by it.
+ status = H5Dclose_wrap (dataset_id);
+
+ // Terminate access to the data space.
+ status = H5Sclose_wrap (dataspace_id);
+
+ // Close the file.
+ status = H5Fclose_wrap (file_id);
+ }
+
+
+ // Help function for creating a new file
+ public static int H5Fcreate_wrap (String name, int flags,
+ int create_id, int access_id)
+ {
+ int file_id = -1; // file identifier
+ try
+ {
+ // Create a new file using default file properties.
+ file_id = H5.H5Fcreate (name, flags, create_id, access_id);
+ }
+ catch (HDF5Exception hdf5e)
+ {
+ System.out.println
+ ("CreateDataset.H5Fcreate_wrap() with HDF5Exception: "
+ + hdf5e.getMessage());
+ }
+ catch (Exception e)
+ {
+ System.out.println
+ ("CreateDataset.H5Fcreate_wrap() with other Exception: "
+ + e.getMessage());
+ }
+ return file_id;
+ }
+
+
+ // Help function for creating a new simple dataspace and opening it
+ // for access
+ public static int H5Screate_simple_wrap (int rank, long dims[],
+ long maxdims[])
+ {
+ int dataspace_id = -1; // dataspace identifier
+
+ try
+ {
+ // Create the data space for the dataset.
+ dataspace_id = H5.H5Screate_simple (rank, dims, maxdims);
+ }
+ catch (HDF5Exception hdf5e)
+ {
+ System.out.println
+ ("CreateDataset.H5Screate_simple_wrap() with HDF5Exception: "
+ + hdf5e.getMessage());
+ }
+ catch (Exception e)
+ {
+ System.out.println
+ ("CreateDataset.H5Screate_simple_wrap() with other Exception: "
+ + e.getMessage());
+ }
+ return dataspace_id;
+ }
+
+
+ // Help function for creating a dataset
+ public static int H5Dcreate_wrap (int loc_id, String name, int type_id,
+ int space_id, int create_plist_id)
+ {
+ int dataset_id = -1; // dataset identifier
+
+ try
+ {
+ // Create the dataset
+ dataset_id = H5.H5Dcreate (loc_id, name, type_id, space_id,
+ create_plist_id);
+ }
+ catch (HDF5Exception hdf5e)
+ {
+ System.out.println
+ ("CreateDataset.H5Dcreate_wrap() with HDF5Exception: "
+ + hdf5e.getMessage());
+ }
+ catch (Exception e)
+ {
+ System.out.println
+ ("CreateDataset.H5Dcreate_wrap() with other Exception: "
+ + e.getMessage());
+ }
+ return dataset_id;
+ }
+
+
+ // Help function for ending access to the dataset and releasing
+ // resources used by it.
+ public static int H5Dclose_wrap (int dataset_id)
+ {
+ int status = -1;
+
+ try
+ {
+ // End access to the dataset and release resources used by it.
+ status = H5.H5Dclose (dataset_id);
+ }
+ catch (HDF5Exception hdf5e)
+ {
+ System.out.println
+ ("CreateDataset.H5Dclose_wrap() with HDF5Exception: "
+ + hdf5e.getMessage());
+ }
+ catch (Exception e)
+ {
+ System.out.println
+ ("CreateDataset.H5Dclose_wrap() with other exception: "
+ + e.getMessage());
+ }
+ return status;
+ }
+
+
+ // Help function for terminating access to the data space.
+ public static int H5Sclose_wrap (int dataspace_id)
+ {
+ int status = -1;
+
+ try
+ {
+ // Terminate access to the data space.
+ status = H5.H5Sclose (dataspace_id);
+ }
+ catch (HDF5Exception hdf5e)
+ {
+ System.out.println
+ ("CreateDataset.H5Sclose_wrap() with HDF5Exception: "
+ + hdf5e.getMessage());
+ }
+ catch (Exception e)
+ {
+ System.out.println
+ ("CreateDataset.H5Sclose_wrap() with other exception: "
+ + e.getMessage());
+ }
+ return status;
+ }
+
+
+ // Help function for terminating access to the file.
+ public static int H5Fclose_wrap (int file_id)
+ {
+ int status = -1;
+
+ try
+ {
+ // Terminate access to the file.
+ status = H5.H5Fclose (file_id);
+ }
+ catch (HDF5Exception hdf5e)
+ {
+ System.out.println
+ ("CreateDataset.H5Fclose_wrap() with HDF5Exception: "
+ + hdf5e.getMessage());
+ }
+ catch (Exception e)
+ {
+ System.out.println
+ ("CreateDataset.H5Fclose_wrap() with other exception: "
+ + e.getMessage());
+ }
+ return status;
+ }
+}
+
diff --git a/doc/html/Tutor/examples/java/CreateFile.java b/doc/html/Tutor/examples/java/CreateFile.java
new file mode 100644
index 0000000..550b263
--- /dev/null
+++ b/doc/html/Tutor/examples/java/CreateFile.java
@@ -0,0 +1,83 @@
+/******************************************************************
+ * CreateFile.java (for HDF5 tutorial lesson 4)
+ *
+ * -- Creating a HDF5 file
+ * (a java conversion from h5_crtfile.c)
+ *
+ ******************************************************************/
+
+import ncsa.hdf.hdf5lib.*;
+import ncsa.hdf.hdf5lib.exceptions.*;
+
+public class CreateFile
+{
+ public static void main(String []argv)
+ {
+ final String FILE = "file.h5";
+ int file_id = -1; // file identifier
+ int status = -1;
+
+ file_id = H5Fcreate_wrap (FILE, HDF5Constants.H5F_ACC_TRUNC,
+ HDF5Constants.H5P_DEFAULT,
+ HDF5Constants.H5P_DEFAULT);
+ status = H5Fclose_wrap (file_id);
+ }
+
+
+ // Help function for creating a new file
+ public static int H5Fcreate_wrap (String name, int flags,
+ int create_id, int access_id)
+ {
+ int file_id = -1; // file identifier
+ try
+ {
+ // Create a new file using default file properties.
+ file_id = H5.H5Fcreate (name, flags, create_id, access_id);
+ }
+ catch (HDF5Exception hdf5e)
+ {
+ System.out.println
+ ("CreateFile.H5Fcreate_wrap() with HDF5Exception: "
+ + hdf5e.getMessage());
+ }
+ catch (Exception e)
+ {
+ System.out.println
+ ("CreateFile.H5Fcreate_wrap() with other Exception: "
+ + e.getMessage());
+ }
+
+ System.out.println ("\nThe file name is: " + name);
+ System.out.println ("The file ID is: " + file_id);
+
+ return file_id;
+ }
+
+
+ // Help function for terminating access to the file.
+ public static int H5Fclose_wrap (int file_id)
+ {
+ int status = -1;
+
+ try
+ {
+ // Terminate access to the file.
+ status = H5.H5Fclose (file_id);
+ }
+ catch (HDF5Exception hdf5e)
+ {
+ System.out.println
+ ("CreateFile.H5Fclose_wrap() with HDF5Exception: "
+ + hdf5e.getMessage());
+ }
+ catch (Exception e)
+ {
+ System.out.println
+ ("CreateFile.H5Fclose_wrap() with other exception: "
+ + e.getMessage());
+ }
+ return status;
+ }
+}
+
+
diff --git a/doc/html/Tutor/examples/java/CreateFileInput.java b/doc/html/Tutor/examples/java/CreateFileInput.java
new file mode 100644
index 0000000..0e7fd4d
--- /dev/null
+++ b/doc/html/Tutor/examples/java/CreateFileInput.java
@@ -0,0 +1,118 @@
+/******************************************************************
+ * CreateFileInput.java (for HDF5 tutorial Lesson 4)
+ *
+ * -- Creating a HDF5 file
+ * (another java conversion from h5_crtfile.c, give user two options:
+ * one for library path and one for file name, if user chooses
+ * nothing, then the default file name is used.)
+ *
+ ******************************************************************/
+
+import java.lang.System;
+import java.util.*;
+import ncsa.hdf.hdf5lib.*;
+import ncsa.hdf.hdf5lib.exceptions.*;
+
+public class CreateFileInput
+{
+ // The run command should be like:
+ // "./runCreateFileInput -l /usr/lib/hdf5.dll -f ./open.h5"
+ public static void main(String []argv)
+ {
+ int file_id = -1; // file identifier
+ int status = -1;
+ String libpath = null;
+ String filename = null;
+
+ for (int i = 0; i < argv.length; i++)
+ {
+ if ("-l".equalsIgnoreCase (argv[i]))
+ libpath = argv[++i];
+
+ if ("-f".equalsIgnoreCase (argv[i]))
+ filename = argv[++i];
+ }
+
+ if (libpath != null)
+ {
+ Properties pros = System.getProperties ();
+ pros.put (H5.H5PATH_PROPERTY_KEY, libpath);
+
+ /*
+ this function call could be used in Java 1.2
+ System.setProperty (H5.H5PATH_PROPERTY_KEY, libpath);
+ */
+ }
+
+ if (filename == null)
+ {
+ filename = "file.h5"; // if no input file name, use the default name
+ }
+
+ file_id = H5Fcreate_wrap (filename,
+ HDF5Constants.H5F_ACC_TRUNC,
+ HDF5Constants.H5P_DEFAULT,
+ HDF5Constants.H5P_DEFAULT);
+ status = H5Fclose_wrap (filename, file_id);
+ }
+
+
+ // Help function for creating a new file
+ public static int H5Fcreate_wrap (String name, int flags,
+ int create_id, int access_id)
+ {
+ int file_id = -1; // file identifier
+ try
+ {
+ // Create a new file using default file properties.
+ file_id = H5.H5Fcreate (name, flags, create_id, access_id);
+
+ }
+ catch (HDF5Exception hdf5e)
+ {
+ System.out.println
+ ("CreateFileInput.H5Fcreate_wrap() with HDF5Exception: "
+ + hdf5e.getMessage());
+ }
+ catch (Exception e)
+ {
+ System.out.println
+ ("CreateFileInput.H5Fcreate_wrap() with other Exception: "
+ + e.getMessage());
+ }
+
+ System.out.println ("\nThe file name is: " + name);
+ System.out.println ("The file ID is: " + file_id);
+
+ return file_id;
+ }
+
+
+ // Help function for terminating access to the file.
+ public static int H5Fclose_wrap (String name, int file_id)
+ {
+ int status = -1;
+
+ try
+ {
+ // Terminate access to the file.
+ status = H5.H5Fclose (file_id);
+ }
+ catch (HDF5Exception hdf5e)
+ {
+ System.out.println
+ ("CreateFileInput.H5Fclose_wrap() with HDF5Exception: "
+ + hdf5e.getMessage());
+ }
+ catch (Exception e)
+ {
+ System.out.println
+ ("CreateFileInput.H5Fclose_wrap() with other exception: "
+ + e.getMessage());
+ }
+
+ return status;
+ }
+}
+
+
diff --git a/doc/html/Tutor/examples/java/CreateGroup.java b/doc/html/Tutor/examples/java/CreateGroup.java
new file mode 100644
index 0000000..48ef4af
--- /dev/null
+++ b/doc/html/Tutor/examples/java/CreateGroup.java
@@ -0,0 +1,139 @@
+/******************************************************************
+ * CreateGroup.java (for HDF5 tutorial lesson 8)
+ *
+ * -- Creating and closing a group
+ * (a java conversion from h5_crtgrp.c)
+ *
+ ******************************************************************/
+
+import ncsa.hdf.hdf5lib.*;
+import ncsa.hdf.hdf5lib.exceptions.*;
+
+public class CreateGroup
+{
+ public static void main(String []argv)
+ {
+ final String FILE = "group.h5";
+ int file_id = -1; // file identifier
+ int group_id = -1; // group identifier
+ int status = -1;
+
+ // Create a new file using default properties.
+ file_id = H5Fcreate_wrap (FILE, HDF5Constants.H5F_ACC_TRUNC,
+ HDF5Constants.H5P_DEFAULT,
+ HDF5Constants.H5P_DEFAULT);
+
+ // Create a group named "/MyGroup" in the file.
+ group_id = H5Gcreate_wrap (file_id, "/MyGroup", 0);
+
+ // Close the group.
+ status = H5Gclose_wrap (group_id);
+
+ // Close the file.
+ status = H5Fclose_wrap (file_id);
+ }
+
+
+ // Help function for creating a new file
+ public static int H5Fcreate_wrap (String name, int flags,
+ int create_id, int access_id)
+ {
+ int file_id = -1; // file identifier
+ try
+ {
+ // Create a new file using default file properties.
+ file_id = H5.H5Fcreate (name, flags, create_id, access_id);
+ }
+ catch (HDF5Exception hdf5e)
+ {
+ System.out.println
+ ("CreateGroup.H5Fcreate_wrap() with HDF5Exception: "
+ + hdf5e.getMessage());
+ }
+ catch (Exception e)
+ {
+ System.out.println
+ ("CreateGroup.H5Fcreate_wrap() with other Exception: "
+ + e.getMessage());
+ }
+ return file_id;
+ }
+
+
+ // Help function for creating a group named "/MyGroup" in the file.
+ public static int H5Gcreate_wrap (int loc_id, String name, int size_hint)
+ {
+ int group_id = -1; // group identifier
+ try
+ {
+ // Create a group
+ group_id = H5.H5Gcreate (loc_id, name, size_hint);
+
+ }
+ catch (HDF5Exception hdf5e)
+ {
+ System.out.println
+ ("CreateGroup.H5Gcreate_wrap() with HDF5Exception: "
+ + hdf5e.getMessage());
+ }
+ catch (Exception e)
+ {
+ System.out.println
+ ("CreateGroup.H5Gcreate_wrap() with other Exception: "
+ + e.getMessage());
+ }
+ return group_id;
+ }
+
+
+ // Help function for closing the group
+ public static int H5Gclose_wrap (int group_id)
+ {
+ int status = -1;
+
+ try
+ {
+ // Close the group
+ status = H5.H5Gclose (group_id);
+ }
+ catch (HDF5Exception hdf5e)
+ {
+ System.out.println
+ ("CreateGroup.H5Gclose_wrap() with HDF5Exception: "
+ + hdf5e.getMessage());
+ }
+ catch (Exception e)
+ {
+ System.out.println
+ ("CreateGroup.H5Gclose_wrap() with other exception: "
+ + e.getMessage());
+ }
+ return status;
+ }
+
+
+ // Help function for terminating access to the file.
+ public static int H5Fclose_wrap (int file_id)
+ {
+ int status = -1;
+
+ try
+ {
+ // Terminate access to the file.
+ status = H5.H5Fclose (file_id);
+ }
+ catch (HDF5Exception hdf5e)
+ {
+ System.out.println
+ ("CreateGroup.H5Fclose_wrap() with HDF5Exception: "
+ + hdf5e.getMessage());
+ }
+ catch (Exception e)
+ {
+ System.out.println
+ ("CreateGroup.H5Fclose_wrap() with other exception: "
+ + e.getMessage());
+ }
+ return status;
+ }
+}
diff --git a/doc/html/Tutor/examples/java/CreateGroupAR.java b/doc/html/Tutor/examples/java/CreateGroupAR.java
new file mode 100644
index 0000000..672f1d1
--- /dev/null
+++ b/doc/html/Tutor/examples/java/CreateGroupAR.java
@@ -0,0 +1,152 @@
+/******************************************************************
+ * CreateGroupAR.java (for HDF5 tutorial lesson 9)
+ *
+ * -- Creating groups using absolute and relative names.
+ * (a java conversion from h5_crtgrpar.c)
+ *
+ ******************************************************************/
+
+import ncsa.hdf.hdf5lib.*;
+import ncsa.hdf.hdf5lib.exceptions.*;
+
+public class CreateGroupAR
+{
+ public static void main(String []argv)
+ {
+ final String FILE = "groups.h5";
+ int file_id = -1; // file identifier
+ int group1_id = -1; // group identifier
+ int group2_id = -1;
+ int group3_id = -1;
+
+ int status = -1;
+
+ // Create a new file using default properties.
+ file_id = H5Fcreate_wrap (FILE, HDF5Constants.H5F_ACC_TRUNC,
+ HDF5Constants.H5P_DEFAULT,
+ HDF5Constants.H5P_DEFAULT);
+
+ // Create group "MyGroup" in the root group using absolute name.
+ group1_id = H5Gcreate_wrap (file_id, "/MyGroup", 0);
+
+
+ // Create group "Group_A" in group "MyGroup" using absolute name.
+ group2_id = H5Gcreate_wrap (file_id, "/MyGroup/Group_A", 0);
+
+ // Create group "Group_B" in group "MyGroup" using relative name.
+ group3_id = H5Gcreate_wrap (group1_id, "Group_B", 0);
+
+ // Close groups.
+ status = H5Gclose_wrap (group1_id);
+ status = H5Gclose_wrap (group2_id);
+ status = H5Gclose_wrap (group3_id);
+
+ // Close the file.
+ status = H5Fclose_wrap (file_id);
+ }
+
+
+ // Help function for creating a new file
+ public static int H5Fcreate_wrap (String name, int flags,
+ int create_id, int access_id)
+ {
+ int file_id = -1; // file identifier
+ try
+ {
+ // Create a new file using default file properties.
+ file_id = H5.H5Fcreate (name, flags, create_id, access_id);
+
+ }
+ catch (HDF5Exception hdf5e)
+ {
+ System.out.println
+ ("CreateGroupAR.H5Fcreate_wrap() with HDF5Exception: "
+ + hdf5e.getMessage());
+ }
+ catch (Exception e)
+ {
+ System.out.println
+ ("CreateGroupAR.H5Fcreate_wrap() with other Exception: "
+ + e.getMessage());
+ }
+ return file_id;
+ }
+
+
+ // Help function for creating a group named "/MyGroup" in the file.
+ public static int H5Gcreate_wrap (int loc_id, String name, int size_hint)
+ {
+ int group_id = -1; // group identifier
+ try
+ {
+ // Create a group
+ group_id = H5.H5Gcreate (loc_id, name, size_hint);
+
+ }
+ catch (HDF5Exception hdf5e)
+ {
+ System.out.println
+ ("CreateGroupAR.H5Gcreate_wrap() with HDF5Exception: "
+ + hdf5e.getMessage());
+ }
+ catch (Exception e)
+ {
+ System.out.println
+ ("CreateGroupAR.H5Gcreate_wrap() with other Exception: "
+ + e.getMessage());
+ }
+ return group_id;
+ }
+
+
+ // Help function for closing the group
+ public static int H5Gclose_wrap (int group_id)
+ {
+ int status = -1;
+
+ try
+ {
+ // Close the group
+ status = H5.H5Gclose (group_id);
+ }
+ catch (HDF5Exception hdf5e)
+ {
+ System.out.println
+ ("CreateGroupAR.H5Gclose_wrap() with HDF5Exception: "
+ + hdf5e.getMessage());
+ }
+ catch (Exception e)
+ {
+ System.out.println
+ ("CreateGroupAR.H5Gclose_wrap() with other exception: "
+ + e.getMessage());
+ }
+ return status;
+ }
+
+
+ // Help function for terminating access to the file.
+ public static int H5Fclose_wrap (int file_id)
+ {
+ int status = -1;
+
+ try
+ {
+ // Terminate access to the file.
+ status = H5.H5Fclose (file_id);
+ }
+ catch (HDF5Exception hdf5e)
+ {
+ System.out.println
+ ("CreateGroupAR.H5Fclose_wrap() with HDF5Exception: "
+ + hdf5e.getMessage());
+ }
+ catch (Exception e)
+ {
+ System.out.println
+ ("CreateGroupAR.H5Fclose_wrap() with other exception: "
+ + e.getMessage());
+ }
+ return status;
+ }
+}
diff --git a/doc/html/Tutor/examples/java/CreateGroupDataset.java b/doc/html/Tutor/examples/java/CreateGroupDataset.java
new file mode 100644
index 0000000..f0fbeaa
--- /dev/null
+++ b/doc/html/Tutor/examples/java/CreateGroupDataset.java
@@ -0,0 +1,340 @@
+/******************************************************************
+ * CreateGroupDataset.java (for HDF5 tutorial lesson 10)
+ *
+ * -- Creating a dataset in a particular group
+ * (a java conversion from h5_crtgrpd.c)
+ *
+ ******************************************************************/
+
+import ncsa.hdf.hdf5lib.*;
+import ncsa.hdf.hdf5lib.exceptions.*;
+
+public class CreateGroupDataset
+{
+ public static void main(String []argv)
+ {
+ final String FILE = "groups.h5";
+ int file_id = -1; // file identifier
+ int group_id = -1; // group identifier
+ int dataset_id;
+ int dataspace_id;
+ int status = -1;
+
+ long[] dims = new long[2];
+ int[][] dset1_data = new int[3][3];
+ int[][] dset2_data = new int[2][10];
+ int i = -1, j = -1;
+
+ // Initialize the first dataset.
+ for (i = 0; i < 3; i++)
+ for (j = 0; j < 3; j++)
+ dset1_data[i][j] = j + 1;
+
+ // Initialize the second dataset.
+ for (i = 0; i < 2; i++)
+ for (j = 0; j < 10; j++)
+ dset2_data[i][j] = j + 1;
+
+ // Open an existing file.
+ file_id = H5Fopen_wrap (FILE, HDF5Constants.H5F_ACC_RDWR,
+ HDF5Constants.H5P_DEFAULT);
+
+ // Create the data space for the first dataset.
+ dims[0] = 3;
+ dims[1] = 3;
+ dataspace_id = H5Screate_simple_wrap (2, dims, null);
+
+ // Create a dataset in group "MyGroup".
+ dataset_id =
+ H5Dcreate_wrap (file_id, "/MyGroup/dset1",
+ H5.J2C (HDF5CDataTypes.JH5T_STD_I32BE),
+ dataspace_id, HDF5Constants.H5P_DEFAULT);
+
+ // Write the first dataset.
+ status = H5Dwrite_wrap
+ (dataset_id,
+ H5.J2C (HDF5CDataTypes.JH5T_NATIVE_INT),
+ HDF5Constants.H5S_ALL, HDF5Constants.H5S_ALL,
+ HDF5Constants.H5P_DEFAULT, dset1_data);
+
+ // Close the data space for the first dataset.
+ status = H5Sclose_wrap (dataspace_id);
+
+ // Close the first dataset.
+ status = H5Dclose_wrap (dataset_id);
+
+ // Open an existing group of the specified file.
+ group_id = H5Gopen_wrap (file_id, "/MyGroup/Group_A");
+
+ // Create the data space for the second dataset.
+ dims[0] = 2;
+ dims[1] = 10;
+ dataspace_id = H5Screate_simple_wrap (2, dims, null);
+
+ // Create the second dataset in group "Group_A".
+ dataset_id =
+ H5Dcreate_wrap (group_id, "dset2",
+ H5.J2C (HDF5CDataTypes.JH5T_STD_I32BE),
+ dataspace_id, HDF5Constants.H5P_DEFAULT);
+
+ // Write the second dataset.
+ status = H5Dwrite_wrap
+ (dataset_id,
+ H5.J2C (HDF5CDataTypes.JH5T_NATIVE_INT),
+ HDF5Constants.H5S_ALL, HDF5Constants.H5S_ALL,
+ HDF5Constants.H5P_DEFAULT, dset2_data);
+
+ // Close the data space for the second dataset.
+ status = H5Sclose_wrap (dataspace_id);
+
+ // Close the second dataset
+ status = H5Dclose_wrap (dataset_id);
+
+ // Close the group.
+ status = H5Gclose_wrap (group_id);
+
+ // Close the file.
+ status = H5Fclose_wrap (file_id);
+ }
+
+
+ // Help function for opening an existing file
+ public static int H5Fopen_wrap (String name, int flags, int access_id)
+ {
+ int file_id = -1; // file identifier
+ try
+ {
+ // Create a new file using default file properties.
+ file_id = H5.H5Fopen (name, flags, access_id);
+ }
+ catch (HDF5Exception hdf5e)
+ {
+ System.out.println
+ ("CreateGroupDataset.H5Fopen_wrap() with HDF5Exception: "
+ + hdf5e.getMessage());
+ }
+ catch (Exception e)
+ {
+ System.out.println
+ ("CreateGroupDataset.H5Fopen_wrap() with other Exception: "
+ + e.getMessage());
+ }
+ return file_id;
+ }
+
+
+ // Help function for creating a new simple dataspace and opening it
+ // for access
+ public static int H5Screate_simple_wrap (int rank, long dims[],
+ long maxdims[])
+ {
+ int dataspace_id = -1; // dataspace identifier
+
+ try
+ {
+ // Create the data space for the dataset.
+ dataspace_id = H5.H5Screate_simple (rank, dims, maxdims);
+ }
+ catch (HDF5Exception hdf5e)
+ {
+ System.out.println
+ ("CreateGroupDataset.H5Screate_simple_wrap() with HDF5Exception: "
+ + hdf5e.getMessage());
+ }
+ catch (Exception e)
+ {
+ System.out.println
+ ("CreateGroupDataset.H5Screate_simple_wrap() with other Exception: "
+ + e.getMessage());
+ }
+ return dataspace_id;
+ }
+
+
+ // Help function for creating a dataset
+ public static int H5Dcreate_wrap (int loc_id, String name, int type_id,
+ int space_id, int create_plist_id)
+ {
+ int dataset_id = -1; // dataset identifier
+
+ try
+ {
+ // Create the dataset
+ dataset_id = H5.H5Dcreate (loc_id, name, type_id, space_id,
+ create_plist_id);
+ }
+ catch (HDF5Exception hdf5e)
+ {
+ System.out.println
+ ("CreateGroupDataset.H5Dcreate_wrap() with HDF5Exception: "
+ + hdf5e.getMessage());
+ }
+ catch (Exception e)
+ {
+ System.out.println
+ ("CreateGroupDataset.H5Dcreate_wrap() with other Exception: "
+ + e.getMessage());
+ }
+ return dataset_id;
+ }
+
+
+ // Help function for writing the dataset
+ public static int H5Dwrite_wrap (int dataset_id, int mem_type_id,
+ int mem_space_id, int file_space_id,
+ int xfer_plist_id, Object buf)
+ {
+ int status = -1;
+
+ try
+ {
+ // Write the dataset.
+ status = H5.H5Dwrite (dataset_id, mem_type_id, mem_space_id,
+ file_space_id, xfer_plist_id, buf);
+ }
+ catch (HDF5Exception hdf5e)
+ {
+ System.out.println
+ ("CreateGroupDataset.H5Dwrite_wrap() with HDF5Exception: "
+ + hdf5e.getMessage());
+ }
+ catch (Exception e)
+ {
+ System.out.println
+ ("CreateGroupDataset.H5Dwrite_wrap() with other exception: "
+ + e.getMessage());
+ }
+ return status;
+ }
+
+
+ // Help function for terminating access to the data space.
+ public static int H5Sclose_wrap (int dataspace_id)
+ {
+ int status = -1;
+
+ try
+ {
+ // Terminate access to the data space.
+ status = H5.H5Sclose (dataspace_id);
+ }
+ catch (HDF5Exception hdf5e)
+ {
+ System.out.println
+ ("CreateGroupDataset.H5Sclose_wrap() with HDF5Exception: "
+ + hdf5e.getMessage());
+ }
+ catch (Exception e)
+ {
+ System.out.println
+ ("CreateGroupDataset.H5Sclose_wrap() with other exception: "
+ + e.getMessage());
+ }
+ return status;
+ }
+
+
+ // Help function for ending access to the dataset and releasing
+ // resources used by it.
+ public static int H5Dclose_wrap (int dataset_id)
+ {
+ int status = -1;
+
+ try
+ {
+ // End access to the dataset and release resources used by it.
+ status = H5.H5Dclose (dataset_id);
+ }
+ catch (HDF5Exception hdf5e)
+ {
+ System.out.println
+ ("CreateGroupDataset.H5Dclose_wrap() with HDF5Exception: "
+ + hdf5e.getMessage());
+ }
+ catch (Exception e)
+ {
+ System.out.println
+ ("CreateGroupDataset.H5Dclose_wrap() with other exception: "
+ + e.getMessage());
+ }
+ return status;
+ }
+
+
+ // Help function for opening a group
+ public static int H5Gopen_wrap (int loc_id, String name)
+ {
+ int group_id = -1; // group identifier
+ try
+ {
+ // Create a group
+ group_id = H5.H5Gopen (loc_id, name);
+
+ }
+ catch (HDF5Exception hdf5e)
+ {
+ System.out.println
+ ("CreateGroupDataset.H5Gopen_wrap() with HDF5Exception: "
+ + hdf5e.getMessage());
+ }
+ catch (Exception e)
+ {
+ System.out.println
+ ("CreateGroupDataset.H5Gopen_wrap() with other Exception: "
+ + e.getMessage());
+ }
+ return group_id;
+ }
+
+
+ // Help function for closing the group
+ public static int H5Gclose_wrap (int group_id)
+ {
+ int status = -1;
+
+ try
+ {
+ // Close the group
+ status = H5.H5Gclose (group_id);
+ }
+ catch (HDF5Exception hdf5e)
+ {
+ System.out.println
+ ("CreateGroupDataset.H5Gclose_wrap() with HDF5Exception: "
+ + hdf5e.getMessage());
+ }
+ catch (Exception e)
+ {
+ System.out.println
+ ("CreateGroupDataset.H5Gclose_wrap() with other exception: "
+ + e.getMessage());
+ }
+ return status;
+ }
+
+
+ // Help function for terminating access to the file.
+ public static int H5Fclose_wrap (int file_id)
+ {
+ int status = -1;
+
+ try
+ {
+ // Terminate access to the file.
+ status = H5.H5Fclose (file_id);
+ }
+ catch (HDF5Exception hdf5e)
+ {
+ System.out.println
+ ("CreateGroupDataset.H5Fclose_wrap() with HDF5Exception: "
+ + hdf5e.getMessage());
+ }
+ catch (Exception e)
+ {
+ System.out.println
+ ("CreateGroupDataset.H5Fclose_wrap() with other exception: "
+ + e.getMessage());
+ }
+ return status;
+ }
+}
diff --git a/doc/html/Tutor/examples/java/DatasetRdWt.java b/doc/html/Tutor/examples/java/DatasetRdWt.java
new file mode 100644
index 0000000..4c26d0f
--- /dev/null
+++ b/doc/html/Tutor/examples/java/DatasetRdWt.java
@@ -0,0 +1,213 @@
+/******************************************************************
+ * DatasetRdWt.java (for HDF5 tutorial lesson 6)
+ *
+ * -- Reading and Writing an existing Dataset
+ * (a java conversion from h5_rdwt.c)
+ *
+ ******************************************************************/
+
+import ncsa.hdf.hdf5lib.*;
+import ncsa.hdf.hdf5lib.exceptions.*;
+
+public class DatasetRdWt
+{
+ public static void main(String []argv)
+ {
+ final String FILE = "dset.h5";
+ int file_id = -1; // file identifier
+ int dataset_id = -1; // dataset identifier
+ int status = -1;
+ int[][] dset_data = new int[4][6];
+
+ // Initialize the dataset.
+ for (int i = 0; i < 4; i++)
+ for (int j = 0; j < 6; j++)
+ dset_data[i][j] = i * 6 + j + 1;
+
+ // Open an existing file
+ file_id = H5Fopen_wrap (FILE, HDF5Constants.H5F_ACC_RDWR,
+ HDF5Constants.H5P_DEFAULT);
+
+ // Open an existing dataset.
+ dataset_id = H5Dopen_wrap (file_id, "/dset");
+
+ // Write the dataset.
+ status = H5Dwrite_wrap
+ (dataset_id, H5.J2C (HDF5CDataTypes.JH5T_NATIVE_INT),
+ HDF5Constants.H5S_ALL, HDF5Constants.H5S_ALL,
+ HDF5Constants.H5P_DEFAULT, dset_data);
+
+ status = H5Dread_wrap
+ (dataset_id, H5.J2C (HDF5CDataTypes.JH5T_NATIVE_INT),
+ HDF5Constants.H5S_ALL, HDF5Constants.H5S_ALL,
+ HDF5Constants.H5P_DEFAULT, dset_data);
+
+ // Close the dataset.
+ status = H5Dclose_wrap (dataset_id);
+
+ // Close the file.
+ status = H5Fclose_wrap (file_id);
+ }
+
+
+ // Help function for opening an existing file
+ public static int H5Fopen_wrap (String name, int flags, int access_id)
+ {
+ int file_id = -1; // file identifier
+ try
+ {
+ // Create a new file using default file properties.
+ file_id = H5.H5Fopen (name, flags, access_id);
+ }
+ catch (HDF5Exception hdf5e)
+ {
+ System.out.println
+ ("DatasetRdWt.H5Fopen_wrap() with HDF5Exception: "
+ + hdf5e.getMessage());
+ }
+ catch (Exception e)
+ {
+ System.out.println
+ ("DatasetRdWt.H5Fopen_wrap() with other Exception: "
+ + e.getMessage());
+ }
+ return file_id;
+ }
+
+
+ // Help function for opening an existing dataset
+ public static int H5Dopen_wrap (int loc_id, String name)
+ {
+ int dataset_id = -1; // dataset identifier
+
+ try
+ {
+ // Opening an existing dataset
+ dataset_id = H5.H5Dopen (loc_id, name);
+ }
+ catch (HDF5Exception hdf5e)
+ {
+ System.out.println
+ ("DatasetRdWt.H5Dopen_wrap() with HDF5Exception: "
+ + hdf5e.getMessage());
+ }
+ catch (Exception e)
+ {
+ System.out.println
+ ("DatasetRdWt.H5Dopen_wrap() with other Exception: "
+ + e.getMessage());
+ }
+ return dataset_id;
+ }
+
+
+ // Help function for writing the dataset
+ public static int H5Dwrite_wrap (int dataset_id, int mem_type_id,
+ int mem_space_id, int file_space_id,
+ int xfer_plist_id, Object buf)
+ {
+ int status = -1;
+
+ try
+ {
+ // Write the dataset.
+ status = H5.H5Dwrite (dataset_id, mem_type_id, mem_space_id,
+ file_space_id, xfer_plist_id, buf);
+ }
+ catch (HDF5Exception hdf5e)
+ {
+ System.out.println
+ ("DatasetRdWt.H5Dwrite_wrap() with HDF5Exception: "
+ + hdf5e.getMessage());
+ }
+ catch (Exception e)
+ {
+ System.out.println
+ ("DatasetRdWt.H5Dwrite_wrap() with other exception: "
+ + e.getMessage());
+ }
+ return status;
+ }
+
+
+ // Help function for reading the dataset
+ public static int H5Dread_wrap (int dataset_id, int mem_type_id,
+ int mem_space_id, int file_space_id,
+ int xfer_plist_id, Object obj)
+ {
+ int status = -1;
+
+ try
+ {
+ // Read the dataset.
+ status = H5.H5Dread (dataset_id, mem_type_id, mem_space_id,
+ file_space_id, xfer_plist_id, obj);
+ }
+ catch (HDF5Exception hdf5e)
+ {
+ System.out.println
+ ("DatasetRdWt.H5Dread_wrap() with HDF5Exception: "
+ + hdf5e.getMessage());
+ }
+ catch (Exception e)
+ {
+ System.out.println
+ ("DatasetRdWt.H5Dread_wrap() with other exception: "
+ + e.getMessage());
+ }
+ return status;
+ }
+
+
+ // Help function for ending access to the dataset and releasing
+ // resources used by it.
+ public static int H5Dclose_wrap (int dataset_id)
+ {
+ int status = -1;
+
+ try
+ {
+ // End access to the dataset and release resources used by it.
+ status = H5.H5Dclose (dataset_id);
+ }
+ catch (HDF5Exception hdf5e)
+ {
+ System.out.println
+ ("DatasetRdWt.H5Dclose_wrap() with HDF5Exception: "
+ + hdf5e.getMessage());
+ }
+ catch (Exception e)
+ {
+ System.out.println
+ ("DatasetRdWt.H5Dclose_wrap() with other exception: "
+ + e.getMessage());
+ }
+ return status;
+ }
+
+
+ // Help function for terminating access to the file.
+ public static int H5Fclose_wrap (int file_id)
+ {
+ int status = -1;
+
+ try
+ {
+ // Terminate access to the file.
+ status = H5.H5Fclose (file_id);
+ }
+ catch (HDF5Exception hdf5e)
+ {
+ System.out.println
+ ("DatasetRdWt.H5Fclose_wrap() with HDF5Exception: "
+ + hdf5e.getMessage());
+ }
+ catch (Exception e)
+ {
+ System.out.println
+ ("DatasetRdWt.H5Fclose_wrap() with other exception: "
+ + e.getMessage());
+ }
+ return status;
+ }
+}
diff --git a/doc/html/Tutor/examples/java/HyperSlab.java b/doc/html/Tutor/examples/java/HyperSlab.java
new file mode 100644
index 0000000..5f8818d
--- /dev/null
+++ b/doc/html/Tutor/examples/java/HyperSlab.java
@@ -0,0 +1,590 @@
+/******************************************************************
+ * HyperSlab.java (for HDF5 tutorial lesson 12)
+ *
+ * -- Writing and reading a hyperslab
+ * (a java conversion from h5_hyperslab.c)
+ *
+ ******************************************************************/
+
+import ncsa.hdf.hdf5lib.*;
+import ncsa.hdf.hdf5lib.exceptions.*;
+
+public class HyperSlab
+{
+ public static void main (String []argv)
+ {
+ final String FILE = "sds.h5";
+ final String DATASETNAME = "IntArray";
+ final int NX_SUB = 3; /* hyperslab dimensions */
+ final int NY_SUB = 4;
+ final int NX = 7; /* output buffer dimensions */
+ final int NY = 7;
+ final int NZ = 3;
+ final int RANK = 2;
+ final int RANK_OUT = 3;
+ final int X = 5; /* dataset dimensions */
+ final int Y = 6;
+
+ long[] dimsf = new long[2]; /* dataset dimensions */
+ int[][] data = new int[X][Y]; /* data to write */
+
+ /*
+ * Data and output buffer initialization.
+ */
+ int file, dataset; /* handles */
+ int dataspace;
+ int memspace;
+ long[] dimsm = new long[3]; /* memory space dimensions */
+ long[] dims_out = new long[2]; /* dataset dimensions */
+ int status;
+
+ int[][][] data_out = new int[NX][NY][NZ]; /* output buffer */
+
+ long[] count = new long[2]; /* size of the hyperslab in the file */
+ long[] offset = new long[2]; /* hyperslab offset in the file */
+ long[] count_out = new long[3]; /* size of the hyperslab in memory */
+ long[] offset_out = new long[3]; /* hyperslab offset in memory */
+ int i, j, k, status_n, rank;
+
+ /*********************************************************
+ This writes data to the HDF5 file.
+ *********************************************************/
+
+ /*
+ * Data and output buffer initialization.
+ */
+ for (j = 0; j < X; j++)
+ {
+ for (i = 0; i < Y; i++)
+ data[j][i] = i + j;
+ }
+ /*
+ * 0 1 2 3 4 5
+ * 1 2 3 4 5 6
+ * 2 3 4 5 6 7
+ * 3 4 5 6 7 8
+ * 4 5 6 7 8 9
+ */
+
+ /*
+ * Create a new file using H5F_ACC_TRUNC access,
+ * the default file creation properties, and the default file
+ * access properties.
+ */
+ file = H5Fcreate_wrap (FILE, HDF5Constants.H5F_ACC_TRUNC,
+ HDF5Constants.H5P_DEFAULT,
+ HDF5Constants.H5P_DEFAULT);
+
+ /*
+ * Describe the size of the array and create the data space for fixed
+ * size dataset.
+ */
+ dimsf[0] = X;
+ dimsf[1] = Y;
+ dataspace = H5Screate_simple_wrap (RANK, dimsf, null);
+
+ /*
+ * Create a new dataset within the file using defined dataspace and
+ * default dataset creation properties.
+ */
+ dataset = H5Dcreate_wrap
+ (file, DATASETNAME, H5.J2C (HDF5CDataTypes.JH5T_STD_I32BE),
+ dataspace, HDF5Constants.H5P_DEFAULT);
+
+ /*
+ * Write the data to the dataset using default transfer properties.
+ */
+ status = H5Dwrite_wrap
+ (dataset, H5.J2C (HDF5CDataTypes.JH5T_NATIVE_INT),
+ HDF5Constants.H5S_ALL, HDF5Constants.H5S_ALL,
+ HDF5Constants.H5P_DEFAULT, data);
+
+ /*
+ * Close/release resources.
+ */
+ H5Sclose_wrap (dataspace);
+ H5Dclose_wrap (dataset);
+ H5Fclose_wrap (file);
+
+ /*************************************************************
+
+ This reads the hyperslab from the sds.h5 file just
+ created, into a 2-dimensional plane of the 3-dimensional
+ array.
+
+ ************************************************************/
+
+ for (j = 0; j < NX; j++)
+ {
+ for (i = 0; i < NY; i++)
+ {
+ for (k = 0; k < NZ ; k++)
+ data_out[j][i][k] = 0;
+ }
+ }
+
+ /*
+ * Open the file and the dataset.
+ */
+ file = H5Fopen_wrap (FILE, HDF5Constants.H5F_ACC_RDONLY,
+ HDF5Constants.H5P_DEFAULT);
+ dataset = H5Dopen_wrap (file, DATASETNAME);
+
+ dataspace = H5Dget_space_wrap (dataset); /* dataspace handle */
+ rank = H5Sget_simple_extent_ndims_wrap (dataspace);
+ status_n = H5Sget_simple_extent_dims_wrap (dataspace, dims_out, null);
+
+ System.out.println ("Rank: " + rank);
+ System.out.println ("Dimensions: "+ dims_out[0] + " x " + dims_out[1]);
+
+ /*
+ * Define hyperslab in the dataset.
+ */
+ offset[0] = 1;
+ offset[1] = 2;
+ count[0] = NX_SUB;
+ count[1] = NY_SUB;
+ status = H5Sselect_hyperslab_wrap (dataspace,
+ HDF5Constants.H5S_SELECT_SET,
+ offset, null, count, null);
+
+ /*
+ * Define the memory dataspace.
+ */
+ dimsm[0] = NX;
+ dimsm[1] = NY;
+ dimsm[2] = NZ;
+ memspace = H5Screate_simple_wrap (RANK_OUT, dimsm, null);
+
+ /*
+ * Define memory hyperslab.
+ */
+ offset_out[0] = 3;
+ offset_out[1] = 0;
+ offset_out[2] = 0;
+ count_out[0] = NX_SUB;
+ count_out[1] = NY_SUB;
+ count_out[2] = 1;
+ status = H5Sselect_hyperslab_wrap (memspace,
+ HDF5Constants.H5S_SELECT_SET,
+ offset_out, null, count_out, null);
+
+ /*
+ * Read data from hyperslab in the file into the hyperslab in
+ * memory and display.
+ */
+ status =
+ H5Dread_wrap (dataset, H5.J2C (HDF5CDataTypes.JH5T_NATIVE_INT),
+ memspace, dataspace, HDF5Constants.H5P_DEFAULT,
+ data_out);
+
+ System.out.println ("Data:");
+ for (j = 0; j < NX; j++)
+ {
+ for (i = 0; i < NY; i++)
+ System.out.print (data_out[j][i][0]);
+ System.out.println ();
+ }
+ System.out.println ();
+
+ /*
+ * 0 0 0 0 0 0 0
+ * 0 0 0 0 0 0 0
+ * 0 0 0 0 0 0 0
+ * 3 4 5 6 0 0 0
+ * 4 5 6 7 0 0 0
+ * 5 6 7 8 0 0 0
+ * 0 0 0 0 0 0 0
+ */
+
+ /*
+ * Close and release resources.
+ */
+ H5Dclose_wrap (dataset);
+ H5Sclose_wrap (dataspace);
+ H5Sclose_wrap (memspace);
+ H5Fclose_wrap (file);
+ }
+
+
+ // Help function for creating a new file
+ public static int H5Fcreate_wrap (String name, int flags,
+ int create_id, int access_id)
+ {
+ int file_id = -1; // file identifier
+ try
+ {
+ // Create a new file using default file properties.
+ file_id = H5.H5Fcreate (name, flags, create_id, access_id);
+ }
+ catch (HDF5Exception hdf5e)
+ {
+ System.out.println
+ ("HyperSlab.H5Fcreate_wrap() with HDF5Exception: "
+ + hdf5e.getMessage());
+ }
+ catch (Exception e)
+ {
+ System.out.println
+ ("HyperSlab.H5Fcreate_wrap() with other Exception: "
+ + e.getMessage());
+ }
+ return file_id;
+ }
+
+
+ // Help function for opening an existing file
+ public static int H5Fopen_wrap (String name, int flags, int access_id)
+ {
+ int file_id = -1; // file identifier
+ try
+ {
+ // Create a new file using default file properties.
+ file_id = H5.H5Fopen (name, flags, access_id);
+ }
+ catch (HDF5Exception hdf5e)
+ {
+ System.out.println
+ ("HyperSlab.H5Fopen_wrap() with HDF5Exception: "
+ + hdf5e.getMessage());
+ }
+ catch (Exception e)
+ {
+ System.out.println
+ ("HyperSlab.H5Fopen_wrap() with other Exception: "
+ + e.getMessage());
+ }
+ return file_id;
+ }
+
+
+ // Help function for opening an existing dataset
+ public static int H5Dopen_wrap (int loc_id, String name)
+ {
+ int dataset_id = -1; // dataset identifier
+
+ try
+ {
+ // Opening an existing dataset
+ dataset_id = H5.H5Dopen (loc_id, name);
+ }
+ catch (HDF5Exception hdf5e)
+ {
+ System.out.println
+ ("HyperSlab.H5Dopen_wrap() with HDF5Exception: "
+ + hdf5e.getMessage());
+ }
+ catch (Exception e)
+ {
+ System.out.println
+ ("HyperSlab.H5Dopen_wrap() with other Exception: "
+ + e.getMessage());
+ }
+ return dataset_id;
+ }
+
+
+ // Help function for creating a new simple dataspace and opening it
+ // for access
+ public static int H5Screate_simple_wrap (int rank, long dims[],
+ long maxdims[])
+ {
+ int dataspace_id = -1; // dataspace identifier
+
+ try
+ {
+ // Create the data space for the dataset.
+ dataspace_id = H5.H5Screate_simple (rank, dims, maxdims);
+ }
+ catch (HDF5Exception hdf5e)
+ {
+ System.out.println
+ ("HyperSlab.H5Screate_simple_wrap() with HDF5Exception: "
+ + hdf5e.getMessage());
+ }
+ catch (Exception e)
+ {
+ System.out.println
+ ("HyperSlab.H5Screate_simple_wrap() with other Exception: "
+ + e.getMessage());
+ }
+ return dataspace_id;
+ }
+
+
+ // Help function for getting an identifier for a copy of
+ // the dataspace for a dataset
+ public static int H5Dget_space_wrap (int dataset_id)
+ {
+ int dataspace_id = -1;
+
+ try
+ {
+ // Returning an identifier for a copy of the dataspace for a dataset
+ dataspace_id = H5.H5Dget_space (dataset_id);
+ }
+ catch (HDF5Exception hdf5e)
+ {
+ System.out.println
+ ("HyperSlab.H5Dget_space_wrap() with HDF5Exception: "
+ + hdf5e.getMessage());
+ }
+ catch (Exception e)
+ {
+ System.out.println
+ ("HyperSlab.H5Dget_space_wrap() with other Exception: "
+ + e.getMessage());
+ }
+ return dataspace_id;
+ }
+
+
+ // Help function for determining the dimensionality (or rank) of
+ // a dataspace
+ public static int H5Sget_simple_extent_ndims_wrap (int space_id)
+ {
+ int rank = -1;
+
+ try
+ {
+ // Determine the dimensionality (or rank) of a dataspace.
+ rank = H5.H5Sget_simple_extent_ndims (space_id);
+ }
+ catch (HDF5Exception hdf5e)
+ {
+ System.out.println
+ ("HyperSlab.H5Sget_simple_extent_ndims_wrap() with HDF5Exception: "
+ + hdf5e.getMessage());
+ }
+ catch (Exception e)
+ {
+ System.out.println
+ ("HyperSlab.H5Sget_simple_extent_ndims_wrap() with other Exception: "
+ + e.getMessage());
+ }
+ return rank;
+ }
+
+
+ // Help function for returning the size and maximum sizes of each
+ // dimension of a dataspace through the dims and maxdims parameters.
+ public static int H5Sget_simple_extent_dims_wrap (int space_id,
+ long dims[],
+ long maxdims[])
+ {
+ int dimension_number = -1;
+
+ try
+ {
+ dimension_number = H5.H5Sget_simple_extent_dims (space_id, dims,
+ maxdims);
+ }
+ catch (HDF5Exception hdf5e)
+ {
+ System.out.println
+ ("HyperSlab.H5Sget_simple_extent_dims_wrap() with HDF5Exception: "
+ + hdf5e.getMessage());
+ }
+ catch (Exception e)
+ {
+ System.out.println
+ ("HyperSlab.H5Sget_simple_extent_dims_wrap() with other Exception: "
+ + e.getMessage());
+ }
+ return dimension_number;
+ }
+
+
+ // Help function for selecting a hyperslab region to add to the
+ // current selected region for the dataspace specified by space_id.
+ public static int H5Sselect_hyperslab_wrap (int space_id, int op,
+ long start[], long stride[],
+ long count[], long block[])
+ {
+ int status = -1;
+
+ try
+ {
+ status = H5.H5Sselect_hyperslab (space_id, op, start, stride,
+ count, block);
+ }
+ catch (HDF5Exception hdf5e)
+ {
+ System.out.println
+ ("HyperSlab.H5Sselect_hyperslab_wrap() with HDF5Exception: "
+ + hdf5e.getMessage());
+ }
+ catch (Exception e)
+ {
+ System.out.println
+ ("HyperSlab.H5Sselect_hyperslab_wrap() with other Exception: "
+ + e.getMessage());
+ }
+ return status;
+ }
+
+
+ // Help function for creating a dataset
+ public static int H5Dcreate_wrap (int loc_id, String name, int type_id,
+ int space_id, int create_plist_id)
+ {
+ int dataset_id = -1; // dataset identifier
+
+ try
+ {
+ // Create the dataset
+ dataset_id = H5.H5Dcreate (loc_id, name, type_id, space_id,
+ create_plist_id);
+ }
+ catch (HDF5Exception hdf5e)
+ {
+ System.out.println
+ ("HyperSlab.H5Dcreate_wrap() with HDF5Exception: "
+ + hdf5e.getMessage());
+ }
+ catch (Exception e)
+ {
+ System.out.println
+ ("HyperSlab.H5Dcreate_wrap() with other Exception: "
+ + e.getMessage());
+ }
+ return dataset_id;
+ }
+
+
+ // Help function for writing the dataset
+ public static int H5Dwrite_wrap (int dataset_id, int mem_type_id,
+ int mem_space_id, int file_space_id,
+ int xfer_plist_id, Object buf)
+ {
+ int status = -1;
+
+ try
+ {
+ // Write the dataset.
+ status = H5.H5Dwrite (dataset_id, mem_type_id, mem_space_id,
+ file_space_id, xfer_plist_id, buf);
+ }
+ catch (HDF5Exception hdf5e)
+ {
+ System.out.println
+ ("HyperSlab.H5Dwrite_wrap() with HDF5Exception: "
+ + hdf5e.getMessage());
+ }
+ catch (Exception e)
+ {
+ System.out.println
+ ("HyperSlab.H5Dwrite_wrap() with other exception: "
+ + e.getMessage());
+ }
+ return status;
+ }
+
+
+ // Help function for reading the dataset
+ public static int H5Dread_wrap (int dataset_id, int mem_type_id,
+ int mem_space_id, int file_space_id,
+ int xfer_plist_id, Object obj)
+ {
+ int status = -1;
+
+ try
+ {
+ // Read the dataset.
+ status = H5.H5Dread (dataset_id, mem_type_id, mem_space_id,
+ file_space_id, xfer_plist_id, obj);
+ }
+ catch (HDF5Exception hdf5e)
+ {
+ System.out.println
+ ("HyperSlab.H5Dread_wrap() with HDF5Exception: "
+ + hdf5e.getMessage());
+ }
+ catch (Exception e)
+ {
+ System.out.println
+ ("HyperSlab.H5Dread_wrap() with other exception: "
+ + e.getMessage());
+ }
+ return status;
+ }
+
+
+ // Help function for terminating access to the data space.
+ public static int H5Sclose_wrap (int dataspace_id)
+ {
+ int status = -1;
+
+ try
+ {
+ // Terminate access to the data space.
+ status = H5.H5Sclose (dataspace_id);
+ }
+ catch (HDF5Exception hdf5e)
+ {
+ System.out.println
+ ("HyperSlab.H5Sclose_wrap() with HDF5Exception: "
+ + hdf5e.getMessage());
+ }
+ catch (Exception e)
+ {
+ System.out.println
+ ("HyperSlab.H5Sclose_wrap() with other exception: "
+ + e.getMessage());
+ }
+ return status;
+ }
+
+
+ // Help function for ending access to the dataset and releasing
+ // resources used by it.
+ public static int H5Dclose_wrap (int dataset_id)
+ {
+ int status = -1;
+
+ try
+ {
+ // End access to the dataset and release resources used by it.
+ status = H5.H5Dclose (dataset_id);
+ }
+ catch (HDF5Exception hdf5e)
+ {
+ System.out.println
+ ("HyperSlab.H5Dclose_wrap() with HDF5Exception: "
+ + hdf5e.getMessage());
+ }
+ catch (Exception e)
+ {
+ System.out.println
+ ("HyperSlab.H5Dclose_wrap() with other exception: "
+ + e.getMessage());
+ }
+ return status;
+ }
+
+
+ // Help function for terminating access to the file.
+ public static int H5Fclose_wrap (int file_id)
+ {
+ int status = -1;
+
+ try
+ {
+ // Terminate access to the file.
+ status = H5.H5Fclose (file_id);
+ }
+ catch (HDF5Exception hdf5e)
+ {
+ System.out.println
+ ("HyperSlab.H5Fclose_wrap() with HDF5Exception: "
+ + hdf5e.getMessage());
+ }
+ catch (Exception e)
+ {
+ System.out.println
+ ("HyperSlab.H5Fclose_wrap() with other exception: "
+ + e.getMessage());
+ }
+ return status;
+ }
+}
diff --git a/doc/html/Tutor/examples/java/Makefile b/doc/html/Tutor/examples/java/Makefile
new file mode 100644
index 0000000..a70ab0b
--- /dev/null
+++ b/doc/html/Tutor/examples/java/Makefile
@@ -0,0 +1,92 @@
+# Generated automatically from Makefile.in by configure.
+# /*=======================================================================
+# UNIVERSITY OF ILLINOIS (UI), NATIONAL CENTER FOR SUPERCOMPUTING
+# APPLICATIONS (NCSA), Software Distribution Policy for Public Domain
+# Software
+#
+# NCSA HDF Version 5 source code and documentation are in the public
+# domain, available without fee for education, research, non-commercial and
+# commercial purposes. Users may distribute the binary or source code to
+# third parties provided that this statement appears on all copies and that
+# no charge is made for such copies.
+#
+# UI MAKES NO REPRESENTATIONS ABOUT THE SUITABILITY OF THE SOFTWARE FOR ANY
+# PURPOSE. IT IS PROVIDED "AS IS" WITHOUT EXPRESS OR IMPLIED WARRANTY. THE
+# UI SHALL NOT BE LIABLE FOR ANY DAMAGES SUFFERED BY THE USER OF THIS
+# SOFTWARE. The software may have been developed under agreements between
+# the UI and the Federal Government which entitle the Government to certain
+# rights.
+#
+# We ask, but do not require that the following message be include in all
+# derived works:
+#
+# Portions developed at the National Center for Supercomputing Applications
+# at the University of Illinois at Urbana-Champaign.
+#
+# By copying this program, you, the user, agree to abide by the conditions
+# and understandings with respect to any software which is marked with a
+# public domain notice.
+#
+# =======================================================================*/
+#
+
+
+JAVAC = /usr/java1.2/bin/javac
+FIND = /bin/find
+
+CLASSPATH=/usr/java1.2/jre/lib/rt.jar:/afs/ncsa.uiuc.edu/projects/hdf/java/java2/mcgrath/arabica/java-hdf5
+
+
+.SUFFIXES: .java .class
+
+.java.class:
+ $(JAVAC) -classpath $(CLASSPATH) $<
+
+tutorial: ./Compound.class \
+ ./Copy.class \
+ ./CreateAttribute.class \
+ ./CreateDataset.class \
+ ./CreateFile.class \
+ ./CreateFileInput.class \
+ ./CreateGroup.class \
+ ./CreateGroupAR.class \
+ ./CreateGroupDataset.class \
+ ./DatasetRdWt.class \
+ ./HyperSlab.class
+ chmod u+x *.sh
+
+clean: clean-classes
+
+distclean: clean-classes clean-data
+ rm config.cache config.status config.log
+ rm -rf ./Makefile
+
+clean-classes:
+ $(FIND) . \( -name '#*' -o -name '*~' -o -name '*.class' \) -exec rm -f {} \; ;\
+
+clean-data:
+ rm -rf *.h5
+
+Compound: ./Compound.class
+Copy: ./Copy.class
+CreateAttribute: ./CreateAttribute.class
+CreateDataset: ./CreateDataset.class
+CreateFile: ./CreateFile.class
+CreateFileInput: ./CreateFileInput.class
+CreateGroup: ./CreateGroup.class
+CreateGroupAR: ./CreateGroupAR.class
+CreateGroupDataset: ./CreateGroupDataset.class
+DatasetRdWt: ./DatasetRdWt.class
+HyperSlab: ./HyperSlab.class
+
+CLASSES= ./Compound.class \
+ ./Copy.class \
+ ./CreateAttribute.class \
+ ./CreateDataset.class \
+ ./CreateFileInput.class \
+ ./CreateFile.class \
+ ./CreateGroup.class \
+ ./CreateGroupAR.class \
+ ./CreateGroupDataset.class \
+ ./DatasetRdWt.class \
+ ./HyperSlab.class
diff --git a/doc/html/Tutor/examples/java/README b/doc/html/Tutor/examples/java/README
new file mode 100644
index 0000000..95c9360
--- /dev/null
+++ b/doc/html/Tutor/examples/java/README
@@ -0,0 +1,21 @@
+These files are Java versions of the example programs used in
+the HDF-5 tutoral:
+ http://hdf.ncsa.uiuc.edu/training/hdf5/
+
+The examples here correspond to the examples explained in the first 13
+sections of the tutorial.
+
+Lesson C program Java program Topic
+
+4 h5_crtfile.c CreateFile.java Create an HDF-5 file.
+5 h5_crtdat.c CreateDataset.java Create a dataset.
+6 h5_rdwt.c DatasetRdWt.java Write/Read a dataset.
+7 h5_crtatt.c CreateAttribute.java Create an attribute.
+8 h5_crtgrp.c CreateGroup.java Create a group.
+9 h5_crtgrpar.c CreateGroupAR.java Abs. and Rel. paths.
+10 h5_crtgrpd.c CreateGroupDataset.java Create dataset in grp.
+
+11 h5_compound.c Compound.java Compound datatype
+12 h5_hyperslab.c Hyperslab.java Selection of hyperslab
+13 h5_copy.c Copy.java Selection of elements
+
diff --git a/doc/html/Tutor/examples/java/readme.html b/doc/html/Tutor/examples/java/readme.html
new file mode 100644
index 0000000..ac96004
--- /dev/null
+++ b/doc/html/Tutor/examples/java/readme.html
@@ -0,0 +1,192 @@
+<!doctype html public "-//w3c//dtd html 4.0 transitional//en">
+<html>
+<head>
+ <meta http-equiv="Content-Type" content="text/html; charset=iso-8859-1">
+ <meta name="GENERATOR" content="Mozilla/4.61 [en] (WinNT; I) [Netscape]">
+ <title>readme</title>
+</head>
+<body text="#000000" bgcolor="#FFFFFF" link="#0000EE" vlink="#551A8B" alink="#FF0000">
+
+<h3>
+<b>HDF 5 Tutorial Examples in Java</b></h3>
+
+<p><br>These files are Java versions of the example programs used in the
+HDF-5 tutoral:
+<br>&nbsp;&nbsp;&nbsp;&nbsp;&nbsp; <a href="http://hdf.ncsa.uiuc.edu/training/hdf5/">http://hdf.ncsa.uiuc.edu/training/hdf5/</a>
+<p>The examples here correspond to the examples explained in the first
+13 sections of the tutorial.
+<br>&nbsp;
+<br>&nbsp;
+<table BORDER CELLPADDING=2 WIDTH="100%" >
+<tr>
+<td>
+<center><b>Lesson</b></center>
+</td>
+
+<td>
+<center><b>Topic</b></center>
+</td>
+
+<td>
+<center><b>C file</b></center>
+</td>
+
+<td>
+<center><b>Java file</b></center>
+</td>
+</tr>
+
+<tr>
+<td><a href="http://hdf.ncsa.uiuc.edu/training/hdf5/crtfile.html">Lesson
+4</a></td>
+
+<td>Create an HDF-5 file.</td>
+
+<td>h5_crtfile.c</td>
+
+<td>CreateFile.java</td>
+</tr>
+
+<tr>
+<td><a href="http://hdf.ncsa.uiuc.edu/training/hdf5/crtdat.html">Lesson
+5</a></td>
+
+<td>Create a Dataset in an HDF-5 file</td>
+
+<td>h5_crtdat.c</td>
+
+<td>CreateDataset.java</td>
+</tr>
+
+<tr>
+<td><a href="http://hdf.ncsa.uiuc.edu/training/hdf5/rdwt.html">Lesson 6</a></td>
+
+<td>Write and Read data in a dataset</td>
+
+<td>h5_rdwt.c</td>
+
+<td>DatasetRdWt.java</td>
+</tr>
+
+<tr>
+<td><a href="http://hdf.ncsa.uiuc.edu/training/hdf5/crtatt.html">Lesson
+7</a></td>
+
+<td>Create an attribute.</td>
+
+<td>h5_crtatt.c</td>
+
+<td>CreateAttribute.java</td>
+</tr>
+
+<tr>
+<td><a href="http://hdf.ncsa.uiuc.edu/training/hdf5/crtgrp.html">Lesson
+8</a></td>
+
+<td>Create a group.</td>
+
+<td>h5_crtgrp.c</td>
+
+<td>CreateGroup.java</td>
+</tr>
+
+<tr>
+<td><a href="http://hdf.ncsa.uiuc.edu/training/hdf5/crtgrpar.html">Lesson
+9</a></td>
+
+<td>Using Absolute and relative paths</td>
+
+<td>h5_crtgrpar.c</td>
+
+<td>CreateGroupAR.java</td>
+</tr>
+
+<tr>
+<td><a href="http://hdf.ncsa.uiuc.edu/training/hdf5/crtgrpd.html">Lesson
+10</a></td>
+
+<td>Create a dataset in a group.</td>
+
+<td>h5_crtgrpd.c</td>
+
+<td>CreateGroupDataset.java</td>
+</tr>
+
+<tr>
+<td><a href="http://hdf.ncsa.uiuc.edu/training/hdf5/compound.html">Lesson
+11</a></td>
+
+<td>Using Compound Datatypes</td>
+
+<td>h5_compound.c</td>
+
+<td>Compound.java</td>
+</tr>
+
+<tr>
+<td><a href="http://hdf.ncsa.uiuc.edu/training/hdf5/select.html">Lesson
+12</a></td>
+
+<td>Selection of a hyperslab.</td>
+
+<td>h5_hyperslab.c</td>
+
+<td>Hyperslab.java</td>
+</tr>
+
+<tr>
+<td><a href="http://hdf.ncsa.uiuc.edu/training/hdf5/selectc.html">Lesson
+13</a></td>
+
+<td>Selection of elements.</td>
+
+<td>h5_copy.c</td>
+
+<td>Copy.java</td>
+</tr>
+</table>
+
+<p>
+<hr><b>Some Explanation About Tutorial Examples</b>
+<p>The Java tutorial programs try to stay close to the corresponding C
+program. The main function's structure almost same as C program, with one
+call for each HDF5 library function. For example, where the C program has
+a call to <b>H5Fopen()</b>, the Java program has a call to <b>H5Fopen_wrap()</b>.
+<p>The wrapper functions call the HDF-5 library using the Java HDF-5 Interface
+(JHI5). The HDF-5 C interface returns error codes; these are represented
+by Java Exceptions in the JHI5. The wrapper function catches the exception
+and prints a message.
+<p>For example, the <b>H5Fopen_wrap() </b>method calls the JHI5, and catches
+any exceptions which may occur:
+<pre>&nbsp;&nbsp; <b>public static int H5Fopen_wrap (String name, int flags, int access_id)
+&nbsp;&nbsp; {
+&nbsp;&nbsp;&nbsp;&nbsp;&nbsp; int file_id = -1;&nbsp;&nbsp;&nbsp; // file identifier&nbsp;
+&nbsp;&nbsp;&nbsp;&nbsp;&nbsp; try&nbsp;
+&nbsp;&nbsp;&nbsp;&nbsp;&nbsp; {
+&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp; // Create a new file using default file properties.
+&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp; file_id = H5.H5Fopen (name, flags, access_id);
+&nbsp;&nbsp;&nbsp;&nbsp;&nbsp; }
+&nbsp;&nbsp;&nbsp;&nbsp;&nbsp; catch (HDF5Exception hdf5e)
+&nbsp;&nbsp;&nbsp;&nbsp;&nbsp; {
+&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp; System.out.println&nbsp;
+&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp; ("DatasetRdWt.H5Fopen_wrap() with HDF5Exception: "
+&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp; + hdf5e.getMessage());
+&nbsp;&nbsp;&nbsp;&nbsp;&nbsp; }
+&nbsp;&nbsp;&nbsp;&nbsp;&nbsp; catch (Exception e)
+&nbsp;&nbsp;&nbsp;&nbsp;&nbsp; {
+&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp; System.out.println&nbsp;
+&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp; ("DatasetRdWt.H5Fopen_wrap() with other Exception: "&nbsp;
+&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp; + e.getMessage());
+&nbsp;&nbsp;&nbsp;&nbsp;&nbsp; }
+&nbsp;&nbsp;&nbsp;&nbsp;&nbsp; return file_id;
+&nbsp;&nbsp; }</b></pre>
+
+<p><br>
+<hr noshade size=1><a href="http://www.ncsa.uiuc.edu/"><img SRC="http://www.ncsa.uiuc.edu/Images/NCSAhome/footerlogo.gif" ALT="NCSA" BORDER=0 ></a>
+<br><font face="arial,helvetica"><font size=-1><a href="http://www.ncsa.uiuc.edu/">The
+National Center for Supercomputing Applications</a></font></font>
+<br><font face="arial,helvetica"><font size=-1><a href="http://www.uiuc.edu/">University
+of Illinois at Urbana-Champaign</a></font></font>
+<p><font face="arial,helvetica"><font size=-1><a href="mailto:hdfhelp@ncsa.uiuc.edu">hdfhelp@ncsa.uiuc.edu</a></font></font>
+</body>
+</html>
diff --git a/doc/html/Tutor/examples/java/runCompound.sh b/doc/html/Tutor/examples/java/runCompound.sh
new file mode 100644
index 0000000..ef2be38
--- /dev/null
+++ b/doc/html/Tutor/examples/java/runCompound.sh
@@ -0,0 +1,17 @@
+#!/bin/sh
+
+JH5INSTALLDIR=/afs/ncsa/projects/hdf/java/java2/mcgrath/arabica/New5
+HDF5LIB=/afs/ncsa/projects/hdf/release/prehdf5-1.2.1/SunOS_5.7/lib
+
+#make this relative to the source root...
+PWD=/afs/ncsa.uiuc.edu/projects/hdf/java/java2/mcgrath/arabica/java-hdf5
+LIBDIR=$JH5INSTALLDIR"/lib"
+
+CLASSPATH=".:"$LIBDIR"/jhdf5.jar"
+
+LD_LIBRARY_PATH=$HDF5LIB":"$LIBDIR"/solaris"
+
+export CLASSPATH
+export LD_LIBRARY_PATH
+
+/usr/java1.2/bin/java Compound $*
diff --git a/doc/html/Tutor/examples/java/runCompound.sh.in b/doc/html/Tutor/examples/java/runCompound.sh.in
new file mode 100644
index 0000000..bc58088
--- /dev/null
+++ b/doc/html/Tutor/examples/java/runCompound.sh.in
@@ -0,0 +1,17 @@
+#!/bin/sh
+
+JH5INSTALLDIR=@JH5INST@
+HDF5LIB=@HDF5LIB@
+
+#make this relative to the source root...
+PWD=@PWD@
+LIBDIR=$JH5INSTALLDIR"/lib"
+
+CLASSPATH=".:"$LIBDIR"/jhdf5.jar"
+
+LD_LIBRARY_PATH=$HDF5LIB":"$LIBDIR"/@JAVATARG@"
+
+export CLASSPATH
+export LD_LIBRARY_PATH
+
+@JAVA@ Compound $*
diff --git a/doc/html/Tutor/examples/java/runCopy.sh b/doc/html/Tutor/examples/java/runCopy.sh
new file mode 100644
index 0000000..de71783
--- /dev/null
+++ b/doc/html/Tutor/examples/java/runCopy.sh
@@ -0,0 +1,17 @@
+#!/bin/sh
+
+JH5INSTALLDIR=/afs/ncsa/projects/hdf/java/java2/mcgrath/arabica/New5
+HDF5LIB=/afs/ncsa/projects/hdf/release/prehdf5-1.2.1/SunOS_5.7/lib
+
+#make this relative to the source root...
+PWD=/afs/ncsa.uiuc.edu/projects/hdf/java/java2/mcgrath/arabica/java-hdf5
+LIBDIR=$JH5INSTALLDIR"/lib"
+
+CLASSPATH=".:"$LIBDIR"/jhdf5.jar"
+
+LD_LIBRARY_PATH=$HDF5LIB":"$LIBDIR"/solaris"
+
+export CLASSPATH
+export LD_LIBRARY_PATH
+
+/usr/java1.2/bin/java Copy $*
diff --git a/doc/html/Tutor/examples/java/runCopy.sh.in b/doc/html/Tutor/examples/java/runCopy.sh.in
new file mode 100644
index 0000000..2fd8a46
--- /dev/null
+++ b/doc/html/Tutor/examples/java/runCopy.sh.in
@@ -0,0 +1,17 @@
+#!/bin/sh
+
+JH5INSTALLDIR=@JH5INST@
+HDF5LIB=@HDF5LIB@
+
+#make this relative to the source root...
+PWD=@PWD@
+LIBDIR=$JH5INSTALLDIR"/lib"
+
+CLASSPATH=".:"$LIBDIR"/jhdf5.jar"
+
+LD_LIBRARY_PATH=$HDF5LIB":"$LIBDIR"/@JAVATARG@"
+
+export CLASSPATH
+export LD_LIBRARY_PATH
+
+@JAVA@ Copy $*
diff --git a/doc/html/Tutor/examples/java/runCreateAttribute.sh b/doc/html/Tutor/examples/java/runCreateAttribute.sh
new file mode 100644
index 0000000..419abce
--- /dev/null
+++ b/doc/html/Tutor/examples/java/runCreateAttribute.sh
@@ -0,0 +1,17 @@
+#!/bin/sh
+
+JH5INSTALLDIR=/afs/ncsa/projects/hdf/java/java2/mcgrath/arabica/New5
+HDF5LIB=/afs/ncsa/projects/hdf/release/prehdf5-1.2.1/SunOS_5.7/lib
+
+#make this relative to the source root...
+PWD=/afs/ncsa.uiuc.edu/projects/hdf/java/java2/mcgrath/arabica/java-hdf5
+LIBDIR=$JH5INSTALLDIR"/lib"
+
+CLASSPATH=".:"$LIBDIR"/jhdf5.jar"
+
+LD_LIBRARY_PATH=$HDF5LIB":"$LIBDIR"/solaris"
+
+export CLASSPATH
+export LD_LIBRARY_PATH
+
+/usr/java1.2/bin/java CreateAttribute $*
diff --git a/doc/html/Tutor/examples/java/runCreateAttribute.sh.in b/doc/html/Tutor/examples/java/runCreateAttribute.sh.in
new file mode 100644
index 0000000..83bcdc7
--- /dev/null
+++ b/doc/html/Tutor/examples/java/runCreateAttribute.sh.in
@@ -0,0 +1,17 @@
+#!/bin/sh
+
+JH5INSTALLDIR=@JH5INST@
+HDF5LIB=@HDF5LIB@
+
+#make this relative to the source root...
+PWD=@PWD@
+LIBDIR=$JH5INSTALLDIR"/lib"
+
+CLASSPATH=".:"$LIBDIR"/jhdf5.jar"
+
+LD_LIBRARY_PATH=$HDF5LIB":"$LIBDIR"/@JAVATARG@"
+
+export CLASSPATH
+export LD_LIBRARY_PATH
+
+@JAVA@ CreateAttribute $*
diff --git a/doc/html/Tutor/examples/java/runCreateDataset.sh b/doc/html/Tutor/examples/java/runCreateDataset.sh
new file mode 100644
index 0000000..371e811
--- /dev/null
+++ b/doc/html/Tutor/examples/java/runCreateDataset.sh
@@ -0,0 +1,17 @@
+#!/bin/sh
+
+JH5INSTALLDIR=/afs/ncsa/projects/hdf/java/java2/mcgrath/arabica/New5
+HDF5LIB=/afs/ncsa/projects/hdf/release/prehdf5-1.2.1/SunOS_5.7/lib
+
+#make this relative to the source root...
+PWD=/afs/ncsa.uiuc.edu/projects/hdf/java/java2/mcgrath/arabica/java-hdf5
+LIBDIR=$JH5INSTALLDIR"/lib"
+
+CLASSPATH=".:"$LIBDIR"/jhdf5.jar"
+
+LD_LIBRARY_PATH=$HDF5LIB":"$LIBDIR"/solaris"
+
+export CLASSPATH
+export LD_LIBRARY_PATH
+
+/usr/java1.2/bin/java CreateDataset $*
diff --git a/doc/html/Tutor/examples/java/runCreateDataset.sh.in b/doc/html/Tutor/examples/java/runCreateDataset.sh.in
new file mode 100644
index 0000000..606e153
--- /dev/null
+++ b/doc/html/Tutor/examples/java/runCreateDataset.sh.in
@@ -0,0 +1,17 @@
+#!/bin/sh
+
+JH5INSTALLDIR=@JH5INST@
+HDF5LIB=@HDF5LIB@
+
+#make this relative to the source root...
+PWD=@PWD@
+LIBDIR=$JH5INSTALLDIR"/lib"
+
+CLASSPATH=".:"$LIBDIR"/jhdf5.jar"
+
+LD_LIBRARY_PATH=$HDF5LIB":"$LIBDIR"/@JAVATARG@"
+
+export CLASSPATH
+export LD_LIBRARY_PATH
+
+@JAVA@ CreateDataset $*
diff --git a/doc/html/Tutor/examples/java/runCreateFile.sh b/doc/html/Tutor/examples/java/runCreateFile.sh
new file mode 100644
index 0000000..e32c0ab
--- /dev/null
+++ b/doc/html/Tutor/examples/java/runCreateFile.sh
@@ -0,0 +1,17 @@
+#!/bin/sh
+
+JH5INSTALLDIR=/afs/ncsa/projects/hdf/java/java2/mcgrath/arabica/New5
+HDF5LIB=/afs/ncsa/projects/hdf/release/prehdf5-1.2.1/SunOS_5.7/lib
+
+#make this relative to the source root...
+PWD=/afs/ncsa.uiuc.edu/projects/hdf/java/java2/mcgrath/arabica/java-hdf5
+LIBDIR=$JH5INSTALLDIR"/lib"
+
+CLASSPATH=".:"$LIBDIR"/jhdf5.jar"
+
+LD_LIBRARY_PATH=$HDF5LIB":"$LIBDIR"/solaris"
+
+export CLASSPATH
+export LD_LIBRARY_PATH
+
+/usr/java1.2/bin/java CreateFile $*
diff --git a/doc/html/Tutor/examples/java/runCreateFile.sh.in b/doc/html/Tutor/examples/java/runCreateFile.sh.in
new file mode 100644
index 0000000..bf48b9c
--- /dev/null
+++ b/doc/html/Tutor/examples/java/runCreateFile.sh.in
@@ -0,0 +1,17 @@
+#!/bin/sh
+
+JH5INSTALLDIR=@JH5INST@
+HDF5LIB=@HDF5LIB@
+
+#make this relative to the source root...
+PWD=@PWD@
+LIBDIR=$JH5INSTALLDIR"/lib"
+
+CLASSPATH=".:"$LIBDIR"/jhdf5.jar"
+
+LD_LIBRARY_PATH=$HDF5LIB":"$LIBDIR"/@JAVATARG@"
+
+export CLASSPATH
+export LD_LIBRARY_PATH
+
+@JAVA@ CreateFile $*
diff --git a/doc/html/Tutor/examples/java/runCreateFileInput.sh b/doc/html/Tutor/examples/java/runCreateFileInput.sh
new file mode 100644
index 0000000..fa12f06
--- /dev/null
+++ b/doc/html/Tutor/examples/java/runCreateFileInput.sh
@@ -0,0 +1,17 @@
+#!/bin/sh
+
+JH5INSTALLDIR=/afs/ncsa/projects/hdf/java/java2/mcgrath/arabica/New5
+HDF5LIB=/afs/ncsa/projects/hdf/release/prehdf5-1.2.1/SunOS_5.7/lib
+
+#make this relative to the source root...
+PWD=/afs/ncsa.uiuc.edu/projects/hdf/java/java2/mcgrath/arabica/java-hdf5
+LIBDIR=$JH5INSTALLDIR"/lib"
+
+CLASSPATH=".:"$LIBDIR"/jhdf5.jar"
+
+LD_LIBRARY_PATH=$HDF5LIB":"$LIBDIR"/solaris"
+
+export CLASSPATH
+export LD_LIBRARY_PATH
+
+/usr/java1.2/bin/java CreateFileInput $*
diff --git a/doc/html/Tutor/examples/java/runCreateFileInput.sh.in b/doc/html/Tutor/examples/java/runCreateFileInput.sh.in
new file mode 100644
index 0000000..776eac5
--- /dev/null
+++ b/doc/html/Tutor/examples/java/runCreateFileInput.sh.in
@@ -0,0 +1,17 @@
+#!/bin/sh
+
+JH5INSTALLDIR=@JH5INST@
+HDF5LIB=@HDF5LIB@
+
+#make this relative to the source root...
+PWD=@PWD@
+LIBDIR=$JH5INSTALLDIR"/lib"
+
+CLASSPATH=".:"$LIBDIR"/jhdf5.jar"
+
+LD_LIBRARY_PATH=$HDF5LIB":"$LIBDIR"/@JAVATARG@"
+
+export CLASSPATH
+export LD_LIBRARY_PATH
+
+@JAVA@ CreateFileInput $*
diff --git a/doc/html/Tutor/examples/java/runCreateGroup.sh b/doc/html/Tutor/examples/java/runCreateGroup.sh
new file mode 100644
index 0000000..ee9deee
--- /dev/null
+++ b/doc/html/Tutor/examples/java/runCreateGroup.sh
@@ -0,0 +1,17 @@
+#!/bin/sh
+
+JH5INSTALLDIR=/afs/ncsa/projects/hdf/java/java2/mcgrath/arabica/New5
+HDF5LIB=/afs/ncsa/projects/hdf/release/prehdf5-1.2.1/SunOS_5.7/lib
+
+#make this relative to the source root...
+PWD=/afs/ncsa.uiuc.edu/projects/hdf/java/java2/mcgrath/arabica/java-hdf5
+LIBDIR=$JH5INSTALLDIR"/lib"
+
+CLASSPATH=".:"$LIBDIR"/jhdf5.jar"
+
+LD_LIBRARY_PATH=$HDF5LIB":"$LIBDIR"/solaris"
+
+export CLASSPATH
+export LD_LIBRARY_PATH
+
+/usr/java1.2/bin/java CreateGroup $*
diff --git a/doc/html/Tutor/examples/java/runCreateGroup.sh.in b/doc/html/Tutor/examples/java/runCreateGroup.sh.in
new file mode 100644
index 0000000..e2eadb5
--- /dev/null
+++ b/doc/html/Tutor/examples/java/runCreateGroup.sh.in
@@ -0,0 +1,17 @@
+#!/bin/sh
+
+JH5INSTALLDIR=@JH5INST@
+HDF5LIB=@HDF5LIB@
+
+#make this relative to the source root...
+PWD=@PWD@
+LIBDIR=$JH5INSTALLDIR"/lib"
+
+CLASSPATH=".:"$LIBDIR"/jhdf5.jar"
+
+LD_LIBRARY_PATH=$HDF5LIB":"$LIBDIR"/@JAVATARG@"
+
+export CLASSPATH
+export LD_LIBRARY_PATH
+
+@JAVA@ CreateGroup $*
diff --git a/doc/html/Tutor/examples/java/runCreateGroupAR.sh b/doc/html/Tutor/examples/java/runCreateGroupAR.sh
new file mode 100644
index 0000000..2619a11
--- /dev/null
+++ b/doc/html/Tutor/examples/java/runCreateGroupAR.sh
@@ -0,0 +1,17 @@
+#!/bin/sh
+
+JH5INSTALLDIR=/afs/ncsa/projects/hdf/java/java2/mcgrath/arabica/New5
+HDF5LIB=/afs/ncsa/projects/hdf/release/prehdf5-1.2.1/SunOS_5.7/lib
+
+#make this relative to the source root...
+PWD=/afs/ncsa.uiuc.edu/projects/hdf/java/java2/mcgrath/arabica/java-hdf5
+LIBDIR=$JH5INSTALLDIR"/lib"
+
+CLASSPATH=".:"$LIBDIR"/jhdf5.jar"
+
+LD_LIBRARY_PATH=$HDF5LIB":"$LIBDIR"/solaris"
+
+export CLASSPATH
+export LD_LIBRARY_PATH
+
+/usr/java1.2/bin/java CreateGroupAR $*
diff --git a/doc/html/Tutor/examples/java/runCreateGroupAR.sh.in b/doc/html/Tutor/examples/java/runCreateGroupAR.sh.in
new file mode 100644
index 0000000..d61d852
--- /dev/null
+++ b/doc/html/Tutor/examples/java/runCreateGroupAR.sh.in
@@ -0,0 +1,17 @@
+#!/bin/sh
+
+JH5INSTALLDIR=@JH5INST@
+HDF5LIB=@HDF5LIB@
+
+#make this relative to the source root...
+PWD=@PWD@
+LIBDIR=$JH5INSTALLDIR"/lib"
+
+CLASSPATH=".:"$LIBDIR"/jhdf5.jar"
+
+LD_LIBRARY_PATH=$HDF5LIB":"$LIBDIR"/@JAVATARG@"
+
+export CLASSPATH
+export LD_LIBRARY_PATH
+
+@JAVA@ CreateGroupAR $*
diff --git a/doc/html/Tutor/examples/java/runCreateGroupDataset.sh b/doc/html/Tutor/examples/java/runCreateGroupDataset.sh
new file mode 100644
index 0000000..15b7bfa
--- /dev/null
+++ b/doc/html/Tutor/examples/java/runCreateGroupDataset.sh
@@ -0,0 +1,17 @@
+#!/bin/sh
+
+JH5INSTALLDIR=/afs/ncsa/projects/hdf/java/java2/mcgrath/arabica/New5
+HDF5LIB=/afs/ncsa/projects/hdf/release/prehdf5-1.2.1/SunOS_5.7/lib
+
+#make this relative to the source root...
+PWD=/afs/ncsa.uiuc.edu/projects/hdf/java/java2/mcgrath/arabica/java-hdf5
+LIBDIR=$JH5INSTALLDIR"/lib"
+
+CLASSPATH=".:"$LIBDIR"/jhdf5.jar"
+
+LD_LIBRARY_PATH=$HDF5LIB":"$LIBDIR"/solaris"
+
+export CLASSPATH
+export LD_LIBRARY_PATH
+
+/usr/java1.2/bin/java CreateGroupDataset $*
diff --git a/doc/html/Tutor/examples/java/runCreateGroupDataset.sh.in b/doc/html/Tutor/examples/java/runCreateGroupDataset.sh.in
new file mode 100644
index 0000000..af2b4b5
--- /dev/null
+++ b/doc/html/Tutor/examples/java/runCreateGroupDataset.sh.in
@@ -0,0 +1,17 @@
+#!/bin/sh
+
+JH5INSTALLDIR=@JH5INST@
+HDF5LIB=@HDF5LIB@
+
+#make this relative to the source root...
+PWD=@PWD@
+LIBDIR=$JH5INSTALLDIR"/lib"
+
+CLASSPATH=".:"$LIBDIR"/jhdf5.jar"
+
+LD_LIBRARY_PATH=$HDF5LIB":"$LIBDIR"/@JAVATARG@"
+
+export CLASSPATH
+export LD_LIBRARY_PATH
+
+@JAVA@ CreateGroupDataset $*
diff --git a/doc/html/Tutor/examples/java/runDatasetRdWt.sh b/doc/html/Tutor/examples/java/runDatasetRdWt.sh
new file mode 100644
index 0000000..a049ea8
--- /dev/null
+++ b/doc/html/Tutor/examples/java/runDatasetRdWt.sh
@@ -0,0 +1,17 @@
+#!/bin/sh
+
+JH5INSTALLDIR=/afs/ncsa/projects/hdf/java/java2/mcgrath/arabica/New5
+HDF5LIB=/afs/ncsa/projects/hdf/release/prehdf5-1.2.1/SunOS_5.7/lib
+
+#make this relative to the source root...
+PWD=/afs/ncsa.uiuc.edu/projects/hdf/java/java2/mcgrath/arabica/java-hdf5
+LIBDIR=$JH5INSTALLDIR"/lib"
+
+CLASSPATH=".:"$LIBDIR"/jhdf5.jar"
+
+LD_LIBRARY_PATH=$HDF5LIB":"$LIBDIR"/solaris"
+
+export CLASSPATH
+export LD_LIBRARY_PATH
+
+/usr/java1.2/bin/java DatasetRdWt $*
diff --git a/doc/html/Tutor/examples/java/runDatasetRdWt.sh.in b/doc/html/Tutor/examples/java/runDatasetRdWt.sh.in
new file mode 100644
index 0000000..ad3a049
--- /dev/null
+++ b/doc/html/Tutor/examples/java/runDatasetRdWt.sh.in
@@ -0,0 +1,17 @@
+#!/bin/sh
+
+JH5INSTALLDIR=@JH5INST@
+HDF5LIB=@HDF5LIB@
+
+#make this relative to the source root...
+PWD=@PWD@
+LIBDIR=$JH5INSTALLDIR"/lib"
+
+CLASSPATH=".:"$LIBDIR"/jhdf5.jar"
+
+LD_LIBRARY_PATH=$HDF5LIB":"$LIBDIR"/@JAVATARG@"
+
+export CLASSPATH
+export LD_LIBRARY_PATH
+
+@JAVA@ DatasetRdWt $*
diff --git a/doc/html/Tutor/examples/java/runHyperSlab.sh b/doc/html/Tutor/examples/java/runHyperSlab.sh
new file mode 100644
index 0000000..549f807
--- /dev/null
+++ b/doc/html/Tutor/examples/java/runHyperSlab.sh
@@ -0,0 +1,17 @@
+#!/bin/sh
+
+JH5INSTALLDIR=/afs/ncsa/projects/hdf/java/java2/mcgrath/arabica/New5
+HDF5LIB=/afs/ncsa/projects/hdf/release/prehdf5-1.2.1/SunOS_5.7/lib
+
+#make this relative to the source root...
+PWD=/afs/ncsa.uiuc.edu/projects/hdf/java/java2/mcgrath/arabica/java-hdf5
+LIBDIR=$JH5INSTALLDIR"/lib"
+
+CLASSPATH=".:"$LIBDIR"/jhdf5.jar"
+
+LD_LIBRARY_PATH=$HDF5LIB":"$LIBDIR"/solaris"
+
+export CLASSPATH
+export LD_LIBRARY_PATH
+
+/usr/java1.2/bin/java HyperSlab $*
diff --git a/doc/html/Tutor/examples/java/runHyperSlab.sh.in b/doc/html/Tutor/examples/java/runHyperSlab.sh.in
new file mode 100644
index 0000000..f515fc9
--- /dev/null
+++ b/doc/html/Tutor/examples/java/runHyperSlab.sh.in
@@ -0,0 +1,17 @@
+#!/bin/sh
+
+JH5INSTALLDIR=@JH5INST@
+HDF5LIB=@HDF5LIB@
+
+#make this relative to the source root...
+PWD=@PWD@
+LIBDIR=$JH5INSTALLDIR"/lib"
+
+CLASSPATH=".:"$LIBDIR"/jhdf5.jar"
+
+LD_LIBRARY_PATH=$HDF5LIB":"$LIBDIR"/@JAVATARG@"
+
+export CLASSPATH
+export LD_LIBRARY_PATH
+
+@JAVA@ HyperSlab $*
diff --git a/doc/html/Tutor/examples/mountexample.f90 b/doc/html/Tutor/examples/mountexample.f90
new file mode 100644
index 0000000..f4341b2
--- /dev/null
+++ b/doc/html/Tutor/examples/mountexample.f90
@@ -0,0 +1,187 @@
+!
+!In the following example we create one file with a group in it,
+!and another file with a dataset. Mounting is used to
+!access the dataset from the second file as a member of a group
+!in the first file.
+!
+
+ PROGRAM MOUNTEXAMPLE
+
+ USE HDF5 ! This module contains all necessary modules
+
+ IMPLICIT NONE
+
+ !
+ ! Filenames are "mount1.h5" and "mount2.h5"
+ !
+ CHARACTER(LEN=9), PARAMETER :: filename1 = "mount1.h5"
+ CHARACTER(LEN=9), PARAMETER :: filename2 = "mount2.h5"
+
+ !
+ !data space rank and dimensions
+ !
+ INTEGER, PARAMETER :: RANK = 2
+ INTEGER, PARAMETER :: NX = 4
+ INTEGER, PARAMETER :: NY = 5
+
+ !
+ ! File identifiers
+ !
+ INTEGER(HID_T) :: file1_id, file2_id
+
+ !
+ ! Group identifier
+ !
+ INTEGER(HID_T) :: gid
+
+ !
+ ! Dataset identifier
+ !
+ INTEGER(HID_T) :: dset_id
+
+ !
+ ! Data space identifier
+ !
+ INTEGER(HID_T) :: dataspace
+
+ !
+ ! Data type identifier
+ !
+ INTEGER(HID_T) :: dtype_id
+
+ !
+ ! The dimensions for the dataset.
+ !
+ INTEGER(HSIZE_T), DIMENSION(2) :: dims = (/NX,NY/)
+
+ !
+ ! Flag to check operation success
+ !
+ INTEGER :: error
+
+ !
+ ! General purpose integer
+ !
+ INTEGER :: i, j
+
+ !
+ ! Data buffers
+ !
+ INTEGER, DIMENSION(NX,NY) :: data_in, data_out
+
+ !
+ ! Initialize FORTRAN interface.
+ !
+ CALL h5open_f(error)
+
+ !
+ ! Initialize data_in buffer
+ !
+ do i = 1, NX
+ do j = 1, NY
+ data_in(i,j) = (i-1) + (j-1)
+ end do
+ end do
+
+ !
+ ! Create first file "mount1.h5" using default properties.
+ !
+ CALL h5fcreate_f(filename1, H5F_ACC_TRUNC_F, file1_id, error)
+
+ !
+ ! Create group "/G" inside file "mount1.h5".
+ !
+ CALL h5gcreate_f(file1_id, "/G", gid, error)
+
+ !
+ ! Close file and group identifiers.
+ !
+ CALL h5gclose_f(gid, error)
+ CALL h5fclose_f(file1_id, error)
+
+ !
+ ! Create second file "mount2.h5" using default properties.
+ !
+ CALL h5fcreate_f(filename2, H5F_ACC_TRUNC_F, file2_id, error)
+
+ !
+ ! Create data space for the dataset.
+ !
+ CALL h5screate_simple_f(RANK, dims, dataspace, error)
+
+ !
+ ! Create dataset "/D" inside file "mount2.h5".
+ !
+ CALL h5dcreate_f(file2_id, "/D", H5T_NATIVE_INTEGER, dataspace, &
+ dset_id, error)
+
+ !
+ ! Write data_in to the dataset
+ !
+ CALL h5dwrite_f(dset_id, H5T_NATIVE_INTEGER, data_in, error)
+
+ !
+ ! Close file, dataset and dataspace identifiers.
+ !
+ CALL h5sclose_f(dataspace, error)
+ CALL h5dclose_f(dset_id, error)
+ CALL h5fclose_f(file2_id, error)
+
+ !
+ ! Reopen both files.
+ !
+ CALL h5fopen_f (filename1, H5F_ACC_RDWR_F, file1_id, error)
+ CALL h5fopen_f (filename2, H5F_ACC_RDWR_F, file2_id, error)
+
+ !
+ ! Mount the second file under the first file's "/G" group.
+ !
+ CALL h5fmount_f (file1_id, "/G", file2_id, error)
+
+
+ !
+ ! Access dataset D in the first file under /G/D name.
+ !
+ CALL h5dopen_f(file1_id, "/G/D", dset_id, error)
+
+ !
+ ! Get dataset's data type.
+ !
+ CALL h5dget_type_f(dset_id, dtype_id, error)
+
+ !
+ ! Read the dataset.
+ !
+ CALL h5dread_f(dset_id, dtype_id, data_out, error)
+
+ !
+ ! Print out the data.
+ !
+ do i = 1, NX
+ print *, (data_out(i,j), j = 1, NY)
+ end do
+
+
+ !
+ !Close dset_id and dtype_id.
+ !
+ CALL h5dclose_f(dset_id, error)
+ CALL h5tclose_f(dtype_id, error)
+
+ !
+ ! Unmount the second file.
+ !
+ CALL h5funmount_f(file1_id, "/G", error);
+
+ !
+ ! Close both files.
+ !
+ CALL h5fclose_f(file1_id, error)
+ CALL h5fclose_f(file2_id, error)
+ !
+ ! Close FORTRAN interface.
+ !
+ CALL h5close_f(error)
+
+ END PROGRAM MOUNTEXAMPLE
+
diff --git a/doc/html/Tutor/examples/refobjexample.f90 b/doc/html/Tutor/examples/refobjexample.f90
new file mode 100644
index 0000000..fdbb26d
--- /dev/null
+++ b/doc/html/Tutor/examples/refobjexample.f90
@@ -0,0 +1,142 @@
+!
+! This program shows how to create and store references to the objects.
+! Program creates a file, two groups, a dataset to store integer data and
+! a dataset to store references to the objects.
+! Stored references are used to open the objects they are point to.
+! Data is written to the dereferenced dataset, and class type is displayed for
+! the shared datatype.
+!
+ PROGRAM OBJ_REFERENCES
+
+ USE HDF5 ! This module contains all necessary modules
+
+ IMPLICIT NONE
+ CHARACTER(LEN=10), PARAMETER :: filename = "FORTRAN.h5" ! File
+ CHARACTER(LEN=8), PARAMETER :: dsetnamei = "INTEGERS" ! Dataset with the integer data
+ CHARACTER(LEN=17), PARAMETER :: dsetnamer = "OBJECT_REFERENCES" ! Dataset wtih object
+ ! references
+ CHARACTER(LEN=6), PARAMETER :: groupname1 = "GROUP1" ! Groups in the file
+ CHARACTER(LEN=6), PARAMETER :: groupname2 = "GROUP2" !
+
+ INTEGER(HID_T) :: file_id ! File identifier
+ INTEGER(HID_T) :: grp1_id ! Group identifiers
+ INTEGER(HID_T) :: grp2_id !
+ INTEGER(HID_T) :: dset_id ! Dataset identifiers
+ INTEGER(HID_T) :: dsetr_id !
+ INTEGER(HID_T) :: type_id ! Type identifier
+ INTEGER(HID_T) :: space_id ! Dataspace identifiers
+ INTEGER(HID_T) :: spacer_id !
+ INTEGER :: error
+ INTEGER(HSIZE_T), DIMENSION(1) :: dims = (/5/)
+ INTEGER(HSIZE_T), DIMENSION(1) :: dimsr= (/4/)
+ INTEGER(HSIZE_T), DIMENSION(1) :: my_maxdims = (/5/)
+ INTEGER :: rank = 1
+ INTEGER :: rankr = 1
+ TYPE(hobj_ref_t_f), DIMENSION(4) :: ref
+ TYPE(hobj_ref_t_f), DIMENSION(4) :: ref_out
+ INTEGER, DIMENSION(5) :: data = (/1, 2, 3, 4, 5/)
+ INTEGER :: class, ref_size
+ !
+ ! Initialize FORTRAN interface.
+ !
+ CALL h5open_f(error)
+ !
+ ! Create a file
+ !
+ CALL h5fcreate_f(filename, H5F_ACC_TRUNC_F, file_id, error)
+ ! Default file access and file creation
+ ! properties are used.
+ !
+ ! Create a group in the file
+ !
+ CALL h5gcreate_f(file_id, groupname1, grp1_id, error)
+ !
+ ! Create a group inside the created gorup
+ !
+ CALL h5gcreate_f(grp1_id, groupname2, grp2_id, error)
+ !
+ ! Create dataspaces for datasets
+ !
+ CALL h5screate_simple_f(rank, dims, space_id, error, maxdims=my_maxdims)
+ CALL h5screate_simple_f(rankr, dimsr, spacer_id, error)
+ !
+ ! Create integer dataset
+ !
+ CALL h5dcreate_f(file_id, dsetnamei, H5T_NATIVE_INTEGER, space_id, &
+ dset_id, error)
+ !
+ ! Create dataset to store references to the objects
+ !
+ CALL h5dcreate_f(file_id, dsetnamer, H5T_STD_REF_OBJ, spacer_id, &
+ dsetr_id, error)
+ !
+ ! Create a datatype and store in the file
+ !
+ CALL h5tcopy_f(H5T_NATIVE_REAL, type_id, error)
+ CALL h5tcommit_f(file_id, "MyType", type_id, error)
+ !
+ ! Close dataspaces, groups and integer dataset
+ !
+ CALL h5sclose_f(space_id, error)
+ CALL h5sclose_f(spacer_id, error)
+ CALL h5tclose_f(type_id, error)
+ CALL h5dclose_f(dset_id, error)
+ CALL h5gclose_f(grp1_id, error)
+ CALL h5gclose_f(grp2_id, error)
+ !
+ ! Create references to two groups, integer dataset and shared datatype
+ ! and write it to the dataset in the file
+ !
+ CALL h5rcreate_f(file_id, groupname1, ref(1), error)
+ CALL h5rcreate_f(file_id, "/GROUP1/GROUP2", ref(2), error)
+ CALL h5rcreate_f(file_id, dsetnamei, ref(3), error)
+ CALL h5rcreate_f(file_id, "MyType", ref(4), error)
+ ref_size = size(ref)
+ CALL h5dwrite_f(dsetr_id, H5T_STD_REF_OBJ, ref, ref_size, error)
+ !
+ ! Close the dataset
+ !
+ CALL h5dclose_f(dsetr_id, error)
+ !
+ ! Reopen the dataset with object references and read references to the buffer
+ !
+ CALL h5dopen_f(file_id, dsetnamer,dsetr_id,error)
+ ref_size = size(ref_out)
+ CALL h5dread_f(dsetr_id, H5T_STD_REF_OBJ, ref_out, ref_size, error)
+ !
+ ! Dereference the third reference. We know that it is a dataset. On practice
+ ! one should use h5rget_object_type_f function to find out
+ ! the type of an object the reference points to.
+ !
+ CALL h5rdereference_f(dsetr_id, ref(3), dset_id, error)
+ !
+ ! Write data to the dataset.
+ !
+ CALL h5dwrite_f(dset_id, H5T_NATIVE_INTEGER, data, error)
+ if (error .eq. 0) write(*,*) "Data has been successfully written to the dataset "
+ !
+ ! Dereference the fourth reference. We know that it is a datatype. On practice
+ ! one should use h5rget_object_type_f function to find out
+ ! the type of an object the reference points to.
+ !
+ CALL h5rdereference_f(dsetr_id, ref(4), type_id, error)
+ !
+ ! Get datatype class and display it if it is of a FLOAT class.
+ !
+ CALL h5tget_class_f(type_id, class, error)
+ if(class .eq. H5T_FLOAT_F) write(*,*) "Stored datatype is of a FLOAT class"
+ !
+ ! Close all objects.
+ !
+ CALL h5dclose_f(dset_id, error)
+ CALL h5tclose_f(type_id, error)
+ CALL h5dclose_f(dsetr_id, error)
+ CALL h5fclose_f(file_id, error)
+ !
+ ! Close FORTRAN interface.
+ !
+ CALL h5close_f(error)
+
+ END PROGRAM OBJ_REFERENCES
+
+
diff --git a/doc/html/Tutor/examples/refregexample.f90 b/doc/html/Tutor/examples/refregexample.f90
new file mode 100644
index 0000000..05fcf3f
--- /dev/null
+++ b/doc/html/Tutor/examples/refregexample.f90
@@ -0,0 +1,162 @@
+!
+! This program shows how to create, store and dereference references
+! to the dataset regions.
+! Program creates a file and writes two dimensional integer dataset
+! to it. Then program creates and stores references to the hyperslab
+! and 3 points selected in the integer dataset, in the second dataset.
+! Program reopens the second dataset, reads and dereferences region
+! references, and then reads and displays selected data from the
+! integer dataset.
+!
+ PROGRAM REG_REFERENCE
+
+ USE HDF5 ! This module contains all necessary modules
+
+ IMPLICIT NONE
+ CHARACTER(LEN=10), PARAMETER :: filename = "FORTRAN.h5"
+ CHARACTER(LEN=6), PARAMETER :: dsetnamev = "MATRIX"
+ CHARACTER(LEN=17), PARAMETER :: dsetnamer = "REGION_REFERENCES"
+
+ INTEGER(HID_T) :: file_id ! File identifier
+ INTEGER(HID_T) :: space_id ! Dataspace identifier
+ INTEGER(HID_T) :: spacer_id ! Dataspace identifier
+ INTEGER(HID_T) :: dsetv_id ! Dataset identifier
+ INTEGER(HID_T) :: dsetr_id ! Dataset identifier
+ INTEGER :: error
+ TYPE(hdset_reg_ref_t_f) , DIMENSION(2) :: ref ! Buffers to store references
+ TYPE(hdset_reg_ref_t_f) , DIMENSION(2) :: ref_out !
+ INTEGER(HSIZE_T), DIMENSION(2) :: dims = (/2,9/) ! Datasets dimensions
+ INTEGER(HSIZE_T), DIMENSION(1) :: dimsr = (/2/) !
+ INTEGER(HSSIZE_T), DIMENSION(2) :: start
+ INTEGER(HSIZE_T), DIMENSION(2) :: count
+ INTEGER :: rankr = 1
+ INTEGER :: rank = 2
+ INTEGER , DIMENSION(2,9) :: data
+ INTEGER , DIMENSION(2,9) :: data_out = 0
+ INTEGER(HSSIZE_T) , DIMENSION(2,3) :: coord
+ INTEGER(SIZE_T) ::num_points = 3 ! Number of selected points
+ INTEGER :: i, j
+ INTEGER :: ref_size
+ coord = reshape((/1,1,2,7,1,9/), (/2,3/)) ! Coordinates of selected points
+ data = reshape ((/1,1,1,2,2,2,3,3,3,4,4,4,5,5,5,6,6,6/), (/2,9/))
+ !
+ ! Initialize FORTRAN interface.
+ !
+ CALL h5open_f(error)
+ !
+ ! Create a new file.
+ !
+ CALL h5fcreate_f(filename, H5F_ACC_TRUNC_F, file_id, error)
+ ! Default file access and file creation
+ ! properties are used.
+ !
+ ! Create dataspaces:
+ !
+ ! for dataset with references to dataset regions
+ !
+ CALL h5screate_simple_f(rankr, dimsr, spacer_id, error)
+ !
+ ! for integer dataset
+ !
+ CALL h5screate_simple_f(rank, dims, space_id, error)
+ !
+ ! Create and write datasets:
+ !
+ ! Integer dataset
+ !
+ CALL h5dcreate_f(file_id, dsetnamev, H5T_NATIVE_INTEGER, space_id, &
+ dsetv_id, error)
+ CALL h5dwrite_f(dsetv_id, H5T_NATIVE_INTEGER, data, error)
+ CALL h5dclose_f(dsetv_id, error)
+ !
+ ! Dataset with references
+ !
+ CALL h5dcreate_f(file_id, dsetnamer, H5T_STD_REF_DSETREG, spacer_id, &
+ dsetr_id, error)
+ !
+ ! Create a reference to the hyperslab selection.
+ !
+ start(1) = 0
+ start(2) = 3
+ count(1) = 2
+ count(2) = 3
+ CALL h5sselect_hyperslab_f(space_id, H5S_SELECT_SET_F, &
+ start, count, error)
+ CALL h5rcreate_f(file_id, dsetnamev, space_id, ref(1), error)
+ !
+ ! Create a reference to elements selection.
+ !
+ CALL h5sselect_none_f(space_id, error)
+ CALL h5sselect_elements_f(space_id, H5S_SELECT_SET_F, rank, num_points,&
+ coord, error)
+ CALL h5rcreate_f(file_id, dsetnamev, space_id, ref(2), error)
+ !
+ ! Write dataset with the references.
+ !
+ ref_size = size(ref)
+ CALL h5dwrite_f(dsetr_id, H5T_STD_REF_DSETREG, ref, ref_size, error)
+ !
+ ! Close all objects.
+ !
+ CALL h5sclose_f(space_id, error)
+ CALL h5sclose_f(spacer_id, error)
+ CALL h5dclose_f(dsetr_id, error)
+ CALL h5fclose_f(file_id, error)
+ !
+ ! Reopen the file to test selections.
+ !
+ CALL h5fopen_f (filename, H5F_ACC_RDWR_F, file_id, error)
+ CALL h5dopen_f(file_id, dsetnamer, dsetr_id, error)
+ !
+ ! Read references to the dataset regions.
+ !
+ ref_size = size(ref_out)
+ CALL h5dread_f(dsetr_id, H5T_STD_REF_DSETREG, ref_out, ref_size, error)
+ !
+ ! Dereference the first reference.
+ !
+ CALL H5rdereference_f(dsetr_id, ref_out(1), dsetv_id, error)
+ CALL H5rget_region_f(dsetr_id, ref_out(1), space_id, error)
+ !
+ ! Read selected data from the dataset.
+ !
+ CALL h5dread_f(dsetv_id, H5T_NATIVE_INTEGER, data_out, error, &
+ mem_space_id = space_id, file_space_id = space_id)
+ write(*,*) "Hypeslab selection"
+ write(*,*)
+ do i = 1,2
+ write(*,*) (data_out (i,j), j = 1,9)
+ enddo
+ write(*,*)
+ CALL h5sclose_f(space_id, error)
+ CALL h5dclose_f(dsetv_id, error)
+ data_out = 0
+ !
+ ! Dereference the second reference.
+ !
+ CALL H5rdereference_f(dsetr_id, ref_out(2), dsetv_id, error)
+ CALL H5rget_region_f(dsetr_id, ref_out(2), space_id, error)
+ !
+ ! Read selected data from the dataset.
+ !
+ CALL h5dread_f(dsetv_id, H5T_NATIVE_INTEGER, data_out, error, &
+ mem_space_id = space_id, file_space_id = space_id)
+ write(*,*) "Point selection"
+ write(*,*)
+ do i = 1,2
+ write(*,*) (data_out (i,j), j = 1,9)
+ enddo
+ !
+ ! Close all objects
+ !
+ CALL h5sclose_f(space_id, error)
+ CALL h5dclose_f(dsetv_id, error)
+ CALL h5dclose_f(dsetr_id, error)
+ !
+ ! Close FORTRAN interface.
+ !
+ CALL h5close_f(error)
+
+ END PROGRAM REG_REFERENCE
+
+
diff --git a/doc/html/Tutor/examples/rwdsetexample.f90 b/doc/html/Tutor/examples/rwdsetexample.f90
new file mode 100644
index 0000000..729e84d
--- /dev/null
+++ b/doc/html/Tutor/examples/rwdsetexample.f90
@@ -0,0 +1,78 @@
+!
+! The following example shows how to write and read to/from an existing dataset.
+! It opens the file created in the previous example, obtains the dataset
+! identifier, writes the data to the dataset in the file,
+! then reads the dataset to memory.
+!
+
+
+ PROGRAM RWDSETEXAMPLE
+
+ USE HDF5 ! This module contains all necessary modules
+
+ IMPLICIT NONE
+
+ CHARACTER(LEN=8), PARAMETER :: filename = "dsetf.h5" ! File name
+ CHARACTER(LEN=4), PARAMETER :: dsetname = "dset" ! Dataset name
+
+ INTEGER(HID_T) :: file_id ! File identifier
+ INTEGER(HID_T) :: dset_id ! Dataset identifier
+
+ INTEGER :: error ! Error flag
+ INTEGER :: i, j
+
+ INTEGER, DIMENSION(4,6) :: dset_data, data_out ! Data buffers
+
+ !
+ ! Initialize the dset_data array.
+ !
+ do i = 1, 4
+ do j = 1, 6
+ dset_data(i,j) = (i-1)*6 + j;
+ end do
+ end do
+
+ !
+ ! Initialize FORTRAN predefined datatypes
+ !
+ CALL h5open_f(error)
+
+ !
+ ! Open an existing file.
+ !
+ CALL h5fopen_f (filename, H5F_ACC_RDWR_F, file_id, error)
+
+ !
+ ! Open an existing dataset.
+ !
+ CALL h5dopen_f(file_id, dsetname, dset_id, error)
+
+ !
+ ! Write the dataset.
+ !
+ CALL h5dwrite_f(dset_id, H5T_NATIVE_INTEGER, dset_data, error)
+
+ !
+ ! Read the dataset.
+ !
+ CALL h5dread_f(dset_id, H5T_NATIVE_INTEGER, data_out, error)
+
+ !
+ ! Close the dataset.
+ !
+ CALL h5dclose_f(dset_id, error)
+
+ !
+ ! Close the file.
+ !
+ CALL h5fclose_f(file_id, error)
+
+ !
+ ! Close FORTRAN predefined datatypes.
+ !
+ CALL h5close_f(error)
+
+ END PROGRAM RWDSETEXAMPLE
+
+
+
diff --git a/doc/html/Tutor/examples/selectele.f90 b/doc/html/Tutor/examples/selectele.f90
new file mode 100644
index 0000000..c75958c
--- /dev/null
+++ b/doc/html/Tutor/examples/selectele.f90
@@ -0,0 +1,282 @@
+!
+! This program creates two files, copy1.h5, and copy2.h5.
+! In copy1.h5, it creates a 3x4 dataset called 'Copy1',
+! and write 0's to this dataset.
+! In copy2.h5, it create a 3x4 dataset called 'Copy2',
+! and write 1's to this dataset.
+! It closes both files, reopens both files, selects two
+! points in copy1.h5 and writes values to them. Then it
+! uses an H5Scopy to write the same selection to copy2.h5.
+! Program reopens the files, and reads and prints the contents of
+! the two datasets.
+!
+
+ PROGRAM SELECTEXAMPLE
+
+ USE HDF5 ! This module contains all necessary modules
+
+ IMPLICIT NONE
+
+ CHARACTER(LEN=8), PARAMETER :: filename1 = "copy1.h5" ! File name
+ CHARACTER(LEN=8), PARAMETER :: filename2 = "copy2.h5" !
+ CHARACTER(LEN=5), PARAMETER :: dsetname1 = "Copy1" ! Dataset name
+ CHARACTER(LEN=5), PARAMETER :: dsetname2 = "Copy2" !
+
+ INTEGER, PARAMETER :: RANK = 2 ! Dataset rank
+
+ INTEGER(SIZE_T), PARAMETER :: NUMP = 2 ! Number of points selected
+
+ INTEGER(HID_T) :: file1_id ! File1 identifier
+ INTEGER(HID_T) :: file2_id ! File2 identifier
+ INTEGER(HID_T) :: dset1_id ! Dataset1 identifier
+ INTEGER(HID_T) :: dset2_id ! Dataset2 identifier
+ INTEGER(HID_T) :: dataspace1 ! Dataspace identifier
+ INTEGER(HID_T) :: dataspace2 ! Dataspace identifier
+ INTEGER(HID_T) :: memspace ! memspace identifier
+
+ INTEGER(HSIZE_T), DIMENSION(1) :: dimsm = (/2/)
+ ! Memory dataspace dimensions
+ INTEGER(HSIZE_T), DIMENSION(2) :: dimsf = (/3,4/)
+ ! File dataspace dimensions
+ INTEGER(HSSIZE_T), DIMENSION(RANK,NUMP) :: coord ! Elements coordinates
+ ! in the file
+
+ INTEGER, DIMENSION(3,4) :: buf1, buf2, bufnew ! Data buffers
+ INTEGER, DIMENSION(2) :: val = (/53, 59/) ! Values to write
+
+ INTEGER :: memrank = 1 ! Rank of the dataset in memory
+
+ INTEGER :: i, j
+
+ INTEGER :: error ! Error flag
+ LOGICAL :: status
+
+
+ !
+ ! Create two files containing identical datasets. Write 0's to one
+ ! and 1's to the other.
+ !
+
+ !
+ ! Data initialization.
+ !
+ do i = 1, 3
+ do j = 1, 4
+ buf1(i,j) = 0;
+ end do
+ end do
+
+ do i = 1, 3
+ do j = 1, 4
+ buf2(i,j) = 1;
+ end do
+ end do
+
+ !
+ ! Initialize FORTRAN predefined datatypes
+ !
+ CALL h5open_f(error)
+
+ !
+ ! Create file1, file2 using default properties.
+ !
+ CALL h5fcreate_f(filename1, H5F_ACC_TRUNC_F, file1_id, error)
+
+ CALL h5fcreate_f(filename2, H5F_ACC_TRUNC_F, file2_id, error)
+
+ !
+ ! Create the data space for the datasets.
+ !
+ CALL h5screate_simple_f(RANK, dimsf, dataspace1, error)
+
+ CALL h5screate_simple_f(RANK, dimsf, dataspace2, error)
+
+ !
+ ! Create the datasets with default properties.
+ !
+ CALL h5dcreate_f(file1_id, dsetname1, H5T_NATIVE_INTEGER, dataspace1, &
+ dset1_id, error)
+
+ CALL h5dcreate_f(file2_id, dsetname2, H5T_NATIVE_INTEGER, dataspace2, &
+ dset2_id, error)
+
+ !
+ ! Write the datasets.
+ !
+ CALL h5dwrite_f(dset1_id, H5T_NATIVE_INTEGER, buf1, error)
+
+ CALL h5dwrite_f(dset2_id, H5T_NATIVE_INTEGER, buf2, error)
+
+ !
+ ! Close the dataspace for the datasets.
+ !
+ CALL h5sclose_f(dataspace1, error)
+
+ CALL h5sclose_f(dataspace2, error)
+
+ !
+ ! Close the datasets.
+ !
+ CALL h5dclose_f(dset1_id, error)
+
+ CALL h5dclose_f(dset2_id, error)
+
+ !
+ ! Close the files.
+ !
+ CALL h5fclose_f(file1_id, error)
+
+ CALL h5fclose_f(file2_id, error)
+
+ !
+ ! Open the two files. Select two points in one file, write values to
+ ! those point locations, then do H5Scopy and write the values to the
+ ! other file. Close files.
+ !
+
+ !
+ ! Open the files.
+ !
+ CALL h5fopen_f (filename1, H5F_ACC_RDWR_F, file1_id, error)
+
+ CALL h5fopen_f (filename2, H5F_ACC_RDWR_F, file2_id, error)
+
+ !
+ ! Open the datasets.
+ !
+ CALL h5dopen_f(file1_id, dsetname1, dset1_id, error)
+
+ CALL h5dopen_f(file2_id, dsetname2, dset2_id, error)
+
+ !
+ ! Get dataset1's dataspace identifier.
+ !
+ CALL h5dget_space_f(dset1_id, dataspace1, error)
+
+ !
+ ! Create memory dataspace.
+ !
+ CALL h5screate_simple_f(memrank, dimsm, memspace, error)
+
+ !
+ ! Set the selected point positions. Because Fortran array index starts
+ ! from 1, so add one to the actual select points in C.
+ !
+ coord(1,1) = 1
+ coord(2,1) = 2
+ coord(1,2) = 1
+ coord(2,2) = 4
+
+ !
+ ! Select the elements in file space.
+ !
+ CALL h5sselect_elements_f(dataspace1, H5S_SELECT_SET_F, RANK, NUMP,&
+ coord, error)
+
+ !
+ ! Write value into the selected points in dataset1.
+ !
+ CALL H5dwrite_f(dset1_id, H5T_NATIVE_INTEGER, val, error, &
+ mem_space_id=memspace, file_space_id=dataspace1)
+
+ !
+ ! Copy the daspace1 into dataspace2.
+ !
+ CALL h5scopy_f(dataspace1, dataspace2, error)
+
+ !
+ ! Write value into the selected points in dataset2.
+ !
+ CALL H5dwrite_f(dset2_id, H5T_NATIVE_INTEGER, val, error, &
+ mem_space_id=memspace, file_space_id=dataspace2)
+
+ !
+ ! Close the dataspace for the datasets.
+ !
+ CALL h5sclose_f(dataspace1, error)
+
+ CALL h5sclose_f(dataspace2, error)
+
+ !
+ ! Close the memoryspace.
+ !
+ CALL h5sclose_f(memspace, error)
+
+ !
+ ! Close the datasets.
+ !
+ CALL h5dclose_f(dset1_id, error)
+
+ CALL h5dclose_f(dset2_id, error)
+
+ !
+ ! Close the files.
+ !
+ CALL h5fclose_f(file1_id, error)
+
+ CALL h5fclose_f(file2_id, error)
+
+ !
+ ! Open both files and print the contents of the datasets.
+ !
+
+ !
+ ! Open the files.
+ !
+ CALL h5fopen_f (filename1, H5F_ACC_RDWR_F, file1_id, error)
+
+ CALL h5fopen_f (filename2, H5F_ACC_RDWR_F, file2_id, error)
+
+ !
+ ! Open the datasets.
+ !
+ CALL h5dopen_f(file1_id, dsetname1, dset1_id, error)
+
+ CALL h5dopen_f(file2_id, dsetname2, dset2_id, error)
+
+ !
+ ! Read dataset from the first file.
+ !
+ CALL h5dread_f(dset1_id, H5T_NATIVE_INTEGER, bufnew, error)
+
+ !
+ ! Display the data read from dataset "Copy1"
+ !
+ write(*,*) "The data in dataset Copy1 is: "
+ do i = 1, 3
+ print *, (bufnew(i,j), j = 1,4)
+ end do
+
+ !
+ ! Read dataset from the second file.
+ !
+ CALL h5dread_f(dset2_id, H5T_NATIVE_INTEGER, bufnew, error)
+
+ !
+ ! Display the data read from dataset "Copy2"
+ !
+ write(*,*) "The data in dataset Copy2 is: "
+ do i = 1, 3
+ print *, (bufnew(i,j), j = 1,4)
+ end do
+
+ !
+ ! Close datasets.
+ !
+ CALL h5dclose_f(dset1_id, error)
+
+ CALL h5dclose_f(dset2_id, error)
+
+ !
+ ! Close files.
+ !
+ CALL h5fclose_f(file1_id, error)
+
+ CALL h5fclose_f(file2_id, error)
+
+ !
+ ! Close FORTRAN predefined datatypes.
+ !
+ CALL h5close_f(error)
+
+ END PROGRAM SELECTEXAMPLE
diff --git a/doc/html/Tutor/property.html b/doc/html/Tutor/property.html
new file mode 100644
index 0000000..9d40d44
--- /dev/null
+++ b/doc/html/Tutor/property.html
@@ -0,0 +1,169 @@
+<HTML><HEAD>
+<TITLE>HDF5 Tutorial - Property Lists
+</TITLE>
+</HEAD>
+
+<body bgcolor="#ffffff">
+
+<!-- BEGIN MAIN BODY -->
+
+<A HREF="http://www.ncsa.uiuc.edu/"><img border=0
+src="http://www.ncsa.uiuc.edu/Images/NCSAhome/footerlogo.gif"
+width=78 height=27 alt="NCSA"><P></A>
+
+ [ <A HREF="title.html"><I>HDF5 Tutorial Top</I></A> ]
+<H1>
+<BIG><BIG><BIG><FONT COLOR="#c101cd">Property Lists</FONT>
+</BIG></BIG></BIG></H1>
+
+<hr noshade size=1>
+
+<BODY>
+<!--
+<H2>Contents:</H2>
+<UL>
+ <LI> <A HREF="#def">Definition of Property Lists</A>
+</UL>
+<HR>
+<A NAME="def">
+-->
+<P>
+The property list interface provides a mechanism for adding functionality
+to HDF5 calls, without increasing the number of arguments used
+for a given call.
+<P>
+A property list is a collection of values which can
+be passed to various HDF5 functions to control features that
+are typically unimportant or whose default values are usually used
+(by specifying <code>H5P_DEFAULT</code> / <CODE>H5P_DEFAULT_F</CODE>).
+<P>
+It supports unusual cases when:
+
+<UL>
+<LI><A HREF="#cf">Creating Files</A>
+<LI><A HREF="#fa">Accessing Files</A>
+<LI><A HREF="#cd">Creating Datasets</A>
+<LI><A HREF="#rdwt">Reading or Writing Data</A>
+</UL>
+
+
+<A NAME="cf">
+<H3>Creating Files</H3>
+The File Creation property list, H5P_FILE_CREATE, applies to H5Fcreate()
+only and is used to control the file metadata which is maintained in the
+super block of the file. The parameters that can be modified are:
+user-block size, offset and length sizes, symbol table parameters,
+and index storage parameters.
+<P>
+The following example shows how to create a file with 64-bit object
+offsets and lengths:
+<PRE>
+ hid_t create_plist;
+ hid_t file_id;
+
+ create_plist = H5Pcreate(H5P_FILE_CREATE);
+ H5Pset_sizes(create_plist, 8, 8);
+
+ file_id = H5Fcreate("test.h5", H5F_ACC_TRUNC,
+ create_plist, H5P_DEFAULT);
+ .
+ .
+ .
+ H5Fclose(file_id);
+</PRE>
+
+<A NAME="fa">
+<H3>Accessing Files</H3>
+The File Access property list, H5P_FILE_ACCESS, applies to H5Fcreate() and
+H5Fopen() and is used to control different methods of
+performing I/O on files. The different types of I/O are: unbuffered I/O,
+buffered I/O, memory I/O, parallel files using MPI I/O, and data alignment.
+<P>
+Following is an example of using the H5P_FILE_ACCESS property list for creating
+HDF5 files with the metadata and data split into different files:
+<BR> &nbsp; &nbsp; &nbsp;
+[ <A HREF="examples/h5split.c">C program</A> ]
+ - <code>h5split.c</code><BR>
+<P>
+
+<A NAME="cd">
+<h3>Creating Datasets</H3>
+The Dataset Creation property list, H5P_DATASET_CREATE, applies to
+H5Dcreate() and controls information on how raw data
+is organized on disk and how the raw data is compressed. The dataset API
+partitions these terms by layout, compression, and external storage:
+<P>
+<UL>
+ <LI>Layout:
+<UL>
+<LI>H5D_COMPACT: Data is small and can be stored in object header (<I>not
+ implemented yet</I>). This eliminates disk seek/read requests.
+<P>
+<LI>H5D_CONTIGUOUS: (default) The data is large, non-extendible,
+non-compressible, non-sparse, and can be stored externally.
+<P>
+<LI>H5D_CHUNKED: The data is large and can be extended in any dimension.
+It is partitioned into chunks so each chunk is the same logical size.
+Following is an example that uses the H5P_DATASET_CREATE property list to create
+a chunked and extendible dataset:
+<BR> &nbsp; &nbsp; &nbsp;
+[ <A HREF="examples/h5_extend.c">C program</A> ]
+ - <code>h5_extend.c</code><BR>
+<P>
+</UL>
+<LI>Compression: (gzip compression)
+<LI>External Storage Properties: The data must be contiguous to be stored
+ externally. It allows you to store the data in one or more non-HDF5 files.
+Following is an example of using the H5P_DATASET_CREATE property list to
+create a dataset in an external file:
+<BR> &nbsp; &nbsp; &nbsp;
+[ <A HREF="examples/h5_crtextd.c">C program</A> ]
+ - <code>h5_crtextd.c</code><BR>
+<P>
+</UL>
+</UL>
+<A NAME="rdwt">
+<H3>Reading or Writing Data</H3>
+
+The Data Transfer property list, H5P_DATASET_XFER, is used to control
+various aspects of I/O, such as caching hints or collective I/O information.
+<P>
+The following code sets the maximum size for the type conversion buffer
+and background buffer:
+<PRE>
+ plist_xfer = H5Pcreate (H5P_DATASET_XFER);
+ H5Pset_buffer(plist_xfer, (hsize_t)NX*NY*NZ, NULL, NULL);
+ status = H5Dread (dataset, H5T_NATIVE_UCHAR, memspace, dataspace,
+ plist_xfer);
+</PRE>
+See:<BR>
+ [ <A HREF="examples/h5_xfer.c">C program</A> ]
+ - <code> h5_xfer.c</code><BR>
+
+
+<!-- BEGIN FOOTER INFO -->
+
+<P><hr noshade size=1>
+<font face="arial,helvetica" size="-1">
+ <a href="http://www.ncsa.uiuc.edu/"><img border=0
+ src="http://www.ncsa.uiuc.edu/Images/NCSAhome/footerlogo.gif"
+ width=78 height=27 alt="NCSA"><br>
+ The National Center for Supercomputing Applications</A><br>
+ <a href="http://www.uiuc.edu/">University of Illinois
+ at Urbana-Champaign</a><br>
+ <br>
+<!-- <A HREF="helpdesk.mail.html"> -->
+<A HREF="mailto:hdfhelp@ncsa.uiuc.edu">
+hdfhelp@ncsa.uiuc.edu</A>
+<br>
+<BR> <H6>Last Modified: February 12, 2001</H6><BR>
+<!-- modified by Barbara Jones - bljones@ncsa.uiuc.edu -->
+</FONT>
+<BR>
+<!-- <A HREF="mailto:hdfhelp@ncsa.uiuc.edu"> -->
+
+</BODY>
+</HTML>
+
+
+
diff --git a/doc/html/Tutor/software.html b/doc/html/Tutor/software.html
new file mode 100644
index 0000000..9802845
--- /dev/null
+++ b/doc/html/Tutor/software.html
@@ -0,0 +1,88 @@
+<HTML><HEAD>
+<TITLE>HDF5 Tutorial - Obtaining HDF5 Software</TITLE>
+</HEAD>
+
+<body bgcolor="#ffffff">
+
+<!-- BEGIN MAIN BODY -->
+
+<A HREF="http://www.ncsa.uiuc.edu/"><img border=0
+src="http://www.ncsa.uiuc.edu/Images/NCSAhome/footerlogo.gif"
+width=78 height=27 alt="NCSA"><P></A>
+
+ [ <A HREF="title.html"><I>HDF5 Tutorial Top</I></A> ]
+<H1>
+<BIG><BIG><BIG><FONT COLOR="#c101cd">Obtaining HDF5 Software</FONT>
+</BIG></BIG></BIG></H1>
+
+<hr noshade size=1>
+
+<BODY>
+If you will be compiling in:
+<DL>
+<DT><B>C:</B>
+<DD>You will need the HDF5 library. We provide pre-compiled binaries
+for the platforms on which we tested at:
+<BR>&nbsp; &nbsp; &nbsp; &nbsp;
+<A HREF="ftp://ftp.ncsa.uiuc.edu/HDF/HDF5/current/bin/">ftp://ftp.ncsa.uiuc.edu/HDF/HDF5/current/bin/</A>
+<P>
+If using the pre-compiled binaries you must
+also obtain the GZIP library, as they were compiled with GZIP included, but do
+not include this library. We provide the GZIP library for the platforms on
+which we tested at:
+<BR>&nbsp; &nbsp; &nbsp; &nbsp;
+<A HREF="ftp://ftp.ncsa.uiuc.edu/HDF/gzip/">ftp://ftp.ncsa.uiuc.edu/HDF/gzip/</A>
+<P>
+You can build the HDF5 library yourself, if need be. The source code
+can be obtained from:
+<BR>&nbsp; &nbsp; &nbsp; &nbsp;
+<A HREF="ftp://ftp.ncsa.uiuc.edu/HDF/HDF5/current/src/">ftp://ftp.ncsa.uiuc.edu/HDF/HDF5/current/src/</A>
+<P>
+For further information regarding HDF5, check the HDF5 home page:
+<BR>&nbsp; &nbsp; &nbsp; &nbsp;
+<A HREF="http://hdf.ncsa.uiuc.edu/HDF5/">http://hdf.ncsa.uiuc.edu/HDF5/</A>
+<P>
+<DT><B>FORTRAN 90:</B>
+<DD> With HDF5-1.4.0, support for Fortran 90 is included as part of
+the installation of the HDF5 library. The pre-compiled binaries include
+the Fortran library. If you need to build from source, download the
+HDF5-1.4.0 source code and compile it with the <I>--enable-fortran</I> flag.
+Read the instructions in the
+<A HREF="ftp://ftp.ncsa.uiuc.edu/HDF/HDF5/current/src/unpacked/RELEASE.txt">RELEASE.txt</A>
+file for further details.
+
+<P>
+<DT><B>Java:</B>
+<DD>You will need the JHI5 code. Go to the
+<A HREF="http://hdf.ncsa.uiuc.edu/java-hdf5-html">Java HDF5 web page</A>
+for information on the Java-HDF5 software. The Java Tutorial examples
+are included with this tutorial:
+<BR>&nbsp; &nbsp; &nbsp; &nbsp;
+ <A HREF="./examples/java/">./examples/java/</A>
+</DL>
+
+
+<!-- BEGIN FOOTER INFO -->
+
+<P><hr noshade size=1>
+<font face="arial,helvetica" size="-1">
+ <a href="http://www.ncsa.uiuc.edu/"><img border=0
+ src="http://www.ncsa.uiuc.edu/Images/NCSAhome/footerlogo.gif"
+ width=78 height=27 alt="NCSA"><br>
+ The National Center for Supercomputing Applications</A><br>
+ <a href="http://www.uiuc.edu/">University of Illinois
+ at Urbana-Champaign</a><br>
+ <br>
+<!-- <A HREF="helpdesk.mail.html"> -->
+<A HREF="mailto:hdfhelp@@ncsa.uiuc.edu">
+hdfhelp@@ncsa.uiuc.edu</A>
+<BR> <H6>Last Modified: March 8, 2001</H6><BR>
+<!-- modified by Barbara Jones - bljones@@ncsa.uiuc.edu -->
+<!-- modified by Frank Baker - fbaker@@ncsa.uiuc.edu -->
+</FONT>
+<BR>
+<!-- <A HREF="mailto:hdfhelp@@ncsa.uiuc.edu"> -->
+
+</BODY>
+</HTML>
+