summaryrefslogtreecommitdiffstats
path: root/c++/examples/h5group.cpp
diff options
context:
space:
mode:
authorLarry Knox <lrknox@hdfgroup.org>2017-07-06 19:16:57 (GMT)
committerLarry Knox <lrknox@hdfgroup.org>2017-07-06 19:16:57 (GMT)
commit23a702e7bad93fc4f14eab07678c75d276e2d0ad (patch)
treeafb57ddaeca17021342fa09b8999c15569630e71 /c++/examples/h5group.cpp
parent03dbcb75918b2cbf39800d9edf5665d3471a1fee (diff)
parentc15238a38fdb3692466bc1218de4c5aeb420fdcc (diff)
downloadhdf5-23a702e7bad93fc4f14eab07678c75d276e2d0ad.zip
hdf5-23a702e7bad93fc4f14eab07678c75d276e2d0ad.tar.gz
hdf5-23a702e7bad93fc4f14eab07678c75d276e2d0ad.tar.bz2
Merge pull request #593 in HDFFV/hdf5 from hdf5_1_8_19 to 1.8/masterhdf5-1_8_19
* commit 'c15238a38fdb3692466bc1218de4c5aeb420fdcc': (153 commits) Commit changes to release version strings in README.txt and RELEASE.txt from release. Put back link targets for dynamically loaded libraries in Makefile.ams that avoid linking with libhdf5 dependencies. Some configurations failed unnecessarily to find and link with lz and lsz when these targets were removed. Update URL for obtaining source inINSTALL file. Disable building of libdynlib* dynamically loaded plugin test libraries on CYGWIN. Added known problem entry to RELEASE.txt. Set version for 1.8.19 release. Barbara updated contents of README.txt file. Moved libdynlib* plugin test libraries to noist_LT_LIBRARIES with added flag '-rpath /nowhere' to stop them being installed while keeping them from linking with libhdf5, etc. Incremented version to 1.8.19-pre1 and ran bin/release to create tar and zip files for testing. HDF5 1.8.19-pre1 released. Add RELEASE.txt entry for H5Dget_chunk_size. Remove test entries Ran bin/reconfigure after so number change. Update hl lib .so numbers: new function was added. Update RELEASE.txt: move H5DOread_chunks entry to new features, clarify entry for HDFFV-10051. Updated notes for test fixes and added bug fix for h5diff help text. Update h5vers and release scripts for 1.8.19 branch, set version to 1.8.19-pre1. Make script name consistent across product versions Update version to 1.8.19-pre1. Propagate .so number changes to Makefile.ins with bin/reconfigure. Add toolset option Update so version numbers according to the interface compatibility report. Update supported Platforms section. Remove empty sub-sections of New Features and Bugs Fixed sections. Add entries for HDFFV-10051 and HDFFV-9934 to RELEASE.txt. Correct spelling ...
Diffstat (limited to 'c++/examples/h5group.cpp')
-rw-r--r--c++/examples/h5group.cpp298
1 files changed, 148 insertions, 150 deletions
diff --git a/c++/examples/h5group.cpp b/c++/examples/h5group.cpp
index a5137bc..6e1764f 100644
--- a/c++/examples/h5group.cpp
+++ b/c++/examples/h5group.cpp
@@ -5,12 +5,10 @@
* *
* This file is part of HDF5. The full HDF5 copyright notice, including *
* terms governing use, modification, and redistribution, is contained in *
- * the files COPYING and Copyright.html. COPYING can be found at the root *
- * of the source code distribution tree; Copyright.html can be found at the *
- * root level of an installed copy of the electronic HDF5 document set and *
- * is linked from the top-level documents page. It can also be found at *
- * http://hdfgroup.org/HDF5/doc/Copyright.html. If you do not have *
- * access to either file, you may request a copy from help@hdfgroup.org. *
+ * the COPYING file, which can be found at the root of the source code *
+ * distribution tree, or in https://support.hdfgroup.org/ftp/HDF5/releases. *
+ * If you do not have access to either file, you may request a copy from *
+ * help@hdfgroup.org. *
* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */
/*
@@ -43,7 +41,7 @@
#endif
const H5std_string FILE_NAME( "Group.h5" );
-const int RANK = 2;
+const int RANK = 2;
// Operator function
extern "C" herr_t file_info(hid_t loc_id, const char *name, const H5L_info_t *linfo,
@@ -58,169 +56,169 @@ int main(void)
// Try block to detect exceptions raised by any of the calls inside it
try
{
- /*
- * Turn off the auto-printing when failure occurs so that we can
- * handle the errors appropriately
- */
- Exception::dontPrint();
-
- /*
- * Create the named file, truncating the existing one if any,
- * using default create and access property lists.
- */
- H5File *file = new H5File( FILE_NAME, H5F_ACC_TRUNC );
-
- /*
- * Create a group in the file
- */
- Group* group = new Group( file->createGroup( "/Data" ));
-
- /*
- * Create dataset "Compressed Data" in the group using absolute
- * name. Dataset creation property list is modified to use
- * GZIP compression with the compression effort set to 6.
- * Note that compression can be used only when dataset is chunked.
- */
- dims[0] = 1000;
- dims[1] = 20;
- cdims[0] = 20;
- cdims[1] = 20;
- DataSpace *dataspace = new DataSpace(RANK, dims); // create new dspace
- DSetCreatPropList ds_creatplist; // create dataset creation prop list
- ds_creatplist.setChunk( 2, cdims ); // then modify it for compression
- ds_creatplist.setDeflate( 6 );
-
- /*
- * Create the first dataset.
- */
- DataSet* dataset = new DataSet(file->createDataSet(
- "/Data/Compressed_Data", PredType::NATIVE_INT,
- *dataspace, ds_creatplist ));
-
- /*
- * Close the first dataset.
- */
- delete dataset;
- delete dataspace;
-
- /*
- * Create the second dataset.
- */
- dims[0] = 500;
- dims[1] = 20;
- dataspace = new DataSpace(RANK, dims); // create second dspace
- dataset = new DataSet(file->createDataSet("/Data/Float_Data",
- PredType::NATIVE_FLOAT, *dataspace));
-
- delete dataset;
- delete dataspace;
- delete group;
- delete file;
-
- /*
- * Now reopen the file and group in the file.
- */
- file = new H5File(FILE_NAME, H5F_ACC_RDWR);
- group = new Group(file->openGroup("Data"));
-
- /*
- * Access "Compressed_Data" dataset in the group.
- */
- try { // to determine if the dataset exists in the group
- dataset = new DataSet( group->openDataSet( "Compressed_Data" ));
- }
- catch( GroupIException not_found_error ) {
- cout << " Dataset is not found." << endl;
- }
- cout << "dataset \"/Data/Compressed_Data\" is open" << endl;
-
- /*
- * Close the dataset.
- */
- delete dataset;
-
- /*
- * Create hard link to the Data group.
- */
- file->link( H5L_TYPE_HARD, "Data", "Data_new" );
-
- /*
- * We can access "Compressed_Data" dataset using created
- * hard link "Data_new".
- */
- try { // to determine if the dataset exists in the file
- dataset = new DataSet(file->openDataSet( "/Data_new/Compressed_Data" ));
- }
- catch( FileIException not_found_error )
- {
- cout << " Dataset is not found." << endl;
- }
- cout << "dataset \"/Data_new/Compressed_Data\" is open" << endl;
-
- /*
- * Close the dataset.
- */
- delete dataset;
-
- /*
- * Use iterator to see the names of the objects in the file
- * root directory.
- */
- cout << endl << "Iterating over elements in the file" << endl;
- herr_t idx = H5Literate(file->getId(), H5_INDEX_NAME, H5_ITER_INC, NULL, file_info, NULL);
- cout << endl;
-
- /*
- * Unlink name "Data" and use iterator to see the names
- * of the objects in the file root direvtory.
- */
- cout << "Unlinking..." << endl;
- try { // attempt to unlink the dataset
- file->unlink( "Data" );
- }
- catch( FileIException unlink_error )
- {
- cout << " unlink failed." << endl;
- }
- cout << "\"Data\" is unlinked" << endl;
-
- cout << endl << "Iterating over elements in the file again" << endl;
- idx = H5Literate(file->getId(), H5_INDEX_NAME, H5_ITER_INC, NULL, file_info, NULL);
- cout << endl;
-
- /*
- * Close the group and file.
- */
- delete group;
- delete file;
+ /*
+ * Turn off the auto-printing when failure occurs so that we can
+ * handle the errors appropriately
+ */
+ Exception::dontPrint();
+
+ /*
+ * Create the named file, truncating the existing one if any,
+ * using default create and access property lists.
+ */
+ H5File *file = new H5File( FILE_NAME, H5F_ACC_TRUNC );
+
+ /*
+ * Create a group in the file
+ */
+ Group* group = new Group( file->createGroup( "/Data" ));
+
+ /*
+ * Create dataset "Compressed Data" in the group using absolute
+ * name. Dataset creation property list is modified to use
+ * GZIP compression with the compression effort set to 6.
+ * Note that compression can be used only when dataset is chunked.
+ */
+ dims[0] = 1000;
+ dims[1] = 20;
+ cdims[0] = 20;
+ cdims[1] = 20;
+ DataSpace *dataspace = new DataSpace(RANK, dims); // create new dspace
+ DSetCreatPropList ds_creatplist; // create dataset creation prop list
+ ds_creatplist.setChunk( 2, cdims ); // then modify it for compression
+ ds_creatplist.setDeflate( 6 );
+
+ /*
+ * Create the first dataset.
+ */
+ DataSet* dataset = new DataSet(file->createDataSet(
+ "/Data/Compressed_Data", PredType::NATIVE_INT,
+ *dataspace, ds_creatplist ));
+
+ /*
+ * Close the first dataset.
+ */
+ delete dataset;
+ delete dataspace;
+
+ /*
+ * Create the second dataset.
+ */
+ dims[0] = 500;
+ dims[1] = 20;
+ dataspace = new DataSpace(RANK, dims); // create second dspace
+ dataset = new DataSet(file->createDataSet("/Data/Float_Data",
+ PredType::NATIVE_FLOAT, *dataspace));
+
+ delete dataset;
+ delete dataspace;
+ delete group;
+ delete file;
+
+ /*
+ * Now reopen the file and group in the file.
+ */
+ file = new H5File(FILE_NAME, H5F_ACC_RDWR);
+ group = new Group(file->openGroup("Data"));
+
+ /*
+ * Access "Compressed_Data" dataset in the group.
+ */
+ try { // to determine if the dataset exists in the group
+ dataset = new DataSet( group->openDataSet( "Compressed_Data" ));
+ }
+ catch( GroupIException not_found_error ) {
+ cout << " Dataset is not found." << endl;
+ }
+ cout << "dataset \"/Data/Compressed_Data\" is open" << endl;
+
+ /*
+ * Close the dataset.
+ */
+ delete dataset;
+
+ /*
+ * Create hard link to the Data group.
+ */
+ file->link( H5L_TYPE_HARD, "Data", "Data_new" );
+
+ /*
+ * We can access "Compressed_Data" dataset using created
+ * hard link "Data_new".
+ */
+ try { // to determine if the dataset exists in the file
+ dataset = new DataSet(file->openDataSet( "/Data_new/Compressed_Data" ));
+ }
+ catch( FileIException not_found_error )
+ {
+ cout << " Dataset is not found." << endl;
+ }
+ cout << "dataset \"/Data_new/Compressed_Data\" is open" << endl;
+
+ /*
+ * Close the dataset.
+ */
+ delete dataset;
+
+ /*
+ * Use iterator to see the names of the objects in the file
+ * root directory.
+ */
+ cout << endl << "Iterating over elements in the file" << endl;
+ herr_t idx = H5Literate(file->getId(), H5_INDEX_NAME, H5_ITER_INC, NULL, file_info, NULL);
+ cout << endl;
+
+ /*
+ * Unlink name "Data" and use iterator to see the names
+ * of the objects in the file root direvtory.
+ */
+ cout << "Unlinking..." << endl;
+ try { // attempt to unlink the dataset
+ file->unlink( "Data" );
+ }
+ catch( FileIException unlink_error )
+ {
+ cout << " unlink failed." << endl;
+ }
+ cout << "\"Data\" is unlinked" << endl;
+
+ cout << endl << "Iterating over elements in the file again" << endl;
+ idx = H5Literate(file->getId(), H5_INDEX_NAME, H5_ITER_INC, NULL, file_info, NULL);
+ cout << endl;
+
+ /*
+ * Close the group and file.
+ */
+ delete group;
+ delete file;
} // end of try block
// catch failure caused by the H5File operations
catch( FileIException error )
{
- error.printError();
- return -1;
+ error.printError();
+ return -1;
}
// catch failure caused by the DataSet operations
catch( DataSetIException error )
{
- error.printError();
- return -1;
+ error.printError();
+ return -1;
}
// catch failure caused by the DataSpace operations
catch( DataSpaceIException error )
{
- error.printError();
- return -1;
+ error.printError();
+ return -1;
}
// catch failure caused by the Attribute operations
catch( AttributeIException error )
{
- error.printError();
- return -1;
+ error.printError();
+ return -1;
}
return 0;
}