summaryrefslogtreecommitdiffstats
path: root/c++/examples/h5tutr_extend.cpp
diff options
context:
space:
mode:
authorAllen Byrne <byrn@hdfgroup.org>2020-05-09 17:11:04 (GMT)
committerAllen Byrne <byrn@hdfgroup.org>2020-05-09 17:11:04 (GMT)
commit6b9f11d7a656504226632c59b42b6b2f7ec52453 (patch)
tree6d39d826aec25f0f1965cc7e9ea121fdd7d91f29 /c++/examples/h5tutr_extend.cpp
parent7c7bab5884bf2a827fb6ec24de610bbde6be3922 (diff)
downloadhdf5-6b9f11d7a656504226632c59b42b6b2f7ec52453.zip
hdf5-6b9f11d7a656504226632c59b42b6b2f7ec52453.tar.gz
hdf5-6b9f11d7a656504226632c59b42b6b2f7ec52453.tar.bz2
Whitespace cleanup compared to develop
Diffstat (limited to 'c++/examples/h5tutr_extend.cpp')
-rw-r--r--c++/examples/h5tutr_extend.cpp28
1 files changed, 14 insertions, 14 deletions
diff --git a/c++/examples/h5tutr_extend.cpp b/c++/examples/h5tutr_extend.cpp
index b6927ea..94e9acf 100644
--- a/c++/examples/h5tutr_extend.cpp
+++ b/c++/examples/h5tutr_extend.cpp
@@ -30,17 +30,17 @@ const H5std_string DATASETNAME("ExtendibleArray");
int main (void)
{
hsize_t dims[2] = {3,3}; // dataset dimensions at creation
- hsize_t maxdims[2] = {H5S_UNLIMITED, H5S_UNLIMITED};
+ hsize_t maxdims[2] = {H5S_UNLIMITED, H5S_UNLIMITED};
hsize_t chunk_dims[2] ={2, 5};
- int data[3][3] = { {1, 1, 1}, // data to write
+ int data[3][3] = { {1, 1, 1}, // data to write
{1, 1, 1},
{1, 1, 1} };
- // Variables used in extending and writing to the extended portion of dataset
+ // Variables used in extending and writing to the extended portion of dataset
hsize_t size[2];
hsize_t offset[2];
- hsize_t dimsext[2] = {7, 3}; // extend dimensions
+ hsize_t dimsext[2] = {7, 3}; // extend dimensions
int dataext[7][3] = { {2, 3, 4},
{2, 3, 4},
{2, 3, 4},
@@ -56,7 +56,7 @@ int main (void)
// handle the errors appropriately
Exception::dontPrint();
- // Create a new file using the default property lists.
+ // Create a new file using the default property lists.
H5File file(FILE_NAME, H5F_ACC_TRUNC);
// Create the data space for the dataset. Note the use of pointer
@@ -70,23 +70,23 @@ int main (void)
prop.setChunk(2, chunk_dims);
// Create the chunked dataset. Note the use of pointer.
- DataSet *dataset = new DataSet(file.createDataSet( DATASETNAME,
+ DataSet *dataset = new DataSet(file.createDataSet( DATASETNAME,
PredType::STD_I32BE, *dataspace, prop) );
-
+
// Write data to dataset.
dataset->write(data, PredType::NATIVE_INT);
// Extend the dataset. Dataset becomes 10 x 3.
size[0] = dims[0] + dimsext[0];
size[1] = dims[1];
- dataset->extend(size);
+ dataset->extend(size);
// Select a hyperslab in extended portion of the dataset.
DataSpace *filespace = new DataSpace(dataset->getSpace ());
offset[0] = 3;
offset[1] = 0;
filespace->selectHyperslab(H5S_SELECT_SET, dimsext, offset);
-
+
// Define memory space.
DataSpace *memspace = new DataSpace(2, dimsext, NULL);
@@ -101,9 +101,9 @@ int main (void)
delete dataset;
file.close();
- // ---------------------------------------
+ // ---------------------------------------
// Re-open the file and read the data back
- // ---------------------------------------
+ // ---------------------------------------
int rdata[10][3];
int i,j, rank, rank_chunk;
@@ -111,7 +111,7 @@ int main (void)
// Open the file and dataset.
file.openFile(FILE_NAME, H5F_ACC_RDONLY);
- dataset = new DataSet(file.openDataSet( DATASETNAME));
+ dataset = new DataSet(file.openDataSet( DATASETNAME));
// Get the dataset's dataspace and creation property list.
filespace = new DataSpace(dataset->getSpace());
@@ -127,7 +127,7 @@ int main (void)
memspace = new DataSpace(rank, dimsr, NULL);
dataset->read(rdata, PredType::NATIVE_INT, *memspace, *filespace);
-
+
cout << endl;
for (j = 0; j < dimsr[0]; j++) {
for (i = 0; i < dimsr[1]; i++)
@@ -141,7 +141,7 @@ int main (void)
delete memspace;
delete dataset;
file.close();
-
+
} // end of try block
// catch failure caused by the H5File operations