summaryrefslogtreecommitdiffstats
path: root/c++/examples/chunks.cpp
blob: 142e70f5576b46058474ca557a343fb69303573d (plain)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
/* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
 * Copyright by The HDF Group.                                               *
 * Copyright by the Board of Trustees of the University of Illinois.         *
 * All rights reserved.                                                      *
 *                                                                           *
 * This file is part of HDF5.  The full HDF5 copyright notice, including     *
 * terms governing use, modification, and redistribution, is contained in    *
 * the COPYING file, which can be found at the root of the source code       *
 * distribution tree, or in https://support.hdfgroup.org/ftp/HDF5/releases.  *
 * If you do not have access to either file, you may request a copy from     *
 * help@hdfgroup.org.                                                        *
 * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */

/*
 *   This example shows how to read data from a chunked dataset.
 *   We will read from the file created by extend.cpp
 */

#ifdef OLD_HEADER_FILENAME
#include <iostream.h>
#else
#include <iostream>
#endif
using std::cout;
using std::endl;

#include <string>
#include "H5Cpp.h"
using namespace H5;

const H5std_string FILE_NAME( "SDSextendible.h5" );
const H5std_string DATASET_NAME( "ExtendibleArray" );
const int      NX = 10;
const int      NY = 5;
const int      RANK = 2;
const int      RANKC = 1;

int main (void)
{
    hsize_t	i, j;

    // Try block to detect exceptions raised by any of the calls inside it
    try
    {
	/*
	 * Turn off the auto-printing when failure occurs so that we can
	 * handle the errors appropriately
	 */
	Exception::dontPrint();

	/*
	 * Open the file and the dataset.
	 */
	H5File file( FILE_NAME, H5F_ACC_RDONLY );
	DataSet dataset = file.openDataSet( DATASET_NAME );

	/*
	 * Get filespace for rank and dimension
	 */
	DataSpace filespace = dataset.getSpace();

	/*
	 * Get number of dimensions in the file dataspace
	 */
	int rank = filespace.getSimpleExtentNdims();

	/*
	 * Get and print the dimension sizes of the file dataspace
	 */
	hsize_t dims[2]; 	// dataset dimensions
	rank = filespace.getSimpleExtentDims( dims );
	cout << "dataset rank = " << rank << ", dimensions "
	     << (unsigned long)(dims[0]) << " x "
	     << (unsigned long)(dims[1]) << endl;

	/*
	 * Define the memory space to read dataset.
	 */
	DataSpace mspace1(RANK, dims);

	/*
	 * Read dataset back and display.
	 */
	int data_out[NX][NY];  // buffer for dataset to be read
	dataset.read( data_out, PredType::NATIVE_INT, mspace1, filespace );

	cout << "\n";
	cout << "Dataset: \n";
	for (j = 0; j < dims[0]; j++)
	{
	    for (i = 0; i < dims[1]; i++)
		cout << data_out[j][i] << " ";
	    cout << endl;
	}

	/*
	 *	    dataset rank 2, dimensions 10 x 5
	 *	    chunk rank 2, dimensions 2 x 5

	 *	    Dataset:
	 *	    1 1 1 3 3
	 *	    1 1 1 3 3
	 *	    1 1 1 0 0
	 *	    2 0 0 0 0
	 *	    2 0 0 0 0
	 *	    2 0 0 0 0
	 *	    2 0 0 0 0
	 *	    2 0 0 0 0
	 *	    2 0 0 0 0
	 *	    2 0 0 0 0
	 */

	/*
	 * Read the third column from the dataset.
	 * First define memory dataspace, then define hyperslab
	 * and read it into column array.
	 */
	hsize_t col_dims[1];
	col_dims[0] = 10;
	DataSpace mspace2( RANKC, col_dims );

	/*
	 * Define the column (hyperslab) to read.
	 */
	hsize_t offset[2] = { 0, 2 };
	hsize_t  count[2] = { 10, 1 };
	int column[10];  // buffer for column to be read

	/*
	 * Define hyperslab and read.
	 */
	filespace.selectHyperslab( H5S_SELECT_SET, count, offset );
	dataset.read( column, PredType::NATIVE_INT, mspace2, filespace );

	cout << endl;
	cout << "Third column: " << endl;
	for (i = 0; i < 10; i++)
	    cout << column[i] << endl;

	/*
	 *	    Third column:
	 *	    1
	 *	    1
	 *	    1
	 *	    0
	 *	    0
	 *	    0
	 *	    0
	 *	    0
	 *	    0
	 *	    0
	 */

	/*
	 * Get creation properties list.
	 */
	DSetCreatPropList cparms = dataset.getCreatePlist();

	/*
	 * Check if dataset is chunked.
	 */
	hsize_t chunk_dims[2];
	int     rank_chunk;
	if( H5D_CHUNKED == cparms.getLayout() )
	{
	    /*
	     * Get chunking information: rank and dimensions
	     */
	    rank_chunk = cparms.getChunk( 2, chunk_dims);
	    cout << "chunk rank " << rank_chunk << "dimensions "
		<< (unsigned long)(chunk_dims[0]) << " x "
		<< (unsigned long)(chunk_dims[1]) << endl;

	    /*
	     * Define the memory space to read a chunk.
	     */
	    DataSpace mspace3( rank_chunk, chunk_dims );

	    /*
	     * Define chunk in the file (hyperslab) to read.
	     */
	    offset[0] = 2;
	    offset[1] = 0;
	    count[0]  = chunk_dims[0];
	    count[1]  = chunk_dims[1];
	    filespace.selectHyperslab( H5S_SELECT_SET, count, offset );

	    /*
	     * Read chunk back and display.
	     */
	    int chunk_out[2][5];   // buffer for chunk to be read
	    dataset.read( chunk_out, PredType::NATIVE_INT, mspace3, filespace );
	    cout << endl;
	    cout << "Chunk:" << endl;
	    for (j = 0; j < chunk_dims[0]; j++)
	    {
		for (i = 0; i < chunk_dims[1]; i++)
		    cout << chunk_out[j][i] << " ";
		cout << endl;
	    }
	    /*
	     *	 Chunk:
	     *	 1 1 1 0 0
	     *	 2 0 0 0 0
	     */
	}
    }  // end of try block

    // catch failure caused by the H5File operations
    catch( FileIException error )
    {
	error.printErrorStack();
	return -1;
    }

    // catch failure caused by the DataSet operations
    catch( DataSetIException error )
    {
	error.printErrorStack();
	return -1;
    }

    // catch failure caused by the DataSpace operations
    catch( DataSpaceIException error )
    {
	error.printErrorStack();
	return -1;
    }
    return 0;
}