summaryrefslogtreecommitdiffstats
path: root/doxygen/examples
diff options
context:
space:
mode:
authorvchoi-hdfgroup <55293060+vchoi-hdfgroup@users.noreply.github.com>2022-07-01 15:42:15 (GMT)
committerGitHub <noreply@github.com>2022-07-01 15:42:15 (GMT)
commit9e97a22b8bb7cc92f0b2ef2b63de61e9da50aba6 (patch)
tree8921d2c772d5616a1251b312663a97f5d0513406 /doxygen/examples
parent3ed53d17fb018c0415be2d668c6765217deff16f (diff)
downloadhdf5-9e97a22b8bb7cc92f0b2ef2b63de61e9da50aba6.zip
hdf5-9e97a22b8bb7cc92f0b2ef2b63de61e9da50aba6.tar.gz
hdf5-9e97a22b8bb7cc92f0b2ef2b63de61e9da50aba6.tar.bz2
Add doxygen for high level modules. (#1839)
* Add doxygen for high level modules. * Committing clang-format changes * Correct spelling based on spelling check. * Committing clang-format changes Co-authored-by: vchoi <vchoi@jelly.ad.hdfgroup.org> Co-authored-by: github-actions <41898282+github-actions[bot]@users.noreply.github.com>
Diffstat (limited to 'doxygen/examples')
-rw-r--r--doxygen/examples/H5DO_examples.c220
-rw-r--r--doxygen/examples/H5LDget_dset_elmts.c143
-rw-r--r--doxygen/examples/H5LT_examples.c27
-rw-r--r--doxygen/examples/H5TBAget_fill.c43
4 files changed, 433 insertions, 0 deletions
diff --git a/doxygen/examples/H5DO_examples.c b/doxygen/examples/H5DO_examples.c
new file mode 100644
index 0000000..7a33c6b
--- /dev/null
+++ b/doxygen/examples/H5DO_examples.c
@@ -0,0 +1,220 @@
+/* -*- c-file-style: "stroustrup" -*- */
+
+//! <!-- [H5DOwrite] -->
+
+#include <zlib.h>
+#include <math.h>
+#define DEFLATE_SIZE_ADJUST(s) (ceil(((double)(s)) * 1.001) + 12)
+ :
+ :
+size_t buf_size = CHUNK_NX*CHUNK_NY*sizeof(int);
+ const Bytef *z_src = (const Bytef *)(direct_buf);
+ Bytef * z_dst; /* Destination buffer */
+ uLongf z_dst_nbytes = (uLongf)DEFLATE_SIZE_ADJUST(buf_size);
+ uLong z_src_nbytes = (uLong)buf_size;
+ int aggression = 9; /* Compression aggression setting */
+ uint32_t filter_mask = 0;
+ size_t buf_size = CHUNK_NX * CHUNK_NY * sizeof(int);
+
+ /* Create the data space */
+ if ((dataspace = H5Screate_simple(RANK, dims, maxdims)) < 0)
+ goto error;
+
+ /* Create a new file */
+ if ((file = H5Fcreate(FILE_NAME5, H5F_ACC_TRUNC, H5P_DEFAULT, H5P_DEFAULT)) < 0)
+ goto error;
+
+ /* Modify dataset creation properties, i.e. enable chunking and compression */
+ if ((cparms = H5Pcreate(H5P_DATASET_CREATE)) < 0)
+ goto error;
+
+ if ((status = H5Pset_chunk(cparms, RANK, chunk_dims)) < 0)
+ goto error;
+
+ if ((status = H5Pset_deflate(cparms, aggression)) < 0)
+ goto error;
+
+ /* Create a new dataset within the file using cparms creation properties */
+ if ((dset_id = H5Dcreate2(file, DATASETNAME, H5T_NATIVE_INT, dataspace, H5P_DEFAULT, cparms,
+ H5P_DEFAULT)) < 0)
+ goto error;
+
+ /* Initialize data for one chunk */
+ for (i = n = 0; i < CHUNK_NX; i++)
+ for (j = 0; j < CHUNK_NY; j++)
+ direct_buf[i][j] = n++;
+
+ /* Allocate output (compressed) buffer */
+ outbuf = malloc(z_dst_nbytes);
+ z_dst = (Bytef *)outbuf;
+
+ /* Perform compression from the source to the destination buffer */
+ ret = compress2(z_dst, &z_dst_nbytes, z_src, z_src_nbytes, aggression);
+
+ /* Check for various zlib errors */
+ if (Z_BUF_ERROR == ret) {
+ fprintf(stderr, "overflow");
+ goto error;
+ }
+ else if (Z_MEM_ERROR == ret) {
+ fprintf(stderr, "deflate memory error");
+ goto error;
+ }
+ else if (Z_OK != ret) {
+ fprintf(stderr, "other deflate error");
+ goto error;
+ }
+
+ /* Write the compressed chunk data repeatedly to cover all the
+ * * chunks in the dataset, using the direct write function. */
+ for (i = 0; i < NX / CHUNK_NX; i++) {
+ for (j = 0; j < NY / CHUNK_NY; j++) {
+ status =
+ H5DOwrite_chunk(dset_id, H5P_DEFAULT, filter_mask, offset, z_dst_nbytes, outbuf);
+ offset[1] += CHUNK_NY;
+ }
+ offset[0] += CHUNK_NX;
+ offset[1] = 0;
+ }
+
+ /* Overwrite the first chunk with uncompressed data. Set the filter mask to
+ * * indicate the compression filter is skipped */
+ filter_mask = 0x00000001;
+ offset[0] = offset[1] = 0;
+ if (H5DOwrite_chunk(dset_id, H5P_DEFAULT, filter_mask, offset, buf_size, direct_buf) < 0)
+ goto error;
+
+ /* Read the entire dataset back for data verification converting ints to longs */
+ if (H5Dread(dataset, H5T_NATIVE_LONG, H5S_ALL, H5S_ALL, H5P_DEFAULT, outbuf_long) < 0)
+ goto error;
+
+ /* Data verification here */
+ :
+ :
+
+ //! <!-- [H5DOwrite] -->
+
+ //! <!-- [H5DOread] -->
+
+#include <zlib.h>
+#include <math.h>
+#define DEFLATE_SIZE_ADJUST(s) (ceil(((double)(s)) * 1.001) + 12)
+ :
+ :
+size_t buf_size = CHUNK_NX*CHUNK_NY*sizeof(int);
+ const Bytef *z_src = (const Bytef *)(direct_buf);
+ Bytef * z_dst; /* Destination buffer */
+ uLongf z_dst_nbytes = (uLongf)DEFLATE_SIZE_ADJUST(buf_size);
+ uLong z_src_nbytes = (uLong)buf_size;
+ int aggression = 9; /* Compression aggression setting */
+ uint32_t filter_mask = 0;
+ size_t buf_size = CHUNK_NX * CHUNK_NY * sizeof(int);
+ /* For H5DOread_chunk() */
+ void * readbuf = NULL; /* Buffer for reading data */
+ const Bytef *pt_readbuf; /* Point to the buffer for data read */
+ hsize_t read_chunk_nbytes; /* Size of chunk on disk */
+ int read_dst_buf[CHUNK_NX][CHUNK_NY]; /* Buffer to hold un-compressed data */
+
+ /* Create the data space */
+ if ((dataspace = H5Screate_simple(RANK, dims, maxdims)) < 0)
+ goto error;
+
+ /* Create a new file */
+ if ((file = H5Fcreate(FILE_NAME5, H5F_ACC_TRUNC, H5P_DEFAULT, H5P_DEFAULT)) < 0)
+ goto error;
+
+ /* Modify dataset creation properties, i.e. enable chunking and compression */
+ if ((cparms = H5Pcreate(H5P_DATASET_CREATE)) < 0)
+ goto error;
+
+ if ((status = H5Pset_chunk(cparms, RANK, chunk_dims)) < 0)
+ goto error;
+
+ if ((status = H5Pset_deflate(cparms, aggression)) < 0)
+ goto error;
+
+ /* Create a new dataset within the file using cparms creation properties */
+ if ((dset_id = H5Dcreate2(file, DATASETNAME, H5T_NATIVE_INT, dataspace, H5P_DEFAULT, cparms,
+ H5P_DEFAULT)) < 0)
+ goto error;
+
+ /* Initialize data for one chunk */
+ for (i = n = 0; i < CHUNK_NX; i++)
+ for (j = 0; j < CHUNK_NY; j++)
+ direct_buf[i][j] = n++;
+
+ /* Allocate output (compressed) buffer */
+ outbuf = malloc(z_dst_nbytes);
+ z_dst = (Bytef *)outbuf;
+
+ /* Perform compression from the source to the destination buffer */
+ ret = compress2(z_dst, &z_dst_nbytes, z_src, z_src_nbytes, aggression);
+
+ /* Check for various zlib errors */
+ if (Z_BUF_ERROR == ret) {
+ fprintf(stderr, "overflow");
+ goto error;
+ }
+ else if (Z_MEM_ERROR == ret) {
+ fprintf(stderr, "deflate memory error");
+ goto error;
+ }
+ else if (Z_OK != ret) {
+ fprintf(stderr, "other deflate error");
+ goto error;
+ }
+
+ /* Write the compressed chunk data repeatedly to cover all the
+ * * chunks in the dataset, using the direct write function. */
+ for (i = 0; i < NX / CHUNK_NX; i++) {
+ for (j = 0; j < NY / CHUNK_NY; j++) {
+ status = H5DOwrite_chunk(dset_id, H5P_DEFAULT, filter_mask, offset, z_dst_nbytes, outbuf);
+ offset[1] += CHUNK_NY;
+ }
+ offset[0] += CHUNK_NX;
+ offset[1] = 0;
+ }
+
+ if (H5Fflush(dataset, H5F_SCOPE_LOCAL) < 0)
+ goto error;
+
+ if (H5Dclose(dataset) < 0)
+ goto error;
+
+ if ((dataset = H5Dopen2(file, DATASETNAME1, H5P_DEFAULT)) < 0)
+ goto error;
+
+ offset[0] = CHUNK_NX;
+ offset[1] = CHUNK_NY;
+
+ /* Get the size of the compressed chunk */
+ ret = H5Dget_chunk_storage_size(dataset, offset, &read_chunk_nbytes);
+
+ readbuf = HDmalloc(read_chunk_nbytes);
+ pt_readbuf = (const Bytef *)readbuf;
+
+ /* Use H5DOread_chunk() to read the chunk back */
+ if ((status = H5DOread_chunk(dataset, H5P_DEFAULT, offset, &read_filter_mask, readbuf)) < 0)
+ goto error;
+
+ ret =
+ uncompress((Bytef *)read_dst_buf, (uLongf *)&buf_size, pt_readbuf, (uLong)read_chunk_nbytes);
+
+ /* Check for various zlib errors */
+ if (Z_BUF_ERROR == ret) {
+ fprintf(stderr, "error: not enough room in output buffer");
+ goto error;
+ }
+ else if (Z_MEM_ERROR == ret) {
+ fprintf(stderr, "error: not enough memory");
+ goto error;
+ }
+ else if (Z_OK != ret) {
+ fprintf(stderr, "error: corrupted input data");
+ goto error;
+ }
+
+ /* Data verification here */
+ :
+ :
+//! <!-- [H5DOread] -->
diff --git a/doxygen/examples/H5LDget_dset_elmts.c b/doxygen/examples/H5LDget_dset_elmts.c
new file mode 100644
index 0000000..e23299a
--- /dev/null
+++ b/doxygen/examples/H5LDget_dset_elmts.c
@@ -0,0 +1,143 @@
+/* -*- c-file-style: "stroustrup" -*- */
+
+//! <!-- [first_declare] -->
+
+DATASET "DSET1" {DATATYPE H5T_STD_I32LE DATASPACE SIMPLE{(4, 13) / (60, 100)} : : }
+
+//! <!-- [first_declare] -->
+
+//! <!-- [first_reading] -->
+
+/* open the HDF5 file */
+fid = H5Fopen(FILE, H5F_ACC_RDWR, H5P_DEFAULT);
+
+/* open the dataset */
+did = H5Dopen2(fid, "DSET1", H5P_DEFAULT);
+ :
+ :
+ /* define hsize_t dims[2]; */
+ /* define hsize_t new_dims[2]; */
+ /* get the dataset's current dimension sizes */
+ H5LDget_dset_dims(did, dims);
+
+ /* extend the dataset by 2 */
+ new_dims[0] = dims[0] + 2;
+ new_dims[1] = dims[1] + 2;
+ H5Dset_extent(did, new_dims)
+
+ /* write data to the extended dataset */
+ : :
+ /* get the size of the dataset's data type */
+ type_size = H5LDget_dset_type_size(did, NULL);
+ :
+ :
+ /* allocate buffer for storing selected data elements from the dataset */
+ /* calculate # of selected elements from dims & new_dims */
+ /* buffer size = type_size * number of selected elements */
+ :
+ :
+ /* read the selected elements from the dataset into buf */
+ H5LDget_dset_elmts(did, dims, new_dims, NULL, buf);
+ :
+ :
+ H5Dclose(did);
+ H5Fclose(fid);
+
+ //! <!-- [first_reading] -->
+
+ //! <!-- [first_output] -->
+
+ data for elements (0, 13), (0, 14)
+ data for elements (1, 13), (1, 14)
+ data for elements (2, 13), (2, 14)
+ data for elements (3, 13), (3, 14)
+ data for elements (4, 0), (4, 1), (4, 2)......................(4, 13), (4, 14)
+ data for elements (5, 0), (5, 1), (5, 2)......................(5, 13), (5, 14)
+
+//! <!-- [first_output] -->
+
+
+//! <!-- [second_declare] -->
+
+ DATASET "DSET2" {
+ DATATYPE H5T_COMPOUND {
+ H5T_STD_I32LE "a";
+ H5T_STD_I32LE "b";
+ H5T_ARRAY
+ {
+ [4] H5T_STD_I32LE
+ }
+ "c";
+ H5T_STD_I32LE "d";
+ H5T_STD_I32LE "e";
+ H5T_COMPOUND
+ {
+ H5T_STD_I32LE "a";
+ H5T_STD_I32LE "b";
+ H5T_ARRAY
+ {
+ [4] H5T_STD_I32LE
+ }
+ "c";
+ H5T_STD_I32LE "d";
+ H5T_STD_I32LE "e";
+ }
+ "s2";
+ }
+ DATASPACE SIMPLE
+ {
+ (5) / (5)
+ }
+ ::
+ }
+
+ //! <!-- [second_declare] -->
+
+ //! <!-- [second_reading] -->
+
+ /* open the HDF5 file */
+ fid = H5Fopen(FILE, H5F_ACC_RDWR, H5P_DEFAULT);
+
+ /* open the dataset */
+ did = H5Dopen2(fid, "DSET2", H5P_DEFAULT);
+
+ /* define hsize_t dims[1]; */
+ /* define hsize_t new_dims[1]; */
+ :
+ :
+ /* get the dataset's current dimension size */
+ H5LDget_dset_dims(did, dims);
+
+ /* extend the dataset by 2 */
+ new_dims[0] = dims[0] + 2;
+ H5Dset_extent(did, new_dims);
+ :
+ :
+ /* write data to the extended part of the dataset */
+ :
+ :
+ /* #define fields "d,s2.c" */
+ /* get the size of the dataset's data type for the selected fields */
+ type_size = H5LDget_dset_type_size(did, fields);
+ :
+ :
+ /* allocate buffer for storing selected data elements from the dataset */
+ /* calculate # of selected elements from dims & new_dims */
+ /* buffer size = type_size * number of selected elements */
+ :
+ :
+ /* read the selected elements from the dataset into buf */
+ H5LD_get_dset_elmts(did, dims, new_dims, fields, buf);
+ :
+ :
+ H5Dclose(did);
+ H5Fclose(fid);
+
+ //! <!-- [second_reading] -->
+
+ //! <!-- [second_output] -->
+
+ Data for element (5): integer value for "d", 4 integer values for array "s2.c"
+ Data for element (6): integer value for "d", 4 integer values for array "s2.c"
+
+//! <!-- [second_output] -->
diff --git a/doxygen/examples/H5LT_examples.c b/doxygen/examples/H5LT_examples.c
new file mode 100644
index 0000000..5ed4c29
--- /dev/null
+++ b/doxygen/examples/H5LT_examples.c
@@ -0,0 +1,27 @@
+/* -*- c-file-style: "stroustrup" -*- */
+
+//! <!-- [get_attribute_info] -->
+
+H5T_class_t type_class;
+size_t type_size;
+hsize_t dims[0];
+... status = H5LTget_attribute_info(file_id, "/", STRNAME, dims, &type_class, &type_size);
+if (type_class == H5T_STRING) {
+ printf("Attribute is a string.\n");
+ printf("String size: %i\n", type_size);
+
+ //! <!-- [get_attribute_info] -->
+
+ //! <!-- [enum] -->
+
+ “H5T_ENUM
+ {
+ H5T_NATIVE_INT;
+ “Bob” 0;
+ “Elena” 1;
+ “Quincey” 2;
+ “Frank” 3;
+ }
+ ”
+
+//! <!-- [enum] -->
diff --git a/doxygen/examples/H5TBAget_fill.c b/doxygen/examples/H5TBAget_fill.c
new file mode 100644
index 0000000..15eae8b
--- /dev/null
+++ b/doxygen/examples/H5TBAget_fill.c
@@ -0,0 +1,43 @@
+unsigned char tmp_fill_buf[40];
+...
+
+ file_id = H5Fopen(FILENAME, H5F_ACC_RDWR, H5P_DEFAULT);
+dataset_id = H5Dopen(file_id, TABLE_NAME, H5P_DEFAULT);
+datatype_id = H5Dget_type(dataset_id);
+
+status = H5TBget_table_info(file_id, TABLE_NAME, &nfields, &nrecords);
+
+hasfill = H5TBAget_fill(file_id, TABLE_NAME, dataset_id, tmp_fill_buf);
+
+for (i = 0; i < nfields; i++) {
+ member_type_id = H5Tget_member_type(datatype_id, (unsigned)i);
+ native_mem_type_id = H5Tget_native_type(member_type_id, H5T_DIR_ASCEND);
+ member_offset = H5Tget_member_offset(datatype_id, (unsigned)i);
+ printf("member_offset: %i\n", member_offset);
+ memb_class = H5Tget_class(member_type_id);
+ switch (memb_class) {
+ case H5T_INTEGER:
+ /* convert unsigned char array to integer */
+ break;
+ case H5T_FLOAT:
+ /* convert unsigned char array to double or float */
+
+ if (H5Tequal(native_mem_type_id, H5T_NATIVE_DOUBLE)) {
+ }
+ else if (H5Tequal(native_mem_type_id, H5T_NATIVE_FLOAT)) {
+ f.i = tmp_fill_buf[member_offset] | (tmp_fill_buf[member_offset + 1] << 8) |
+ (tmp_fill_buf[member_offset + 2] << 16) | (tmp_fill_buf[member_offset + 3] << 24);
+ printf("Field %i Fill Value: %lf\n", i, f.f);
+ }
+ break;
+
+ case H5T_STRING:
+ /* convert unsigned char array to string */
+ strsize = H5Tget_size(member_type_id);
+
+ printf("Field %i Fill Value: ", i);
+ for (j = 0; j < strsize; j++)
+ printf("%c", tmp_fill_buf[member_offset + j]);
+ printf("\n");
+ break;
+ }