summaryrefslogtreecommitdiffstats
diff options
context:
space:
mode:
authorFang Guo <fangguo@ncsa.uiuc.edu>2005-07-26 22:20:44 (GMT)
committerFang Guo <fangguo@ncsa.uiuc.edu>2005-07-26 22:20:44 (GMT)
commit580fde182ae58adce9ba78e440d6838247b47101 (patch)
tree91fb6b4249184a8ca7dca3981a607918cde4a49d
parent62e6bc99b0cb9b85d2a3bb2da1318798ed4e0bb7 (diff)
downloadhdf5-580fde182ae58adce9ba78e440d6838247b47101.zip
hdf5-580fde182ae58adce9ba78e440d6838247b47101.tar.gz
hdf5-580fde182ae58adce9ba78e440d6838247b47101.tar.bz2
[svn-r11159] Purpose:
Improvement Description: Source code for high level c library examples. They are obtained from http://hdf.ncsa.uiuc.edu/HDF5/hdf5_hl/doc/tutor/examples/ Solution: Platforms tested: Misc. update:
-rwxr-xr-xwindows/hl/src/ex_images.c66
-rwxr-xr-xwindows/hl/src/ex_lite.c72
-rwxr-xr-xwindows/hl/src/ex_table.c129
3 files changed, 267 insertions, 0 deletions
diff --git a/windows/hl/src/ex_images.c b/windows/hl/src/ex_images.c
new file mode 100755
index 0000000..f987796
--- /dev/null
+++ b/windows/hl/src/ex_images.c
@@ -0,0 +1,66 @@
+/****************************************************************************
+ * NCSA HDF *
+ * Scientific Data Technologies *
+ * National Center for Supercomputing Applications *
+ * University of Illinois at Urbana-Champaign *
+ * 605 E. Springfield, Champaign IL 61820 *
+ * *
+ * For conditions of distribution and use, see the accompanying *
+ * hdf/COPYING f. *
+ * *
+ ****************************************************************************/
+
+
+#include "H5IM.h"
+
+
+#define WIDTH (hsize_t)500
+#define HEIGHT (hsize_t)200
+unsigned char data [ WIDTH*HEIGHT ];
+
+
+int main( void )
+{
+ hid_t file_id;
+ herr_t status;
+ hsize_t pal_dims[] = {9,3};
+ hsize_t i;
+ /* create a 9 entry palette */
+ unsigned char pal[9*3] = {0, 0, 168,
+ 0, 0, 252,
+ 0, 168, 252,
+ 84, 252, 252,
+ 168, 252, 168,
+ 0, 252, 168,
+ 252, 252, 84,
+ 252, 168, 0,
+ 252, 0, 0};
+
+ EXAMPLE("make indexed image");
+
+
+ for (i = 0; i < WIDTH*HEIGHT; i++ )
+ data[i] = (unsigned char)i;
+
+ /* Create a new HDF5 file using default properties. */
+ file_id = H5Fcreate( "ex_image1.h5", H5F_ACC_TRUNC, H5P_DEFAULT, H5P_DEFAULT );
+
+ /* Write image */
+ status = H5IMmake_image_8bit( file_id, "Image1", WIDTH, HEIGHT, data );
+
+ /* Make a palette */
+ status = H5IMmake_palette( file_id, "Pallete", pal_dims, pal );
+
+ /* Attach a palette to the image dataset */
+ status = H5IMlink_palette( file_id, "Image1", "Pallete" );
+
+ /* Close the file. */
+ status = H5Fclose( file_id );
+
+ PASSED();
+
+ return 0;
+
+
+}
+
diff --git a/windows/hl/src/ex_lite.c b/windows/hl/src/ex_lite.c
new file mode 100755
index 0000000..a111998
--- /dev/null
+++ b/windows/hl/src/ex_lite.c
@@ -0,0 +1,72 @@
+/****************************************************************************
+ * NCSA HDF *
+ * Scientific Data Technologies *
+ * National Center for Supercomputing Applications *
+ * University of Illinois at Urbana-Champaign *
+ * 605 E. Springfield, Champaign IL 61820 *
+ * *
+ * For conditions of distribution and use, see the accompanying *
+ * hdf/COPYING f. *
+ * *
+ ****************************************************************************/
+
+
+#include "H5LT.h"
+
+#include <stdlib.h>
+
+
+
+int main( void )
+{
+
+
+ hid_t file_id;
+ hid_t dataset_id;
+ hid_t space_id;
+ hsize_t dims[1] = { 5 };
+ int data[5] = {1,2,3,4,5};
+ herr_t status;
+
+ EXAMPLE("make an attribute");
+
+ /* Create a file */
+ file_id = H5Fcreate ("ex_lite3.h5", H5F_ACC_TRUNC, H5P_DEFAULT, H5P_DEFAULT);
+
+ /* Create a data space */
+ space_id = H5Screate_simple(1,dims,NULL);
+
+ /* Create a dataset "dset" */
+ dataset_id = H5Dcreate(file_id,"dset",H5T_NATIVE_INT,space_id,H5P_DEFAULT);
+
+ /* Close */
+ status = H5Dclose(dataset_id);
+ status = H5Sclose(space_id);
+
+/*-------------------------------------------------------------------------
+ * Example of H5LTset_attribute_int
+ *-------------------------------------------------------------------------
+ */
+
+ /* Create and write the attribute "attr1" on the dataset "dset" */
+ status = H5LTset_attribute_int(file_id,"dset","attr1",data,5);
+
+/*-------------------------------------------------------------------------
+ * Example of H5LTget_attribute_int
+ *-------------------------------------------------------------------------
+ */
+
+ /* Get the attribute "attr1" from the dataset "Dataset" */
+ status = H5LTget_attribute_int(file_id,"dset","attr1",data);
+
+
+ /* Close file */
+ status = H5Fclose(file_id);
+
+ PASSED();
+
+ return 0;
+
+}
+
+
diff --git a/windows/hl/src/ex_table.c b/windows/hl/src/ex_table.c
new file mode 100755
index 0000000..ae3f2fc
--- /dev/null
+++ b/windows/hl/src/ex_table.c
@@ -0,0 +1,129 @@
+/****************************************************************************
+ * NCSA HDF *
+ * Scientific Data Technologies *
+ * National Center for Supercomputing Applications *
+ * University of Illinois at Urbana-Champaign *
+ * 605 E. Springfield, Champaign IL 61820 *
+ * *
+ * For conditions of distribution and use, see the accompanying *
+ * hdf/COPYING f. *
+ * *
+ ****************************************************************************/
+
+
+#include "H5TB.h"
+
+#include <stdlib.h>
+
+
+/*-------------------------------------------------------------------------
+ * Table API example
+ *
+ * H5TBmake_table
+ * H5TBread_table
+ *
+ *-------------------------------------------------------------------------
+ */
+
+#define NFIELDS 5
+#define NRECORDS 8
+
+int main( void )
+{
+
+ typedef struct Particle
+ {
+ char name[16];
+ int lati;
+ int longi;
+ float pressure;
+ double temperature;
+ } Particle;
+
+ Particle dst_buf[NRECORDS];
+
+ /* Calculate the size and the offsets of our struct members in memory */
+ size_t dst_size = sizeof( Particle );
+ size_t dst_offset[NFIELDS] = { HOFFSET( Particle, name ),
+ HOFFSET( Particle, lati ),
+ HOFFSET( Particle, longi ),
+ HOFFSET( Particle, pressure ),
+ HOFFSET( Particle, temperature )};
+
+ size_t dst_sizes[NFIELDS] = { sizeof( dst_buf[0].name),
+ sizeof( dst_buf[0].lati),
+ sizeof( dst_buf[0].longi),
+ sizeof( dst_buf[0].pressure),
+ sizeof( dst_buf[0].temperature)};
+
+
+ /* Define an array of Particles */
+ Particle p_data[NRECORDS] = { {"zero",0,0, 0.0f, 0.0},
+ {"one",10,10, 1.0f, 10.0},
+ {"two", 20,20, 2.0f, 20.0},
+ {"three",30,30, 3.0f, 30.0},
+ {"four", 40,40, 4.0f, 40.0},
+ {"five", 50,50, 5.0f, 50.0},
+ {"six", 60,60, 6.0f, 60.0},
+ {"seven",70,70, 7.0f, 70.0}
+ };
+
+ /* Define field information */
+ const char *field_names[NFIELDS] =
+ { "Name","Latitude", "Longitude", "Pressure", "Temperature" };
+ hid_t field_type[NFIELDS];
+ hid_t string_type;
+ hid_t file_id;
+ hsize_t chunk_size = 10;
+ int *fill_data = NULL;
+ int compress = 0;
+ herr_t status;
+
+ EXAMPLE("make a table");
+
+
+ /* Initialize the field field_type */
+ string_type = H5Tcopy( H5T_C_S1 );
+ H5Tset_size( string_type, 16 );
+ field_type[0] = string_type;
+ field_type[1] = H5T_NATIVE_INT;
+ field_type[2] = H5T_NATIVE_INT;
+ field_type[3] = H5T_NATIVE_FLOAT;
+ field_type[4] = H5T_NATIVE_DOUBLE;
+
+ /* Create a new file using default properties. */
+ file_id = H5Fcreate( "ex_table_01.h5", H5F_ACC_TRUNC, H5P_DEFAULT, H5P_DEFAULT );
+
+
+/*-------------------------------------------------------------------------
+ * H5TBmake_table
+ *-------------------------------------------------------------------------
+ */
+
+ status=H5TBmake_table( "Table Title", file_id, "Table1",(hsize_t) NFIELDS, (hsize_t)NRECORDS, dst_size,
+ field_names, dst_offset, field_type,
+ chunk_size, fill_data, compress, p_data );
+
+
+/*-------------------------------------------------------------------------
+ * H5TBread_table
+ *-------------------------------------------------------------------------
+ */
+
+ status=H5TBread_table( file_id, "Table1", dst_size, dst_offset, dst_sizes, dst_buf );
+
+/*-------------------------------------------------------------------------
+ * end
+ *-------------------------------------------------------------------------
+ */
+
+ /* Close the file. */
+ H5Fclose( file_id );
+
+ PASSED();
+
+ return 0;
+
+}
+
+