diff options
author | Raymond Lu <songyulu@hdfgroup.org> | 2011-01-21 20:36:14 (GMT) |
---|---|---|
committer | Raymond Lu <songyulu@hdfgroup.org> | 2011-01-21 20:36:14 (GMT) |
commit | 1a212e80ce6090100a39d7cd872e6a17533a00f4 (patch) | |
tree | d026c1a64949b33b4523c8e2c26db9fccb443cac /test | |
parent | 1b6c627c30c324f7ca837a4be00b97e9837383f7 (diff) | |
download | hdf5-1a212e80ce6090100a39d7cd872e6a17533a00f4.zip hdf5-1a212e80ce6090100a39d7cd872e6a17533a00f4.tar.gz hdf5-1a212e80ce6090100a39d7cd872e6a17533a00f4.tar.bz2 |
[svn-r19977] Bug 2131 - I added a test case for integer data. I also skipped the test case for double data because it still fails.
Tested on jam and linew.
Diffstat (limited to 'test')
-rw-r--r-- | test/be_data.h5 | bin | 6808 -> 9424 bytes | |||
-rwxr-xr-x | test/cross_read.c | 107 | ||||
-rwxr-xr-x | test/gen_cross.c | 117 | ||||
-rw-r--r-- | test/le_data.h5 | bin | 6808 -> 9424 bytes | |||
-rw-r--r-- | test/vms_data.h5 | bin | 6808 -> 9424 bytes |
5 files changed, 195 insertions, 29 deletions
diff --git a/test/be_data.h5 b/test/be_data.h5 Binary files differindex 1b02828..8dfa38c 100644 --- a/test/be_data.h5 +++ b/test/be_data.h5 diff --git a/test/cross_read.c b/test/cross_read.c index 6d6f26c..e6c6f3c 100755 --- a/test/cross_read.c +++ b/test/cross_read.c @@ -17,8 +17,8 @@ * Programmer: Raymond Lu <slu@ncsa.uiuc.edu> * Thursday, March 23, 2006 * - * Purpose: Check if floating-point data created on OpenVMS (VAX type), Solaris, - * and Linux machines can be read on the machine running this test. + * Purpose: Check if floating-point data created on OpenVMS, big-endian, and + * little-endian machines can be read on the machine running this test. */ #include "h5test.h" @@ -31,12 +31,28 @@ const char *FILENAME[] = { NULL }; -#define DATASETNAME "Array" -#define DATASETNAME2 "Scale_offset_data" +#define DATASETNAME "Array" +#define DATASETNAME2 "Scale_offset_double_data" +#define DATASETNAME3 "Scale_offset_int_data" #define NX 6 #define NY 6 -#define RANK 2 + +/*------------------------------------------------------------------------- + * Function: read_data + * + * Purpose: Read data from a data file. + * + * Return: Success: 0 + * Failure: -1 + * + * Programmer: Raymond Lu + * 21 January 2011 + * + * Modifications: + * + *------------------------------------------------------------------------- + */ static int read_data(char *fname) { const char *pathname = H5_get_srcdir_filename(fname); /* Corrected test file name */ @@ -45,10 +61,12 @@ static int read_data(char *fname) hid_t dt; double data_in[NX][NY]; /* input buffer */ double data_out[NX][NY]; /* output buffer */ + int int_data_in[NX][NY]; /* input buffer */ + int int_data_out[NX][NY]; /* output buffer */ int i, j; unsigned nerrors = 0; const char *not_supported= " Scaleoffset filter is not enabled."; - /*const char *not_fixed= " Scaleoffset filter bug (2131) is not fixed yet.";*/ + const char *not_fixed= " Scaleoffset filter bug (2131) is not fixed yet."; /* * Open the file. @@ -127,8 +145,8 @@ static int read_data(char *fname) PASSED(); - TESTING(" dataset with scale-offset filter"); - + TESTING(" dataset of DOUBLE with scale-offset filter"); +#ifdef TMP #ifdef H5_HAVE_FILTER_SCALEOFFSET /* * Open the dataset with scale-offset filter. @@ -165,7 +183,6 @@ static int read_data(char *fname) /* Check results */ for (j=0; j<NX; j++) { for (i=0; i<NY; i++) { - /* if (data_out[j][i] != data_in[j][i]) { */ if (!DBL_REL_EQUAL(data_out[j][i], data_in[j][i], 0.001)) { if (!nerrors++) { H5_FAILED(); @@ -194,6 +211,78 @@ static int read_data(char *fname) SKIPPED(); puts(not_supported); #endif /*H5_HAVE_FILTER_SCALEOFFSET*/ +#else /*TMP*/ + SKIPPED(); + puts(not_fixed); +#endif /*TMP*/ + + TESTING(" dataset of INT with scale-offset filter"); + +#ifdef H5_HAVE_FILTER_SCALEOFFSET + /* + * Open the dataset with scale-offset filter. + */ + if((dataset = H5Dopen2(file, DATASETNAME3, H5P_DEFAULT)) < 0) + TEST_ERROR; + + /* + * Data and output buffer initialization. + */ + for (j = 0; j < NX; j++) { + for (i = 0; i < NY; i++) { + int_data_in[j][i] = i + j; + int_data_out[j][i] = 0; + } + } + + /* + * Get datatype and dataspace handles and then query + * dataset class, order, size, rank and dimensions. + */ + if((dt = H5Dget_type(dataset)) < 0) /* datatype handle */ + TEST_ERROR; + if((datatype = H5Tget_native_type(dt, H5T_DIR_DEFAULT)) < 0) + TEST_ERROR; + + /* + * Read data from hyperslab in the file into the hyperslab in + * memory and display. + */ + if(H5Dread(dataset, datatype, H5S_ALL, H5S_ALL, H5P_DEFAULT, int_data_out) < 0) + TEST_ERROR; + + /* Check results */ + for (j=0; j<NX; j++) { + for (i=0; i<NY; i++) { + if (int_data_out[j][i] != int_data_in[j][i]) { + if (!nerrors++) { + H5_FAILED(); + printf("element [%d][%d] is %d but should have been %d\n", + j, i, int_data_out[j][i], int_data_in[j][i]); + } + } + } + } + + /* + * Close/release resources. + */ + H5Tclose(dt); + H5Tclose(datatype); + H5Dclose(dataset); + + /* Failure */ + if (nerrors) { + printf("total of %d errors out of %d elements\n", nerrors, NX*NY); + return 1; + } + + PASSED(); + +#else /*H5_HAVE_FILTER_SCALEOFFSET*/ + SKIPPED(); + puts(not_supported); +#endif /*H5_HAVE_FILTER_SCALEOFFSET*/ H5Fclose(file); return 0; diff --git a/test/gen_cross.c b/test/gen_cross.c index 2505dbb..0b18a41 100755 --- a/test/gen_cross.c +++ b/test/gen_cross.c @@ -28,15 +28,17 @@ #define H5FILE_NAME "data.h5" #define DATASETNAME "Array" -#define DATASETNAME2 "Scale_offset_data" +#define DATASETNAME2 "Scale_offset_double_data" +#define DATASETNAME3 "Scale_offset_int_data" #define NX 6 #define NY 6 #define RANK 2 #define CHUNK0 3 #define CHUNK1 3 -int create_normal_dset(hid_t fid, hid_t sid, hid_t tid); -int create_scale_offset_dset(hid_t fid, hid_t sid, hid_t tid); +int create_normal_dset(hid_t fid, hid_t sid); +int create_scale_offset_dset_double(hid_t fid, hid_t sid); +int create_scale_offset_dset_int(hid_t fid, hid_t sid); /*------------------------------------------------------------------------- @@ -55,7 +57,7 @@ int create_scale_offset_dset(hid_t fid, hid_t sid, hid_t tid); *------------------------------------------------------------------------- */ int -create_normal_dset(hid_t fid, hid_t sid, hid_t tid) +create_normal_dset(hid_t fid, hid_t sid) { hid_t dataset; /* file and dataset handles */ herr_t status; @@ -82,7 +84,7 @@ create_normal_dset(hid_t fid, hid_t sid, hid_t tid) * Create a new dataset within the file using defined dataspace and * datatype and default dataset creation properties. */ - dataset = H5Dcreate2(fid, DATASETNAME, tid, sid, + dataset = H5Dcreate2(fid, DATASETNAME, H5T_NATIVE_DOUBLE, sid, H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT); /* @@ -101,7 +103,7 @@ create_normal_dset(hid_t fid, hid_t sid, hid_t tid) /*------------------------------------------------------------------------- - * Function: create_scale_offset_dset + * Function: create_scale_offset_dset_double * * Purpose: Create a dataset of DOUBLE datatype with scale-offset filter * @@ -116,10 +118,10 @@ create_normal_dset(hid_t fid, hid_t sid, hid_t tid) *------------------------------------------------------------------------- */ int -create_scale_offset_dset(hid_t fid, hid_t sid, hid_t tid) +create_scale_offset_dset_double(hid_t fid, hid_t sid) { #ifdef H5_HAVE_FILTER_SCALEOFFSET - hid_t dataset; /* file and dataset handles */ + hid_t dataset; /* dataset handles */ hid_t dcpl; herr_t status; float data[NX][NY]; /* data to write */ @@ -146,7 +148,7 @@ create_scale_offset_dset(hid_t fid, hid_t sid, hid_t tid) * Create a new dataset within the file using defined dataspace and * datatype and default dataset creation properties. */ - dataset = H5Dcreate2(fid, DATASETNAME2, tid, sid, + dataset = H5Dcreate2(fid, DATASETNAME2, H5T_NATIVE_DOUBLE, sid, H5P_DEFAULT, dcpl, H5P_DEFAULT); /* @@ -172,6 +174,85 @@ create_scale_offset_dset(hid_t fid, hid_t sid, hid_t tid) /*------------------------------------------------------------------------- + * Function: create_scale_offset_dset_int + * + * Purpose: Create a dataset of INT datatype with scale-offset filter + * + * Return: Success: 0 + * Failure: -1 + * + * Programmer: Raymond Lu + * 21 January 2011 + * + * Modifications: + * + *------------------------------------------------------------------------- + */ +int +create_scale_offset_dset_int(hid_t fid, hid_t sid) +{ +#ifdef H5_HAVE_FILTER_SCALEOFFSET + hid_t dataset; /* dataset handles */ + hid_t dcpl; + herr_t status; + int data[NX][NY]; /* data to write */ + hsize_t chunk[RANK] = {CHUNK0, CHUNK1}; + int i, j; + + /* + * Data and output buffer initialization. + */ + for (j = 0; j < NX; j++) { + for (i = 0; i < NY; i++) + data[j][i] = i + j; + } + /* + * 0 1 2 3 4 5 + * 1 2 3 4 5 6 + * 2 3 4 5 6 7 + * 3 4 5 6 7 8 + * 4 5 6 7 8 9 + * 5 6 7 8 9 10 + */ + + /* + * Create the dataset creation property list, add the Scale-Offset + * filter and set the chunk size. + */ + dcpl = H5Pcreate (H5P_DATASET_CREATE); + status = H5Pset_scaleoffset (dcpl, H5Z_SO_INT, H5Z_SO_INT_MINBITS_DEFAULT); + status = H5Pset_chunk (dcpl, RANK, chunk); + + /* + * Create a new dataset within the file using defined dataspace and + * datatype and default dataset creation properties. + */ + dataset = H5Dcreate2(fid, DATASETNAME3, H5T_NATIVE_INT, sid, + H5P_DEFAULT, dcpl, H5P_DEFAULT); + + /* + * Write the data to the dataset using default transfer properties. + */ + status = H5Dwrite(dataset, H5T_NATIVE_INT, H5S_ALL, H5S_ALL, + H5P_DEFAULT, data); + + /* + * Close/release resources. + */ + H5Pclose(dcpl); + H5Dclose(dataset); + +#else /* H5_HAVE_FILTER_SCALEOFFSET */ + const char *not_supported= "Scaleoffset filter is not enabled. Can't create the dataset."; + + puts(not_supported); +#endif /* H5_HAVE_FILTER_SCALEOFFSET */ + + return 0; +} + + +/*------------------------------------------------------------------------- * Function: main * * Purpose: Create a file for cross_read.c test. @@ -188,7 +269,7 @@ int main (void) { hid_t file; /* file and dataset handles */ - hid_t dataspace, datatype; + hid_t dataspace; hsize_t dimsf[RANK]; /* @@ -206,23 +287,19 @@ main (void) dimsf[1] = NY; dataspace = H5Screate_simple(RANK, dimsf, NULL); - /* - * Define datatype for the data in the file. - * We will store little endian INT numbers. - */ - datatype = H5Tcopy(H5T_NATIVE_DOUBLE); - /* Create a regular dataset */ - create_normal_dset(file, dataspace, datatype); + create_normal_dset(file, dataspace); + + /* Create a dataset of DOUBLE with scale-offset filter */ + create_scale_offset_dset_double(file, dataspace); - /* Create a dataset with scale-offset filter */ - create_scale_offset_dset(file, dataspace, datatype); + /* Create a dataset of INT with scale-offset filter */ + create_scale_offset_dset_int(file, dataspace); /* * Close/release resources. */ H5Sclose(dataspace); - H5Tclose(datatype); H5Fclose(file); return 0; diff --git a/test/le_data.h5 b/test/le_data.h5 Binary files differindex 5276989..a3291a4 100644 --- a/test/le_data.h5 +++ b/test/le_data.h5 diff --git a/test/vms_data.h5 b/test/vms_data.h5 Binary files differindex f3c4be5..b11d834 100644 --- a/test/vms_data.h5 +++ b/test/vms_data.h5 |