summaryrefslogtreecommitdiffstats
diff options
context:
space:
mode:
-rw-r--r--test/be_data.h5bin40320 -> 72368 bytes
-rwxr-xr-xtest/cross_read.c932
-rwxr-xr-xtest/gen_cross.c608
-rw-r--r--test/le_data.h5bin40320 -> 72368 bytes
-rw-r--r--test/vms_data.h5bin40320 -> 0 bytes
5 files changed, 799 insertions, 741 deletions
diff --git a/test/be_data.h5 b/test/be_data.h5
index 0feefa3..e6629d3 100644
--- a/test/be_data.h5
+++ b/test/be_data.h5
Binary files differ
diff --git a/test/cross_read.c b/test/cross_read.c
index 6588031..308a0c1 100755
--- a/test/cross_read.c
+++ b/test/cross_read.c
@@ -17,7 +17,7 @@
* Programmer: Raymond Lu <slu@ncsa.uiuc.edu>
* Thursday, March 23, 2006
*
- * Purpose: Check if floating-point data created on OpenVMS, big-endian, and
+ * Purpose: Check if floating-point data created on big-endian and
* little-endian machines can be read on the machine running this test.
*/
@@ -31,7 +31,8 @@ const char *FILENAME[] = {
NULL
};
-#define DATASETNAME "Array"
+#define DATASETNAME "Array_le"
+#define DATASETNAME1 "Array_be"
#define DATASETNAME2 "Scale_offset_float_data_le"
#define DATASETNAME3 "Scale_offset_float_data_be"
#define DATASETNAME4 "Scale_offset_double_data_le"
@@ -44,94 +45,112 @@ const char *FILENAME[] = {
#define DATASETNAME11 "Scale_offset_int_data_be"
#define DATASETNAME12 "Scale_offset_long_long_data_le"
#define DATASETNAME13 "Scale_offset_long_long_data_be"
+
+#define DATASETNAME14 "Fletcher_float_data_le"
+#define DATASETNAME15 "Fletcher_float_data_be"
+#define DATASETNAME16 "Deflate_float_data_le"
+#define DATASETNAME17 "Deflate_float_data_be"
+#define DATASETNAME18 "Szip_float_data_le"
+#define DATASETNAME19 "Szip_float_data_be"
+#define DATASETNAME20 "Shuffle_float_data_le"
+#define DATASETNAME21 "Shuffle_float_data_be"
+#define DATASETNAME22 "Nbit_float_data_le"
+#define DATASETNAME23 "Nbit_float_data_be"
+
#define NX 6
#define NY 6
/*-------------------------------------------------------------------------
- * Function: read_data
+ * Function: open_dataset
*
- * Purpose: Read data from a data file.
+ * Purpose: Read and compare the data from a dataset.
*
* Return: Success: 0
- * Failure: -1
+ * Failure: 1
*
* Programmer: Raymond Lu
- * 21 January 2011
+ * 17 May 2011
*
* Modifications:
*
*-------------------------------------------------------------------------
*/
-static int read_data(char *fname)
+static int check_data(const char *dsetname, hid_t fid, hbool_t floating_number)
{
- const char *pathname = H5_get_srcdir_filename(fname); /* Corrected test file name */
- hid_t file, dataset; /* handles */
+ hid_t dataset; /* handles */
double data_in[NX+1][NY]; /* input buffer */
double data_out[NX+1][NY]; /* output buffer */
long long int_data_in[NX+1][NY]; /* input buffer */
long long int_data_out[NX+1][NY]; /* output buffer */
int i, j;
unsigned nerrors = 0;
- const char *not_supported= " Scaleoffset filter is not enabled.";
-
- /*
- * Open the file.
- */
- if((file = H5Fopen(pathname, H5F_ACC_RDONLY, H5P_DEFAULT)) < 0)
- TEST_ERROR;
-
- TESTING("regular dataset");
/*
* Open the regular dataset.
*/
- if((dataset = H5Dopen2(file, DATASETNAME, H5P_DEFAULT)) < 0)
+ if((dataset = H5Dopen2(fid, dsetname, H5P_DEFAULT)) < 0)
TEST_ERROR;
/*
* Data and output buffer initialization.
*/
for (j = 0; j < NX; j++) {
- for (i = 0; i < NY; i++) {
- data_in[j][i] = i + j;
- data_out[j][i] = 0;
+ for (i = 0; i < NY; i++) {
+ data_in[j][i] = ((double)(i + j + 1))/3;
+ data_out[j][i] = 0;
+
+ int_data_in[j][i] = i + j;
+ int_data_out[j][i] = 0;
}
}
for (i = 0; i < NY; i++) {
data_in[NX][i] = -2.2;
data_out[NX][i] = 0;
+
+ int_data_in[NX][i] = -2;
+ int_data_out[NX][i] = 0;
}
- /*
- * 0 1 2 3 4 5
- * 1 2 3 4 5 6
- * 2 3 4 5 6 7
- * 3 4 5 6 7 8
- * 4 5 6 7 8 9
- * 5 6 7 8 9 10
- * -2.2 -2.2 -2.2 -2.2 -2.2 -2.2
- */
/*
* Read data from hyperslab in the file into the hyperslab in
* memory and display.
*/
- if(H5Dread(dataset, H5T_NATIVE_DOUBLE, H5S_ALL, H5S_ALL, H5P_DEFAULT,
- data_out) < 0)
- TEST_ERROR;
-
- /* Check results */
- for (j=0; j<(NX+1); j++) {
- for (i=0; i<NY; i++) {
- /* if (data_out[j][i] != data_in[j][i]) { */
- if (!FLT_ABS_EQUAL(data_out[j][i], data_in[j][i])) {
- if (!nerrors++) {
- H5_FAILED();
- printf("element [%d][%d] is %g but should have been %g\n",
- j, i, data_out[j][i], data_in[j][i]);
- }
- }
- }
+ if(floating_number) {
+ if(H5Dread(dataset, H5T_NATIVE_DOUBLE, H5S_ALL, H5S_ALL, H5P_DEFAULT,
+ data_out) < 0)
+ TEST_ERROR;
+
+ /* Check results */
+ for (j=0; j<(NX+1); j++) {
+ for (i=0; i<NY; i++) {
+ if (!DBL_REL_EQUAL(data_out[j][i], data_in[j][i], 0.001)) {
+ if (!nerrors++) {
+ H5_FAILED();
+ printf("element [%d][%d] is %g but should have been %g\n",
+ j, i, data_out[j][i], data_in[j][i]);
+ }
+ }
+ }
+ }
+ } else {
+ if(H5Dread(dataset, H5T_NATIVE_LLONG, H5S_ALL, H5S_ALL, H5P_DEFAULT,
+ int_data_out) < 0)
+ TEST_ERROR;
+
+ /* Check results */
+ for (j=0; j<(NX+1); j++) {
+ for (i=0; i<NY; i++) {
+ if (int_data_out[j][i] != int_data_in[j][i]) {
+ if (!nerrors++) {
+ H5_FAILED();
+ printf("element [%d][%d] is %d but should have been %d\n",
+ j, i, (int)int_data_out[j][i],
+ (int)int_data_in[j][i]);
+ }
+ }
+ }
+ }
}
/*
@@ -147,766 +166,225 @@ static int read_data(char *fname)
}
PASSED();
+ return 0;
- TESTING("dataset of LE FLOAT with scale-offset filter");
-
-#ifdef H5_HAVE_FILTER_SCALEOFFSET
- /*
- * Open the dataset with scale-offset filter.
- */
- if((dataset = H5Dopen2(file, DATASETNAME2, H5P_DEFAULT)) < 0)
- TEST_ERROR;
+error:
+ H5E_BEGIN_TRY {
+ H5Dclose(dataset);
+ } H5E_END_TRY;
+ return 1;
+}
- /*
- * Data and output buffer initialization.
- */
- for (j = 0; j < NX; j++) {
- for (i = 0; i < NY; i++) {
- data_in[j][i] = ((double)(i + j + 1))/3;
- data_out[j][i] = 0;
- }
- }
- for (i = 0; i < NY; i++) {
- data_in[NX][i] = -2.2;
- data_out[NX][i] = 0;
- }
+
+/*-------------------------------------------------------------------------
+ * Function: open_dataset
+ *
+ * Purpose: Handle each dataset from the data file.
+ *
+ * Return: Success: 0
+ * Failure: Number of failures
+ *
+ * Programmer: Raymond Lu
+ * 21 January 2011
+ *
+ * Modifications:
+ *
+ *-------------------------------------------------------------------------
+ */
+static int open_dataset(char *fname)
+{
+ const char *pathname = H5_get_srcdir_filename(fname); /* Corrected test file name */
+ hid_t file; /* handles */
+ unsigned nerrors = 0;
+ const char *not_supported= " filter is not enabled.";
/*
- * Read data from hyperslab in the file into the hyperslab in
- * memory and display.
+ * Open the file.
*/
- if(H5Dread(dataset, H5T_NATIVE_DOUBLE, H5S_ALL, H5S_ALL, H5P_DEFAULT,
- data_out) < 0)
+ if((file = H5Fopen(pathname, H5F_ACC_RDONLY, H5P_DEFAULT)) < 0)
TEST_ERROR;
- /* Check results */
- for (j=0; j<(NX+1); j++) {
- for (i=0; i<NY; i++) {
- if (!DBL_REL_EQUAL(data_out[j][i], data_in[j][i], 0.001)) {
- if (!nerrors++) {
- H5_FAILED();
- printf("element [%d][%d] is %g but should have been %g\n",
- j, i, data_out[j][i], data_in[j][i]);
- }
- }
- }
- }
-
- /*
- * Close/release resources.
- */
- if(H5Dclose(dataset) < 0)
- TEST_ERROR
+ TESTING("regular dataset of LE DOUBLE");
+ nerrors += check_data(DATASETNAME, file, TRUE);
- /* Failure */
- if (nerrors) {
- printf("total of %d errors out of %d elements\n", nerrors, NX*NY);
- return 1;
- }
+ TESTING("regular dataset of BE DOUBLE");
+ nerrors += check_data(DATASETNAME1, file, TRUE);
- PASSED();
+ TESTING("dataset of LE FLOAT with scale-offset filter");
+#ifdef H5_HAVE_FILTER_SCALEOFFSET
+ nerrors += check_data(DATASETNAME2, file, TRUE);
#else /*H5_HAVE_FILTER_SCALEOFFSET*/
SKIPPED();
puts(not_supported);
#endif /*H5_HAVE_FILTER_SCALEOFFSET*/
-
+
TESTING("dataset of BE FLOAT with scale-offset filter");
-
#ifdef H5_HAVE_FILTER_SCALEOFFSET
- /*
- * Open the dataset with scale-offset filter.
- */
- if((dataset = H5Dopen2(file, DATASETNAME3, H5P_DEFAULT)) < 0)
- TEST_ERROR;
-
- /*
- * Data and output buffer initialization.
- */
- for (j = 0; j < NX; j++) {
- for (i = 0; i < NY; i++) {
- data_in[j][i] = ((double)(i + j + 1))/3;
- data_out[j][i] = 0;
- }
- }
- for (i = 0; i < NY; i++) {
- data_in[NX][i] = -2.2;
- data_out[NX][i] = 0;
- }
-
- /*
- * Read data from hyperslab in the file into the hyperslab in
- * memory and display.
- */
- if(H5Dread(dataset, H5T_NATIVE_DOUBLE, H5S_ALL, H5S_ALL, H5P_DEFAULT,
- data_out) < 0)
- TEST_ERROR;
-
- /* Check results */
- for (j=0; j<(NX+1); j++) {
- for (i=0; i<NY; i++) {
- if (!DBL_REL_EQUAL(data_out[j][i], data_in[j][i], 0.001)) {
- if (!nerrors++) {
- H5_FAILED();
- printf("element [%d][%d] is %g but should have been %g\n",
- j, i, data_out[j][i], data_in[j][i]);
- }
- }
- }
- }
-
- /*
- * Close/release resources.
- */
- if(H5Dclose(dataset) < 0)
- TEST_ERROR
-
- /* Failure */
- if (nerrors) {
- printf("total of %d errors out of %d elements\n", nerrors, NX*NY);
- return 1;
- }
-
- PASSED();
+ nerrors += check_data(DATASETNAME3, file, TRUE);
#else /*H5_HAVE_FILTER_SCALEOFFSET*/
SKIPPED();
puts(not_supported);
#endif /*H5_HAVE_FILTER_SCALEOFFSET*/
TESTING("dataset of LE DOUBLE with scale-offset filter");
-
#ifdef H5_HAVE_FILTER_SCALEOFFSET
- /*
- * Open the dataset with scale-offset filter.
- */
- if((dataset = H5Dopen2(file, DATASETNAME4, H5P_DEFAULT)) < 0)
- TEST_ERROR;
-
- /*
- * Data and output buffer initialization.
- */
- for (j = 0; j < NX; j++) {
- for (i = 0; i < NY; i++) {
- data_in[j][i] = ((double)(i + j + 1))/3;
- data_out[j][i] = 0;
- }
- }
- for (i = 0; i < NY; i++) {
- data_in[NX][i] = -2.2;
- data_out[NX][i] = 0;
- }
-
- /*
- * Read data from hyperslab in the file into the hyperslab in
- * memory and display.
- */
- if(H5Dread(dataset, H5T_NATIVE_DOUBLE, H5S_ALL, H5S_ALL, H5P_DEFAULT,
- data_out) < 0)
- TEST_ERROR;
-
- /* Check results */
- for (j=0; j<(NX+1); j++) {
- for (i=0; i<NY; i++) {
- if (!DBL_REL_EQUAL(data_out[j][i], data_in[j][i], 0.001)) {
- if (!nerrors++) {
- H5_FAILED();
- printf("element [%d][%d] is %g but should have been %g\n",
- j, i, data_out[j][i], data_in[j][i]);
- }
- }
- }
- }
-
- /*
- * Close/release resources.
- */
- if(H5Dclose(dataset) < 0)
- TEST_ERROR
-
- /* Failure */
- if (nerrors) {
- printf("total of %d errors out of %d elements\n", nerrors, NX*NY);
- return 1;
- }
-
- PASSED();
+ nerrors += check_data(DATASETNAME4, file, TRUE);
#else /*H5_HAVE_FILTER_SCALEOFFSET*/
SKIPPED();
puts(not_supported);
#endif /*H5_HAVE_FILTER_SCALEOFFSET*/
-
+
TESTING("dataset of BE DOUBLE with scale-offset filter");
-
#ifdef H5_HAVE_FILTER_SCALEOFFSET
- /*
- * Open the dataset with scale-offset filter.
- */
- if((dataset = H5Dopen2(file, DATASETNAME5, H5P_DEFAULT)) < 0)
- TEST_ERROR;
-
- /*
- * Data and output buffer initialization.
- */
- for (j = 0; j < NX; j++) {
- for (i = 0; i < NY; i++) {
- data_in[j][i] = ((double)(i + j + 1))/3;
- data_out[j][i] = 0;
- }
- }
- for (i = 0; i < NY; i++) {
- data_in[NX][i] = -2.2;
- data_out[NX][i] = 0;
- }
-
- /*
- * Read data from hyperslab in the file into the hyperslab in
- * memory and display.
- */
- if(H5Dread(dataset, H5T_NATIVE_DOUBLE, H5S_ALL, H5S_ALL, H5P_DEFAULT,
- data_out) < 0)
- TEST_ERROR;
-
- /* Check results */
- for (j=0; j<(NX+1); j++) {
- for (i=0; i<NY; i++) {
- if (!DBL_REL_EQUAL(data_out[j][i], data_in[j][i], 0.001)) {
- if (!nerrors++) {
- H5_FAILED();
- printf("element [%d][%d] is %g but should have been %g\n",
- j, i, data_out[j][i], data_in[j][i]);
- }
- }
- }
- }
-
- /*
- * Close/release resources.
- */
- if(H5Dclose(dataset) < 0)
- TEST_ERROR
-
- /* Failure */
- if (nerrors) {
- printf("total of %d errors out of %d elements\n", nerrors, NX*NY);
- return 1;
- }
-
- PASSED();
+ nerrors += check_data(DATASETNAME5, file, TRUE);
#else /*H5_HAVE_FILTER_SCALEOFFSET*/
SKIPPED();
puts(not_supported);
#endif /*H5_HAVE_FILTER_SCALEOFFSET*/
-
+
TESTING("dataset of LE CHAR with scale-offset filter");
-
#ifdef H5_HAVE_FILTER_SCALEOFFSET
- /*
- * Open the dataset with scale-offset filter.
- */
- if((dataset = H5Dopen2(file, DATASETNAME6, H5P_DEFAULT)) < 0)
- TEST_ERROR;
-
- /*
- * Data and output buffer initialization.
- */
- for (j = 0; j < NX; j++) {
- for (i = 0; i < NY; i++) {
- int_data_in[j][i] = i + j;
- int_data_out[j][i] = 0;
- }
- }
- for (i = 0; i < NY; i++) {
- int_data_in[NX][i] = -2;
- int_data_out[NX][i] = 0;
- }
-
- /*
- * Read data from hyperslab in the file into the hyperslab in
- * memory and display.
- */
- if(H5Dread(dataset, H5T_NATIVE_LLONG, H5S_ALL, H5S_ALL, H5P_DEFAULT,
- int_data_out) < 0)
- TEST_ERROR;
-
- /* Check results */
- for (j=0; j<(NX+1); j++) {
- for (i=0; i<NY; i++) {
- if (int_data_out[j][i] != int_data_in[j][i]) {
- if (!nerrors++) {
- H5_FAILED();
- printf("element [%d][%d] is %d but should have been %d\n",
- j, i, (int)int_data_out[j][i],
- (int)int_data_in[j][i]);
- }
- }
- }
- }
-
- /*
- * Close/release resources.
- */
- if(H5Dclose(dataset) < 0)
- TEST_ERROR
-
- /* Failure */
- if (nerrors) {
- printf("total of %d errors out of %d elements\n", nerrors, NX*NY);
- return 1;
- }
-
- PASSED();
-
+ nerrors += check_data(DATASETNAME6, file, FALSE);
#else /*H5_HAVE_FILTER_SCALEOFFSET*/
SKIPPED();
puts(not_supported);
#endif /*H5_HAVE_FILTER_SCALEOFFSET*/
-
+
TESTING("dataset of BE CHAR with scale-offset filter");
-
#ifdef H5_HAVE_FILTER_SCALEOFFSET
- /*
- * Open the dataset with scale-offset filter.
- */
- if((dataset = H5Dopen2(file, DATASETNAME7, H5P_DEFAULT)) < 0)
- TEST_ERROR;
-
- /*
- * Data and output buffer initialization.
- */
- for (j = 0; j < NX; j++) {
- for (i = 0; i < NY; i++) {
- int_data_in[j][i] = i + j;
- int_data_out[j][i] = 0;
- }
- }
- for (i = 0; i < NY; i++) {
- int_data_in[NX][i] = -2;
- int_data_out[NX][i] = 0;
- }
-
- /*
- * Read data from hyperslab in the file into the hyperslab in
- * memory and display.
- */
- if(H5Dread(dataset, H5T_NATIVE_LLONG, H5S_ALL, H5S_ALL, H5P_DEFAULT,
- int_data_out) < 0)
- TEST_ERROR;
-
- /* Check results */
- for (j=0; j<(NX+1); j++) {
- for (i=0; i<NY; i++) {
- if (int_data_out[j][i] != int_data_in[j][i]) {
- if (!nerrors++) {
- H5_FAILED();
- printf("element [%d][%d] is %d but should have been %d\n",
- j, i, (int)int_data_out[j][i],
- (int)int_data_in[j][i]);
- }
- }
- }
- }
-
- /*
- * Close/release resources.
- */
- if(H5Dclose(dataset) < 0)
- TEST_ERROR
-
- /* Failure */
- if (nerrors) {
- printf("total of %d errors out of %d elements\n", nerrors, NX*NY);
- return 1;
- }
-
- PASSED();
-
+ nerrors += check_data(DATASETNAME7, file, FALSE);
#else /*H5_HAVE_FILTER_SCALEOFFSET*/
SKIPPED();
puts(not_supported);
#endif /*H5_HAVE_FILTER_SCALEOFFSET*/
-
+
TESTING("dataset of LE SHORT with scale-offset filter");
-
#ifdef H5_HAVE_FILTER_SCALEOFFSET
- /*
- * Open the dataset with scale-offset filter.
- */
- if((dataset = H5Dopen2(file, DATASETNAME8, H5P_DEFAULT)) < 0)
- TEST_ERROR;
-
- /*
- * Data and output buffer initialization.
- */
- for (j = 0; j < NX; j++) {
- for (i = 0; i < NY; i++) {
- int_data_in[j][i] = i + j;
- int_data_out[j][i] = 0;
- }
- }
- for (i = 0; i < NY; i++) {
- int_data_in[NX][i] = -2;
- int_data_out[NX][i] = 0;
- }
-
- /*
- * Read data from hyperslab in the file into the hyperslab in
- * memory and display.
- */
- if(H5Dread(dataset, H5T_NATIVE_LLONG, H5S_ALL, H5S_ALL, H5P_DEFAULT,
- int_data_out) < 0)
- TEST_ERROR;
-
- /* Check results */
- for (j=0; j<(NX+1); j++) {
- for (i=0; i<NY; i++) {
- if (int_data_out[j][i] != int_data_in[j][i]) {
- if (!nerrors++) {
- H5_FAILED();
- printf("element [%d][%d] is %d but should have been %d\n",
- j, i, (int)int_data_out[j][i],
- (int)int_data_in[j][i]);
- }
- }
- }
- }
-
- /*
- * Close/release resources.
- */
- if(H5Dclose(dataset) < 0)
- TEST_ERROR
-
- /* Failure */
- if (nerrors) {
- printf("total of %d errors out of %d elements\n", nerrors, NX*NY);
- return 1;
- }
-
- PASSED();
-
+ nerrors += check_data(DATASETNAME8, file, FALSE);
#else /*H5_HAVE_FILTER_SCALEOFFSET*/
SKIPPED();
puts(not_supported);
#endif /*H5_HAVE_FILTER_SCALEOFFSET*/
-
+
TESTING("dataset of BE SHORT with scale-offset filter");
-
#ifdef H5_HAVE_FILTER_SCALEOFFSET
- /*
- * Open the dataset with scale-offset filter.
- */
- if((dataset = H5Dopen2(file, DATASETNAME9, H5P_DEFAULT)) < 0)
- TEST_ERROR;
-
- /*
- * Data and output buffer initialization.
- */
- for (j = 0; j < NX; j++) {
- for (i = 0; i < NY; i++) {
- int_data_in[j][i] = i + j;
- int_data_out[j][i] = 0;
- }
- }
- for (i = 0; i < NY; i++) {
- int_data_in[NX][i] = -2;
- int_data_out[NX][i] = 0;
- }
-
- /*
- * Read data from hyperslab in the file into the hyperslab in
- * memory and display.
- */
- if(H5Dread(dataset, H5T_NATIVE_LLONG, H5S_ALL, H5S_ALL, H5P_DEFAULT,
- int_data_out) < 0)
- TEST_ERROR;
-
- /* Check results */
- for (j=0; j<(NX+1); j++) {
- for (i=0; i<NY; i++) {
- if (int_data_out[j][i] != int_data_in[j][i]) {
- if (!nerrors++) {
- H5_FAILED();
- printf("element [%d][%d] is %d but should have been %d\n",
- j, i, (int)int_data_out[j][i],
- (int)int_data_in[j][i]);
- }
- }
- }
- }
-
- /*
- * Close/release resources.
- */
- if(H5Dclose(dataset) < 0)
- TEST_ERROR
-
- /* Failure */
- if (nerrors) {
- printf("total of %d errors out of %d elements\n", nerrors, NX*NY);
- return 1;
- }
-
- PASSED();
-
+ nerrors += check_data(DATASETNAME9, file, FALSE);
#else /*H5_HAVE_FILTER_SCALEOFFSET*/
SKIPPED();
puts(not_supported);
#endif /*H5_HAVE_FILTER_SCALEOFFSET*/
TESTING("dataset of LE INT with scale-offset filter");
-
#ifdef H5_HAVE_FILTER_SCALEOFFSET
- /*
- * Open the dataset with scale-offset filter.
- */
- if((dataset = H5Dopen2(file, DATASETNAME10, H5P_DEFAULT)) < 0)
- TEST_ERROR;
-
- /*
- * Data and output buffer initialization.
- */
- for (j = 0; j < NX; j++) {
- for (i = 0; i < NY; i++) {
- int_data_in[j][i] = i + j;
- int_data_out[j][i] = 0;
- }
- }
- for (i = 0; i < NY; i++) {
- int_data_in[NX][i] = -2;
- int_data_out[NX][i] = 0;
- }
-
- /*
- * Read data from hyperslab in the file into the hyperslab in
- * memory and display.
- */
- if(H5Dread(dataset, H5T_NATIVE_LLONG, H5S_ALL, H5S_ALL, H5P_DEFAULT,
- int_data_out) < 0)
- TEST_ERROR;
-
- /* Check results */
- for (j=0; j<(NX+1); j++) {
- for (i=0; i<NY; i++) {
- if (int_data_out[j][i] != int_data_in[j][i]) {
- if (!nerrors++) {
- H5_FAILED();
- printf("element [%d][%d] is %d but should have been %d\n",
- j, i, (int)int_data_out[j][i],
- (int)int_data_in[j][i]);
- }
- }
- }
- }
-
- /*
- * Close/release resources.
- */
- if(H5Dclose(dataset) < 0)
- TEST_ERROR
-
- /* Failure */
- if (nerrors) {
- printf("total of %d errors out of %d elements\n", nerrors, NX*NY);
- return 1;
- }
-
- PASSED();
-
+ nerrors += check_data(DATASETNAME10, file, FALSE);
#else /*H5_HAVE_FILTER_SCALEOFFSET*/
SKIPPED();
puts(not_supported);
#endif /*H5_HAVE_FILTER_SCALEOFFSET*/
-
+
TESTING("dataset of BE INT with scale-offset filter");
-
#ifdef H5_HAVE_FILTER_SCALEOFFSET
- /*
- * Open the dataset with scale-offset filter.
- */
- if((dataset = H5Dopen2(file, DATASETNAME11, H5P_DEFAULT)) < 0)
- TEST_ERROR;
-
- /*
- * Data and output buffer initialization.
- */
- for (j = 0; j < NX; j++) {
- for (i = 0; i < NY; i++) {
- int_data_in[j][i] = i + j;
- int_data_out[j][i] = 0;
- }
- }
- for (i = 0; i < NY; i++) {
- int_data_in[NX][i] = -2;
- int_data_out[NX][i] = 0;
- }
-
- /*
- * Read data from hyperslab in the file into the hyperslab in
- * memory and display.
- */
- if(H5Dread(dataset, H5T_NATIVE_LLONG, H5S_ALL, H5S_ALL, H5P_DEFAULT,
- int_data_out) < 0)
- TEST_ERROR;
-
- /* Check results */
- for (j=0; j<(NX+1); j++) {
- for (i=0; i<NY; i++) {
- if (int_data_out[j][i] != int_data_in[j][i]) {
- if (!nerrors++) {
- H5_FAILED();
- printf("element [%d][%d] is %d but should have been %d\n",
- j, i, (int)int_data_out[j][i],
- (int)int_data_in[j][i]);
- }
- }
- }
- }
-
- /*
- * Close/release resources.
- */
- if(H5Dclose(dataset) < 0)
- TEST_ERROR
-
- /* Failure */
- if (nerrors) {
- printf("total of %d errors out of %d elements\n", nerrors, NX*NY);
- return 1;
- }
-
- PASSED();
-
+ nerrors += check_data(DATASETNAME11, file, FALSE);
#else /*H5_HAVE_FILTER_SCALEOFFSET*/
SKIPPED();
puts(not_supported);
#endif /*H5_HAVE_FILTER_SCALEOFFSET*/
TESTING("dataset of LE LONG LONG with scale-offset filter");
-
#ifdef H5_HAVE_FILTER_SCALEOFFSET
- /*
- * Open the dataset with scale-offset filter.
- */
- if((dataset = H5Dopen2(file, DATASETNAME12, H5P_DEFAULT)) < 0)
- TEST_ERROR;
-
- /*
- * Data and output buffer initialization.
- */
- for (j = 0; j < NX; j++) {
- for (i = 0; i < NY; i++) {
- int_data_in[j][i] = i + j;
- int_data_out[j][i] = 0;
- }
- }
- for (i = 0; i < NY; i++) {
- int_data_in[NX][i] = -2;
- int_data_out[NX][i] = 0;
- }
-
- /*
- * Read data from hyperslab in the file into the hyperslab in
- * memory and display.
- */
- if(H5Dread(dataset, H5T_NATIVE_LLONG, H5S_ALL, H5S_ALL, H5P_DEFAULT,
- int_data_out) < 0)
- TEST_ERROR;
-
- /* Check results */
- for (j=0; j<(NX+1); j++) {
- for (i=0; i<NY; i++) {
- if (int_data_out[j][i] != int_data_in[j][i]) {
- if (!nerrors++) {
- H5_FAILED();
- printf("element [%d][%d] is %d but should have been %d\n",
- j, i, (int)int_data_out[j][i],
- (int)int_data_in[j][i]);
- }
- }
- }
- }
-
- /*
- * Close/release resources.
- */
- if(H5Dclose(dataset) < 0)
- TEST_ERROR
-
- /* Failure */
- if (nerrors) {
- printf("total of %d errors out of %d elements\n", nerrors, NX*NY);
- return 1;
- }
-
- PASSED();
-
+ nerrors += check_data(DATASETNAME12, file, FALSE);
#else /*H5_HAVE_FILTER_SCALEOFFSET*/
SKIPPED();
puts(not_supported);
#endif /*H5_HAVE_FILTER_SCALEOFFSET*/
-
+
TESTING("dataset of BE LONG LONG with scale-offset filter");
-
#ifdef H5_HAVE_FILTER_SCALEOFFSET
- /*
- * Open the dataset with scale-offset filter.
- */
- if((dataset = H5Dopen2(file, DATASETNAME13, H5P_DEFAULT)) < 0)
- TEST_ERROR;
+ nerrors += check_data(DATASETNAME13, file, FALSE);
+#else /*H5_HAVE_FILTER_SCALEOFFSET*/
+ SKIPPED();
+ puts(not_supported);
+#endif /*H5_HAVE_FILTER_SCALEOFFSET*/
- /*
- * Data and output buffer initialization.
- */
- for (j = 0; j < NX; j++) {
- for (i = 0; i < NY; i++) {
- int_data_in[j][i] = i + j;
- int_data_out[j][i] = 0;
- }
- }
- for (i = 0; i < NY; i++) {
- int_data_in[NX][i] = -2;
- int_data_out[NX][i] = 0;
- }
+ TESTING("dataset of LE FLOAT with Fletcher32 filter");
+#ifdef H5_HAVE_FILTER_FLETCHER32
+ nerrors += check_data(DATASETNAME14, file, TRUE);
+#else /*H5_HAVE_FILTER_FLETCHER32*/
+ SKIPPED();
+ puts(not_supported);
+#endif /*H5_HAVE_FILTER_FLETCHER32*/
+
+ TESTING("dataset of BE FLOAT with Fletcher32 filter");
+#ifdef H5_HAVE_FILTER_FLETCHER32
+ nerrors += check_data(DATASETNAME15, file, TRUE);
+#else /*H5_HAVE_FILTER_FLETCHER32*/
+ SKIPPED();
+ puts(not_supported);
+#endif /*H5_HAVE_FILTER_FLETCHER32*/
+
+ TESTING("dataset of LE FLOAT with Deflate filter");
+#ifdef H5_HAVE_FILTER_DEFLATE
+ nerrors += check_data(DATASETNAME16, file, TRUE);
+#else /*H5_HAVE_FILTER_DEFLATE*/
+ SKIPPED();
+ puts(not_supported);
+#endif /*H5_HAVE_FILTER_DEFLATE*/
- /*
- * Read data from hyperslab in the file into the hyperslab in
- * memory and display.
- */
- if(H5Dread(dataset, H5T_NATIVE_LLONG, H5S_ALL, H5S_ALL, H5P_DEFAULT,
- int_data_out) < 0)
- TEST_ERROR;
+ TESTING("dataset of BE FLOAT with Deflate filter");
+#ifdef H5_HAVE_FILTER_DEFLATE
+ nerrors += check_data(DATASETNAME17, file, TRUE);
+#else /*H5_HAVE_FILTER_DEFLATE*/
+ SKIPPED();
+ puts(not_supported);
+#endif /*H5_HAVE_FILTER_DEFLATE*/
- /* Check results */
- for (j=0; j<(NX+1); j++) {
- for (i=0; i<NY; i++) {
- if (int_data_out[j][i] != int_data_in[j][i]) {
- if (!nerrors++) {
- H5_FAILED();
- printf("element [%d][%d] is %d but should have been %d\n",
- j, i, (int)int_data_out[j][i],
- (int)int_data_in[j][i]);
- }
- }
- }
- }
+ TESTING("dataset of LE FLOAT with Szip filter");
+#ifdef H5_HAVE_FILTER_SZIP
+ nerrors += check_data(DATASETNAME18, file, TRUE);
+#else /*H5_HAVE_FILTER_SZIP*/
+ SKIPPED();
+ puts(not_supported);
+#endif /*H5_HAVE_FILTER_SZIP*/
- /*
- * Close/release resources.
- */
- if(H5Dclose(dataset) < 0)
- TEST_ERROR
+ TESTING("dataset of BE FLOAT with Szip filter");
+#ifdef H5_HAVE_FILTER_SZIP
+ nerrors += check_data(DATASETNAME19, file, TRUE);
+#else /*H5_HAVE_FILTER_SZIP*/
+ SKIPPED();
+ puts(not_supported);
+#endif /*H5_HAVE_FILTER_SZIP*/
- /* Failure */
- if (nerrors) {
- printf("total of %d errors out of %d elements\n", nerrors, NX*NY);
- return 1;
- }
+ TESTING("dataset of LE FLOAT with Shuffle filter");
+#ifdef H5_HAVE_FILTER_SHUFFLE
+ nerrors += check_data(DATASETNAME20, file, TRUE);
+#else /*H5_HAVE_FILTER_SHUFFLE*/
+ SKIPPED();
+ puts(not_supported);
+#endif /*H5_HAVE_FILTER_SHUFFLE*/
- PASSED();
+ TESTING("dataset of BE FLOAT with Shuffle filter");
+#ifdef H5_HAVE_FILTER_SHUFFLE
+ nerrors += check_data(DATASETNAME21, file, TRUE);
+#else /*H5_HAVE_FILTER_SHUFFLE*/
+ SKIPPED();
+ puts(not_supported);
+#endif /*H5_HAVE_FILTER_SHUFFLE*/
-#else /*H5_HAVE_FILTER_SCALEOFFSET*/
+ TESTING("dataset of LE FLOAT with Nbit filter");
+#ifdef H5_HAVE_FILTER_NBIT
+ nerrors += check_data(DATASETNAME22, file, TRUE);
+#else /*H5_HAVE_FILTER_NBIT*/
SKIPPED();
puts(not_supported);
-#endif /*H5_HAVE_FILTER_SCALEOFFSET*/
+#endif /*H5_HAVE_FILTER_NBIT*/
+
+ TESTING("dataset of BE FLOAT with Nbit filter");
+#ifdef H5_HAVE_FILTER_NBIT
+ nerrors += check_data(DATASETNAME23, file, TRUE);
+#else /*H5_HAVE_FILTER_NBIT*/
+ SKIPPED();
+ puts(not_supported);
+#endif /*H5_HAVE_FILTER_NBIT*/
if(H5Fclose(file))
TEST_ERROR
@@ -916,20 +394,20 @@ error:
H5E_BEGIN_TRY {
H5Fclose(file);
} H5E_END_TRY;
- return 1;
+ return nerrors;
}
/*-------------------------------------------------------------------------
* Function: main
*
- * Purpose: Tests the basic features of Virtual File Drivers
+ * Purpose: Tests reading files created on LE and BE systems.
*
* Return: Success: exit(0)
* Failure: exit(1)
*
* Programmer: Raymond Lu
- * Tuesday, Sept 24, 2002
+ * Thursday, March 23, 2006
*
*-------------------------------------------------------------------------
*/
@@ -940,17 +418,13 @@ int main(void)
h5_reset();
- puts("Testing reading data created on OpenVMS");
- h5_fixname(FILENAME[0], H5P_DEFAULT, filename, sizeof filename);
- nerrors += read_data(filename);
-
puts("Testing reading data created on Linux");
h5_fixname(FILENAME[1], H5P_DEFAULT, filename, sizeof filename);
- nerrors += read_data(filename);
+ nerrors += open_dataset(filename);
puts("Testing reading data created on Solaris");
h5_fixname(FILENAME[2], H5P_DEFAULT, filename, sizeof filename);
- nerrors += read_data(filename);
+ nerrors += open_dataset(filename);
if (nerrors) {
printf("***** %u FAILURE%s! *****\n",
diff --git a/test/gen_cross.c b/test/gen_cross.c
index 32be867..d960499 100755
--- a/test/gen_cross.c
+++ b/test/gen_cross.c
@@ -27,7 +27,8 @@
#include "h5test.h"
#define H5FILE_NAME "data.h5"
-#define DATASETNAME "Array"
+#define DATASETNAME "Array_le"
+#define DATASETNAME1 "Array_be"
#define DATASETNAME2 "Scale_offset_float_data_le"
#define DATASETNAME3 "Scale_offset_float_data_be"
#define DATASETNAME4 "Scale_offset_double_data_le"
@@ -40,6 +41,18 @@
#define DATASETNAME11 "Scale_offset_int_data_be"
#define DATASETNAME12 "Scale_offset_long_long_data_le"
#define DATASETNAME13 "Scale_offset_long_long_data_be"
+
+#define DATASETNAME14 "Fletcher_float_data_le"
+#define DATASETNAME15 "Fletcher_float_data_be"
+#define DATASETNAME16 "Deflate_float_data_le"
+#define DATASETNAME17 "Deflate_float_data_be"
+#define DATASETNAME18 "Szip_float_data_le"
+#define DATASETNAME19 "Szip_float_data_be"
+#define DATASETNAME20 "Shuffle_float_data_le"
+#define DATASETNAME21 "Shuffle_float_data_be"
+#define DATASETNAME22 "Nbit_float_data_le"
+#define DATASETNAME23 "Nbit_float_data_be"
+
#define NX 6
#define NY 6
#define RANK 2
@@ -53,6 +66,11 @@ int create_scale_offset_dsets_char(hid_t fid, hid_t fsid, hid_t msid);
int create_scale_offset_dsets_short(hid_t fid, hid_t fsid, hid_t msid);
int create_scale_offset_dsets_int(hid_t fid, hid_t fsid, hid_t msid);
int create_scale_offset_dsets_long_long(hid_t fid, hid_t fsid, hid_t msid);
+int create_fletcher_dsets_float(hid_t fid, hid_t fsid, hid_t msid);
+int create_deflate_dsets_float(hid_t fid, hid_t fsid, hid_t msid);
+int create_szip_dsets_float(hid_t fid, hid_t fsid, hid_t msid);
+int create_shuffle_dsets_float(hid_t fid, hid_t fsid, hid_t msid);
+int create_nbit_dsets_float(hid_t fid, hid_t fsid, hid_t msid);
/*-------------------------------------------------------------------------
@@ -84,15 +102,15 @@ create_normal_dset(hid_t fid, hid_t fsid, hid_t msid)
*/
for (j = 0; j < NX; j++) {
for (i = 0; i < NY; i++)
- data[j][i] = i + j;
+ data[j][i] = ((float)(i + j + 1)) / 3;
}
/*
- * 0 1 2 3 4 5
- * 1 2 3 4 5 6
- * 2 3 4 5 6 7
- * 3 4 5 6 7 8
- * 4 5 6 7 8 9
- * 5 6 7 8 9 10
+ * 1/3 2/3 3/3 4/3 5/3 6/3
+ * 2/3 3/3 4/3 5/3 6/3 7/3
+ * 3/3 4/3 5/3 6/3 7/3 8/3
+ * 4/3 5/3 6/3 7/3 8/3 9/3
+ * 5/3 6/3 7/3 8/3 9/3 10/3
+ * 6/3 7/3 8/3 9/3 10/3 11/3
* -2.2 -2.2 -2.2 -2.2 -2.2 -2.2
*/
@@ -106,9 +124,9 @@ create_normal_dset(hid_t fid, hid_t fsid, hid_t msid)
/*
* Create a new dataset within the file using defined dataspace and
- * datatype and default dataset creation properties.
+ * little-endian datatype and default dataset creation properties.
*/
- if((dataset = H5Dcreate2(fid, DATASETNAME, H5T_NATIVE_DOUBLE, fsid,
+ if((dataset = H5Dcreate2(fid, DATASETNAME, H5T_IEEE_F64LE, fsid,
H5P_DEFAULT, dcpl, H5P_DEFAULT)) < 0)
TEST_ERROR
@@ -118,14 +136,38 @@ create_normal_dset(hid_t fid, hid_t fsid, hid_t msid)
if(H5Dwrite(dataset, H5T_NATIVE_FLOAT, msid, fsid, H5P_DEFAULT, data) < 0)
TEST_ERROR
+ /*
+ * Close dataset
+ */
+ if(H5Dclose(dataset) < 0)
+ TEST_ERROR
+
/*
- * Close/release resources.
+ * Create a new dataset within the file using defined dataspace and
+ * big-endian datatype and default dataset creation properties.
*/
- if(H5Pclose(dcpl) < 0)
+ if((dataset = H5Dcreate2(fid, DATASETNAME1, H5T_IEEE_F64BE, fsid,
+ H5P_DEFAULT, dcpl, H5P_DEFAULT)) < 0)
+ TEST_ERROR
+
+ /*
+ * Write the data to the dataset using default transfer properties.
+ */
+ if(H5Dwrite(dataset, H5T_NATIVE_FLOAT, msid, fsid, H5P_DEFAULT, data) < 0)
TEST_ERROR
+
+ /*
+ * Close dataset
+ */
if(H5Dclose(dataset) < 0)
TEST_ERROR
+ /*
+ * Close/release resources.
+ */
+ if(H5Pclose(dcpl) < 0)
+ TEST_ERROR
+
return 0;
#ifdef H5_HAVE_FILTER_SCALEOFFSET
@@ -774,6 +816,527 @@ error:
/*-------------------------------------------------------------------------
+ * Function: create_fletcher_dsets_float
+ *
+ * Purpose: Create a dataset of FLOAT datatype with fletcher filter
+ *
+ * Return: Success: 0
+ * Failure: -1
+ *
+ * Programmer: Raymond Lu
+ * 29 March 2011
+ *
+ * Modifications:
+ *
+ *-------------------------------------------------------------------------
+ */
+int
+create_fletcher_dsets_float(hid_t fid, hid_t fsid, hid_t msid)
+{
+#ifdef H5_HAVE_FILTER_FLETCHER32
+ hid_t dataset; /* dataset handles */
+ hid_t dcpl;
+ float data[NX][NY]; /* data to write */
+ float fillvalue = -2.2;
+ hsize_t chunk[RANK] = {CHUNK0, CHUNK1};
+ int i, j;
+
+ /*
+ * Data and output buffer initialization.
+ */
+ for (j = 0; j < NX; j++) {
+ for (i = 0; i < NY; i++)
+ data[j][i] = ((float)(i + j + 1))/3;
+ }
+
+ /*
+ * Create the dataset creation property list, add the Scale-Offset
+ * filter, set the chunk size, and set the fill value.
+ */
+ if((dcpl = H5Pcreate(H5P_DATASET_CREATE)) < 0)
+ TEST_ERROR
+ if(H5Pset_filter(dcpl, H5Z_FILTER_FLETCHER32, 0, (size_t)0, NULL) < 0)
+ TEST_ERROR
+ if(H5Pset_chunk(dcpl, RANK, chunk) < 0)
+ TEST_ERROR
+ if(H5Pset_fill_value(dcpl, H5T_NATIVE_FLOAT, &fillvalue) < 0)
+ TEST_ERROR
+
+ /*
+ * Create a new dataset within the file using defined dataspace, little
+ * endian datatype and default dataset creation properties.
+ */
+ if((dataset = H5Dcreate2(fid, DATASETNAME14, H5T_IEEE_F32LE, fsid,
+ H5P_DEFAULT, dcpl, H5P_DEFAULT)) < 0)
+ TEST_ERROR
+
+ /*
+ * Write the data to the dataset using default transfer properties.
+ */
+ if(H5Dwrite(dataset, H5T_NATIVE_FLOAT, msid, fsid, H5P_DEFAULT, data) < 0)
+ TEST_ERROR
+
+ /* Close dataset */
+ if(H5Dclose(dataset) < 0)
+ TEST_ERROR
+
+ /* Now create a dataset with a big-endian type */
+ if((dataset = H5Dcreate2(fid, DATASETNAME15, H5T_IEEE_F32BE, fsid,
+ H5P_DEFAULT, dcpl, H5P_DEFAULT)) < 0)
+ TEST_ERROR
+ if(H5Dwrite(dataset, H5T_NATIVE_FLOAT, msid, fsid, H5P_DEFAULT, data) < 0)
+ TEST_ERROR
+ if(H5Dclose(dataset) < 0)
+ TEST_ERROR
+
+ /*
+ * Close/release resources.
+ */
+ if(H5Pclose(dcpl) < 0)
+ TEST_ERROR
+
+#else /* H5_HAVE_FILTER_FLETCHER32 */
+ const char *not_supported= "Fletcher filter is not enabled. Can't create the dataset.";
+
+ puts(not_supported);
+#endif /* H5_HAVE_FILTER_FLETCHER32 */
+
+ return 0;
+
+#ifdef H5_HAVE_FILTER_FLETCHER32
+error:
+ H5E_BEGIN_TRY {
+ H5Pclose(dcpl);
+ H5Dclose(dataset);
+ } H5E_END_TRY;
+
+ return -1;
+#endif /* H5_HAVE_FILTER_FLETCHER32 */
+}
+
+
+/*-------------------------------------------------------------------------
+ * Function: create_deflate_dsets_float
+ *
+ * Purpose: Create a dataset of FLOAT datatype with deflate filter
+ *
+ * Return: Success: 0
+ * Failure: -1
+ *
+ * Programmer: Raymond Lu
+ * 29 March 2011
+ *
+ * Modifications:
+ *
+ *-------------------------------------------------------------------------
+ */
+int
+create_deflate_dsets_float(hid_t fid, hid_t fsid, hid_t msid)
+{
+#ifdef H5_HAVE_FILTER_DEFLATE
+ hid_t dataset; /* dataset handles */
+ hid_t dcpl;
+ float data[NX][NY]; /* data to write */
+ float fillvalue = -2.2;
+ hsize_t chunk[RANK] = {CHUNK0, CHUNK1};
+ int i, j;
+
+ /*
+ * Data and output buffer initialization.
+ */
+ for (j = 0; j < NX; j++) {
+ for (i = 0; i < NY; i++)
+ data[j][i] = ((float)(i + j + 1))/3;
+ }
+
+ /*
+ * Create the dataset creation property list, add the Scale-Offset
+ * filter, set the chunk size, and set the fill value.
+ */
+ if((dcpl = H5Pcreate(H5P_DATASET_CREATE)) < 0)
+ TEST_ERROR
+ if(H5Pset_deflate (dcpl, 6) < 0)
+ TEST_ERROR
+ if(H5Pset_chunk(dcpl, RANK, chunk) < 0)
+ TEST_ERROR
+ if(H5Pset_fill_value(dcpl, H5T_NATIVE_FLOAT, &fillvalue) < 0)
+ TEST_ERROR
+
+ /*
+ * Create a new dataset within the file using defined dataspace, little
+ * endian datatype and default dataset creation properties.
+ */
+ if((dataset = H5Dcreate2(fid, DATASETNAME16, H5T_IEEE_F32LE, fsid,
+ H5P_DEFAULT, dcpl, H5P_DEFAULT)) < 0)
+ TEST_ERROR
+
+ /*
+ * Write the data to the dataset using default transfer properties.
+ */
+ if(H5Dwrite(dataset, H5T_NATIVE_FLOAT, msid, fsid, H5P_DEFAULT, data) < 0)
+ TEST_ERROR
+
+ /* Close dataset */
+ if(H5Dclose(dataset) < 0)
+ TEST_ERROR
+
+ /* Now create a dataset with a big-endian type */
+ if((dataset = H5Dcreate2(fid, DATASETNAME17, H5T_IEEE_F32BE, fsid,
+ H5P_DEFAULT, dcpl, H5P_DEFAULT)) < 0)
+ TEST_ERROR
+ if(H5Dwrite(dataset, H5T_NATIVE_FLOAT, msid, fsid, H5P_DEFAULT, data) < 0)
+ TEST_ERROR
+ if(H5Dclose(dataset) < 0)
+ TEST_ERROR
+
+ /*
+ * Close/release resources.
+ */
+ if(H5Pclose(dcpl) < 0)
+ TEST_ERROR
+
+#else /* H5_HAVE_FILTER_DEFLATE */
+ const char *not_supported= "Deflate filter is not enabled. Can't create the dataset.";
+
+ puts(not_supported);
+#endif /* H5_HAVE_FILTER_DEFLATE */
+
+ return 0;
+
+#ifdef H5_HAVE_FILTER_DEFLATE
+error:
+ H5E_BEGIN_TRY {
+ H5Pclose(dcpl);
+ H5Dclose(dataset);
+ } H5E_END_TRY;
+
+ return -1;
+#endif /* H5_HAVE_FILTER_DEFLATE */
+}
+
+
+/*-------------------------------------------------------------------------
+ * Function: create_szip_dsets_float
+ *
+ * Purpose: Create a dataset of FLOAT datatype with szip filter
+ *
+ * Return: Success: 0
+ * Failure: -1
+ *
+ * Programmer: Raymond Lu
+ * 29 March 2011
+ *
+ * Modifications:
+ *
+ *-------------------------------------------------------------------------
+ */
+int
+create_szip_dsets_float(hid_t fid, hid_t fsid, hid_t msid)
+{
+#ifdef H5_HAVE_FILTER_SZIP
+ hid_t dataset; /* dataset handles */
+ hid_t dcpl;
+ float data[NX][NY]; /* data to write */
+ float fillvalue = -2.2;
+ hsize_t chunk[RANK] = {CHUNK0, CHUNK1};
+ int i, j;
+
+ /*
+ * Data and output buffer initialization.
+ */
+ for (j = 0; j < NX; j++) {
+ for (i = 0; i < NY; i++)
+ data[j][i] = ((float)(i + j + 1))/3;
+ }
+
+ /*
+ * Create the dataset creation property list, add the Scale-Offset
+ * filter, set the chunk size, and set the fill value.
+ */
+ if((dcpl = H5Pcreate(H5P_DATASET_CREATE)) < 0)
+ TEST_ERROR
+ if(H5Pset_szip(dcpl, H5_SZIP_NN_OPTION_MASK, 4) < 0)
+ TEST_ERROR
+ if(H5Pset_chunk(dcpl, RANK, chunk) < 0)
+ TEST_ERROR
+ if(H5Pset_fill_value(dcpl, H5T_NATIVE_FLOAT, &fillvalue) < 0)
+ TEST_ERROR
+
+ /*
+ * Create a new dataset within the file using defined dataspace, little
+ * endian datatype and default dataset creation properties.
+ */
+ if((dataset = H5Dcreate2(fid, DATASETNAME18, H5T_IEEE_F32LE, fsid,
+ H5P_DEFAULT, dcpl, H5P_DEFAULT)) < 0)
+ TEST_ERROR
+
+ /*
+ * Write the data to the dataset using default transfer properties.
+ */
+ if(H5Dwrite(dataset, H5T_NATIVE_FLOAT, msid, fsid, H5P_DEFAULT, data) < 0)
+ TEST_ERROR
+
+ /* Close dataset */
+ if(H5Dclose(dataset) < 0)
+ TEST_ERROR
+
+ /* Now create a dataset with a big-endian type */
+ if((dataset = H5Dcreate2(fid, DATASETNAME19, H5T_IEEE_F32BE, fsid,
+ H5P_DEFAULT, dcpl, H5P_DEFAULT)) < 0)
+ TEST_ERROR
+ if(H5Dwrite(dataset, H5T_NATIVE_FLOAT, msid, fsid, H5P_DEFAULT, data) < 0)
+ TEST_ERROR
+ if(H5Dclose(dataset) < 0)
+ TEST_ERROR
+
+ /*
+ * Close/release resources.
+ */
+ if(H5Pclose(dcpl) < 0)
+ TEST_ERROR
+
+#else /* H5_HAVE_FILTER_SZIP */
+ const char *not_supported= "Szip filter is not enabled. Can't create the dataset.";
+
+ puts(not_supported);
+#endif /* H5_HAVE_FILTER_SZIP */
+
+ return 0;
+
+#ifdef H5_HAVE_FILTER_SZIP
+error:
+ H5E_BEGIN_TRY {
+ H5Pclose(dcpl);
+ H5Dclose(dataset);
+ } H5E_END_TRY;
+
+ return -1;
+#endif /* H5_HAVE_FILTER_SZIP */
+}
+
+
+/*-------------------------------------------------------------------------
+ * Function: create_shuffle_dsets_float
+ *
+ * Purpose: Create a dataset of FLOAT datatype with shuffle filter
+ *
+ * Return: Success: 0
+ * Failure: -1
+ *
+ * Programmer: Raymond Lu
+ * 29 March 2011
+ *
+ * Modifications:
+ *
+ *-------------------------------------------------------------------------
+ */
+int
+create_shuffle_dsets_float(hid_t fid, hid_t fsid, hid_t msid)
+{
+#ifdef H5_HAVE_FILTER_SHUFFLE
+ hid_t dataset; /* dataset handles */
+ hid_t dcpl;
+ float data[NX][NY]; /* data to write */
+ float fillvalue = -2.2;
+ hsize_t chunk[RANK] = {CHUNK0, CHUNK1};
+ int i, j;
+
+ /*
+ * Data and output buffer initialization.
+ */
+ for (j = 0; j < NX; j++) {
+ for (i = 0; i < NY; i++)
+ data[j][i] = ((float)(i + j + 1))/3;
+ }
+
+ /*
+ * Create the dataset creation property list, add the Scale-Offset
+ * filter, set the chunk size, and set the fill value.
+ */
+ if((dcpl = H5Pcreate(H5P_DATASET_CREATE)) < 0)
+ TEST_ERROR
+ if(H5Pset_shuffle (dcpl) < 0)
+ TEST_ERROR
+ if(H5Pset_chunk(dcpl, RANK, chunk) < 0)
+ TEST_ERROR
+ if(H5Pset_fill_value(dcpl, H5T_NATIVE_FLOAT, &fillvalue) < 0)
+ TEST_ERROR
+
+ /*
+ * Create a new dataset within the file using defined dataspace, little
+ * endian datatype and default dataset creation properties.
+ */
+ if((dataset = H5Dcreate2(fid, DATASETNAME20, H5T_IEEE_F32LE, fsid,
+ H5P_DEFAULT, dcpl, H5P_DEFAULT)) < 0)
+ TEST_ERROR
+
+ /*
+ * Write the data to the dataset using default transfer properties.
+ */
+ if(H5Dwrite(dataset, H5T_NATIVE_FLOAT, msid, fsid, H5P_DEFAULT, data) < 0)
+ TEST_ERROR
+
+ /* Close dataset */
+ if(H5Dclose(dataset) < 0)
+ TEST_ERROR
+
+ /* Now create a dataset with a big-endian type */
+ if((dataset = H5Dcreate2(fid, DATASETNAME21, H5T_IEEE_F32BE, fsid,
+ H5P_DEFAULT, dcpl, H5P_DEFAULT)) < 0)
+ TEST_ERROR
+ if(H5Dwrite(dataset, H5T_NATIVE_FLOAT, msid, fsid, H5P_DEFAULT, data) < 0)
+ TEST_ERROR
+ if(H5Dclose(dataset) < 0)
+ TEST_ERROR
+
+ /*
+ * Close/release resources.
+ */
+ if(H5Pclose(dcpl) < 0)
+ TEST_ERROR
+
+#else /* H5_HAVE_FILTER_SHUFFLE */
+ const char *not_supported= "Shuffle filter is not enabled. Can't create the dataset.";
+
+ puts(not_supported);
+#endif /* H5_HAVE_FILTER_SHUFFLE */
+
+ return 0;
+
+#ifdef H5_HAVE_FILTER_SHUFFLE
+error:
+ H5E_BEGIN_TRY {
+ H5Pclose(dcpl);
+ H5Dclose(dataset);
+ } H5E_END_TRY;
+
+ return -1;
+#endif /* H5_HAVE_FILTER_SHUFFLE */
+}
+
+
+/*-------------------------------------------------------------------------
+ * Function: create_nbit_dsets_float
+ *
+ * Purpose: Create a dataset of FLOAT datatype with nbit filter
+ *
+ * Return: Success: 0
+ * Failure: -1
+ *
+ * Programmer: Raymond Lu
+ * 29 March 2011
+ *
+ * Modifications:
+ *
+ *-------------------------------------------------------------------------
+ */
+int
+create_nbit_dsets_float(hid_t fid, hid_t fsid, hid_t msid)
+{
+#ifdef H5_HAVE_FILTER_NBIT
+ hid_t dataset; /* dataset handles */
+ hid_t datatype;
+ hid_t dcpl;
+ size_t precision, offset;
+ float data[NX][NY]; /* data to write */
+ float fillvalue = -2.2;
+ hsize_t chunk[RANK] = {CHUNK0, CHUNK1};
+ int i, j;
+
+ /*
+ * Data and output buffer initialization.
+ */
+ for (j = 0; j < NX; j++) {
+ for (i = 0; i < NY; i++)
+ data[j][i] = ((float)(i + j + 1))/3;
+ }
+
+ /*
+ * Create the dataset creation property list, add the Scale-Offset
+ * filter, set the chunk size, and set the fill value.
+ */
+ if((dcpl = H5Pcreate(H5P_DATASET_CREATE)) < 0)
+ TEST_ERROR
+ if(H5Pset_nbit(dcpl) < 0)
+ TEST_ERROR
+ if(H5Pset_chunk(dcpl, RANK, chunk) < 0)
+ TEST_ERROR
+ if(H5Pset_fill_value(dcpl, H5T_NATIVE_FLOAT, &fillvalue) < 0)
+ TEST_ERROR
+
+ /* Define user-defined single-precision floating-point type for dataset.
+ * A 20-bit little-endian data type. */
+ if((datatype = H5Tcopy(H5T_IEEE_F32LE)) < 0)
+ TEST_ERROR
+ if(H5Tset_fields(datatype, (size_t)26, (size_t)20, (size_t)6, (size_t)7, (size_t)13) < 0)
+ TEST_ERROR
+ offset = 7;
+ if(H5Tset_offset(datatype,offset) < 0)
+ TEST_ERROR
+ precision = 20;
+ if(H5Tset_precision(datatype,precision) < 0)
+ TEST_ERROR
+ if(H5Tset_size(datatype, (size_t)4) < 0)
+ TEST_ERROR
+ if(H5Tset_ebias(datatype, (size_t)31) < 0)
+ TEST_ERROR
+
+ /*
+ * Create a new dataset within the file using defined dataspace,
+ * user-defined datatype, and default dataset creation properties.
+ */
+ if((dataset = H5Dcreate2(fid, DATASETNAME22, datatype, fsid,
+ H5P_DEFAULT, dcpl, H5P_DEFAULT)) < 0)
+ TEST_ERROR
+
+ /*
+ * Write the data to the dataset using default transfer properties.
+ */
+ if(H5Dwrite(dataset, H5T_NATIVE_FLOAT, msid, fsid, H5P_DEFAULT, data) < 0)
+ TEST_ERROR
+
+ /* Close dataset */
+ if(H5Dclose(dataset) < 0)
+ TEST_ERROR
+
+ /* Now create a dataset with a big-endian type */
+ if(H5Tset_order(datatype, H5T_ORDER_BE) < 0)
+ TEST_ERROR
+ if((dataset = H5Dcreate2(fid, DATASETNAME23, datatype, fsid,
+ H5P_DEFAULT, dcpl, H5P_DEFAULT)) < 0)
+ TEST_ERROR
+ if(H5Dwrite(dataset, H5T_NATIVE_FLOAT, msid, fsid, H5P_DEFAULT, data) < 0)
+ TEST_ERROR
+ if(H5Dclose(dataset) < 0)
+ TEST_ERROR
+
+ /*
+ * Close/release resources.
+ */
+ if(H5Pclose(dcpl) < 0)
+ TEST_ERROR
+
+#else /* H5_HAVE_FILTER_NBIT */
+ const char *not_supported= "Nbit filter is not enabled. Can't create the dataset.";
+
+ puts(not_supported);
+#endif /* H5_HAVE_FILTER_NBIT */
+
+ return 0;
+
+#ifdef H5_HAVE_FILTER_NBIT
+error:
+ H5E_BEGIN_TRY {
+ H5Pclose(dcpl);
+ H5Dclose(dataset);
+ } H5E_END_TRY;
+
+ return -1;
+#endif /* H5_HAVE_FILTER_NBIT */
+}
+
+
+/*-------------------------------------------------------------------------
* Function: main
*
* Purpose: Create a file for cross_read.c test.
@@ -853,6 +1416,27 @@ main (void)
if(create_scale_offset_dsets_long_long(file, filespace, memspace) < 0)
{H5_FAILED(); AT(); return 1;}
+ /* Create a dataset of FLOAT with fletcher filter */
+ if(create_fletcher_dsets_float(file, filespace, memspace) < 0)
+ {H5_FAILED(); AT(); return 1;}
+
+ /* Create a dataset of FLOAT with deflate filter */
+ if(create_deflate_dsets_float(file, filespace, memspace) < 0)
+ {H5_FAILED(); AT(); return 1;}
+
+ /* Create a dataset of FLOAT with szip filter */
+ if(create_szip_dsets_float(file, filespace, memspace) < 0)
+ {H5_FAILED(); AT(); return 1;}
+
+ /* Create a dataset of FLOAT with shuffle filter */
+ if(create_shuffle_dsets_float(file, filespace, memspace) < 0)
+ {H5_FAILED(); AT(); return 1;}
+
+ /* Create a dataset of FLOAT with nbit filter */
+ if(create_nbit_dsets_float(file, filespace, memspace) < 0)
+ {H5_FAILED(); AT(); return 1;}
+
+
/*
* Close/release resources.
*/
diff --git a/test/le_data.h5 b/test/le_data.h5
index 1225d21..e79ab47 100644
--- a/test/le_data.h5
+++ b/test/le_data.h5
Binary files differ
diff --git a/test/vms_data.h5 b/test/vms_data.h5
deleted file mode 100644
index 14aeef2..0000000
--- a/test/vms_data.h5
+++ /dev/null
Binary files differ