summaryrefslogtreecommitdiffstats
path: root/test
diff options
context:
space:
mode:
authorNeil Fortner <nfortne2@hdfgroup.org>2011-02-02 14:41:49 (GMT)
committerNeil Fortner <nfortne2@hdfgroup.org>2011-02-02 14:41:49 (GMT)
commit73d58f8b3c8cfba33875fdf8742ee2c921b1c066 (patch)
treeb845384f461709e1ec34e4abf75d4cf85bd3a06c /test
parente2b089b21cdac9687a08b2c5ef377f6b66d2c71e (diff)
downloadhdf5-73d58f8b3c8cfba33875fdf8742ee2c921b1c066.zip
hdf5-73d58f8b3c8cfba33875fdf8742ee2c921b1c066.tar.gz
hdf5-73d58f8b3c8cfba33875fdf8742ee2c921b1c066.tar.bz2
[svn-r20029] Purpose: Fix bug 2131
Description: When using the scale-offset filter with floating point data or fill values, big endian machines would save some metadata in the wrong byte order. This caused such datasets to yield incorrect data when read on little endian machines. Fixed the scale-offset filter to always save this metadata in the right byte order (i.e. little endian). Tested: jam, amani, heiwa (h5committedt); fedora, linew
Diffstat (limited to 'test')
-rw-r--r--test/be_data.h5bin9424 -> 40320 bytes
-rwxr-xr-xtest/cross_read.c737
-rwxr-xr-xtest/gen_cross.c671
-rw-r--r--test/le_data.h5bin9424 -> 40320 bytes
-rw-r--r--test/vms_data.h5bin9424 -> 40320 bytes
5 files changed, 1296 insertions, 112 deletions
diff --git a/test/be_data.h5 b/test/be_data.h5
index 7fc9ef7..0feefa3 100644
--- a/test/be_data.h5
+++ b/test/be_data.h5
Binary files differ
diff --git a/test/cross_read.c b/test/cross_read.c
index 279d102..6588031 100755
--- a/test/cross_read.c
+++ b/test/cross_read.c
@@ -32,9 +32,19 @@ const char *FILENAME[] = {
};
#define DATASETNAME "Array"
-#define DATASETNAME2 "Scale_offset_double_data"
-#define DATASETNAME3 "Scale_offset_int_data"
-#define NX 6
+#define DATASETNAME2 "Scale_offset_float_data_le"
+#define DATASETNAME3 "Scale_offset_float_data_be"
+#define DATASETNAME4 "Scale_offset_double_data_le"
+#define DATASETNAME5 "Scale_offset_double_data_be"
+#define DATASETNAME6 "Scale_offset_char_data_le"
+#define DATASETNAME7 "Scale_offset_char_data_be"
+#define DATASETNAME8 "Scale_offset_short_data_le"
+#define DATASETNAME9 "Scale_offset_short_data_be"
+#define DATASETNAME10 "Scale_offset_int_data_le"
+#define DATASETNAME11 "Scale_offset_int_data_be"
+#define DATASETNAME12 "Scale_offset_long_long_data_le"
+#define DATASETNAME13 "Scale_offset_long_long_data_be"
+#define NX 6
#define NY 6
@@ -57,18 +67,13 @@ static int read_data(char *fname)
{
const char *pathname = H5_get_srcdir_filename(fname); /* Corrected test file name */
hid_t file, dataset; /* handles */
- hid_t datatype;
- hid_t dt;
- float data_in[NX][NY]; /* input buffer */
- float data_out[NX][NY]; /* output buffer */
- double double_data_in[NX][NY]; /* input buffer */
- double double_data_out[NX][NY]; /* output buffer */
- int int_data_in[NX][NY]; /* input buffer */
- int int_data_out[NX][NY]; /* output buffer */
+ double data_in[NX+1][NY]; /* input buffer */
+ double data_out[NX+1][NY]; /* output buffer */
+ long long int_data_in[NX+1][NY]; /* input buffer */
+ long long int_data_out[NX+1][NY]; /* output buffer */
int i, j;
unsigned nerrors = 0;
const char *not_supported= " Scaleoffset filter is not enabled.";
- const char *not_fixed= " Scaleoffset filter bug (2131) is not fixed yet.";
/*
* Open the file.
@@ -76,8 +81,8 @@ static int read_data(char *fname)
if((file = H5Fopen(pathname, H5F_ACC_RDONLY, H5P_DEFAULT)) < 0)
TEST_ERROR;
- TESTING(" regular dataset");
-
+ TESTING("regular dataset");
+
/*
* Open the regular dataset.
*/
@@ -93,6 +98,10 @@ static int read_data(char *fname)
data_out[j][i] = 0;
}
}
+ for (i = 0; i < NY; i++) {
+ data_in[NX][i] = -2.2;
+ data_out[NX][i] = 0;
+ }
/*
* 0 1 2 3 4 5
* 1 2 3 4 5 6
@@ -100,29 +109,80 @@ static int read_data(char *fname)
* 3 4 5 6 7 8
* 4 5 6 7 8 9
* 5 6 7 8 9 10
+ * -2.2 -2.2 -2.2 -2.2 -2.2 -2.2
*/
/*
- * Get datatype and dataspace handles and then query
- * dataset class, order, size, rank and dimensions.
+ * Read data from hyperslab in the file into the hyperslab in
+ * memory and display.
*/
- if((dt = H5Dget_type(dataset)) < 0) /* datatype handle */
+ if(H5Dread(dataset, H5T_NATIVE_DOUBLE, H5S_ALL, H5S_ALL, H5P_DEFAULT,
+ data_out) < 0)
TEST_ERROR;
- if((datatype = H5Tget_native_type(dt, H5T_DIR_DEFAULT)) < 0)
+
+ /* Check results */
+ for (j=0; j<(NX+1); j++) {
+ for (i=0; i<NY; i++) {
+ /* if (data_out[j][i] != data_in[j][i]) { */
+ if (!FLT_ABS_EQUAL(data_out[j][i], data_in[j][i])) {
+ if (!nerrors++) {
+ H5_FAILED();
+ printf("element [%d][%d] is %g but should have been %g\n",
+ j, i, data_out[j][i], data_in[j][i]);
+ }
+ }
+ }
+ }
+
+ /*
+ * Close/release resources.
+ */
+ if(H5Dclose(dataset) < 0)
+ TEST_ERROR
+
+ /* Failure */
+ if (nerrors) {
+ printf("total of %d errors out of %d elements\n", nerrors, NX*NY);
+ return 1;
+ }
+
+ PASSED();
+
+ TESTING("dataset of LE FLOAT with scale-offset filter");
+
+#ifdef H5_HAVE_FILTER_SCALEOFFSET
+ /*
+ * Open the dataset with scale-offset filter.
+ */
+ if((dataset = H5Dopen2(file, DATASETNAME2, H5P_DEFAULT)) < 0)
TEST_ERROR;
/*
+ * Data and output buffer initialization.
+ */
+ for (j = 0; j < NX; j++) {
+ for (i = 0; i < NY; i++) {
+ data_in[j][i] = ((double)(i + j + 1))/3;
+ data_out[j][i] = 0;
+ }
+ }
+ for (i = 0; i < NY; i++) {
+ data_in[NX][i] = -2.2;
+ data_out[NX][i] = 0;
+ }
+
+ /*
* Read data from hyperslab in the file into the hyperslab in
* memory and display.
*/
- if(H5Dread(dataset, datatype, H5S_ALL, H5S_ALL, H5P_DEFAULT, data_out) < 0)
+ if(H5Dread(dataset, H5T_NATIVE_DOUBLE, H5S_ALL, H5S_ALL, H5P_DEFAULT,
+ data_out) < 0)
TEST_ERROR;
/* Check results */
- for (j=0; j<NX; j++) {
+ for (j=0; j<(NX+1); j++) {
for (i=0; i<NY; i++) {
- /* if (data_out[j][i] != data_in[j][i]) { */
- if (!DBL_ABS_EQUAL(data_out[j][i], data_in[j][i])) {
+ if (!DBL_REL_EQUAL(data_out[j][i], data_in[j][i], 0.001)) {
if (!nerrors++) {
H5_FAILED();
printf("element [%d][%d] is %g but should have been %g\n",
@@ -135,9 +195,8 @@ static int read_data(char *fname)
/*
* Close/release resources.
*/
- H5Tclose(dt);
- H5Tclose(datatype);
- H5Dclose(dataset);
+ if(H5Dclose(dataset) < 0)
+ TEST_ERROR
/* Failure */
if (nerrors) {
@@ -146,14 +205,80 @@ static int read_data(char *fname)
}
PASSED();
+#else /*H5_HAVE_FILTER_SCALEOFFSET*/
+ SKIPPED();
+ puts(not_supported);
+#endif /*H5_HAVE_FILTER_SCALEOFFSET*/
+
+ TESTING("dataset of BE FLOAT with scale-offset filter");
+
+#ifdef H5_HAVE_FILTER_SCALEOFFSET
+ /*
+ * Open the dataset with scale-offset filter.
+ */
+ if((dataset = H5Dopen2(file, DATASETNAME3, H5P_DEFAULT)) < 0)
+ TEST_ERROR;
+
+ /*
+ * Data and output buffer initialization.
+ */
+ for (j = 0; j < NX; j++) {
+ for (i = 0; i < NY; i++) {
+ data_in[j][i] = ((double)(i + j + 1))/3;
+ data_out[j][i] = 0;
+ }
+ }
+ for (i = 0; i < NY; i++) {
+ data_in[NX][i] = -2.2;
+ data_out[NX][i] = 0;
+ }
+
+ /*
+ * Read data from hyperslab in the file into the hyperslab in
+ * memory and display.
+ */
+ if(H5Dread(dataset, H5T_NATIVE_DOUBLE, H5S_ALL, H5S_ALL, H5P_DEFAULT,
+ data_out) < 0)
+ TEST_ERROR;
+
+ /* Check results */
+ for (j=0; j<(NX+1); j++) {
+ for (i=0; i<NY; i++) {
+ if (!DBL_REL_EQUAL(data_out[j][i], data_in[j][i], 0.001)) {
+ if (!nerrors++) {
+ H5_FAILED();
+ printf("element [%d][%d] is %g but should have been %g\n",
+ j, i, data_out[j][i], data_in[j][i]);
+ }
+ }
+ }
+ }
+
+ /*
+ * Close/release resources.
+ */
+ if(H5Dclose(dataset) < 0)
+ TEST_ERROR
+
+ /* Failure */
+ if (nerrors) {
+ printf("total of %d errors out of %d elements\n", nerrors, NX*NY);
+ return 1;
+ }
+
+ PASSED();
+#else /*H5_HAVE_FILTER_SCALEOFFSET*/
+ SKIPPED();
+ puts(not_supported);
+#endif /*H5_HAVE_FILTER_SCALEOFFSET*/
+
+ TESTING("dataset of LE DOUBLE with scale-offset filter");
- TESTING(" dataset of DOUBLE with scale-offset filter");
-#ifdef TMP
#ifdef H5_HAVE_FILTER_SCALEOFFSET
/*
* Open the dataset with scale-offset filter.
*/
- if((dataset = H5Dopen2(file, DATASETNAME2, H5P_DEFAULT)) < 0)
+ if((dataset = H5Dopen2(file, DATASETNAME4, H5P_DEFAULT)) < 0)
TEST_ERROR;
/*
@@ -161,35 +286,220 @@ static int read_data(char *fname)
*/
for (j = 0; j < NX; j++) {
for (i = 0; i < NY; i++) {
- double_data_in[j][i] = ((double)(i + j + 1))/3;
- double_data_out[j][i] = 0;
+ data_in[j][i] = ((double)(i + j + 1))/3;
+ data_out[j][i] = 0;
}
}
+ for (i = 0; i < NY; i++) {
+ data_in[NX][i] = -2.2;
+ data_out[NX][i] = 0;
+ }
/*
- * Get datatype and dataspace handles and then query
- * dataset class, order, size, rank and dimensions.
+ * Read data from hyperslab in the file into the hyperslab in
+ * memory and display.
*/
- if((dt = H5Dget_type(dataset)) < 0) /* datatype handle */
+ if(H5Dread(dataset, H5T_NATIVE_DOUBLE, H5S_ALL, H5S_ALL, H5P_DEFAULT,
+ data_out) < 0)
TEST_ERROR;
- if((datatype = H5Tget_native_type(dt, H5T_DIR_DEFAULT)) < 0)
+
+ /* Check results */
+ for (j=0; j<(NX+1); j++) {
+ for (i=0; i<NY; i++) {
+ if (!DBL_REL_EQUAL(data_out[j][i], data_in[j][i], 0.001)) {
+ if (!nerrors++) {
+ H5_FAILED();
+ printf("element [%d][%d] is %g but should have been %g\n",
+ j, i, data_out[j][i], data_in[j][i]);
+ }
+ }
+ }
+ }
+
+ /*
+ * Close/release resources.
+ */
+ if(H5Dclose(dataset) < 0)
+ TEST_ERROR
+
+ /* Failure */
+ if (nerrors) {
+ printf("total of %d errors out of %d elements\n", nerrors, NX*NY);
+ return 1;
+ }
+
+ PASSED();
+#else /*H5_HAVE_FILTER_SCALEOFFSET*/
+ SKIPPED();
+ puts(not_supported);
+#endif /*H5_HAVE_FILTER_SCALEOFFSET*/
+
+ TESTING("dataset of BE DOUBLE with scale-offset filter");
+
+#ifdef H5_HAVE_FILTER_SCALEOFFSET
+ /*
+ * Open the dataset with scale-offset filter.
+ */
+ if((dataset = H5Dopen2(file, DATASETNAME5, H5P_DEFAULT)) < 0)
TEST_ERROR;
/*
+ * Data and output buffer initialization.
+ */
+ for (j = 0; j < NX; j++) {
+ for (i = 0; i < NY; i++) {
+ data_in[j][i] = ((double)(i + j + 1))/3;
+ data_out[j][i] = 0;
+ }
+ }
+ for (i = 0; i < NY; i++) {
+ data_in[NX][i] = -2.2;
+ data_out[NX][i] = 0;
+ }
+
+ /*
* Read data from hyperslab in the file into the hyperslab in
* memory and display.
*/
- if(H5Dread(dataset, datatype, H5S_ALL, H5S_ALL, H5P_DEFAULT, double_data_out) < 0)
+ if(H5Dread(dataset, H5T_NATIVE_DOUBLE, H5S_ALL, H5S_ALL, H5P_DEFAULT,
+ data_out) < 0)
TEST_ERROR;
/* Check results */
- for (j=0; j<NX; j++) {
+ for (j=0; j<(NX+1); j++) {
for (i=0; i<NY; i++) {
- if (!DBL_REL_EQUAL(double_data_out[j][i], double_data_in[j][i], 0.001)) {
+ if (!DBL_REL_EQUAL(data_out[j][i], data_in[j][i], 0.001)) {
if (!nerrors++) {
H5_FAILED();
printf("element [%d][%d] is %g but should have been %g\n",
- j, i, double_data_out[j][i], double_data_in[j][i]);
+ j, i, data_out[j][i], data_in[j][i]);
+ }
+ }
+ }
+ }
+
+ /*
+ * Close/release resources.
+ */
+ if(H5Dclose(dataset) < 0)
+ TEST_ERROR
+
+ /* Failure */
+ if (nerrors) {
+ printf("total of %d errors out of %d elements\n", nerrors, NX*NY);
+ return 1;
+ }
+
+ PASSED();
+#else /*H5_HAVE_FILTER_SCALEOFFSET*/
+ SKIPPED();
+ puts(not_supported);
+#endif /*H5_HAVE_FILTER_SCALEOFFSET*/
+
+ TESTING("dataset of LE CHAR with scale-offset filter");
+
+#ifdef H5_HAVE_FILTER_SCALEOFFSET
+ /*
+ * Open the dataset with scale-offset filter.
+ */
+ if((dataset = H5Dopen2(file, DATASETNAME6, H5P_DEFAULT)) < 0)
+ TEST_ERROR;
+
+ /*
+ * Data and output buffer initialization.
+ */
+ for (j = 0; j < NX; j++) {
+ for (i = 0; i < NY; i++) {
+ int_data_in[j][i] = i + j;
+ int_data_out[j][i] = 0;
+ }
+ }
+ for (i = 0; i < NY; i++) {
+ int_data_in[NX][i] = -2;
+ int_data_out[NX][i] = 0;
+ }
+
+ /*
+ * Read data from hyperslab in the file into the hyperslab in
+ * memory and display.
+ */
+ if(H5Dread(dataset, H5T_NATIVE_LLONG, H5S_ALL, H5S_ALL, H5P_DEFAULT,
+ int_data_out) < 0)
+ TEST_ERROR;
+
+ /* Check results */
+ for (j=0; j<(NX+1); j++) {
+ for (i=0; i<NY; i++) {
+ if (int_data_out[j][i] != int_data_in[j][i]) {
+ if (!nerrors++) {
+ H5_FAILED();
+ printf("element [%d][%d] is %d but should have been %d\n",
+ j, i, (int)int_data_out[j][i],
+ (int)int_data_in[j][i]);
+ }
+ }
+ }
+ }
+
+ /*
+ * Close/release resources.
+ */
+ if(H5Dclose(dataset) < 0)
+ TEST_ERROR
+
+ /* Failure */
+ if (nerrors) {
+ printf("total of %d errors out of %d elements\n", nerrors, NX*NY);
+ return 1;
+ }
+
+ PASSED();
+
+#else /*H5_HAVE_FILTER_SCALEOFFSET*/
+ SKIPPED();
+ puts(not_supported);
+#endif /*H5_HAVE_FILTER_SCALEOFFSET*/
+
+ TESTING("dataset of BE CHAR with scale-offset filter");
+
+#ifdef H5_HAVE_FILTER_SCALEOFFSET
+ /*
+ * Open the dataset with scale-offset filter.
+ */
+ if((dataset = H5Dopen2(file, DATASETNAME7, H5P_DEFAULT)) < 0)
+ TEST_ERROR;
+
+ /*
+ * Data and output buffer initialization.
+ */
+ for (j = 0; j < NX; j++) {
+ for (i = 0; i < NY; i++) {
+ int_data_in[j][i] = i + j;
+ int_data_out[j][i] = 0;
+ }
+ }
+ for (i = 0; i < NY; i++) {
+ int_data_in[NX][i] = -2;
+ int_data_out[NX][i] = 0;
+ }
+
+ /*
+ * Read data from hyperslab in the file into the hyperslab in
+ * memory and display.
+ */
+ if(H5Dread(dataset, H5T_NATIVE_LLONG, H5S_ALL, H5S_ALL, H5P_DEFAULT,
+ int_data_out) < 0)
+ TEST_ERROR;
+
+ /* Check results */
+ for (j=0; j<(NX+1); j++) {
+ for (i=0; i<NY; i++) {
+ if (int_data_out[j][i] != int_data_in[j][i]) {
+ if (!nerrors++) {
+ H5_FAILED();
+ printf("element [%d][%d] is %d but should have been %d\n",
+ j, i, (int)int_data_out[j][i],
+ (int)int_data_in[j][i]);
}
}
}
@@ -198,9 +508,8 @@ static int read_data(char *fname)
/*
* Close/release resources.
*/
- H5Tclose(dt);
- H5Tclose(datatype);
- H5Dclose(dataset);
+ if(H5Dclose(dataset) < 0)
+ TEST_ERROR
/* Failure */
if (nerrors) {
@@ -209,22 +518,147 @@ static int read_data(char *fname)
}
PASSED();
+
#else /*H5_HAVE_FILTER_SCALEOFFSET*/
SKIPPED();
puts(not_supported);
#endif /*H5_HAVE_FILTER_SCALEOFFSET*/
-#else /*TMP*/
+
+ TESTING("dataset of LE SHORT with scale-offset filter");
+
+#ifdef H5_HAVE_FILTER_SCALEOFFSET
+ /*
+ * Open the dataset with scale-offset filter.
+ */
+ if((dataset = H5Dopen2(file, DATASETNAME8, H5P_DEFAULT)) < 0)
+ TEST_ERROR;
+
+ /*
+ * Data and output buffer initialization.
+ */
+ for (j = 0; j < NX; j++) {
+ for (i = 0; i < NY; i++) {
+ int_data_in[j][i] = i + j;
+ int_data_out[j][i] = 0;
+ }
+ }
+ for (i = 0; i < NY; i++) {
+ int_data_in[NX][i] = -2;
+ int_data_out[NX][i] = 0;
+ }
+
+ /*
+ * Read data from hyperslab in the file into the hyperslab in
+ * memory and display.
+ */
+ if(H5Dread(dataset, H5T_NATIVE_LLONG, H5S_ALL, H5S_ALL, H5P_DEFAULT,
+ int_data_out) < 0)
+ TEST_ERROR;
+
+ /* Check results */
+ for (j=0; j<(NX+1); j++) {
+ for (i=0; i<NY; i++) {
+ if (int_data_out[j][i] != int_data_in[j][i]) {
+ if (!nerrors++) {
+ H5_FAILED();
+ printf("element [%d][%d] is %d but should have been %d\n",
+ j, i, (int)int_data_out[j][i],
+ (int)int_data_in[j][i]);
+ }
+ }
+ }
+ }
+
+ /*
+ * Close/release resources.
+ */
+ if(H5Dclose(dataset) < 0)
+ TEST_ERROR
+
+ /* Failure */
+ if (nerrors) {
+ printf("total of %d errors out of %d elements\n", nerrors, NX*NY);
+ return 1;
+ }
+
+ PASSED();
+
+#else /*H5_HAVE_FILTER_SCALEOFFSET*/
SKIPPED();
- puts(not_fixed);
-#endif /*TMP*/
+ puts(not_supported);
+#endif /*H5_HAVE_FILTER_SCALEOFFSET*/
- TESTING(" dataset of INT with scale-offset filter");
+ TESTING("dataset of BE SHORT with scale-offset filter");
+
+#ifdef H5_HAVE_FILTER_SCALEOFFSET
+ /*
+ * Open the dataset with scale-offset filter.
+ */
+ if((dataset = H5Dopen2(file, DATASETNAME9, H5P_DEFAULT)) < 0)
+ TEST_ERROR;
+
+ /*
+ * Data and output buffer initialization.
+ */
+ for (j = 0; j < NX; j++) {
+ for (i = 0; i < NY; i++) {
+ int_data_in[j][i] = i + j;
+ int_data_out[j][i] = 0;
+ }
+ }
+ for (i = 0; i < NY; i++) {
+ int_data_in[NX][i] = -2;
+ int_data_out[NX][i] = 0;
+ }
+
+ /*
+ * Read data from hyperslab in the file into the hyperslab in
+ * memory and display.
+ */
+ if(H5Dread(dataset, H5T_NATIVE_LLONG, H5S_ALL, H5S_ALL, H5P_DEFAULT,
+ int_data_out) < 0)
+ TEST_ERROR;
+
+ /* Check results */
+ for (j=0; j<(NX+1); j++) {
+ for (i=0; i<NY; i++) {
+ if (int_data_out[j][i] != int_data_in[j][i]) {
+ if (!nerrors++) {
+ H5_FAILED();
+ printf("element [%d][%d] is %d but should have been %d\n",
+ j, i, (int)int_data_out[j][i],
+ (int)int_data_in[j][i]);
+ }
+ }
+ }
+ }
+
+ /*
+ * Close/release resources.
+ */
+ if(H5Dclose(dataset) < 0)
+ TEST_ERROR
+
+ /* Failure */
+ if (nerrors) {
+ printf("total of %d errors out of %d elements\n", nerrors, NX*NY);
+ return 1;
+ }
+
+ PASSED();
+
+#else /*H5_HAVE_FILTER_SCALEOFFSET*/
+ SKIPPED();
+ puts(not_supported);
+#endif /*H5_HAVE_FILTER_SCALEOFFSET*/
+
+ TESTING("dataset of LE INT with scale-offset filter");
#ifdef H5_HAVE_FILTER_SCALEOFFSET
/*
* Open the dataset with scale-offset filter.
*/
- if((dataset = H5Dopen2(file, DATASETNAME3, H5P_DEFAULT)) < 0)
+ if((dataset = H5Dopen2(file, DATASETNAME10, H5P_DEFAULT)) < 0)
TEST_ERROR;
/*
@@ -236,31 +670,220 @@ static int read_data(char *fname)
int_data_out[j][i] = 0;
}
}
+ for (i = 0; i < NY; i++) {
+ int_data_in[NX][i] = -2;
+ int_data_out[NX][i] = 0;
+ }
+
+ /*
+ * Read data from hyperslab in the file into the hyperslab in
+ * memory and display.
+ */
+ if(H5Dread(dataset, H5T_NATIVE_LLONG, H5S_ALL, H5S_ALL, H5P_DEFAULT,
+ int_data_out) < 0)
+ TEST_ERROR;
+
+ /* Check results */
+ for (j=0; j<(NX+1); j++) {
+ for (i=0; i<NY; i++) {
+ if (int_data_out[j][i] != int_data_in[j][i]) {
+ if (!nerrors++) {
+ H5_FAILED();
+ printf("element [%d][%d] is %d but should have been %d\n",
+ j, i, (int)int_data_out[j][i],
+ (int)int_data_in[j][i]);
+ }
+ }
+ }
+ }
+
+ /*
+ * Close/release resources.
+ */
+ if(H5Dclose(dataset) < 0)
+ TEST_ERROR
+
+ /* Failure */
+ if (nerrors) {
+ printf("total of %d errors out of %d elements\n", nerrors, NX*NY);
+ return 1;
+ }
+
+ PASSED();
+
+#else /*H5_HAVE_FILTER_SCALEOFFSET*/
+ SKIPPED();
+ puts(not_supported);
+#endif /*H5_HAVE_FILTER_SCALEOFFSET*/
+
+ TESTING("dataset of BE INT with scale-offset filter");
+
+#ifdef H5_HAVE_FILTER_SCALEOFFSET
+ /*
+ * Open the dataset with scale-offset filter.
+ */
+ if((dataset = H5Dopen2(file, DATASETNAME11, H5P_DEFAULT)) < 0)
+ TEST_ERROR;
+
+ /*
+ * Data and output buffer initialization.
+ */
+ for (j = 0; j < NX; j++) {
+ for (i = 0; i < NY; i++) {
+ int_data_in[j][i] = i + j;
+ int_data_out[j][i] = 0;
+ }
+ }
+ for (i = 0; i < NY; i++) {
+ int_data_in[NX][i] = -2;
+ int_data_out[NX][i] = 0;
+ }
/*
- * Get datatype and dataspace handles and then query
- * dataset class, order, size, rank and dimensions.
+ * Read data from hyperslab in the file into the hyperslab in
+ * memory and display.
*/
- if((dt = H5Dget_type(dataset)) < 0) /* datatype handle */
+ if(H5Dread(dataset, H5T_NATIVE_LLONG, H5S_ALL, H5S_ALL, H5P_DEFAULT,
+ int_data_out) < 0)
TEST_ERROR;
- if((datatype = H5Tget_native_type(dt, H5T_DIR_DEFAULT)) < 0)
+
+ /* Check results */
+ for (j=0; j<(NX+1); j++) {
+ for (i=0; i<NY; i++) {
+ if (int_data_out[j][i] != int_data_in[j][i]) {
+ if (!nerrors++) {
+ H5_FAILED();
+ printf("element [%d][%d] is %d but should have been %d\n",
+ j, i, (int)int_data_out[j][i],
+ (int)int_data_in[j][i]);
+ }
+ }
+ }
+ }
+
+ /*
+ * Close/release resources.
+ */
+ if(H5Dclose(dataset) < 0)
+ TEST_ERROR
+
+ /* Failure */
+ if (nerrors) {
+ printf("total of %d errors out of %d elements\n", nerrors, NX*NY);
+ return 1;
+ }
+
+ PASSED();
+
+#else /*H5_HAVE_FILTER_SCALEOFFSET*/
+ SKIPPED();
+ puts(not_supported);
+#endif /*H5_HAVE_FILTER_SCALEOFFSET*/
+
+ TESTING("dataset of LE LONG LONG with scale-offset filter");
+
+#ifdef H5_HAVE_FILTER_SCALEOFFSET
+ /*
+ * Open the dataset with scale-offset filter.
+ */
+ if((dataset = H5Dopen2(file, DATASETNAME12, H5P_DEFAULT)) < 0)
+ TEST_ERROR;
+
+ /*
+ * Data and output buffer initialization.
+ */
+ for (j = 0; j < NX; j++) {
+ for (i = 0; i < NY; i++) {
+ int_data_in[j][i] = i + j;
+ int_data_out[j][i] = 0;
+ }
+ }
+ for (i = 0; i < NY; i++) {
+ int_data_in[NX][i] = -2;
+ int_data_out[NX][i] = 0;
+ }
+
+ /*
+ * Read data from hyperslab in the file into the hyperslab in
+ * memory and display.
+ */
+ if(H5Dread(dataset, H5T_NATIVE_LLONG, H5S_ALL, H5S_ALL, H5P_DEFAULT,
+ int_data_out) < 0)
+ TEST_ERROR;
+
+ /* Check results */
+ for (j=0; j<(NX+1); j++) {
+ for (i=0; i<NY; i++) {
+ if (int_data_out[j][i] != int_data_in[j][i]) {
+ if (!nerrors++) {
+ H5_FAILED();
+ printf("element [%d][%d] is %d but should have been %d\n",
+ j, i, (int)int_data_out[j][i],
+ (int)int_data_in[j][i]);
+ }
+ }
+ }
+ }
+
+ /*
+ * Close/release resources.
+ */
+ if(H5Dclose(dataset) < 0)
+ TEST_ERROR
+
+ /* Failure */
+ if (nerrors) {
+ printf("total of %d errors out of %d elements\n", nerrors, NX*NY);
+ return 1;
+ }
+
+ PASSED();
+
+#else /*H5_HAVE_FILTER_SCALEOFFSET*/
+ SKIPPED();
+ puts(not_supported);
+#endif /*H5_HAVE_FILTER_SCALEOFFSET*/
+
+ TESTING("dataset of BE LONG LONG with scale-offset filter");
+
+#ifdef H5_HAVE_FILTER_SCALEOFFSET
+ /*
+ * Open the dataset with scale-offset filter.
+ */
+ if((dataset = H5Dopen2(file, DATASETNAME13, H5P_DEFAULT)) < 0)
TEST_ERROR;
/*
+ * Data and output buffer initialization.
+ */
+ for (j = 0; j < NX; j++) {
+ for (i = 0; i < NY; i++) {
+ int_data_in[j][i] = i + j;
+ int_data_out[j][i] = 0;
+ }
+ }
+ for (i = 0; i < NY; i++) {
+ int_data_in[NX][i] = -2;
+ int_data_out[NX][i] = 0;
+ }
+
+ /*
* Read data from hyperslab in the file into the hyperslab in
* memory and display.
*/
- if(H5Dread(dataset, datatype, H5S_ALL, H5S_ALL, H5P_DEFAULT, int_data_out) < 0)
+ if(H5Dread(dataset, H5T_NATIVE_LLONG, H5S_ALL, H5S_ALL, H5P_DEFAULT,
+ int_data_out) < 0)
TEST_ERROR;
/* Check results */
- for (j=0; j<NX; j++) {
+ for (j=0; j<(NX+1); j++) {
for (i=0; i<NY; i++) {
if (int_data_out[j][i] != int_data_in[j][i]) {
if (!nerrors++) {
H5_FAILED();
printf("element [%d][%d] is %d but should have been %d\n",
- j, i, int_data_out[j][i], int_data_in[j][i]);
+ j, i, (int)int_data_out[j][i],
+ (int)int_data_in[j][i]);
}
}
}
@@ -269,9 +892,8 @@ static int read_data(char *fname)
/*
* Close/release resources.
*/
- H5Tclose(dt);
- H5Tclose(datatype);
- H5Dclose(dataset);
+ if(H5Dclose(dataset) < 0)
+ TEST_ERROR
/* Failure */
if (nerrors) {
@@ -286,7 +908,8 @@ static int read_data(char *fname)
puts(not_supported);
#endif /*H5_HAVE_FILTER_SCALEOFFSET*/
- H5Fclose(file);
+ if(H5Fclose(file))
+ TEST_ERROR
return 0;
error:
diff --git a/test/gen_cross.c b/test/gen_cross.c
index 0bc3460..32be867 100755
--- a/test/gen_cross.c
+++ b/test/gen_cross.c
@@ -28,23 +28,37 @@
#define H5FILE_NAME "data.h5"
#define DATASETNAME "Array"
-#define DATASETNAME2 "Scale_offset_double_data"
-#define DATASETNAME3 "Scale_offset_int_data"
+#define DATASETNAME2 "Scale_offset_float_data_le"
+#define DATASETNAME3 "Scale_offset_float_data_be"
+#define DATASETNAME4 "Scale_offset_double_data_le"
+#define DATASETNAME5 "Scale_offset_double_data_be"
+#define DATASETNAME6 "Scale_offset_char_data_le"
+#define DATASETNAME7 "Scale_offset_char_data_be"
+#define DATASETNAME8 "Scale_offset_short_data_le"
+#define DATASETNAME9 "Scale_offset_short_data_be"
+#define DATASETNAME10 "Scale_offset_int_data_le"
+#define DATASETNAME11 "Scale_offset_int_data_be"
+#define DATASETNAME12 "Scale_offset_long_long_data_le"
+#define DATASETNAME13 "Scale_offset_long_long_data_be"
#define NX 6
#define NY 6
#define RANK 2
-#define CHUNK0 3
+#define CHUNK0 4
#define CHUNK1 3
-int create_normal_dset(hid_t fid, hid_t sid);
-int create_scale_offset_dset_double(hid_t fid, hid_t sid);
-int create_scale_offset_dset_int(hid_t fid, hid_t sid);
+int create_normal_dset(hid_t fid, hid_t fsid, hid_t msid);
+int create_scale_offset_dsets_float(hid_t fid, hid_t fsid, hid_t msid);
+int create_scale_offset_dsets_double(hid_t fid, hid_t fsid, hid_t msid);
+int create_scale_offset_dsets_char(hid_t fid, hid_t fsid, hid_t msid);
+int create_scale_offset_dsets_short(hid_t fid, hid_t fsid, hid_t msid);
+int create_scale_offset_dsets_int(hid_t fid, hid_t fsid, hid_t msid);
+int create_scale_offset_dsets_long_long(hid_t fid, hid_t fsid, hid_t msid);
/*-------------------------------------------------------------------------
* Function: create_normal_dset
*
- * Purpose: Create a regular dataset of FLOAT datatype.
+ * Purpose: Create a regular dataset of DOUBLE datatype.
*
* Return: Success: 0
* Failure: -1
@@ -57,11 +71,12 @@ int create_scale_offset_dset_int(hid_t fid, hid_t sid);
*-------------------------------------------------------------------------
*/
int
-create_normal_dset(hid_t fid, hid_t sid)
+create_normal_dset(hid_t fid, hid_t fsid, hid_t msid)
{
hid_t dataset; /* file and dataset handles */
- herr_t status;
+ hid_t dcpl;
float data[NX][NY]; /* data to write */
+ float fillvalue = -2.2;
int i, j;
/*
@@ -78,32 +93,155 @@ create_normal_dset(hid_t fid, hid_t sid)
* 3 4 5 6 7 8
* 4 5 6 7 8 9
* 5 6 7 8 9 10
+ * -2.2 -2.2 -2.2 -2.2 -2.2 -2.2
*/
/*
+ * Create the dataset creation property list, set the fill value.
+ */
+ if((dcpl = H5Pcreate(H5P_DATASET_CREATE)) < 0)
+ TEST_ERROR
+ if(H5Pset_fill_value(dcpl, H5T_NATIVE_FLOAT, &fillvalue) < 0)
+ TEST_ERROR
+
+ /*
* Create a new dataset within the file using defined dataspace and
* datatype and default dataset creation properties.
*/
- dataset = H5Dcreate2(fid, DATASETNAME, H5T_NATIVE_FLOAT, sid,
- H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT);
+ if((dataset = H5Dcreate2(fid, DATASETNAME, H5T_NATIVE_DOUBLE, fsid,
+ H5P_DEFAULT, dcpl, H5P_DEFAULT)) < 0)
+ TEST_ERROR
/*
* Write the data to the dataset using default transfer properties.
*/
- status = H5Dwrite(dataset, H5T_NATIVE_FLOAT, H5S_ALL, H5S_ALL,
- H5P_DEFAULT, data);
+ if(H5Dwrite(dataset, H5T_NATIVE_FLOAT, msid, fsid, H5P_DEFAULT, data) < 0)
+ TEST_ERROR
/*
* Close/release resources.
*/
- H5Dclose(dataset);
+ if(H5Pclose(dcpl) < 0)
+ TEST_ERROR
+ if(H5Dclose(dataset) < 0)
+ TEST_ERROR
return 0;
+
+#ifdef H5_HAVE_FILTER_SCALEOFFSET
+error:
+ H5E_BEGIN_TRY {
+ H5Pclose(dcpl);
+ H5Dclose(dataset);
+ } H5E_END_TRY;
+
+ return -1;
+#endif /* H5_HAVE_FILTER_SCALEOFFSET */
}
/*-------------------------------------------------------------------------
- * Function: create_scale_offset_dset_double
+ * Function: create_scale_offset_dsets_float
+ *
+ * Purpose: Create a dataset of FLOAT datatype with scale-offset filter
+ *
+ * Return: Success: 0
+ * Failure: -1
+ *
+ * Programmer: Neil Fortner
+ * 27 January 2011
+ *
+ * Modifications:
+ *
+ *-------------------------------------------------------------------------
+ */
+int
+create_scale_offset_dsets_float(hid_t fid, hid_t fsid, hid_t msid)
+{
+#ifdef H5_HAVE_FILTER_SCALEOFFSET
+ hid_t dataset; /* dataset handles */
+ hid_t dcpl;
+ float data[NX][NY]; /* data to write */
+ float fillvalue = -2.2;
+ hsize_t chunk[RANK] = {CHUNK0, CHUNK1};
+ int i, j;
+
+ /*
+ * Data and output buffer initialization.
+ */
+ for (j = 0; j < NX; j++) {
+ for (i = 0; i < NY; i++)
+ data[j][i] = ((float)(i + j + 1))/3;
+ }
+
+ /*
+ * Create the dataset creation property list, add the Scale-Offset
+ * filter, set the chunk size, and set the fill value.
+ */
+ if((dcpl = H5Pcreate(H5P_DATASET_CREATE)) < 0)
+ TEST_ERROR
+ if(H5Pset_scaleoffset(dcpl, H5Z_SO_FLOAT_DSCALE, 3) < 0)
+ TEST_ERROR
+ if(H5Pset_chunk(dcpl, RANK, chunk) < 0)
+ TEST_ERROR
+ if(H5Pset_fill_value(dcpl, H5T_NATIVE_FLOAT, &fillvalue) < 0)
+ TEST_ERROR
+
+ /*
+ * Create a new dataset within the file using defined dataspace, little
+ * endian datatype and default dataset creation properties.
+ */
+ if((dataset = H5Dcreate2(fid, DATASETNAME2, H5T_IEEE_F32LE, fsid,
+ H5P_DEFAULT, dcpl, H5P_DEFAULT)) < 0)
+ TEST_ERROR
+
+ /*
+ * Write the data to the dataset using default transfer properties.
+ */
+ if(H5Dwrite(dataset, H5T_NATIVE_FLOAT, msid, fsid, H5P_DEFAULT, data) < 0)
+ TEST_ERROR
+
+ /* Close dataset */
+ if(H5Dclose(dataset) < 0)
+ TEST_ERROR
+
+ /* Now create a dataset with a big-endian type */
+ if((dataset = H5Dcreate2(fid, DATASETNAME3, H5T_IEEE_F32BE, fsid,
+ H5P_DEFAULT, dcpl, H5P_DEFAULT)) < 0)
+ TEST_ERROR
+ if(H5Dwrite(dataset, H5T_NATIVE_FLOAT, msid, fsid, H5P_DEFAULT, data) < 0)
+ TEST_ERROR
+ if(H5Dclose(dataset) < 0)
+ TEST_ERROR
+
+ /*
+ * Close/release resources.
+ */
+ if(H5Pclose(dcpl) < 0)
+ TEST_ERROR
+
+#else /* H5_HAVE_FILTER_SCALEOFFSET */
+ const char *not_supported= "Scaleoffset filter is not enabled. Can't create the dataset.";
+
+ puts(not_supported);
+#endif /* H5_HAVE_FILTER_SCALEOFFSET */
+
+ return 0;
+
+#ifdef H5_HAVE_FILTER_SCALEOFFSET
+error:
+ H5E_BEGIN_TRY {
+ H5Pclose(dcpl);
+ H5Dclose(dataset);
+ } H5E_END_TRY;
+
+ return -1;
+#endif /* H5_HAVE_FILTER_SCALEOFFSET */
+}
+
+
+/*-------------------------------------------------------------------------
+ * Function: create_scale_offset_dsets_double
*
* Purpose: Create a dataset of DOUBLE datatype with scale-offset filter
*
@@ -118,13 +256,13 @@ create_normal_dset(hid_t fid, hid_t sid)
*-------------------------------------------------------------------------
*/
int
-create_scale_offset_dset_double(hid_t fid, hid_t sid)
+create_scale_offset_dsets_double(hid_t fid, hid_t fsid, hid_t msid)
{
#ifdef H5_HAVE_FILTER_SCALEOFFSET
hid_t dataset; /* dataset handles */
hid_t dcpl;
- herr_t status;
- double data[NX][NY]; /* data to write */
+ double data[NX][NY]; /* data to write */
+ double fillvalue = -2.2;
hsize_t chunk[RANK] = {CHUNK0, CHUNK1};
int i, j;
@@ -138,30 +276,265 @@ create_scale_offset_dset_double(hid_t fid, hid_t sid)
/*
* Create the dataset creation property list, add the Scale-Offset
- * filter and set the chunk size.
+ * filter, set the chunk size, and set the fill value.
*/
- dcpl = H5Pcreate (H5P_DATASET_CREATE);
- status = H5Pset_scaleoffset (dcpl, H5Z_SO_FLOAT_DSCALE, 3);
- status = H5Pset_chunk (dcpl, RANK, chunk);
+ if((dcpl = H5Pcreate(H5P_DATASET_CREATE)) < 0)
+ TEST_ERROR
+ if(H5Pset_scaleoffset(dcpl, H5Z_SO_FLOAT_DSCALE, 3) < 0)
+ TEST_ERROR
+ if(H5Pset_chunk(dcpl, RANK, chunk) < 0)
+ TEST_ERROR
+ if(H5Pset_fill_value(dcpl, H5T_NATIVE_DOUBLE, &fillvalue) < 0)
+ TEST_ERROR
/*
- * Create a new dataset within the file using defined dataspace and
- * datatype and default dataset creation properties.
+ * Create a new dataset within the file using defined dataspace, little
+ * endian datatype and default dataset creation properties.
+ */
+ if((dataset = H5Dcreate2(fid, DATASETNAME4, H5T_IEEE_F64LE, fsid,
+ H5P_DEFAULT, dcpl, H5P_DEFAULT)) < 0)
+ TEST_ERROR
+
+ /*
+ * Write the data to the dataset using default transfer properties.
+ */
+ if(H5Dwrite(dataset, H5T_NATIVE_DOUBLE, msid, fsid, H5P_DEFAULT, data) < 0)
+ TEST_ERROR
+
+ /* Close dataset */
+ if(H5Dclose(dataset) < 0)
+ TEST_ERROR
+
+ /* Now create a dataset with a big-endian type */
+ if((dataset = H5Dcreate2(fid, DATASETNAME5, H5T_IEEE_F64BE, fsid,
+ H5P_DEFAULT, dcpl, H5P_DEFAULT)) < 0)
+ TEST_ERROR
+ if(H5Dwrite(dataset, H5T_NATIVE_DOUBLE, msid, fsid, H5P_DEFAULT, data) < 0)
+ TEST_ERROR
+ if(H5Dclose(dataset) < 0)
+ TEST_ERROR
+
+ /*
+ * Close/release resources.
+ */
+ if(H5Pclose(dcpl) < 0)
+ TEST_ERROR
+
+#else /* H5_HAVE_FILTER_SCALEOFFSET */
+ const char *not_supported= "Scaleoffset filter is not enabled. Can't create the dataset.";
+
+ puts(not_supported);
+#endif /* H5_HAVE_FILTER_SCALEOFFSET */
+
+ return 0;
+
+#ifdef H5_HAVE_FILTER_SCALEOFFSET
+error:
+ H5E_BEGIN_TRY {
+ H5Pclose(dcpl);
+ H5Dclose(dataset);
+ } H5E_END_TRY;
+
+ return -1;
+#endif /* H5_HAVE_FILTER_SCALEOFFSET */
+}
+
+
+/*-------------------------------------------------------------------------
+ * Function: create_scale_offset_dset_char
+ *
+ * Purpose: Create a dataset of CHAR datatype with scale-offset filter
+ *
+ * Return: Success: 0
+ * Failure: -1
+ *
+ * Programmer: Neil Fortner
+ * 27 January 2011
+ *
+ * Modifications:
+ *
+ *-------------------------------------------------------------------------
+ */
+int
+create_scale_offset_dsets_char(hid_t fid, hid_t fsid, hid_t msid)
+{
+#ifdef H5_HAVE_FILTER_SCALEOFFSET
+ hid_t dataset; /* dataset handles */
+ hid_t dcpl;
+ char data[NX][NY]; /* data to write */
+ char fillvalue = -2;
+ hsize_t chunk[RANK] = {CHUNK0, CHUNK1};
+ int i, j;
+
+ /*
+ * Data and output buffer initialization.
+ */
+ for (j = 0; j < NX; j++) {
+ for (i = 0; i < NY; i++)
+ data[j][i] = i + j;
+ }
+ /*
+ * 0 1 2 3 4 5
+ * 1 2 3 4 5 6
+ * 2 3 4 5 6 7
+ * 3 4 5 6 7 8
+ * 4 5 6 7 8 9
+ * 5 6 7 8 9 10
+ */
+
+ /*
+ * Create the dataset creation property list, add the Scale-Offset
+ * filter, set the chunk size, and set the fill value.
+ */
+ if((dcpl = H5Pcreate(H5P_DATASET_CREATE)) < 0)
+ TEST_ERROR
+ if(H5Pset_scaleoffset(dcpl, H5Z_SO_INT, H5Z_SO_INT_MINBITS_DEFAULT) < 0)
+ TEST_ERROR
+ if(H5Pset_chunk(dcpl, RANK, chunk) < 0)
+ TEST_ERROR
+ if(H5Pset_fill_value(dcpl, H5T_NATIVE_CHAR, &fillvalue) < 0)
+ TEST_ERROR
+
+ /*
+ * Create a new dataset within the file using defined dataspace, little
+ * endian datatype and default dataset creation properties.
+ */
+ if((dataset = H5Dcreate2(fid, DATASETNAME6, H5T_STD_I8LE, fsid, H5P_DEFAULT,
+ dcpl, H5P_DEFAULT)) < 0)
+ TEST_ERROR
+
+ /*
+ * Write the data to the dataset using default transfer properties.
+ */
+ if(H5Dwrite(dataset, H5T_NATIVE_CHAR, msid, fsid, H5P_DEFAULT, data) < 0)
+ TEST_ERROR
+
+ /* Close dataset */
+ if(H5Dclose(dataset) < 0)
+ TEST_ERROR
+
+ /* Now create a dataset with a big-endian type */
+ if((dataset = H5Dcreate2(fid, DATASETNAME7, H5T_STD_I8BE, fsid, H5P_DEFAULT,
+ dcpl, H5P_DEFAULT)) < 0)
+ TEST_ERROR
+ if(H5Dwrite(dataset, H5T_NATIVE_CHAR, msid, fsid, H5P_DEFAULT, data) < 0)
+ TEST_ERROR
+ if(H5Dclose(dataset) < 0)
+ TEST_ERROR
+
+ /*
+ * Close/release resources.
*/
- dataset = H5Dcreate2(fid, DATASETNAME2, H5T_NATIVE_DOUBLE, sid,
- H5P_DEFAULT, dcpl, H5P_DEFAULT);
+ if(H5Pclose(dcpl) < 0)
+ TEST_ERROR
+
+#else /* H5_HAVE_FILTER_SCALEOFFSET */
+ const char *not_supported= "Scaleoffset filter is not enabled. Can't create the dataset.";
+
+ puts(not_supported);
+#endif /* H5_HAVE_FILTER_SCALEOFFSET */
+
+ return 0;
+
+#ifdef H5_HAVE_FILTER_SCALEOFFSET
+error:
+ H5E_BEGIN_TRY {
+ H5Pclose(dcpl);
+ H5Dclose(dataset);
+ } H5E_END_TRY;
+
+ return -1;
+#endif /* H5_HAVE_FILTER_SCALEOFFSET */
+}
+
+
+/*-------------------------------------------------------------------------
+ * Function: create_scale_offset_dset_short
+ *
+ * Purpose: Create a dataset of SHORT datatype with scale-offset filter
+ *
+ * Return: Success: 0
+ * Failure: -1
+ *
+ * Programmer: Neil Fortner
+ * 27 January 2011
+ *
+ * Modifications:
+ *
+ *-------------------------------------------------------------------------
+ */
+int
+create_scale_offset_dsets_short(hid_t fid, hid_t fsid, hid_t msid)
+{
+#ifdef H5_HAVE_FILTER_SCALEOFFSET
+ hid_t dataset; /* dataset handles */
+ hid_t dcpl;
+ short data[NX][NY]; /* data to write */
+ short fillvalue = -2;
+ hsize_t chunk[RANK] = {CHUNK0, CHUNK1};
+ int i, j;
+
+ /*
+ * Data and output buffer initialization.
+ */
+ for (j = 0; j < NX; j++) {
+ for (i = 0; i < NY; i++)
+ data[j][i] = i + j;
+ }
+ /*
+ * 0 1 2 3 4 5
+ * 1 2 3 4 5 6
+ * 2 3 4 5 6 7
+ * 3 4 5 6 7 8
+ * 4 5 6 7 8 9
+ * 5 6 7 8 9 10
+ */
+
+ /*
+ * Create the dataset creation property list, add the Scale-Offset
+ * filter, set the chunk size, and set the fill value.
+ */
+ if((dcpl = H5Pcreate(H5P_DATASET_CREATE)) < 0)
+ TEST_ERROR
+ if(H5Pset_scaleoffset(dcpl, H5Z_SO_INT, H5Z_SO_INT_MINBITS_DEFAULT) < 0)
+ TEST_ERROR
+ if(H5Pset_chunk(dcpl, RANK, chunk) < 0)
+ TEST_ERROR
+ if(H5Pset_fill_value(dcpl, H5T_NATIVE_SHORT, &fillvalue) < 0)
+ TEST_ERROR
+
+ /*
+ * Create a new dataset within the file using defined dataspace, little
+ * endian datatype and default dataset creation properties.
+ */
+ if((dataset = H5Dcreate2(fid, DATASETNAME8, H5T_STD_I16LE, fsid,
+ H5P_DEFAULT, dcpl, H5P_DEFAULT)) < 0)
+ TEST_ERROR
/*
* Write the data to the dataset using default transfer properties.
*/
- status = H5Dwrite(dataset, H5T_NATIVE_DOUBLE, H5S_ALL, H5S_ALL,
- H5P_DEFAULT, data);
+ if(H5Dwrite(dataset, H5T_NATIVE_SHORT, msid, fsid, H5P_DEFAULT, data) < 0)
+ TEST_ERROR
+
+ /* Close dataset */
+ if(H5Dclose(dataset) < 0)
+ TEST_ERROR
+
+ /* Now create a dataset with a big-endian type */
+ if((dataset = H5Dcreate2(fid, DATASETNAME9, H5T_STD_I16BE, fsid,
+ H5P_DEFAULT, dcpl, H5P_DEFAULT)) < 0)
+ TEST_ERROR
+ if(H5Dwrite(dataset, H5T_NATIVE_SHORT, msid, fsid, H5P_DEFAULT, data) < 0)
+ TEST_ERROR
+ if(H5Dclose(dataset) < 0)
+ TEST_ERROR
/*
* Close/release resources.
*/
- H5Pclose(dcpl);
- H5Dclose(dataset);
+ if(H5Pclose(dcpl) < 0)
+ TEST_ERROR
#else /* H5_HAVE_FILTER_SCALEOFFSET */
const char *not_supported= "Scaleoffset filter is not enabled. Can't create the dataset.";
@@ -170,6 +543,16 @@ create_scale_offset_dset_double(hid_t fid, hid_t sid)
#endif /* H5_HAVE_FILTER_SCALEOFFSET */
return 0;
+
+#ifdef H5_HAVE_FILTER_SCALEOFFSET
+error:
+ H5E_BEGIN_TRY {
+ H5Pclose(dcpl);
+ H5Dclose(dataset);
+ } H5E_END_TRY;
+
+ return -1;
+#endif /* H5_HAVE_FILTER_SCALEOFFSET */
}
@@ -189,13 +572,13 @@ create_scale_offset_dset_double(hid_t fid, hid_t sid)
*-------------------------------------------------------------------------
*/
int
-create_scale_offset_dset_int(hid_t fid, hid_t sid)
+create_scale_offset_dsets_int(hid_t fid, hid_t fsid, hid_t msid)
{
#ifdef H5_HAVE_FILTER_SCALEOFFSET
hid_t dataset; /* dataset handles */
hid_t dcpl;
- herr_t status;
int data[NX][NY]; /* data to write */
+ int fillvalue = -2;
hsize_t chunk[RANK] = {CHUNK0, CHUNK1};
int i, j;
@@ -217,30 +600,158 @@ create_scale_offset_dset_int(hid_t fid, hid_t sid)
/*
* Create the dataset creation property list, add the Scale-Offset
- * filter and set the chunk size.
+ * filter, set the chunk size, and set the fill value.
*/
- dcpl = H5Pcreate (H5P_DATASET_CREATE);
- status = H5Pset_scaleoffset (dcpl, H5Z_SO_INT, H5Z_SO_INT_MINBITS_DEFAULT);
- status = H5Pset_chunk (dcpl, RANK, chunk);
+ if((dcpl = H5Pcreate(H5P_DATASET_CREATE)) < 0)
+ TEST_ERROR
+ if(H5Pset_scaleoffset(dcpl, H5Z_SO_INT, H5Z_SO_INT_MINBITS_DEFAULT) < 0)
+ TEST_ERROR
+ if(H5Pset_chunk(dcpl, RANK, chunk) < 0)
+ TEST_ERROR
+ if(H5Pset_fill_value(dcpl, H5T_NATIVE_INT, &fillvalue) < 0)
+ TEST_ERROR
/*
- * Create a new dataset within the file using defined dataspace and
- * datatype and default dataset creation properties.
+ * Create a new dataset within the file using defined dataspace, little
+ * endian datatype and default dataset creation properties.
+ */
+ if((dataset = H5Dcreate2(fid, DATASETNAME10, H5T_STD_I32LE, fsid,
+ H5P_DEFAULT, dcpl, H5P_DEFAULT)) < 0)
+ TEST_ERROR
+
+ /*
+ * Write the data to the dataset using default transfer properties.
+ */
+ if(H5Dwrite(dataset, H5T_NATIVE_INT, msid, fsid, H5P_DEFAULT, data) < 0)
+ TEST_ERROR
+
+ /* Close dataset */
+ if(H5Dclose(dataset) < 0)
+ TEST_ERROR
+
+ /* Now create a dataset with a big-endian type */
+ if((dataset = H5Dcreate2(fid, DATASETNAME11, H5T_STD_I32BE, fsid,
+ H5P_DEFAULT, dcpl, H5P_DEFAULT)) < 0)
+ TEST_ERROR
+ if(H5Dwrite(dataset, H5T_NATIVE_INT, msid, fsid, H5P_DEFAULT, data) < 0)
+ TEST_ERROR
+ if(H5Dclose(dataset) < 0)
+ TEST_ERROR
+
+ /*
+ * Close/release resources.
*/
- dataset = H5Dcreate2(fid, DATASETNAME3, H5T_NATIVE_INT, sid,
- H5P_DEFAULT, dcpl, H5P_DEFAULT);
+ if(H5Pclose(dcpl) < 0)
+ TEST_ERROR
+
+#else /* H5_HAVE_FILTER_SCALEOFFSET */
+ const char *not_supported= "Scaleoffset filter is not enabled. Can't create the dataset.";
+
+ puts(not_supported);
+#endif /* H5_HAVE_FILTER_SCALEOFFSET */
+
+ return 0;
+
+#ifdef H5_HAVE_FILTER_SCALEOFFSET
+error:
+ H5E_BEGIN_TRY {
+ H5Pclose(dcpl);
+ H5Dclose(dataset);
+ } H5E_END_TRY;
+
+ return -1;
+#endif /* H5_HAVE_FILTER_SCALEOFFSET */
+}
+
+
+/*-------------------------------------------------------------------------
+ * Function: create_scale_offset_dset_long_long
+ *
+ * Purpose: Create a dataset of LONG LONG datatype with scale-offset
+ * filter
+ *
+ * Return: Success: 0
+ * Failure: -1
+ *
+ * Programmer: Neil Fortner
+ * 27 January 2011
+ *
+ * Modifications:
+ *
+ *-------------------------------------------------------------------------
+ */
+int
+create_scale_offset_dsets_long_long(hid_t fid, hid_t fsid, hid_t msid)
+{
+#ifdef H5_HAVE_FILTER_SCALEOFFSET
+ hid_t dataset; /* dataset handles */
+ hid_t dcpl;
+ long long data[NX][NY]; /* data to write */
+ long long fillvalue = -2;
+ hsize_t chunk[RANK] = {CHUNK0, CHUNK1};
+ int i, j;
+
+ /*
+ * Data and output buffer initialization.
+ */
+ for (j = 0; j < NX; j++) {
+ for (i = 0; i < NY; i++)
+ data[j][i] = i + j;
+ }
+ /*
+ * 0 1 2 3 4 5
+ * 1 2 3 4 5 6
+ * 2 3 4 5 6 7
+ * 3 4 5 6 7 8
+ * 4 5 6 7 8 9
+ * 5 6 7 8 9 10
+ */
+
+ /*
+ * Create the dataset creation property list, add the Scale-Offset
+ * filter, set the chunk size, and set the fill value.
+ */
+ if((dcpl = H5Pcreate(H5P_DATASET_CREATE)) < 0)
+ TEST_ERROR
+ if(H5Pset_scaleoffset(dcpl, H5Z_SO_INT, H5Z_SO_INT_MINBITS_DEFAULT) < 0)
+ TEST_ERROR
+ if(H5Pset_chunk(dcpl, RANK, chunk) < 0)
+ TEST_ERROR
+ if(H5Pset_fill_value(dcpl, H5T_NATIVE_LLONG, &fillvalue) < 0)
+ TEST_ERROR
+
+ /*
+ * Create a new dataset within the file using defined dataspace, little
+ * endian datatype and default dataset creation properties.
+ */
+ if((dataset = H5Dcreate2(fid, DATASETNAME12, H5T_STD_I64LE, fsid,
+ H5P_DEFAULT, dcpl, H5P_DEFAULT)) < 0)
+ TEST_ERROR
/*
* Write the data to the dataset using default transfer properties.
*/
- status = H5Dwrite(dataset, H5T_NATIVE_INT, H5S_ALL, H5S_ALL,
- H5P_DEFAULT, data);
+ if(H5Dwrite(dataset, H5T_NATIVE_LLONG, msid, fsid, H5P_DEFAULT, data) < 0)
+ TEST_ERROR
+
+ /* Close dataset */
+ if(H5Dclose(dataset) < 0)
+ TEST_ERROR
+
+ /* Now create a dataset with a big-endian type */
+ if((dataset = H5Dcreate2(fid, DATASETNAME13, H5T_STD_I64BE, fsid,
+ H5P_DEFAULT, dcpl, H5P_DEFAULT)) < 0)
+ TEST_ERROR
+ if(H5Dwrite(dataset, H5T_NATIVE_LLONG, msid, fsid, H5P_DEFAULT, data) < 0)
+ TEST_ERROR
+ if(H5Dclose(dataset) < 0)
+ TEST_ERROR
/*
* Close/release resources.
*/
- H5Pclose(dcpl);
- H5Dclose(dataset);
+ if(H5Pclose(dcpl) < 0)
+ TEST_ERROR
#else /* H5_HAVE_FILTER_SCALEOFFSET */
const char *not_supported= "Scaleoffset filter is not enabled. Can't create the dataset.";
@@ -249,6 +760,16 @@ create_scale_offset_dset_int(hid_t fid, hid_t sid)
#endif /* H5_HAVE_FILTER_SCALEOFFSET */
return 0;
+
+#ifdef H5_HAVE_FILTER_SCALEOFFSET
+error:
+ H5E_BEGIN_TRY {
+ H5Pclose(dcpl);
+ H5Dclose(dataset);
+ } H5E_END_TRY;
+
+ return -1;
+#endif /* H5_HAVE_FILTER_SCALEOFFSET */
}
@@ -268,39 +789,79 @@ create_scale_offset_dset_int(hid_t fid, hid_t sid)
int
main (void)
{
- hid_t file; /* file and dataset handles */
- hid_t dataspace;
+ hid_t file = -1;
+ hid_t filespace = -1;
+ hid_t memspace = -1;
hsize_t dimsf[RANK];
+ hsize_t start[RANK] = {0, 0};
/*
* Create a new file using H5F_ACC_TRUNC access,
* default file creation properties, and default file
* access properties.
*/
- file = H5Fcreate(H5FILE_NAME, H5F_ACC_TRUNC, H5P_DEFAULT, H5P_DEFAULT);
+ if((file = H5Fcreate(H5FILE_NAME, H5F_ACC_TRUNC, H5P_DEFAULT, H5P_DEFAULT))
+ < 0)
+ {H5_FAILED(); AT(); return 1;}
/*
* Describe the size of the array and create the data space for fixed
- * size dataset.
+ * size dataset. Increase the size in the X direction to have some fill
+ * values.
*/
- dimsf[0] = NX;
+ dimsf[0] = NX + 1;
dimsf[1] = NY;
- dataspace = H5Screate_simple(RANK, dimsf, NULL);
+ if((filespace = H5Screate_simple(RANK, dimsf, NULL)) < 0)
+ {H5_FAILED(); AT(); return 1;}
+ dimsf[0] = NX;
+ if(H5Sselect_hyperslab(filespace, H5S_SELECT_SET, start, NULL, dimsf, NULL)
+ < 0)
+ {H5_FAILED(); AT(); return 1;}
+
+ /* Create memory space. This does not include the extra row for fill
+ * values. */
+ HDassert(dimsf[0] == NX);
+ HDassert(dimsf[1] == NY);
+ if((memspace = H5Screate_simple(RANK, dimsf, NULL)) < 0)
+ {H5_FAILED(); AT(); return 1;}
/* Create a regular dataset */
- create_normal_dset(file, dataspace);
+ if(create_normal_dset(file, filespace, memspace) < 0)
+ {H5_FAILED(); AT(); return 1;}
+
+ /* Create a dataset of FLOAT with scale-offset filter */
+ if(create_scale_offset_dsets_float(file, filespace, memspace) < 0)
+ {H5_FAILED(); AT(); return 1;}
/* Create a dataset of DOUBLE with scale-offset filter */
- create_scale_offset_dset_double(file, dataspace);
+ if(create_scale_offset_dsets_double(file, filespace, memspace) < 0)
+ {H5_FAILED(); AT(); return 1;}
+
+ /* Create a dataset of CHAR with scale-offset filter */
+ if(create_scale_offset_dsets_char(file, filespace, memspace) < 0)
+ {H5_FAILED(); AT(); return 1;}
+
+ /* Create a dataset of SHORT with scale-offset filter */
+ if(create_scale_offset_dsets_short(file, filespace, memspace) < 0)
+ {H5_FAILED(); AT(); return 1;}
/* Create a dataset of INT with scale-offset filter */
- create_scale_offset_dset_int(file, dataspace);
+ if(create_scale_offset_dsets_int(file, filespace, memspace) < 0)
+ {H5_FAILED(); AT(); return 1;}
+
+ /* Create a dataset of LONG LONG with scale-offset filter */
+ if(create_scale_offset_dsets_long_long(file, filespace, memspace) < 0)
+ {H5_FAILED(); AT(); return 1;}
/*
* Close/release resources.
*/
- H5Sclose(dataspace);
- H5Fclose(file);
+ if(H5Sclose(memspace) < 0)
+ {H5_FAILED(); AT(); return 1;}
+ if(H5Sclose(filespace) < 0)
+ {H5_FAILED(); AT(); return 1;}
+ if(H5Fclose(file) < 0)
+ {H5_FAILED(); AT(); return 1;}
return 0;
}
diff --git a/test/le_data.h5 b/test/le_data.h5
index 6bb0e46..1225d21 100644
--- a/test/le_data.h5
+++ b/test/le_data.h5
Binary files differ
diff --git a/test/vms_data.h5 b/test/vms_data.h5
index 5f07082..14aeef2 100644
--- a/test/vms_data.h5
+++ b/test/vms_data.h5
Binary files differ