summaryrefslogtreecommitdiffstats
path: root/test/dsets.c
diff options
context:
space:
mode:
authorXiaowen Wu <wuxiaowe@ncsa.uiuc.edu>2005-03-23 17:46:46 (GMT)
committerXiaowen Wu <wuxiaowe@ncsa.uiuc.edu>2005-03-23 17:46:46 (GMT)
commit24c88ddadf935c4405bada45ab23f088ee316fdc (patch)
tree86a08f7021e3002648a3a04120ea93c9306fb5ec /test/dsets.c
parenta25827491b8ce548c96d932ac58ed8679326ecb9 (diff)
downloadhdf5-24c88ddadf935c4405bada45ab23f088ee316fdc.zip
hdf5-24c88ddadf935c4405bada45ab23f088ee316fdc.tar.gz
hdf5-24c88ddadf935c4405bada45ab23f088ee316fdc.tar.bz2
[svn-r10385] Purpose:
New feature. Description: The scaleoffset filter is now complete for compressing integers. All previous bugs have been fixed. The filter does not compress floating-point at present. Solution: Update test cases for the scaleoffset filter. Two for integer types and one for floating-point (even though the filter does not compress). The two test cases for integer type tests situations where fill value is defined and undefined for the dataset. Platforms tested: heping, copper, shanti Misc. update:
Diffstat (limited to 'test/dsets.c')
-rw-r--r--test/dsets.c332
1 files changed, 304 insertions, 28 deletions
diff --git a/test/dsets.c b/test/dsets.c
index 8646485..2085175 100644
--- a/test/dsets.c
+++ b/test/dsets.c
@@ -74,13 +74,15 @@ const char *FILENAME[] = {
#define DSET_SET_LOCAL_NAME "set_local"
#define DSET_SET_LOCAL_NAME_2 "set_local_2"
#define DSET_ONEBYTE_SHUF_NAME "onebyte_shuffle"
-#define DSET_NBIT_INT_NAME "nbit_int"
-#define DSET_NBIT_FLOAT_NAME "nbit_float"
-#define DSET_NBIT_DOUBLE_NAME "nbit_double"
-#define DSET_NBIT_ARRAY_NAME "nbit_array"
-#define DSET_NBIT_COMPOUND_NAME "nbit_compound"
-#define DSET_NBIT_COMPOUND_NAME_2 "nbit_compound_2"
-#define DSET_SCALEOFFSET_INT_NAME "scaleoffset_int"
+#define DSET_NBIT_INT_NAME "nbit_int"
+#define DSET_NBIT_FLOAT_NAME "nbit_float"
+#define DSET_NBIT_DOUBLE_NAME "nbit_double"
+#define DSET_NBIT_ARRAY_NAME "nbit_array"
+#define DSET_NBIT_COMPOUND_NAME "nbit_compound"
+#define DSET_NBIT_COMPOUND_NAME_2 "nbit_compound_2"
+#define DSET_SCALEOFFSET_INT_NAME "scaleoffset_int"
+#define DSET_SCALEOFFSET_INT_NAME_2 "scaleoffset_int_2"
+#define DSET_SCALEOFFSET_FLOAT_NAME "scaleoffset_float"
#define DSET_COMPARE_DCPL_NAME "compare_dcpl"
#define DSET_COMPARE_DCPL_NAME_2 "compare_dcpl_2"
@@ -3547,6 +3549,7 @@ error:
* Function: test_scaleoffset_int
*
* Purpose: Tests the integer datatype for scaleoffset filter
+ * with fill value not defined
*
* Return: Success: 0
*
@@ -3574,35 +3577,39 @@ test_scaleoffset_int(hid_t file)
#endif /* H5_HAVE_FILTER_SCALEOFFSET */
puts("Testing scaleoffset filter");
- TESTING(" scaleoffset int (setup)");
+ TESTING(" scaleoffset int without fill value (setup)");
#ifdef H5_HAVE_FILTER_SCALEOFFSET
datatype = H5Tcopy(H5T_NATIVE_INT);
-#if 0
+
/* Set order of dataset datatype */
- if(H5Tset_order(datatype, H5T_ORDER_BE)<0) goto error;
-#endif
+ if(H5Tset_order(datatype, H5T_ORDER_BE)<0) goto error;
+
/* Create the data space */
if ((space = H5Screate_simple(2, size, NULL))<0) goto error;
- /* Createa the dataset property list */
+ /* Create the dataset property list */
if((dc = H5Pcreate(H5P_DATASET_CREATE))<0) goto error;
- if (H5Pset_chunk(dc, 2, chunk_size)<0) goto error;
- if (H5Pset_scaleoffset(dc, 0)<0) goto error;
+
+ /* Fill value undefined */
if (H5Pset_fill_value(dc, datatype, NULL)<0) goto error;
+ /* Set up to use scaleoffset filter, let library calculate minbits */
+ if (H5Pset_chunk(dc, 2, chunk_size)<0) goto error;
+ if (H5Pset_scaleoffset(dc, 0)<0) goto error;
+
/* Create the dataset */
if ((dataset = H5Dcreate(file, DSET_SCALEOFFSET_INT_NAME, datatype,
space,dc))<0) goto error;
- /* Initialize data, assuming size of long_long >= size of int */
+ /* Initialize data */
for (i= 0;i< size[0]; i++)
for (j = 0; j < size[1]; j++) {
- orig_data[i][j] = (long_long)HDrandom() %
- 10000;
+ orig_data[i][j] = HDrandom() % 10000;
+
/* even-numbered values are negtive */
if((i*size[1]+j+1)%2 == 0)
orig_data[i][j] = -orig_data[i][j];
- }
+ }
PASSED();
#else
@@ -3615,12 +3622,11 @@ test_scaleoffset_int(hid_t file)
* to it.
*----------------------------------------------------------------------
*/
- TESTING(" scaleoffset int (write)");
+ TESTING(" scaleoffset int without fill value (write)");
#ifdef H5_HAVE_FILTER_SCALEOFFSET
if (H5Dwrite(dataset, H5T_NATIVE_INT, H5S_ALL, H5S_ALL, H5P_DEFAULT,
- orig_data)<0)
- goto error;
+ orig_data)<0) goto error;
PASSED();
#else
SKIPPED();
@@ -3631,13 +3637,12 @@ test_scaleoffset_int(hid_t file)
* STEP 2: Try to read the data we just wrote.
*----------------------------------------------------------------------
*/
- TESTING(" scaleoffset int (read)");
+ TESTING(" scaleoffset int without fill value (read)");
#ifdef H5_HAVE_FILTER_SCALEOFFSET
/* Read the dataset back */
if (H5Dread(dataset, H5T_NATIVE_INT, H5S_ALL, H5S_ALL, H5P_DEFAULT,
- new_data)<0)
- goto error;
+ new_data)<0) goto error;
/* Check that the values read are the same as the values written */
for (i=0; i<size[0]; i++) {
@@ -3646,11 +3651,280 @@ test_scaleoffset_int(hid_t file)
H5_FAILED();
printf(" Read different values than written.\n");
printf(" At index %lu,%lu\n", (unsigned long)i, (unsigned long)j);
- goto error;
- }/*
- printf("orig: %d new: %d\n", orig_data[i][j], new_data[i][j]);*/
+ goto error;
+ }
+ }
+ }
+
+ /*----------------------------------------------------------------------
+ * Cleanup
+ *----------------------------------------------------------------------
+ */
+ if (H5Tclose(datatype)<0) goto error;
+ if (H5Pclose (dc)<0) goto error;
+ if (H5Dclose(dataset)<0) goto error;
+
+ PASSED();
+#else
+ SKIPPED();
+ puts(not_supported);
+#endif
+ return 0;
+error:
+ return -1;
+}
+
+
+/*-------------------------------------------------------------------------
+ * Function: test_scaleoffset_int_2
+ *
+ * Purpose: Tests the integer datatype for scaleoffset filter
+ * with fill value set
+ *
+ * Return: Success: 0
+ *
+ * Failure: -1
+ *
+ * Programmer: Xiaowen Wu
+ * Tuesday, March 15th, 2005
+ *
+ * Modifications:
+ *
+ *-------------------------------------------------------------------------
+ */
+static herr_t
+test_scaleoffset_int_2(hid_t file)
+{
+#ifdef H5_HAVE_FILTER_SCALEOFFSET
+ hid_t dataset, datatype, space, mspace, dc;
+ const hsize_t size[2] = {2, 5};
+ const hsize_t chunk_size[2] = {2,5};
+ int orig_data[2][5];
+ int new_data[2][5];
+ hssize_t start[2]; /* Start of hyperslab */
+ hsize_t stride[2]; /* Stride of hyperslab */
+ hsize_t count[2]; /* Block count */
+ hsize_t block[2]; /* Block sizes */
+ int fillval;
+ hsize_t j;
+#else /* H5_HAVE_FILTER_SCALEOFFSET */
+ const char *not_supported= " Scaleoffset is not enabled.";
+#endif /* H5_HAVE_FILTER_SCALEOFFSET */
+
+ puts("Testing scaleoffset filter");
+ TESTING(" scaleoffset int with fill value (setup)");
+#ifdef H5_HAVE_FILTER_SCALEOFFSET
+ datatype = H5Tcopy(H5T_NATIVE_INT);
+
+ /* Set order of dataset datatype */
+ if(H5Tset_order(datatype, H5T_ORDER_BE)<0) goto error;
+
+ /* Create the data space for the dataset */
+ if ((space = H5Screate_simple(2, size, NULL))<0) goto error;
+
+ /* Create the dataset property list */
+ if((dc = H5Pcreate(H5P_DATASET_CREATE))<0) goto error;
+
+ /* Set fill value */
+ fillval = 18446744073709551615;
+ if (H5Pset_fill_value(dc, H5T_NATIVE_INT, &fillval)<0) goto error;
+
+ /* Set up to use scaleoffset filter, let library calculate minbits */
+ if (H5Pset_chunk(dc, 2, chunk_size)<0) goto error;
+ if (H5Pset_scaleoffset(dc, 0)<0) goto error;
+
+ /* Create the dataset */
+ if ((dataset = H5Dcreate(file, DSET_SCALEOFFSET_INT_NAME_2, datatype,
+ space,dc))<0) goto error;
+
+ /* Create the memory data space */
+ if ((mspace = H5Screate_simple(2, size, NULL))<0) goto error;
+
+ /* Select hyperslab for data to write, using 1x5 blocks,
+ * (1,1) stride and (1,1) count starting at the position (0,0).
+ */
+ start[0] = 0; start[1] = 0;
+ stride[0] = 1; stride[1] = 1;
+ count[0] = 1; count[1] = 1;
+ block[0] = 1; block[1] = 5;
+ if(H5Sselect_hyperslab(mspace, H5S_SELECT_SET, start,
+ stride, count, block)<0) goto error;
+
+ /* Initialize data of hyperslab */
+ for (j = 0; j < size[1]; j++) {
+ orig_data[0][j] = (long_long)HDrandom() % 10000;
+
+ /* even-numbered values are negtive */
+ if((j+1)%2 == 0)
+ orig_data[0][j] = -orig_data[0][j];
+ }
+
+ PASSED();
+#else
+ SKIPPED();
+ puts(not_supported);
+#endif
+
+ /*----------------------------------------------------------------------
+ * STEP 1: Test scaleoffset by setting up a chunked dataset and writing
+ * to it.
+ *----------------------------------------------------------------------
+ */
+ TESTING(" scaleoffset int with fill value (write)");
+
+#ifdef H5_HAVE_FILTER_SCALEOFFSET
+ /* only data in the hyperslab will be written, other value should be fill value */
+ if (H5Dwrite(dataset, H5T_NATIVE_INT, mspace, mspace, H5P_DEFAULT,
+ orig_data)<0) goto error;
+ PASSED();
+#else
+ SKIPPED();
+ puts(not_supported);
+#endif
+
+ /*----------------------------------------------------------------------
+ * STEP 2: Try to read the data we just wrote.
+ *----------------------------------------------------------------------
+ */
+ TESTING(" scaleoffset int with fill value (read)");
+
+#ifdef H5_HAVE_FILTER_SCALEOFFSET
+ /* Read the dataset back */
+ if (H5Dread(dataset, H5T_NATIVE_INT, mspace, mspace, H5P_DEFAULT,
+ new_data)<0) goto error;
+
+ /* Check that the values read are the same as the values written */
+ for (j=0; j<size[1]; j++) {
+ if (new_data[0][j] != orig_data[0][j]) {
+ H5_FAILED();
+ printf(" Read different values than written.\n");
+ printf(" At index %lu,%lu\n", 0, (unsigned long)j);
+ goto error;
}
}
+
+ /*----------------------------------------------------------------------
+ * Cleanup
+ *----------------------------------------------------------------------
+ */
+ if (H5Tclose(datatype)<0) goto error;
+ if (H5Pclose (dc)<0) goto error;
+ if (H5Dclose(dataset)<0) goto error;
+
+ PASSED();
+#else
+ SKIPPED();
+ puts(not_supported);
+#endif
+ return 0;
+error:
+ return -1;
+}
+
+
+/*-------------------------------------------------------------------------
+ * Function: test_scaleoffset_float
+ *
+ * Purpose: Tests the float datatype for scaleoffset filter, the filter
+ * does nothing to floating-point datatype at present
+ *
+ * Return: Success: 0
+ *
+ * Failure: -1
+ *
+ * Programmer: Xiaowen Wu
+ * Tuesday, Mar. 22th, 2005
+ *
+ * Modifications:
+ *
+ *-------------------------------------------------------------------------
+ */
+static herr_t
+test_scaleoffset_float(hid_t file)
+{
+#ifdef H5_HAVE_FILTER_SCALEOFFSET
+ hid_t dataset, datatype, space, dc;
+ const hsize_t size[2] = {2, 5};
+ const hsize_t chunk_size[2] = {2,5};
+ float orig_data[2][5];
+ float new_data[2][5];
+ hsize_t i, j;
+#else /* H5_HAVE_FILTER_SCALEOFFSET */
+ const char *not_supported= " Scaleoffset is not enabled.";
+#endif /* H5_HAVE_FILTER_SCALEOFFSET */
+
+ puts("Testing scaleoffset filter");
+ TESTING(" scaleoffset float (setup)");
+#ifdef H5_HAVE_FILTER_SCALEOFFSET
+ datatype = H5Tcopy(H5T_NATIVE_FLOAT);
+
+ /* Set order of dataset datatype */
+ if(H5Tset_order(datatype, H5T_ORDER_BE)<0) goto error;
+
+ /* Create the data space */
+ if ((space = H5Screate_simple(2, size, NULL))<0) goto error;
+
+ /* Create the dataset property list */
+ if((dc = H5Pcreate(H5P_DATASET_CREATE))<0) goto error;
+
+ /* Set up to use scaleoffset filter, let library calculate minbits */
+ if (H5Pset_chunk(dc, 2, chunk_size)<0) goto error;
+ if (H5Pset_scaleoffset(dc, 0)<0) goto error;
+
+ /* Create the dataset */
+ if ((dataset = H5Dcreate(file, DSET_SCALEOFFSET_FLOAT_NAME, datatype,
+ space,dc))<0) goto error;
+
+ /* Initialize data */
+ for (i= 0;i< size[0]; i++)
+ for (j = 0; j < size[1]; j++)
+ orig_data[i][j] = (HDrandom() % 10000) / 1000.0;
+
+ PASSED();
+#else
+ SKIPPED();
+ puts(not_supported);
+#endif
+
+ /*----------------------------------------------------------------------
+ * STEP 1: Test scaleoffset by setting up a chunked dataset and writing
+ * to it.
+ *----------------------------------------------------------------------
+ */
+ TESTING(" scaleoffset float (write)");
+
+#ifdef H5_HAVE_FILTER_SCALEOFFSET
+ if (H5Dwrite(dataset, H5T_NATIVE_FLOAT, H5S_ALL, H5S_ALL, H5P_DEFAULT,
+ orig_data)<0) goto error;
+ PASSED();
+#else
+ SKIPPED();
+ puts(not_supported);
+#endif
+
+ /*----------------------------------------------------------------------
+ * STEP 2: Try to read the data we just wrote.
+ *----------------------------------------------------------------------
+ */
+ TESTING(" scaleoffset float (read)");
+
+#ifdef H5_HAVE_FILTER_SCALEOFFSET
+ /* Read the dataset back */
+ if (H5Dread(dataset, H5T_NATIVE_FLOAT, H5S_ALL, H5S_ALL, H5P_DEFAULT,
+ new_data)<0) goto error;
+
+ /* Check that the values read are the same as the values written */
+ for (i=0; i<size[0]; i++) {
+ for (j=0; j<size[1]; j++) {
+ if (new_data[i][j] != orig_data[i][j]) {
+ H5_FAILED();
+ printf(" Read different values than written.\n");
+ printf(" At index %lu,%lu\n", (unsigned long)i, (unsigned long)j);
+ goto error;
+ }
+ }
+ }
+
/*----------------------------------------------------------------------
* Cleanup
*----------------------------------------------------------------------
@@ -5107,7 +5381,9 @@ int main(void)
nerrors += test_nbit_array(file)<0 ?1:0;
nerrors += test_nbit_compound(file)<0 ?1:0;
nerrors += test_nbit_compound_2(file)<0 ?1:0;
-/* nerrors += test_scaleoffset_int(file)<0 ?1:0; */
+ nerrors += test_scaleoffset_int(file)<0 ?1:0;
+ nerrors += test_scaleoffset_int_2(file)<0 ?1:0;
+ nerrors += test_scaleoffset_float(file)<0 ?1:0;
nerrors += test_multiopen (file)<0 ?1:0;
nerrors += test_types(file)<0 ?1:0;
nerrors += test_userblock_offset(fapl)<0 ?1:0;