summaryrefslogtreecommitdiffstats
path: root/test/dsets.c
diff options
context:
space:
mode:
authorDana Robinson <derobins@hdfgroup.org>2015-02-27 12:50:38 (GMT)
committerDana Robinson <derobins@hdfgroup.org>2015-02-27 12:50:38 (GMT)
commit7462a7ef1c3542b8812c8da86e79b15ce22d21cb (patch)
tree7f231cb842e7d2f17f25d6b84c7a0fa07c0496dc /test/dsets.c
parent7b7f0eafc57c083298d87a09df70a33f93e4760d (diff)
downloadhdf5-7462a7ef1c3542b8812c8da86e79b15ce22d21cb.zip
hdf5-7462a7ef1c3542b8812c8da86e79b15ce22d21cb.tar.gz
hdf5-7462a7ef1c3542b8812c8da86e79b15ce22d21cb.tar.bz2
[svn-r26329] Merge of r26069, r26070, and r26072 from the features/autotools_rework
branch. Removed the configure option that allows selective disabling of individual internal filters (fletcher32, shuffle, etc.). This feature mucked up the code with a lot of #ifdefs, saved very little space, and was not scalable to a general scheme for library size reduction. We've decided to remove the feature while we investigate a more general scheme for decreasing the library size. Part of: HDFFV-9086 Tested on: h5committest
Diffstat (limited to 'test/dsets.c')
-rw-r--r--test/dsets.c364
1 files changed, 39 insertions, 325 deletions
diff --git a/test/dsets.c b/test/dsets.c
index 3055943..c782e70 100644
--- a/test/dsets.c
+++ b/test/dsets.c
@@ -70,19 +70,17 @@ const char *FILENAME[] = {
#define DSET_CONV_BUF_NAME "conv_buf"
#define DSET_TCONV_NAME "tconv"
#define DSET_DEFLATE_NAME "deflate"
-#ifdef H5_HAVE_FILTER_SZIP
-#define DSET_SZIP_NAME "szip"
-#endif /* H5_HAVE_FILTER_SZIP */
#define DSET_SHUFFLE_NAME "shuffle"
#define DSET_FLETCHER32_NAME "fletcher32"
#define DSET_FLETCHER32_NAME_2 "fletcher32_2"
#define DSET_FLETCHER32_NAME_3 "fletcher32_3"
#define DSET_SHUF_DEF_FLET_NAME "shuffle+deflate+fletcher32"
#define DSET_SHUF_DEF_FLET_NAME_2 "shuffle+deflate+fletcher32_2"
-#if defined H5_HAVE_FILTER_SZIP && defined H5_HAVE_FILTER_SHUFFLE && defined H5_HAVE_FILTER_FLETCHER32
+#ifdef H5_HAVE_FILTER_SZIP
+#define DSET_SZIP_NAME "szip"
#define DSET_SHUF_SZIP_FLET_NAME "shuffle+szip+fletcher32"
#define DSET_SHUF_SZIP_FLET_NAME_2 "shuffle+szip+fletcher32_2"
-#endif /* defined H5_HAVE_FILTER_SZIP && defined H5_HAVE_FILTER_SHUFFLE && defined H5_HAVE_FILTER_FLETCHER32 */
+#endif /* H5_HAVE_FILTER_SZIP */
#define DSET_BOGUS_NAME "bogus"
#define DSET_MISSING_NAME "missing"
@@ -2006,21 +2004,17 @@ test_get_filter_info(void)
/* Verify that each filter is reported as having the right combination
* of encoder and decoder.
*/
-#ifdef H5_HAVE_FILTER_FLETCHER32
if(H5Zget_filter_info(H5Z_FILTER_FLETCHER32, &flags) < 0) TEST_ERROR
if(((flags & H5Z_FILTER_CONFIG_ENCODE_ENABLED) == 0) ||
((flags & H5Z_FILTER_CONFIG_DECODE_ENABLED) == 0))
TEST_ERROR
-#endif
-#ifdef H5_HAVE_FILTER_SHUFFLE
if(H5Zget_filter_info(H5Z_FILTER_SHUFFLE, &flags) < 0) TEST_ERROR
if(((flags & H5Z_FILTER_CONFIG_ENCODE_ENABLED) == 0) ||
((flags & H5Z_FILTER_CONFIG_DECODE_ENABLED) == 0))
TEST_ERROR
-#endif
#ifdef H5_HAVE_FILTER_DEFLATE
if(H5Zget_filter_info(H5Z_FILTER_DEFLATE, &flags) < 0) TEST_ERROR
@@ -2083,10 +2077,8 @@ UNUSED
const hsize_t chunk_size[2] = {FILTER_CHUNK_DIM1, FILTER_CHUNK_DIM2}; /* Chunk dimensions */
hsize_t null_size; /* Size of dataset with null filter */
-#ifdef H5_HAVE_FILTER_FLETCHER32
hsize_t fletcher32_size; /* Size of dataset with Fletcher32 checksum */
unsigned data_corrupt[3]; /* position and length of data to be corrupted */
-#endif /* H5_HAVE_FILTER_FLETCHER32 */
#ifdef H5_HAVE_FILTER_DEFLATE
hsize_t deflate_size; /* Size of dataset with deflate filter */
@@ -2098,13 +2090,11 @@ UNUSED
unsigned szip_pixels_per_block=4;
#endif /* H5_HAVE_FILTER_SZIP */
-#ifdef H5_HAVE_FILTER_SHUFFLE
hsize_t shuffle_size; /* Size of dataset with shuffle filter */
-#endif /* H5_HAVE_FILTER_SHUFFLE */
-#if(defined H5_HAVE_FILTER_DEFLATE | defined H5_HAVE_FILTER_SZIP) && defined H5_HAVE_FILTER_SHUFFLE && defined H5_HAVE_FILTER_FLETCHER32
- hsize_t combo_size; /* Size of dataset with shuffle+deflate filter */
-#endif /* H5_HAVE_FILTER_DEFLATE && H5_HAVE_FILTER_SHUFFLE && H5_HAVE_FILTER_FLETCHER32 */
+#if(defined H5_HAVE_FILTER_DEFLATE | defined H5_HAVE_FILTER_SZIP)
+ hsize_t combo_size; /* Size of dataset with multiple filters */
+#endif /* defined H5_HAVE_FILTER_DEFLATE | defined H5_HAVE_FILTER_SZIP */
/* test the H5Zget_filter_info function */
if(test_get_filter_info() < 0) goto error;
@@ -2128,7 +2118,6 @@ UNUSED
* STEP 1: Test Fletcher32 Checksum by itself.
*----------------------------------------------------------
*/
-#ifdef H5_HAVE_FILTER_FLETCHER32
puts("Testing Fletcher32 checksum(enabled for read)");
if((dc = H5Pcreate(H5P_DATASET_CREATE)) < 0) goto error;
if(H5Pset_chunk (dc, 2, chunk_size) < 0) goto error;
@@ -2169,11 +2158,6 @@ UNUSED
/* Clean up objects used for this test */
if(H5Pclose (dc) < 0) goto error;
-#else /* H5_HAVE_FILTER_FLETCHER32 */
- TESTING("fletcher32 checksum");
- SKIPPED();
- puts(" Fletcher32 checksum not enabled");
-#endif /* H5_HAVE_FILTER_FLETCHER32 */
/*----------------------------------------------------------
* STEP 2: Test deflation by itself.
@@ -2231,7 +2215,6 @@ UNUSED
* STEP 4: Test shuffling by itself.
*----------------------------------------------------------
*/
-#ifdef H5_HAVE_FILTER_SHUFFLE
puts("Testing shuffle filter");
if((dc = H5Pcreate(H5P_DATASET_CREATE)) < 0) goto error;
if(H5Pset_chunk (dc, 2, chunk_size) < 0) goto error;
@@ -2246,17 +2229,12 @@ UNUSED
/* Clean up objects used for this test */
if(H5Pclose (dc) < 0) goto error;
-#else /* H5_HAVE_FILTER_SHUFFLE */
- TESTING("shuffle filter");
- SKIPPED();
- puts(" Shuffle filter not enabled");
-#endif /* H5_HAVE_FILTER_SHUFFLE */
/*----------------------------------------------------------
* STEP 5: Test shuffle + deflate + checksum in any order.
*----------------------------------------------------------
*/
-#if defined H5_HAVE_FILTER_DEFLATE && defined H5_HAVE_FILTER_SHUFFLE && defined H5_HAVE_FILTER_FLETCHER32
+#ifdef H5_HAVE_FILTER_DEFLATE
puts("Testing shuffle+deflate+checksum filters(checksum first)");
if((dc = H5Pcreate(H5P_DATASET_CREATE)) < 0) goto error;
if(H5Pset_chunk (dc, 2, chunk_size) < 0) goto error;
@@ -2280,17 +2258,17 @@ UNUSED
/* Clean up objects used for this test */
if(H5Pclose (dc) < 0) goto error;
-#else /* H5_HAVE_FILTER_DEFLATE && H5_HAVE_FILTER_SHUFFLE && H5_HAVE_FILTER_FLETCHER32 */
+#else /* H5_HAVE_FILTER_DEFLATE */
TESTING("shuffle+deflate+fletcher32 filters");
SKIPPED();
- puts(" Deflate, shuffle, or fletcher32 checksum filter not enabled");
-#endif /* H5_HAVE_FILTER_DEFLATE && H5_HAVE_FILTER_SHUFFLE && H5_HAVE_FILTER_FLETCHER32 */
+ puts(" Deflate filter not enabled");
+#endif /* H5_HAVE_FILTER_DEFLATE */
/*----------------------------------------------------------
* STEP 6: Test shuffle + szip + checksum in any order.
*----------------------------------------------------------
*/
-#if defined H5_HAVE_FILTER_SZIP && defined H5_HAVE_FILTER_SHUFFLE && defined H5_HAVE_FILTER_FLETCHER32
+#ifdef H5_HAVE_FILTER_SZIP
TESTING("shuffle+szip+checksum filters(checksum first, with encoder)");
if((dc = H5Pcreate(H5P_DATASET_CREATE)) < 0) goto error;
@@ -2339,11 +2317,11 @@ UNUSED
SKIPPED();
}
-#else /* H5_HAVE_FILTER_SZIP && H5_HAVE_FILTER_SHUFFLE && H5_HAVE_FILTER_FLETCHER32 */
+#else /* H5_HAVE_FILTER_SZIP */
TESTING("shuffle+szip+fletcher32 filters");
SKIPPED();
- puts(" Szip, shuffle, or fletcher32 checksum filter not enabled");
-#endif /* H5_HAVE_FILTER_SZIP && H5_HAVE_FILTER_SHUFFLE && H5_HAVE_FILTER_FLETCHER32 */
+ puts(" szip filter not enabled");
+#endif /* H5_HAVE_FILTER_SZIP */
return 0;
error:
@@ -2610,20 +2588,15 @@ error:
static herr_t
test_onebyte_shuffle(hid_t file)
{
-#ifdef H5_HAVE_FILTER_SHUFFLE
hid_t dataset, space,dc;
const hsize_t size[2] = {10, 20};
const hsize_t chunk_size[2] = {10, 20};
unsigned char orig_data[10][20];
unsigned char new_data[10][20];
size_t i, j;
-#else /* H5_HAVE_FILTER_SHUFFLE */
- const char *not_supported= " Data shuffling is not enabled.";
-#endif /* H5_HAVE_FILTER_SHUFFLE */
TESTING("8-bit shuffling (setup)");
-#ifdef H5_HAVE_FILTER_SHUFFLE
/* Create the data space */
if((space = H5Screate_simple(2, size, NULL)) < 0) goto error;
@@ -2641,10 +2614,6 @@ test_onebyte_shuffle(hid_t file)
orig_data[i][j] = (unsigned char)HDrandom();
PASSED();
-#else
- SKIPPED();
- puts(not_supported);
-#endif
/*----------------------------------------------------------------------
* STEP 1: Test shuffling by setting up a chunked dataset and writing
@@ -2653,16 +2622,11 @@ test_onebyte_shuffle(hid_t file)
*/
TESTING("8-bit shuffling (write)");
-#ifdef H5_HAVE_FILTER_SHUFFLE
if(H5Dwrite(dataset, H5T_NATIVE_UCHAR, H5S_ALL, H5S_ALL, H5P_DEFAULT,
orig_data) < 0)
goto error;
PASSED();
-#else
- SKIPPED();
- puts(not_supported);
-#endif
/*----------------------------------------------------------------------
* STEP 2: Try to read the data we just wrote.
@@ -2670,7 +2634,6 @@ test_onebyte_shuffle(hid_t file)
*/
TESTING("8-bit shuffling (read)");
-#ifdef H5_HAVE_FILTER_SHUFFLE
/* Read the dataset back */
if(H5Dread(dataset, H5T_NATIVE_UCHAR, H5S_ALL, H5S_ALL, H5P_DEFAULT,
new_data) < 0)
@@ -2697,10 +2660,6 @@ test_onebyte_shuffle(hid_t file)
if(H5Dclose(dataset) < 0) goto error;
PASSED();
-#else
- SKIPPED();
- puts(not_supported);
-#endif
return 0;
@@ -2726,7 +2685,6 @@ error:
static herr_t
test_nbit_int(hid_t file)
{
-#ifdef H5_HAVE_FILTER_NBIT
hid_t dataset, datatype, mem_datatype, space, dc;
hsize_t size[2] = {2, 5};
hsize_t chunk_size[2] = {2,5};
@@ -2735,13 +2693,10 @@ test_nbit_int(hid_t file)
unsigned int mask;
size_t precision, offset;
size_t i, j;
-#else /* H5_HAVE_FILTER_NBIT */
- const char *not_supported= " Nbit is not enabled.";
-#endif /* H5_HAVE_FILTER_NBIT */
puts("Testing nbit filter");
TESTING(" nbit int (setup)");
-#ifdef H5_HAVE_FILTER_NBIT
+
/* Define dataset datatype (integer), and set precision, offset */
datatype = H5Tcopy(H5T_NATIVE_INT);
precision = 17; /* precision includes sign bit */
@@ -2779,10 +2734,6 @@ test_nbit_int(hid_t file)
}
PASSED();
-#else
- SKIPPED();
- puts(not_supported);
-#endif
/*----------------------------------------------------------------------
* STEP 1: Test nbit by setting up a chunked dataset and writing
@@ -2791,15 +2742,10 @@ test_nbit_int(hid_t file)
*/
TESTING(" nbit int (write)");
-#ifdef H5_HAVE_FILTER_NBIT
if(H5Dwrite(dataset, mem_datatype, H5S_ALL, H5S_ALL, H5P_DEFAULT,
orig_data) < 0)
goto error;
PASSED();
-#else
- SKIPPED();
- puts(not_supported);
-#endif
/*----------------------------------------------------------------------
* STEP 2: Try to read the data we just wrote.
@@ -2807,7 +2753,6 @@ test_nbit_int(hid_t file)
*/
TESTING(" nbit int (read)");
-#ifdef H5_HAVE_FILTER_NBIT
/* Read the dataset back */
if(H5Dread(dataset, mem_datatype, H5S_ALL, H5S_ALL, H5P_DEFAULT,
new_data) < 0)
@@ -2839,10 +2784,7 @@ test_nbit_int(hid_t file)
if(H5Dclose(dataset) < 0) goto error;
PASSED();
-#else
- SKIPPED();
- puts(not_supported);
-#endif
+
return 0;
error:
return -1;
@@ -2866,7 +2808,6 @@ error:
static herr_t
test_nbit_float(hid_t file)
{
-#ifdef H5_HAVE_FILTER_NBIT
hid_t dataset, datatype, space, dc;
const hsize_t size[2] = {2, 5};
const hsize_t chunk_size[2] = {2, 5};
@@ -2878,12 +2819,9 @@ test_nbit_float(hid_t file)
float new_data[2][5];
size_t precision, offset;
size_t i, j;
-#else /* H5_HAVE_FILTER_NBIT */
- const char *not_supported= " Nbit is not enabled.";
-#endif /* H5_HAVE_FILTER_NBIT */
TESTING(" nbit float (setup)");
-#ifdef H5_HAVE_FILTER_NBIT
+
/* Define user-defined single-precision floating-point type for dataset */
datatype = H5Tcopy(H5T_IEEE_F32BE);
if(H5Tset_fields(datatype, (size_t)26, (size_t)20, (size_t)6, (size_t)7, (size_t)13) < 0) goto error;
@@ -2906,10 +2844,6 @@ test_nbit_float(hid_t file)
if((dataset = H5Dcreate2(file, DSET_NBIT_FLOAT_NAME, datatype,
space, H5P_DEFAULT, dc, H5P_DEFAULT)) < 0) goto error;
PASSED();
-#else
- SKIPPED();
- puts(not_supported);
-#endif
/*----------------------------------------------------------------------
* STEP 1: Test nbit by setting up a chunked dataset and writing
@@ -2918,16 +2852,11 @@ test_nbit_float(hid_t file)
*/
TESTING(" nbit float (write)");
-#ifdef H5_HAVE_FILTER_NBIT
if(H5Dwrite(dataset, H5T_NATIVE_FLOAT, H5S_ALL, H5S_ALL, H5P_DEFAULT,
orig_data) < 0)
goto error;
PASSED();
-#else
- SKIPPED();
- puts(not_supported);
-#endif
/*----------------------------------------------------------------------
* STEP 2: Try to read the data we just wrote.
@@ -2935,7 +2864,6 @@ test_nbit_float(hid_t file)
*/
TESTING(" nbit float (read)");
-#ifdef H5_HAVE_FILTER_NBIT
/* Read the dataset back */
if(H5Dread(dataset, H5T_NATIVE_FLOAT, H5S_ALL, H5S_ALL, H5P_DEFAULT,
new_data) < 0)
@@ -2966,10 +2894,6 @@ test_nbit_float(hid_t file)
if(H5Dclose(dataset) < 0) goto error;
PASSED();
-#else
- SKIPPED();
- puts(not_supported);
-#endif
return 0;
@@ -2996,7 +2920,6 @@ static herr_t
test_nbit_double(hid_t file)
{
/* assume unsigned int and float has the same number of bytes */
-#ifdef H5_HAVE_FILTER_NBIT
hid_t dataset, datatype, space, dc;
const hsize_t size[2] = {2, 5};
const hsize_t chunk_size[2] = {2, 5};
@@ -3009,12 +2932,9 @@ test_nbit_double(hid_t file)
double new_data[2][5];
size_t precision, offset;
size_t i, j;
-#else /* H5_HAVE_FILTER_NBIT */
- const char *not_supported= " Nbit is not enabled.";
-#endif /* H5_HAVE_FILTER_NBIT */
TESTING(" nbit double (setup)");
-#ifdef H5_HAVE_FILTER_NBIT
+
/* Define user-defined doule-precision floating-point type for dataset */
datatype = H5Tcopy(H5T_IEEE_F64BE);
if(H5Tset_fields(datatype, (size_t)55, (size_t)46, (size_t)9, (size_t)5, (size_t)41) < 0) goto error;
@@ -3038,10 +2958,6 @@ test_nbit_double(hid_t file)
space, H5P_DEFAULT, dc, H5P_DEFAULT)) < 0) goto error;
PASSED();
-#else
- SKIPPED();
- puts(not_supported);
-#endif
/*----------------------------------------------------------------------
* STEP 1: Test nbit by setting up a chunked dataset and writing
@@ -3050,15 +2966,10 @@ test_nbit_double(hid_t file)
*/
TESTING(" nbit double (write)");
-#ifdef H5_HAVE_FILTER_NBIT
if(H5Dwrite(dataset, H5T_NATIVE_DOUBLE, H5S_ALL, H5S_ALL, H5P_DEFAULT,
orig_data) < 0)
goto error;
PASSED();
-#else
- SKIPPED();
- puts(not_supported);
-#endif
/*----------------------------------------------------------------------
* STEP 2: Try to read the data we just wrote.
@@ -3066,7 +2977,6 @@ test_nbit_double(hid_t file)
*/
TESTING(" nbit double (read)");
-#ifdef H5_HAVE_FILTER_NBIT
/* Read the dataset back */
if(H5Dread(dataset, H5T_NATIVE_DOUBLE, H5S_ALL, H5S_ALL, H5P_DEFAULT,
new_data) < 0)
@@ -3097,10 +3007,6 @@ test_nbit_double(hid_t file)
if(H5Dclose(dataset) < 0) goto error;
PASSED();
-#else
- SKIPPED();
- puts(not_supported);
-#endif
return 0;
@@ -3126,7 +3032,6 @@ error:
static herr_t
test_nbit_array(hid_t file)
{
-#ifdef H5_HAVE_FILTER_NBIT
hid_t dataset, base_datatype, array_datatype, space, dc;
hid_t mem_base_datatype, mem_array_datatype;
const hsize_t size[2] = {2, 5};
@@ -3136,12 +3041,9 @@ test_nbit_array(hid_t file)
unsigned int new_data[2][5][3][2];
size_t precision, offset;
size_t i, j, m, n;
-#else /* H5_HAVE_FILTER_NBIT */
- const char *not_supported= " Nbit is not enabled.";
-#endif /* H5_HAVE_FILTER_NBIT */
TESTING(" nbit array (setup)");
-#ifdef H5_HAVE_FILTER_NBIT
+
/* Define dataset array datatype's base datatype and set precision, offset */
base_datatype = H5Tcopy(H5T_NATIVE_UINT);
precision = 22;
@@ -3181,10 +3083,6 @@ test_nbit_array(hid_t file)
orig_data[i][j][m][n] = (unsigned int)(((long long)HDrandom() %
(long long)HDpow(2.0, (double)precision)) << offset);
PASSED();
-#else
- SKIPPED();
- puts(not_supported);
-#endif
/*----------------------------------------------------------------------
* STEP 1: Test nbit by setting up a chunked dataset and writing
@@ -3193,16 +3091,11 @@ test_nbit_array(hid_t file)
*/
TESTING(" nbit array (write)");
-#ifdef H5_HAVE_FILTER_NBIT
if(H5Dwrite(dataset, mem_array_datatype, H5S_ALL, H5S_ALL, H5P_DEFAULT,
orig_data) < 0)
goto error;
PASSED();
-#else
- SKIPPED();
- puts(not_supported);
-#endif
/*----------------------------------------------------------------------
* STEP 2: Try to read the data we just wrote.
@@ -3210,7 +3103,6 @@ test_nbit_array(hid_t file)
*/
TESTING(" nbit array (read)");
-#ifdef H5_HAVE_FILTER_NBIT
/* Read the dataset back */
if(H5Dread(dataset, mem_array_datatype, H5S_ALL, H5S_ALL, H5P_DEFAULT,
new_data) < 0)
@@ -3244,10 +3136,7 @@ test_nbit_array(hid_t file)
if(H5Dclose(dataset) < 0) goto error;
PASSED();
-#else
- SKIPPED();
- puts(not_supported);
-#endif
+
return 0;
error:
@@ -3272,7 +3161,6 @@ error:
static herr_t
test_nbit_compound(hid_t file)
{
-#ifdef H5_HAVE_FILTER_NBIT
typedef struct { /* Struct with atomic fields */
int i;
char c;
@@ -3294,12 +3182,9 @@ test_nbit_compound(hid_t file)
unsigned int i_mask, s_mask, c_mask;
size_t i, j;
-#else /* H5_HAVE_FILTER_NBIT */
- const char *not_supported= " Nbit is not enabled.";
-#endif /* H5_HAVE_FILTER_NBIT */
TESTING(" nbit compound (setup)");
-#ifdef H5_HAVE_FILTER_NBIT
+
/* Define datatypes of members of compound datatype */
i_tid=H5Tcopy(H5T_NATIVE_INT);
c_tid=H5Tcopy(H5T_NATIVE_CHAR);
@@ -3370,10 +3255,6 @@ test_nbit_compound(hid_t file)
}
PASSED();
-#else
- SKIPPED();
- puts(not_supported);
-#endif
/*----------------------------------------------------------------------
* STEP 1: Test nbit by setting up a chunked dataset and writing
@@ -3382,15 +3263,10 @@ test_nbit_compound(hid_t file)
*/
TESTING(" nbit compound (write)");
-#ifdef H5_HAVE_FILTER_NBIT
if(H5Dwrite(dataset, mem_cmpd_tid, H5S_ALL, H5S_ALL, H5P_DEFAULT,
orig_data) < 0)
goto error;
PASSED();
-#else
- SKIPPED();
- puts(not_supported);
-#endif
/*----------------------------------------------------------------------
* STEP 2: Try to read the data we just wrote.
@@ -3398,7 +3274,6 @@ test_nbit_compound(hid_t file)
*/
TESTING(" nbit compound (read)");
-#ifdef H5_HAVE_FILTER_NBIT
/* Read the dataset back */
if(H5Dread(dataset, mem_cmpd_tid, H5S_ALL, H5S_ALL, H5P_DEFAULT,
new_data) < 0)
@@ -3440,10 +3315,7 @@ test_nbit_compound(hid_t file)
if(H5Dclose(dataset) < 0) goto error;
PASSED();
-#else
- SKIPPED();
- puts(not_supported);
-#endif
+
return 0;
error:
@@ -3468,7 +3340,6 @@ error:
static herr_t
test_nbit_compound_2(hid_t file)
{
-#ifdef H5_HAVE_FILTER_NBIT
typedef struct { /* Struct with atomic fields */
int i;
char c;
@@ -3505,12 +3376,9 @@ test_nbit_compound_2(hid_t file)
unsigned int i_mask, s_mask, c_mask, b_mask;
size_t i, j, m, n, b_failed, d_failed;
-#else /* H5_HAVE_FILTER_NBIT */
- const char *not_supported= " Nbit is not enabled.";
-#endif /* H5_HAVE_FILTER_NBIT */
TESTING(" nbit compound complex (setup)");
-#ifdef H5_HAVE_FILTER_NBIT
+
/* Define datatypes of members of compound datatype */
i_tid=H5Tcopy(H5T_NATIVE_INT);
c_tid=H5Tcopy(H5T_NATIVE_CHAR);
@@ -3626,10 +3494,6 @@ test_nbit_compound_2(hid_t file)
}
PASSED();
-#else
- SKIPPED();
- puts(not_supported);
-#endif
/*----------------------------------------------------------------------
* STEP 1: Test nbit by setting up a chunked dataset and writing
@@ -3638,15 +3502,10 @@ test_nbit_compound_2(hid_t file)
*/
TESTING(" nbit compound complex (write)");
-#ifdef H5_HAVE_FILTER_NBIT
if(H5Dwrite(dataset, mem_cmpd_tid2, H5S_ALL, H5S_ALL, H5P_DEFAULT,
orig_data) < 0)
goto error;
PASSED();
-#else
- SKIPPED();
- puts(not_supported);
-#endif
/*----------------------------------------------------------------------
* STEP 2: Try to read the data we just wrote.
@@ -3654,7 +3513,6 @@ test_nbit_compound_2(hid_t file)
*/
TESTING(" nbit compound complex (read)");
-#ifdef H5_HAVE_FILTER_NBIT
/* Read the dataset back */
if(H5Dread(dataset, mem_cmpd_tid2, H5S_ALL, H5S_ALL, H5P_DEFAULT,
new_data) < 0)
@@ -3738,10 +3596,7 @@ test_nbit_compound_2(hid_t file)
if(H5Dclose(dataset) < 0) goto error;
PASSED();
-#else
- SKIPPED();
- puts(not_supported);
-#endif
+
return 0;
error:
@@ -3766,7 +3621,6 @@ error:
static herr_t
test_nbit_compound_3(hid_t file)
{
-#ifdef H5_HAVE_FILTER_NBIT
typedef struct { /* Struct with some no-op type fields */
int i; /* integer field, NOT a no-op type */
char str[30]; /* fixed-length string, no-op type */
@@ -3784,12 +3638,8 @@ test_nbit_compound_3(hid_t file)
atomic new_data[5];
size_t i, k, j;
-#else /* H5_HAVE_FILTER_NBIT */
- const char *not_supported= " Nbit is not enabled.";
-#endif /* H5_HAVE_FILTER_NBIT */
TESTING(" nbit compound with no-op type (setup)");
-#ifdef H5_HAVE_FILTER_NBIT
/* Define datatypes of members of compound datatype */
i_tid=H5Tcopy(H5T_NATIVE_INT);
@@ -3849,10 +3699,6 @@ test_nbit_compound_3(hid_t file)
}
PASSED();
-#else
- SKIPPED();
- puts(not_supported);
-#endif
/*----------------------------------------------------------------------
* STEP 1: Test nbit by setting up a chunked dataset and writing
@@ -3861,15 +3707,10 @@ test_nbit_compound_3(hid_t file)
*/
TESTING(" nbit compound with no-op type (write)");
-#ifdef H5_HAVE_FILTER_NBIT
if(H5Dwrite(dataset, cmpd_tid, H5S_ALL, H5S_ALL, H5P_DEFAULT,
orig_data) < 0)
goto error;
PASSED();
-#else
- SKIPPED();
- puts(not_supported);
-#endif
/*----------------------------------------------------------------------
* STEP 2: Try to read the data we just wrote.
@@ -3877,7 +3718,6 @@ test_nbit_compound_3(hid_t file)
*/
TESTING(" nbit compound with no-op type (read)");
-#ifdef H5_HAVE_FILTER_NBIT
/* Read the dataset back */
if(H5Dread(dataset, cmpd_tid, H5S_ALL, H5S_ALL, H5P_DEFAULT,
new_data) < 0)
@@ -3934,10 +3774,7 @@ test_nbit_compound_3(hid_t file)
if(H5Dclose(dataset) < 0) goto error;
PASSED();
-#else
- SKIPPED();
- puts(not_supported);
-#endif
+
return 0;
error:
@@ -3962,19 +3799,14 @@ error:
static herr_t
test_nbit_int_size(hid_t file)
{
-#ifdef H5_HAVE_FILTER_NBIT
hid_t dataspace, dataset, datatype, mem_datatype, dset_create_props;
hsize_t dims[2], chunk_size[2];
hsize_t dset_size = 0;
int orig_data[DSET_DIM1][DSET_DIM2];
int i, j;
size_t precision, offset;
-#else /* H5_HAVE_FILTER_NBIT */
- const char *not_supported= " Nbit is not enabled.";
-#endif /* H5_HAVE_FILTER_NBIT */
TESTING(" nbit integer dataset size");
-#ifdef H5_HAVE_FILTER_NBIT
/* Define dataset datatype (integer), and set precision, offset */
if((datatype = H5Tcopy(H5T_NATIVE_INT)) < 0) {
@@ -4107,10 +3939,6 @@ test_nbit_int_size(hid_t file)
H5Pclose (dset_create_props);
PASSED();
-#else
- SKIPPED();
- puts(not_supported);
-#endif
return 0;
error:
@@ -4136,7 +3964,6 @@ error:
static herr_t
test_nbit_flt_size(hid_t file)
{
-#ifdef H5_HAVE_FILTER_NBIT
hid_t dataspace, dataset, datatype, dset_create_props;
hsize_t dims[2], chunk_size[2];
hsize_t dset_size = 0;
@@ -4144,12 +3971,8 @@ test_nbit_flt_size(hid_t file)
int i, j;
size_t precision, offset;
size_t spos, epos, esize, mpos, msize;
-#else /* H5_HAVE_FILTER_NBIT */
- const char *not_supported= " Nbit is not enabled.";
-#endif /* H5_HAVE_FILTER_NBIT */
TESTING(" nbit floating-number dataset size");
-#ifdef H5_HAVE_FILTER_NBIT
/* Define floating-point type for dataset
*-------------------------------------------------------------------
@@ -4313,10 +4136,6 @@ test_nbit_flt_size(hid_t file)
H5Pclose (dset_create_props);
PASSED();
-#else
- SKIPPED();
- puts(not_supported);
-#endif
return 0;
error:
@@ -4341,20 +4160,16 @@ error:
static herr_t
test_scaleoffset_int(hid_t file)
{
-#ifdef H5_HAVE_FILTER_SCALEOFFSET
hid_t dataset, datatype, space, dc;
const hsize_t size[2] = {2, 5};
const hsize_t chunk_size[2] = {2,5};
int orig_data[2][5];
int new_data[2][5];
size_t i, j;
-#else /* H5_HAVE_FILTER_SCALEOFFSET */
- const char *not_supported= " Scaleoffset is not enabled.";
-#endif /* H5_HAVE_FILTER_SCALEOFFSET */
puts("Testing scaleoffset filter");
TESTING(" scaleoffset int without fill value (setup)");
-#ifdef H5_HAVE_FILTER_SCALEOFFSET
+
datatype = H5Tcopy(H5T_NATIVE_INT);
/* Set order of dataset datatype */
@@ -4388,10 +4203,6 @@ test_scaleoffset_int(hid_t file)
}
PASSED();
-#else
- SKIPPED();
- puts(not_supported);
-#endif
/*----------------------------------------------------------------------
* STEP 1: Test scaleoffset by setting up a chunked dataset and writing
@@ -4400,14 +4211,9 @@ test_scaleoffset_int(hid_t file)
*/
TESTING(" scaleoffset int without fill value (write)");
-#ifdef H5_HAVE_FILTER_SCALEOFFSET
if(H5Dwrite(dataset, H5T_NATIVE_INT, H5S_ALL, H5S_ALL, H5P_DEFAULT,
orig_data) < 0) goto error;
PASSED();
-#else
- SKIPPED();
- puts(not_supported);
-#endif
/*----------------------------------------------------------------------
* STEP 2: Try to read the data we just wrote.
@@ -4415,7 +4221,6 @@ test_scaleoffset_int(hid_t file)
*/
TESTING(" scaleoffset int without fill value (read)");
-#ifdef H5_HAVE_FILTER_SCALEOFFSET
/* Read the dataset back */
if(H5Dread(dataset, H5T_NATIVE_INT, H5S_ALL, H5S_ALL, H5P_DEFAULT,
new_data) < 0) goto error;
@@ -4442,10 +4247,7 @@ test_scaleoffset_int(hid_t file)
if(H5Dclose(dataset) < 0) goto error;
PASSED();
-#else
- SKIPPED();
- puts(not_supported);
-#endif
+
return 0;
error:
return -1;
@@ -4470,7 +4272,6 @@ error:
static herr_t
test_scaleoffset_int_2(hid_t file)
{
-#ifdef H5_HAVE_FILTER_SCALEOFFSET
hid_t dataset, datatype, space, mspace, dc;
const hsize_t size[2] = {2, 5};
const hsize_t chunk_size[2] = {2,5};
@@ -4482,12 +4283,9 @@ test_scaleoffset_int_2(hid_t file)
hsize_t block[2]; /* Block sizes */
int fillval;
size_t j;
-#else /* H5_HAVE_FILTER_SCALEOFFSET */
- const char *not_supported= " Scaleoffset is not enabled.";
-#endif /* H5_HAVE_FILTER_SCALEOFFSET */
TESTING(" scaleoffset int with fill value (setup)");
-#ifdef H5_HAVE_FILTER_SCALEOFFSET
+
datatype = H5Tcopy(H5T_NATIVE_INT);
/* Set order of dataset datatype */
@@ -4534,10 +4332,6 @@ test_scaleoffset_int_2(hid_t file)
}
PASSED();
-#else
- SKIPPED();
- puts(not_supported);
-#endif
/*----------------------------------------------------------------------
* STEP 1: Test scaleoffset by setting up a chunked dataset and writing
@@ -4546,15 +4340,10 @@ test_scaleoffset_int_2(hid_t file)
*/
TESTING(" scaleoffset int with fill value (write)");
-#ifdef H5_HAVE_FILTER_SCALEOFFSET
/* only data in the hyperslab will be written, other value should be fill value */
if(H5Dwrite(dataset, H5T_NATIVE_INT, mspace, mspace, H5P_DEFAULT,
orig_data) < 0) goto error;
PASSED();
-#else
- SKIPPED();
- puts(not_supported);
-#endif
/*----------------------------------------------------------------------
* STEP 2: Try to read the data we just wrote.
@@ -4562,7 +4351,6 @@ test_scaleoffset_int_2(hid_t file)
*/
TESTING(" scaleoffset int with fill value (read)");
-#ifdef H5_HAVE_FILTER_SCALEOFFSET
/* Read the dataset back */
if(H5Dread(dataset, H5T_NATIVE_INT, mspace, mspace, H5P_DEFAULT,
new_data) < 0) goto error;
@@ -4587,10 +4375,7 @@ test_scaleoffset_int_2(hid_t file)
if(H5Dclose(dataset) < 0) goto error;
PASSED();
-#else
- SKIPPED();
- puts(not_supported);
-#endif
+
return 0;
error:
return -1;
@@ -4615,19 +4400,15 @@ error:
static herr_t
test_scaleoffset_float(hid_t file)
{
-#ifdef H5_HAVE_FILTER_SCALEOFFSET
hid_t dataset, datatype, space, dc;
const hsize_t size[2] = {2, 5};
const hsize_t chunk_size[2] = {2,5};
float orig_data[2][5];
float new_data[2][5];
size_t i, j;
-#else /* H5_HAVE_FILTER_SCALEOFFSET */
- const char *not_supported= " Scaleoffset is not enabled.";
-#endif /* H5_HAVE_FILTER_SCALEOFFSET */
TESTING(" scaleoffset float without fill value, D-scaling (setup)");
-#ifdef H5_HAVE_FILTER_SCALEOFFSET
+
datatype = H5Tcopy(H5T_NATIVE_FLOAT);
/* Set order of dataset datatype */
@@ -4663,10 +4444,6 @@ test_scaleoffset_float(hid_t file)
}
PASSED();
-#else
- SKIPPED();
- puts(not_supported);
-#endif
/*----------------------------------------------------------------------
* STEP 1: Test scaleoffset by setting up a chunked dataset and writing
@@ -4675,14 +4452,9 @@ test_scaleoffset_float(hid_t file)
*/
TESTING(" scaleoffset float without fill value, D-scaling (write)");
-#ifdef H5_HAVE_FILTER_SCALEOFFSET
if(H5Dwrite(dataset, H5T_NATIVE_FLOAT, H5S_ALL, H5S_ALL, H5P_DEFAULT,
orig_data) < 0) goto error;
PASSED();
-#else
- SKIPPED();
- puts(not_supported);
-#endif
/*----------------------------------------------------------------------
* STEP 2: Try to read the data we just wrote.
@@ -4690,7 +4462,6 @@ test_scaleoffset_float(hid_t file)
*/
TESTING(" scaleoffset float without fill value, D-scaling (read)");
-#ifdef H5_HAVE_FILTER_SCALEOFFSET
/* Read the dataset back */
if(H5Dread(dataset, H5T_NATIVE_FLOAT, H5S_ALL, H5S_ALL, H5P_DEFAULT,
new_data) < 0) goto error;
@@ -4717,10 +4488,7 @@ test_scaleoffset_float(hid_t file)
if(H5Dclose(dataset) < 0) goto error;
PASSED();
-#else
- SKIPPED();
- puts(not_supported);
-#endif
+
return 0;
error:
return -1;
@@ -4745,7 +4513,6 @@ error:
static herr_t
test_scaleoffset_float_2(hid_t file)
{
-#ifdef H5_HAVE_FILTER_SCALEOFFSET
hid_t dataset, datatype, space, mspace, dc;
const hsize_t size[2] = {2, 5};
const hsize_t chunk_size[2] = {2,5};
@@ -4757,12 +4524,9 @@ test_scaleoffset_float_2(hid_t file)
hsize_t count[2]; /* Block count */
hsize_t block[2]; /* Block sizes */
size_t j;
-#else /* H5_HAVE_FILTER_SCALEOFFSET */
- const char *not_supported= " Scaleoffset is not enabled.";
-#endif /* H5_HAVE_FILTER_SCALEOFFSET */
TESTING(" scaleoffset float with fill value, D-scaling (setup)");
-#ifdef H5_HAVE_FILTER_SCALEOFFSET
+
datatype = H5Tcopy(H5T_NATIVE_FLOAT);
/* Set order of dataset datatype */
@@ -4811,10 +4575,6 @@ test_scaleoffset_float_2(hid_t file)
}
PASSED();
-#else
- SKIPPED();
- puts(not_supported);
-#endif
/*----------------------------------------------------------------------
* STEP 1: Test scaleoffset by setting up a chunked dataset and writing
@@ -4823,15 +4583,10 @@ test_scaleoffset_float_2(hid_t file)
*/
TESTING(" scaleoffset float with fill value, D-scaling (write)");
-#ifdef H5_HAVE_FILTER_SCALEOFFSET
/* only data in the hyperslab will be written, other value should be fill value */
if(H5Dwrite(dataset, H5T_NATIVE_FLOAT, mspace, mspace, H5P_DEFAULT,
orig_data) < 0) goto error;
PASSED();
-#else
- SKIPPED();
- puts(not_supported);
-#endif
/*----------------------------------------------------------------------
* STEP 2: Try to read the data we just wrote.
@@ -4839,7 +4594,6 @@ test_scaleoffset_float_2(hid_t file)
*/
TESTING(" scaleoffset float with fill value, D-scaling (read)");
-#ifdef H5_HAVE_FILTER_SCALEOFFSET
/* Read the dataset back */
if(H5Dread(dataset, H5T_NATIVE_FLOAT, mspace, mspace, H5P_DEFAULT,
new_data) < 0) goto error;
@@ -4863,10 +4617,7 @@ test_scaleoffset_float_2(hid_t file)
if(H5Dclose(dataset) < 0) goto error;
PASSED();
-#else
- SKIPPED();
- puts(not_supported);
-#endif
+
return 0;
error:
return -1;
@@ -4891,19 +4642,15 @@ error:
static herr_t
test_scaleoffset_double(hid_t file)
{
-#ifdef H5_HAVE_FILTER_SCALEOFFSET
hid_t dataset, datatype, space, dc;
const hsize_t size[2] = {2, 5};
const hsize_t chunk_size[2] = {2,5};
double orig_data[2][5];
double new_data[2][5];
size_t i, j;
-#else /* H5_HAVE_FILTER_SCALEOFFSET */
- const char *not_supported= " Scaleoffset is not enabled.";
-#endif /* H5_HAVE_FILTER_SCALEOFFSET */
TESTING(" scaleoffset double without fill value, D-scaling (setup)");
-#ifdef H5_HAVE_FILTER_SCALEOFFSET
+
datatype = H5Tcopy(H5T_NATIVE_DOUBLE);
/* Set order of dataset datatype */
@@ -4939,10 +4686,6 @@ test_scaleoffset_double(hid_t file)
}
PASSED();
-#else
- SKIPPED();
- puts(not_supported);
-#endif
/*----------------------------------------------------------------------
* STEP 1: Test scaleoffset by setting up a chunked dataset and writing
@@ -4951,14 +4694,9 @@ test_scaleoffset_double(hid_t file)
*/
TESTING(" scaleoffset double without fill value, D-scaling (write)");
-#ifdef H5_HAVE_FILTER_SCALEOFFSET
if(H5Dwrite(dataset, H5T_NATIVE_DOUBLE, H5S_ALL, H5S_ALL, H5P_DEFAULT,
orig_data) < 0) goto error;
PASSED();
-#else
- SKIPPED();
- puts(not_supported);
-#endif
/*----------------------------------------------------------------------
* STEP 2: Try to read the data we just wrote.
@@ -4966,7 +4704,6 @@ test_scaleoffset_double(hid_t file)
*/
TESTING(" scaleoffset double without fill value, D-scaling (read)");
-#ifdef H5_HAVE_FILTER_SCALEOFFSET
/* Read the dataset back */
if(H5Dread(dataset, H5T_NATIVE_DOUBLE, H5S_ALL, H5S_ALL, H5P_DEFAULT,
new_data) < 0) goto error;
@@ -4993,10 +4730,7 @@ test_scaleoffset_double(hid_t file)
if(H5Dclose(dataset) < 0) goto error;
PASSED();
-#else
- SKIPPED();
- puts(not_supported);
-#endif
+
return 0;
error:
return -1;
@@ -5021,7 +4755,6 @@ error:
static herr_t
test_scaleoffset_double_2(hid_t file)
{
-#ifdef H5_HAVE_FILTER_SCALEOFFSET
hid_t dataset, datatype, space, mspace, dc;
const hsize_t size[2] = {2, 5};
const hsize_t chunk_size[2] = {2,5};
@@ -5033,12 +4766,9 @@ test_scaleoffset_double_2(hid_t file)
hsize_t count[2]; /* Block count */
hsize_t block[2]; /* Block sizes */
size_t j;
-#else /* H5_HAVE_FILTER_SCALEOFFSET */
- const char *not_supported= " Scaleoffset is not enabled.";
-#endif /* H5_HAVE_FILTER_SCALEOFFSET */
TESTING(" scaleoffset double with fill value, D-scaling (setup)");
-#ifdef H5_HAVE_FILTER_SCALEOFFSET
+
datatype = H5Tcopy(H5T_NATIVE_DOUBLE);
/* Set order of dataset datatype */
@@ -5087,10 +4817,6 @@ test_scaleoffset_double_2(hid_t file)
}
PASSED();
-#else
- SKIPPED();
- puts(not_supported);
-#endif
/*----------------------------------------------------------------------
* STEP 1: Test scaleoffset by setting up a chunked dataset and writing
@@ -5099,15 +4825,10 @@ test_scaleoffset_double_2(hid_t file)
*/
TESTING(" scaleoffset double with fill value, D-scaling (write)");
-#ifdef H5_HAVE_FILTER_SCALEOFFSET
/* only data in the hyperslab will be written, other value should be fill value */
if(H5Dwrite(dataset, H5T_NATIVE_DOUBLE, mspace, mspace, H5P_DEFAULT,
orig_data) < 0) goto error;
PASSED();
-#else
- SKIPPED();
- puts(not_supported);
-#endif
/*----------------------------------------------------------------------
* STEP 2: Try to read the data we just wrote.
@@ -5115,7 +4836,6 @@ test_scaleoffset_double_2(hid_t file)
*/
TESTING(" scaleoffset double with fill value, D-scaling (read)");
-#ifdef H5_HAVE_FILTER_SCALEOFFSET
/* Read the dataset back */
if(H5Dread(dataset, H5T_NATIVE_DOUBLE, mspace, mspace, H5P_DEFAULT,
new_data) < 0) goto error;
@@ -5140,10 +4860,7 @@ test_scaleoffset_double_2(hid_t file)
if(H5Dclose(dataset) < 0) goto error;
PASSED();
-#else
- SKIPPED();
- puts(not_supported);
-#endif
+
return 0;
error:
return -1;
@@ -6371,7 +6088,7 @@ test_filter_delete(hid_t file)
TESTING("filter deletion");
-#if defined H5_HAVE_FILTER_DEFLATE && defined H5_HAVE_FILTER_SHUFFLE && defined H5_HAVE_FILTER_FLETCHER32
+#ifdef H5_HAVE_FILTER_DEFLATE
/* create the data space */
if((sid = H5Screate_simple(2, dims, NULL)) < 0) goto error;
@@ -6576,7 +6293,6 @@ test_filters_endianess(void)
TESTING("filters with big-endian/little-endian data");
-#if defined H5_HAVE_FILTER_FLETCHER32
/*-------------------------------------------------------------------------
* step 1: open a file written on a little-endian machine
*-------------------------------------------------------------------------
@@ -6609,9 +6325,7 @@ test_filters_endianess(void)
if(H5Fclose(fid) < 0) FAIL_STACK_ERROR
PASSED();
-#else
- SKIPPED();
-#endif
+
return 0;
error: