diff options
author | Quincey Koziol <koziol@hdfgroup.org> | 2016-04-02 09:39:32 (GMT) |
---|---|---|
committer | Quincey Koziol <koziol@hdfgroup.org> | 2016-04-02 09:39:32 (GMT) |
commit | 9d2178ab886ae957cfe11b6fe09f9e7f0e9ce369 (patch) | |
tree | d4256c01e3f5364ca39ec230a6a897d16f51273d /test | |
parent | e45885dea912a18b9fd6b1450d3ff196dcb749eb (diff) | |
download | hdf5-9d2178ab886ae957cfe11b6fe09f9e7f0e9ce369.zip hdf5-9d2178ab886ae957cfe11b6fe09f9e7f0e9ce369.tar.gz hdf5-9d2178ab886ae957cfe11b6fe09f9e7f0e9ce369.tar.bz2 |
[svn-r29607] Description:
Bring "don't filter partial edge chunks" capability from revise_chunks
to trunk.
Tested on:
MacOSX/64 10.11.4 (amazon) w/debug, production & parallel
(h5committest forthcoming)
Diffstat (limited to 'test')
-rw-r--r-- | test/dsets.c | 227 | ||||
-rw-r--r-- | test/objcopy.c | 141 | ||||
-rw-r--r-- | test/set_extent.c | 54 |
3 files changed, 391 insertions, 31 deletions
diff --git a/test/dsets.c b/test/dsets.c index 2cb51d3..b3f11b8 100644 --- a/test/dsets.c +++ b/test/dsets.c @@ -44,10 +44,13 @@ const char *FILENAME[] = { "huge_chunks", /* 7 */ "chunk_cache", /* 8 */ "big_chunk", /* 9 */ - "chunk_expand", /* 10 */ - "copy_dcpl_newfile",/* 11 */ - "layout_extend", /* 12 */ - "zero_chunk", /* 13 */ + "chunk_fast", /* 10 */ + "chunk_expand", /* 11 */ + "chunk_fixed", /* 12 */ + "copy_dcpl_newfile",/* 13 */ + "partial_chunks", /* 14 */ + "layout_extend", /* 15 */ + "zero_chunk", /* 16 */ NULL }; #define FILENAME_BUF_SIZE 1024 @@ -125,6 +128,7 @@ const char *FILENAME[] = { #define H5Z_FILTER_DEPREC 309 #define H5Z_FILTER_EXPAND 310 #define H5Z_FILTER_CAN_APPLY_TEST2 311 +#define H5Z_FILTER_COUNT 312 /* Flags for testing filters */ #define DISABLE_FLETCHER32 0 @@ -198,6 +202,8 @@ const char *FILENAME[] = { #define DSET_DIM2 200 int points[DSET_DIM1][DSET_DIM2], check[DSET_DIM1][DSET_DIM2]; double points_dbl[DSET_DIM1][DSET_DIM2], check_dbl[DSET_DIM1][DSET_DIM2]; +size_t count_nbytes_read = 0; +size_t count_nbytes_written = 0; /* Local prototypes for filter functions */ static size_t filter_bogus(unsigned int flags, size_t cd_nelmts, @@ -212,6 +218,49 @@ static size_t filter_corrupt(unsigned int flags, size_t cd_nelmts, const unsigned int *cd_values, size_t nbytes, size_t *buf_size, void **buf); static size_t filter_expand(unsigned int flags, size_t cd_nelmts, const unsigned int *cd_values, size_t nbytes, size_t *buf_size, void **buf); +static size_t filter_count(unsigned int flags, size_t cd_nelmts, + const unsigned int *cd_values, size_t nbytes, size_t *buf_size, void **buf); + +/* This message derives from H5Z */ +const H5Z_class2_t H5Z_COUNT[1] = {{ + H5Z_CLASS_T_VERS, /* H5Z_class_t version */ + H5Z_FILTER_COUNT, /* Filter id number */ + 1, 1, /* Encoding and decoding enabled */ + "count", /* Filter name for debugging */ + NULL, /* The "can apply" callback */ + NULL, /* The "set local" callback */ + filter_count, /* The actual filter function */ +}}; + + +/*------------------------------------------------------------------------- + * Function: filter_count + * + * Purpose: This filter counts the number of bytes read and written, + * incrementing count_nbytes_read or count_nbytes_written as + * appropriate. + * + * Return: Success: Data chunk size + * + * Failure: 0 + * + * Programmer: Neil Fortner + * Wednesday, March 17, 2010 + * + *------------------------------------------------------------------------- + */ +static size_t +filter_count(unsigned int flags, size_t H5_ATTR_UNUSED cd_nelmts, + const unsigned int H5_ATTR_UNUSED *cd_values, size_t nbytes, + size_t H5_ATTR_UNUSED *buf_size, void H5_ATTR_UNUSED **buf) +{ + if(flags & H5Z_FLAG_REVERSE) + count_nbytes_read += nbytes; + else + count_nbytes_written += nbytes; + + return nbytes; +} /*------------------------------------------------------------------------- @@ -905,7 +954,7 @@ test_layout_extend(hid_t fapl) TESTING("extendible dataset with various layout"); /* Create a file */ - h5_fixname(FILENAME[12], fapl, filename, sizeof filename); + h5_fixname(FILENAME[15], fapl, filename, sizeof filename); if((fid = H5Fcreate(filename, H5F_ACC_TRUNC, H5P_DEFAULT, fapl)) < 0) FAIL_STACK_ERROR @@ -2798,7 +2847,7 @@ test_nbit_int(hid_t file) mask = ~((unsigned)~0 << (precision + offset)) & ((unsigned)~0 << offset); for(i=0; i<(size_t)size[0]; i++) { for(j=0; j<(size_t)size[1]; j++) { - if((new_data[i][j] & mask) != (orig_data[i][j] & mask)) { + if(((unsigned)new_data[i][j] & mask) != ((unsigned)orig_data[i][j] & mask)) { H5_FAILED(); printf(" Read different values than written.\n"); printf(" At index %lu,%lu\n", (unsigned long)i, (unsigned long)j); @@ -3337,9 +3386,9 @@ test_nbit_compound(hid_t file) s_mask = ~((unsigned)~0 << (precision[2] + offset[2])) & ((unsigned)~0 << offset[2]); for(i=0; i<size[0]; i++) { for(j=0; j<size[1]; j++) { - if((new_data[i][j].i & i_mask) != (orig_data[i][j].i & i_mask) || - (new_data[i][j].c & c_mask) != (orig_data[i][j].c & c_mask) || - (new_data[i][j].s & s_mask) != (orig_data[i][j].s & s_mask) || + if(((unsigned)new_data[i][j].i & i_mask) != ((unsigned)orig_data[i][j].i & i_mask) || + ((unsigned)new_data[i][j].c & c_mask) != ((unsigned)orig_data[i][j].c & c_mask) || + ((unsigned)new_data[i][j].s & s_mask) != ((unsigned)orig_data[i][j].s & s_mask) || (orig_data[i][j].f==orig_data[i][j].f && new_data[i][j].f != orig_data[i][j].f)) { H5_FAILED(); @@ -3595,16 +3644,16 @@ test_nbit_compound_2(hid_t file) for(m = 0; m < (size_t)array_dims[0]; m++) for(n = 0; n < (size_t)array_dims[1]; n++) - if((new_data[i][j].b[m][n]&b_mask)!=(orig_data[i][j].b[m][n]&b_mask)) { + if(((unsigned)new_data[i][j].b[m][n] & b_mask)!=((unsigned)orig_data[i][j].b[m][n] & b_mask)) { b_failed = 1; goto out; } for(m = 0; m < (size_t)array_dims[0]; m++) for(n = 0; n < (size_t)array_dims[1]; n++) - if((new_data[i][j].d[m][n].i & i_mask)!=(orig_data[i][j].d[m][n].i & i_mask)|| - (new_data[i][j].d[m][n].c & c_mask)!=(orig_data[i][j].d[m][n].c & c_mask)|| - (new_data[i][j].d[m][n].s & s_mask)!=(orig_data[i][j].d[m][n].s & s_mask)|| + if(((unsigned)new_data[i][j].d[m][n].i & i_mask) != ((unsigned)orig_data[i][j].d[m][n].i & i_mask)|| + ((unsigned)new_data[i][j].d[m][n].c & c_mask) != ((unsigned)orig_data[i][j].d[m][n].c & c_mask)|| + ((unsigned)new_data[i][j].d[m][n].s & s_mask) != ((unsigned)orig_data[i][j].d[m][n].s & s_mask)|| (new_data[i][j].d[m][n].f==new_data[i][j].d[m][n].f && new_data[i][j].d[m][n].f != new_data[i][j].d[m][n].f)) { d_failed = 1; @@ -3612,9 +3661,9 @@ test_nbit_compound_2(hid_t file) } out: - if((new_data[i][j].a.i & i_mask)!=(orig_data[i][j].a.i & i_mask)|| - (new_data[i][j].a.c & c_mask)!=(orig_data[i][j].a.c & c_mask)|| - (new_data[i][j].a.s & s_mask)!=(orig_data[i][j].a.s & s_mask)|| + if(((unsigned)new_data[i][j].a.i & i_mask) != ((unsigned)orig_data[i][j].a.i & i_mask)|| + ((unsigned)new_data[i][j].a.c & c_mask) != ((unsigned)orig_data[i][j].a.c & c_mask)|| + ((unsigned)new_data[i][j].a.s & s_mask) != ((unsigned)orig_data[i][j].a.s & s_mask)|| (new_data[i][j].a.f==new_data[i][j].a.f && new_data[i][j].a.f != new_data[i][j].a.f)|| new_data[i][j].v != orig_data[i][j].v || b_failed || d_failed) { @@ -6067,7 +6116,7 @@ test_copy_dcpl(hid_t file, hid_t fapl) /* Create a second file and create 2 datasets with the copies of the DCPLs in the first * file. Test whether the copies of DCPLs work. */ - h5_fixname(FILENAME[11], fapl, filename, sizeof filename); + h5_fixname(FILENAME[13], fapl, filename, sizeof filename); if((new_file = H5Fcreate(filename, H5F_ACC_TRUNC, H5P_DEFAULT, H5P_DEFAULT)) < 0) TEST_ERROR @@ -7849,6 +7898,147 @@ error: /*------------------------------------------------------------------------- + * + * test_unfiltered_edge_chunks(): + * Tests that partial edge chunks aren't filtered when the + * H5D_CHUNK_FILTER_PARTIAL_CHUNKS option is set. + * + * Programmer: Neil Fortner; 17th March, 2010 + * + *------------------------------------------------------------------------- + */ +static herr_t +test_unfiltered_edge_chunks(hid_t fapl) +{ + hid_t fid = -1; /* File id */ + hid_t did = -1; /* Dataset id */ + hid_t sid = -1; /* Dataspace id */ + hid_t dcpl = -1; /* DCPL id */ + hsize_t dim[2] = {4, 3}; /* Dataset dimensions */ + hsize_t cdim[2] = {2, 2}; /* Chunk dimension */ + char wbuf[4][3]; /* Write buffer */ + char rbuf[4][3]; /* Read buffer */ + char filename[FILENAME_BUF_SIZE] = ""; /* old test file name */ + unsigned opts; /* Chunk options */ + unsigned i, j; /* Local index variables */ + + /* Output message about test being performed */ + TESTING("disabled partial chunk filters"); + + h5_fixname(FILENAME[14], fapl, filename, sizeof filename); + + /* Create the file */ + if((fid = H5Fcreate(filename, H5F_ACC_TRUNC, H5P_DEFAULT, fapl)) < 0) + TEST_ERROR + + /* Register byte-counting filter */ + if(H5Zregister(H5Z_COUNT) < 0) + TEST_ERROR + + /* Create dataspace */ + if((sid = H5Screate_simple(2, dim, NULL)) < 0) + TEST_ERROR + + /* Create DCPL */ + if((dcpl = H5Pcreate(H5P_DATASET_CREATE)) < 0) + TEST_ERROR + + /* Set chunk dimensions */ + if(H5Pset_chunk(dcpl, 2, cdim) < 0) + TEST_ERROR + + /* Add "count" filter */ + if(H5Pset_filter(dcpl, H5Z_FILTER_COUNT, 0u, (size_t)0, NULL) < 0) + TEST_ERROR + + /* Disable filters on partial chunks */ + if(H5Pget_chunk_opts(dcpl, &opts) < 0) + TEST_ERROR + opts |= H5D_CHUNK_DONT_FILTER_PARTIAL_CHUNKS; + if(H5Pset_chunk_opts(dcpl, opts) < 0) + TEST_ERROR + + /* Initialize write buffer */ + for(i=0; i<dim[0]; i++) + for(j=0; j<dim[1]; j++) + wbuf[i][j] = (char)(2 * i) - (char)j; + + /* Reset byte counts */ + count_nbytes_read = (size_t)0; + count_nbytes_written = (size_t)0; + + /* Create dataset */ + if((did = H5Dcreate2(fid, DSET_CHUNKED_NAME, H5T_NATIVE_CHAR, sid, + H5P_DEFAULT, dcpl, H5P_DEFAULT)) < 0) + TEST_ERROR + + /* Nothing should have been written, as we are not using early allocation */ + if(count_nbytes_read != (size_t)0) + TEST_ERROR + if(count_nbytes_written != (size_t)0) + TEST_ERROR + + /* Write data */ + if(H5Dwrite(did, H5T_NATIVE_CHAR, H5S_ALL, H5S_ALL, H5P_DEFAULT, wbuf) < 0) + TEST_ERROR + + /* Close dataset */ + if(H5Dclose(did) < 0) + TEST_ERROR + + /* Make sure only 2 of the 4 chunks were written through the filter (4 bytes + * each) */ + if(count_nbytes_read != (size_t)0) + TEST_ERROR + if(count_nbytes_written != (size_t)(2 * cdim[0] * cdim[1])) + TEST_ERROR + + /* Reopen the dataset */ + if((did = H5Dopen2(fid, DSET_CHUNKED_NAME, H5P_DEFAULT)) < 0) + TEST_ERROR + + /* Read the dataset */ + if(H5Dread(did, H5T_NATIVE_CHAR, H5S_ALL, H5S_ALL, H5P_DEFAULT, rbuf) < 0) + TEST_ERROR + + /* Verify that data read == data written */ + for(i=0; i<dim[0]; i++) + for(j=0; j<dim[1]; j++) + if(rbuf[i][j] != wbuf[i][j]) + TEST_ERROR + + /* Make sure only 2 of the 4 chunks were read through the filter (4 bytes + * each) */ + if(count_nbytes_read != (size_t)(2 * cdim[0] * cdim[1])) + TEST_ERROR + if(count_nbytes_written != (size_t)(2 * cdim[0] * cdim[1])) + TEST_ERROR + + /* Close IDs */ + if(H5Dclose(did) < 0) + TEST_ERROR + if(H5Pclose(dcpl) < 0) + TEST_ERROR + if(H5Sclose(sid) < 0) + TEST_ERROR + if(H5Fclose(fid) < 0) + TEST_ERROR + + PASSED(); + return 0; + +error: + H5E_BEGIN_TRY { + H5Dclose(did); + H5Pclose(dcpl); + H5Sclose(sid); + H5Fclose(fid); + } H5E_END_TRY; + return -1; +} /* test_unfiltered_edge_chunks */ + + +/*------------------------------------------------------------------------- * Function: test_large_chunk_shrink * * Purpose: Tests support for shrinking a chunk larger than 1 MB by a @@ -7990,7 +8180,7 @@ test_zero_dim_dset(hid_t fapl) TESTING("shrinking large chunk"); - h5_fixname(FILENAME[13], fapl, filename, sizeof filename); + h5_fixname(FILENAME[16], fapl, filename, sizeof filename); /* Create file */ if((fid = H5Fcreate(filename, H5F_ACC_TRUNC, H5P_DEFAULT, fapl)) < 0) FAIL_STACK_ERROR @@ -9185,6 +9375,7 @@ main(void) nerrors += (test_big_chunks_bypass_cache(my_fapl) < 0 ? 1 : 0); nerrors += (test_chunk_expand(my_fapl) < 0 ? 1 : 0); nerrors += (test_layout_extend(my_fapl) < 0 ? 1 : 0); + nerrors += (test_unfiltered_edge_chunks(my_fapl) < 0 ? 1 : 0); nerrors += (test_large_chunk_shrink(my_fapl) < 0 ? 1 : 0); nerrors += (test_zero_dim_dset(my_fapl) < 0 ? 1 : 0); diff --git a/test/objcopy.c b/test/objcopy.c index 9bdb50d..a9791d5 100644 --- a/test/objcopy.c +++ b/test/objcopy.c @@ -3084,6 +3084,146 @@ error: /*------------------------------------------------------------------------- + * Function: test_copy_dataset_no_edge_filt + * + * Purpose: Create a compressed, chunked dataset in SRC file and copy it to DST file + * + * Return: Success: 0 + * Failure: number of errors + * + * Programmer: Neil Fortner + * Tuesday, May 11, 2010 + * Mostly copied from test_copy_dataset_compressed, by + * Quincey Koziol + * + *------------------------------------------------------------------------- + */ +static int +test_copy_dataset_no_edge_filt(hid_t fcpl_src, hid_t fcpl_dst, hid_t src_fapl, + hid_t dst_fapl) +{ +#ifdef H5_HAVE_FILTER_DEFLATE + hid_t fid_src = -1, fid_dst = -1; /* File IDs */ + hid_t sid = -1; /* Dataspace ID */ + hid_t pid = -1; /* Dataset creation property list ID */ + hid_t did = -1, did2 = -1; /* Dataset IDs */ + hsize_t dim2d[2]; /* Dataset dimensions */ + hsize_t chunk_dim2d[2] ={CHUNK_SIZE_1, CHUNK_SIZE_2}; /* Chunk dimensions */ + float buf[DIM_SIZE_1][DIM_SIZE_2]; /* Buffer for writing data */ + int i, j; /* Local index variables */ + char src_filename[NAME_BUF_SIZE]; + char dst_filename[NAME_BUF_SIZE]; +#endif /* H5_HAVE_FILTER_DEFLATE */ + + TESTING("H5Ocopy(): compressed dataset with no edge filters"); + +#ifndef H5_HAVE_FILTER_DEFLATE + SKIPPED(); + puts(" Deflation filter not available"); +#else /* H5_HAVE_FILTER_DEFLATE */ + /* set initial data values */ + for (i=0; i<DIM_SIZE_1; i++) + for (j=0; j<DIM_SIZE_2; j++) + buf[i][j] = 100.0F; /* Something easy to compress */ + + /* Initialize the filenames */ + h5_fixname(FILENAME[0], src_fapl, src_filename, sizeof src_filename); + h5_fixname(FILENAME[1], dst_fapl, dst_filename, sizeof dst_filename); + + /* Reset file address checking info */ + addr_reset(); + + /* create source file */ + if((fid_src = H5Fcreate(src_filename, H5F_ACC_TRUNC, fcpl_src, src_fapl)) < 0) TEST_ERROR + + /* Set dataspace dimensions */ + dim2d[0]=DIM_SIZE_1; + dim2d[1]=DIM_SIZE_2; + + /* create dataspace */ + if((sid = H5Screate_simple(2, dim2d, NULL)) < 0) TEST_ERROR + + /* create and set comp & chunk plist, and disable partial chunk filters */ + if((pid = H5Pcreate(H5P_DATASET_CREATE)) < 0) TEST_ERROR + if(H5Pset_chunk(pid, 2, chunk_dim2d) < 0) TEST_ERROR + if(H5Pset_deflate(pid, 9) < 0) TEST_ERROR + if(H5Pset_chunk_opts(pid, H5D_CHUNK_DONT_FILTER_PARTIAL_CHUNKS) < 0) TEST_ERROR + + /* create dataset */ + if((did = H5Dcreate2(fid_src, NAME_DATASET_CHUNKED, H5T_NATIVE_FLOAT, sid, H5P_DEFAULT, pid, H5P_DEFAULT)) < 0) TEST_ERROR + + /* close chunk plist */ + if(H5Pclose(pid) < 0) TEST_ERROR + + /* write data into file */ + if(H5Dwrite(did, H5T_NATIVE_FLOAT, H5S_ALL, H5S_ALL, H5P_DEFAULT, buf) < 0) TEST_ERROR + + /* close dataspace */ + if(H5Sclose(sid) < 0) TEST_ERROR + + /* attach attributes to the dataset */ + if(test_copy_attach_attributes(did, H5T_NATIVE_INT) < 0) TEST_ERROR + + /* close the dataset */ + if(H5Dclose(did) < 0) TEST_ERROR + + /* close the SRC file */ + if(H5Fclose(fid_src) < 0) TEST_ERROR + + + /* open the source file with read-only */ + if((fid_src = H5Fopen(src_filename, H5F_ACC_RDONLY, src_fapl)) < 0) TEST_ERROR + + /* create destination file */ + if((fid_dst = H5Fcreate(dst_filename, H5F_ACC_TRUNC, fcpl_dst, dst_fapl)) < 0) TEST_ERROR + + /* Create an uncopied object in destination file so that addresses in source and destination files aren't the same */ + if(H5Gclose(H5Gcreate2(fid_dst, NAME_GROUP_UNCOPIED, H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT)) < 0) TEST_ERROR + + /* copy the dataset from SRC to DST */ + if(H5Ocopy(fid_src, NAME_DATASET_CHUNKED, fid_dst, NAME_DATASET_CHUNKED, H5P_DEFAULT, H5P_DEFAULT) < 0) TEST_ERROR + + /* open the dataset for copy */ + if((did = H5Dopen2(fid_src, NAME_DATASET_CHUNKED, H5P_DEFAULT)) < 0) TEST_ERROR + + /* open the destination dataset */ + if((did2 = H5Dopen2(fid_dst, NAME_DATASET_CHUNKED, H5P_DEFAULT)) < 0) TEST_ERROR + + /* Check if the datasets are equal */ + if(compare_datasets(did, did2, H5P_DEFAULT, NULL) != TRUE) TEST_ERROR + + /* close the destination dataset */ + if(H5Dclose(did2) < 0) TEST_ERROR + + /* close the source dataset */ + if(H5Dclose(did) < 0) TEST_ERROR + + /* close the SRC file */ + if(H5Fclose(fid_src) < 0) TEST_ERROR + + /* close the DST file */ + if(H5Fclose(fid_dst) < 0) TEST_ERROR + + PASSED(); +#endif /* H5_HAVE_FILTER_DEFLATE */ + return 0; + +#ifdef H5_HAVE_FILTER_DEFLATE +error: + H5E_BEGIN_TRY { + H5Dclose(did2); + H5Dclose(did); + H5Pclose(pid); + H5Sclose(sid); + H5Fclose(fid_dst); + H5Fclose(fid_src); + } H5E_END_TRY; + return 1; +#endif /* H5_HAVE_FILTER_DEFLATE */ +} /* end test_copy_dataset_no_edge_filt */ + + +/*------------------------------------------------------------------------- * Function: test_copy_dataset_compact * * Purpose: Create a compact dataset in SRC file and copy it to DST file @@ -12376,6 +12516,7 @@ main(void) nerrors += test_copy_dataset_chunked_empty(fcpl_src, fcpl_dst, src_fapl, dst_fapl); nerrors += test_copy_dataset_chunked_sparse(fcpl_src, fcpl_dst, src_fapl, dst_fapl); nerrors += test_copy_dataset_compressed(fcpl_src, fcpl_dst, src_fapl, dst_fapl); + nerrors += test_copy_dataset_no_edge_filt(fcpl_src, fcpl_dst, src_fapl, dst_fapl); nerrors += test_copy_dataset_compact(fcpl_src, fcpl_dst, src_fapl, dst_fapl); nerrors += test_copy_dataset_multi_ohdr_chunks(fcpl_src, fcpl_dst, src_fapl, dst_fapl); nerrors += test_copy_dataset_attr_named_dtype(fcpl_src, fcpl_dst, src_fapl, dst_fapl); diff --git a/test/set_extent.c b/test/set_extent.c index acfdc5b..7fe8d75 100644 --- a/test/set_extent.c +++ b/test/set_extent.c @@ -46,8 +46,9 @@ const char *FILENAME[] = { #define CONFIG_COMPRESS 0x01u #define CONFIG_FILL 0x02u #define CONFIG_EARLY_ALLOC 0x04u +#define CONFIG_UNFILT_EDGE 0x08u #define CONFIG_ALL (CONFIG_COMPRESS + CONFIG_FILL \ - + CONFIG_EARLY_ALLOC) + + CONFIG_EARLY_ALLOC + CONFIG_UNFILT_EDGE) #define FILL_VALUE -1 #define DO_RANKS_PRINT_CONFIG(TEST) { \ printf(" Config:\n"); \ @@ -56,6 +57,8 @@ const char *FILENAME[] = { printf(" Fill value: %s\n", (do_fillvalue ? "yes" : "no")); \ printf(" Early allocation: %s\n", (config & CONFIG_EARLY_ALLOC ? "yes" \ : "no")); \ + printf(" Edge chunk filters: %s\n", (config & CONFIG_UNFILT_EDGE \ + ? "disabled" : "enabled")); \ } /* end DO_RANKS_PRINT_CONFIG */ #define RANK1 1 @@ -85,18 +88,22 @@ static int do_layouts( hid_t fapl ); static int test_rank1( hid_t fapl, hid_t dcpl, hbool_t do_fill_value, + hbool_t disable_edge_filters, hbool_t set_istore_k); static int test_rank2( hid_t fapl, hid_t dcpl, hbool_t do_fill_value, + hbool_t disable_edge_filters, hbool_t set_istore_k); static int test_rank3( hid_t fapl, hid_t dcpl, hbool_t do_fill_value, + hbool_t disable_edge_filters, hbool_t set_istore_k); static int test_random_rank4( hid_t fapl, hid_t dcpl, hbool_t do_fillvalue, + hbool_t disable_edge_filters, hbool_t do_sparse); static int test_external( hid_t fapl ); @@ -211,7 +218,8 @@ error: static int do_ranks( hid_t fapl ) { - hbool_t do_fillvalue = 0; + hbool_t do_fillvalue = FALSE; + hbool_t disable_edge_filters = FALSE; hid_t dcpl = -1; int fillvalue = FILL_VALUE; unsigned config; @@ -247,6 +255,11 @@ static int do_ranks( hid_t fapl ) if(H5Pset_alloc_time(dcpl, H5D_ALLOC_TIME_EARLY) < 0) TEST_ERROR + if(config & CONFIG_UNFILT_EDGE) + disable_edge_filters = TRUE; + else + disable_edge_filters = FALSE; + /* Run tests */ if(do_fillvalue) { unsigned ifset; @@ -261,25 +274,25 @@ static int do_ranks( hid_t fapl ) if(H5Pset_fill_time(dcpl, H5D_FILL_TIME_ALLOC) < 0) TEST_ERROR - if(test_rank1(fapl, dcpl, do_fillvalue, FALSE) < 0) { + if(test_rank1(fapl, dcpl, do_fillvalue, disable_edge_filters, FALSE) < 0) { DO_RANKS_PRINT_CONFIG("Rank 1") printf(" Fill time: %s\n", (ifset ? "H5D_FILL_TIME_IFSET" : "H5D_FILL_TIME_ALLOC")); goto error; } /* end if */ - if(test_rank2(fapl, dcpl, do_fillvalue, FALSE) < 0) { + if(test_rank2(fapl, dcpl, do_fillvalue, disable_edge_filters, FALSE) < 0) { DO_RANKS_PRINT_CONFIG("Rank 2") printf(" Fill time: %s\n", (ifset ? "H5D_FILL_TIME_IFSET" : "H5D_FILL_TIME_ALLOC")); goto error; } /* end if */ - if(test_rank3(fapl, dcpl, do_fillvalue, FALSE) < 0) { + if(test_rank3(fapl, dcpl, do_fillvalue, disable_edge_filters, FALSE) < 0) { DO_RANKS_PRINT_CONFIG("Rank 3") printf(" Fill time: %s\n", (ifset ? "H5D_FILL_TIME_IFSET" : "H5D_FILL_TIME_ALLOC")); goto error; } /* end if */ - if(test_rank2(fapl, dcpl, do_fillvalue, TRUE) < 0) { + if(test_rank2(fapl, dcpl, do_fillvalue, disable_edge_filters, TRUE) < 0) { DO_RANKS_PRINT_CONFIG("Rank 2 with non-default indexed storage B-tree") printf(" Fill time: %s\n", (ifset ? "H5D_FILL_TIME_IFSET" : "H5D_FILL_TIME_ALLOC")); @@ -293,19 +306,19 @@ static int do_ranks( hid_t fapl ) if(H5Pset_fill_time(dcpl, H5D_FILL_TIME_ALLOC) < 0) TEST_ERROR - if(test_rank1(fapl, dcpl, do_fillvalue, FALSE) < 0) { + if(test_rank1(fapl, dcpl, do_fillvalue, disable_edge_filters, FALSE) < 0) { DO_RANKS_PRINT_CONFIG("Rank 1") goto error; } /* end if */ - if(test_rank2(fapl, dcpl, do_fillvalue, FALSE) < 0) { + if(test_rank2(fapl, dcpl, do_fillvalue, disable_edge_filters, FALSE) < 0) { DO_RANKS_PRINT_CONFIG("Rank 2") goto error; } /* end if */ - if(test_rank3(fapl, dcpl, do_fillvalue, FALSE) < 0) { + if(test_rank3(fapl, dcpl, do_fillvalue, disable_edge_filters, FALSE) < 0) { DO_RANKS_PRINT_CONFIG("Rank 3") goto error; } /* end if */ - if(test_rank2(fapl, dcpl, do_fillvalue, TRUE) < 0) { + if(test_rank2(fapl, dcpl, do_fillvalue, disable_edge_filters, TRUE) < 0) { DO_RANKS_PRINT_CONFIG("Rank 2 with non-default indexed storage B-tree") goto error; } /* end if */ @@ -316,13 +329,13 @@ static int do_ranks( hid_t fapl ) if(H5Pset_fill_time(dcpl, H5D_FILL_TIME_IFSET) < 0) TEST_ERROR - if(test_random_rank4(fapl, dcpl, do_fillvalue, FALSE) < 0) { + if(test_random_rank4(fapl, dcpl, do_fillvalue, disable_edge_filters, FALSE) < 0) { DO_RANKS_PRINT_CONFIG("Randomized rank 4") goto error; } /* end if */ if(!(config & CONFIG_EARLY_ALLOC)) - if(test_random_rank4(fapl, dcpl, do_fillvalue, TRUE) < 0) { + if(test_random_rank4(fapl, dcpl, do_fillvalue, disable_edge_filters, TRUE) < 0) { DO_RANKS_PRINT_CONFIG("Randomized rank 4 with sparse allocation") goto error; } /* end if */ @@ -376,6 +389,7 @@ error: static int test_rank1( hid_t fapl, hid_t dcpl, hbool_t do_fill_value, + hbool_t disable_edge_filters, hbool_t set_istore_k) { @@ -433,6 +447,9 @@ static int test_rank1( hid_t fapl, TEST_ERROR if(H5Pset_chunk(my_dcpl, RANK1, dims_c) < 0) TEST_ERROR + if(disable_edge_filters) + if(H5Pset_chunk_opts(my_dcpl, H5D_CHUNK_DONT_FILTER_PARTIAL_CHUNKS) < 0) + TEST_ERROR /*------------------------------------------------------------------------- * create, write dataset @@ -713,6 +730,7 @@ error: static int test_rank2( hid_t fapl, hid_t dcpl, hbool_t do_fill_value, + hbool_t disable_edge_filters, hbool_t set_istore_k) { @@ -793,6 +811,9 @@ static int test_rank2( hid_t fapl, { TEST_ERROR } + if(disable_edge_filters) + if(H5Pset_chunk_opts(my_dcpl, H5D_CHUNK_DONT_FILTER_PARTIAL_CHUNKS) < 0) + TEST_ERROR /*------------------------------------------------------------------------- * Procedure 1 @@ -1328,6 +1349,7 @@ error: static int test_rank3( hid_t fapl, hid_t dcpl, hbool_t do_fill_value, + hbool_t disable_edge_filters, hbool_t set_istore_k) { @@ -1414,6 +1436,9 @@ static int test_rank3( hid_t fapl, { TEST_ERROR } + if(disable_edge_filters) + if(H5Pset_chunk_opts(my_dcpl, H5D_CHUNK_DONT_FILTER_PARTIAL_CHUNKS) < 0) + TEST_ERROR /*------------------------------------------------------------------------- * create, write array @@ -2488,7 +2513,7 @@ error: *------------------------------------------------------------------------- */ static int test_random_rank4( hid_t fapl, hid_t dcpl, hbool_t do_fillvalue, - hbool_t do_sparse ) + hbool_t disable_edge_filters, hbool_t do_sparse ) { hid_t file = -1; hid_t dset = -1; @@ -2532,6 +2557,9 @@ static int test_random_rank4( hid_t fapl, hid_t dcpl, hbool_t do_fillvalue, TEST_ERROR if(H5Pset_chunk(my_dcpl, 4, cdims) < 0) TEST_ERROR + if(disable_edge_filters) + if(H5Pset_chunk_opts(my_dcpl, H5D_CHUNK_DONT_FILTER_PARTIAL_CHUNKS) < 0) + TEST_ERROR if((dset = H5Dcreate2(file, "dset", H5T_NATIVE_INT, fspace, H5P_DEFAULT, my_dcpl, H5P_DEFAULT)) < 0) TEST_ERROR |