diff options
Diffstat (limited to 'test')
-rw-r--r-- | test/Makefile.am | 2 | ||||
-rw-r--r-- | test/dsets.c | 248 | ||||
-rw-r--r-- | test/objcopy.c | 22 |
3 files changed, 258 insertions, 14 deletions
diff --git a/test/Makefile.am b/test/Makefile.am index a99f11d..7e3a385 100644 --- a/test/Makefile.am +++ b/test/Makefile.am @@ -128,7 +128,7 @@ CHECK_CLEANFILES+=accum.h5 cmpd_dset.h5 compact_dataset.h5 dataset.h5 dset_offse max_compact_dataset.h5 simple.h5 set_local.h5 random_chunks.h5 \ huge_chunks.h5 chunk_cache.h5 big_chunk.h5 chunk_fast.h5 chunk_expand.h5 \ chunk_fixed.h5 copy_dcpl_newfile.h5 partial_chunks.h5 layout_extend.h5 \ - zero_chunk.h5 dls_01_strings.h5 storage_size.h5 \ + zero_chunk.h5 chunk_single.h5 storage_size.h5 dls_01_strings.h5 \ extend.h5 istore.h5 extlinks*.h5 frspace.h5 links*.h5 \ sys_file1 tfile[1-7].h5 th5s[1-4].h5 lheap.h5 fheap.h5 ohdr.h5 \ stab.h5 extern_[1-5].h5 extern_[1-4][rw].raw gheap[0-4].h5 \ diff --git a/test/dsets.c b/test/dsets.c index bd683d1..e095875 100644 --- a/test/dsets.c +++ b/test/dsets.c @@ -51,8 +51,9 @@ const char *FILENAME[] = { "partial_chunks", /* 14 */ "layout_extend", /* 15 */ "zero_chunk", /* 16 */ - "dls_01_strings", /* 17 */ + "chunk_single", /* 17 */ "storage_size", /* 18 */ + "dls_01_strings", /* 19 */ NULL }; #define FILENAME_BUF_SIZE 1024 @@ -126,6 +127,10 @@ const char *FILENAME[] = { #define POINTS 72 #define POINTS_BIG 2500 +/* Dataset names for testing Implicit Indexing */ +#define DSET_SINGLE_MAX "DSET_SINGLE_MAX" +#define DSET_SINGLE_NOMAX "DSET_SINGLE_NOMAX" + #define USER_BLOCK 1024 #define SIXTY_FOUR_KB 65536 @@ -9462,6 +9467,244 @@ error: } /* end test_fixed_array() */ +/*------------------------------------------------------------------------- + * Function: test_single_chunk + * + * Purpose: Tests support for Single Chunk indexing type + * + * Create the following 2 datasets: + * 1) chunked dataset with NULL max dims and cur_dims = chunk_dims + * 2) chunked dataset with cur_dims = max_dims = chunk_dims + * + * Repeat the following test with/without compression filter + * Repeat the following test with H5D_ALLOC_TIME_EARLY/H5D_ALLOC_TIME_LATE/H5D_ALLOC_TIME_INCR + * For the old format, + * verify that v1 btree indexing type is used for + * all datasets with all settings + * For the new format: + * Verify that Single Chunk indexing type is used for + * all datasets with all settings + * + * Return: Success: 0 + * Failure: -1 + * + * Programmer: Vailin Choi; July 2011 + * + *------------------------------------------------------------------------- + */ +static herr_t +test_single_chunk(hid_t fapl) +{ + char filename[FILENAME_BUF_SIZE]; /* File name */ + hid_t fid = -1; /* File ID */ + hid_t dcpl = -1; /* Dataset creation property list ID */ + hid_t t_dcpl = -1; /* Dataset creation property list ID */ + + hid_t sid = -1, sid_max = -1; /* Dataspace ID for dataset with fixed dimensions */ + hid_t did = -1, did_max = -1; /* Dataset ID for dataset with fixed dimensions */ + hsize_t dim2[2] = {DSET_DIM1, DSET_DIM2}; /* Dataset dimensions */ + hsize_t t_dim2[2] = {50, 100}; /* Dataset dimensions */ + int wbuf[DSET_DIM1*DSET_DIM2]; /* write buffer */ + int t_wbuf[50*100]; /* write buffer */ + int rbuf[DSET_DIM1*DSET_DIM2]; /* read buffer */ + int t_rbuf[50*100]; /* read buffer */ + + H5D_chunk_index_t idx_type; /* Dataset chunk index type */ + H5F_libver_t low, high; /* File format bounds */ + H5D_alloc_time_t alloc_time; /* Storage allocation time */ + +#ifdef H5_HAVE_FILTER_DEFLATE + unsigned compress; /* Whether chunks should be compressed */ +#endif /* H5_HAVE_FILTER_DEFLATE */ + + size_t n, i; /* local index variables */ + herr_t ret; /* Generic return value */ + h5_stat_size_t empty_size; /* Size of an empty file */ + h5_stat_size_t file_size; /* Size of each file created */ + + TESTING("datasets w/Single Chunk indexing"); + + h5_fixname(FILENAME[17], fapl, filename, sizeof filename); + + /* Check if we are using the latest version of the format */ + if(H5Pget_libver_bounds(fapl, &low, &high) < 0) FAIL_STACK_ERROR + + /* Create and close the file to get the file size */ + if((fid = H5Fcreate(filename, H5F_ACC_TRUNC, H5P_DEFAULT, fapl)) < 0) + STACK_ERROR + if(H5Fclose(fid) < 0) + STACK_ERROR + + /* Get the size of the empty file */ + if((empty_size = h5_get_file_size(filename, fapl)) < 0) + TEST_ERROR + + for(i = n = 0; i < (DSET_DIM1 * DSET_DIM2); i++) + wbuf[i] = n++; + + for(i = n = 0; i < (50* 100); i++) + t_wbuf[i] = n++; + +#ifdef H5_HAVE_FILTER_DEFLATE + /* Loop over compressing chunks */ + for(compress = FALSE; compress <= TRUE; compress++) { +#endif /* H5_HAVE_FILTER_DEFLATE */ + + /* Loop over storage allocation time */ + for(alloc_time = H5D_ALLOC_TIME_EARLY; alloc_time <= H5D_ALLOC_TIME_INCR; alloc_time++) { + /* Create file */ + if((fid = H5Fcreate(filename, H5F_ACC_TRUNC, H5P_DEFAULT, fapl)) < 0) FAIL_STACK_ERROR + + /* Create dataset creation property list */ + if((dcpl = H5Pcreate(H5P_DATASET_CREATE)) < 0) FAIL_STACK_ERROR + if((t_dcpl = H5Pcreate(H5P_DATASET_CREATE)) < 0) FAIL_STACK_ERROR + + /* Set chunking */ + if((ret = H5Pset_chunk(dcpl, 2, dim2)) < 0) + FAIL_PUTS_ERROR(" Problem with setting chunk.") + + if((ret = H5Pset_chunk(t_dcpl, 2, t_dim2)) < 0) + FAIL_PUTS_ERROR(" Problem with setting chunk.") + +#ifdef H5_HAVE_FILTER_DEFLATE + /* Check if we should compress the chunks */ + if(compress) { + if(H5Pset_deflate(dcpl, 9) < 0) FAIL_STACK_ERROR + if(H5Pset_deflate(t_dcpl, 9) < 0) FAIL_STACK_ERROR + } +#endif /* H5_HAVE_FILTER_DEFLATE */ + + /* Set fill time */ + if(H5Pset_fill_time(dcpl, H5D_FILL_TIME_ALLOC) < 0) FAIL_STACK_ERROR + if(H5Pset_fill_time(t_dcpl, H5D_FILL_TIME_ALLOC) < 0) FAIL_STACK_ERROR + + /* Set allocation time */ + if(H5Pset_alloc_time(dcpl, alloc_time) < 0) FAIL_STACK_ERROR + if(H5Pset_alloc_time(t_dcpl, alloc_time) < 0) FAIL_STACK_ERROR + + /* Create first dataset with cur and max dimensions */ + if((sid_max = H5Screate_simple(2, dim2, dim2)) < 0) FAIL_STACK_ERROR + did_max = H5Dcreate2(fid, DSET_SINGLE_MAX, H5T_NATIVE_INT, sid_max, H5P_DEFAULT, dcpl, H5P_DEFAULT); + if(did_max < 0) + FAIL_PUTS_ERROR(" Creating Chunked Dataset with maximum dimensions.") + + /* Get the chunk index type */ + if(H5D__layout_idx_type_test(did_max, &idx_type) < 0) FAIL_STACK_ERROR + + /* Chunk index type depends on whether we are using the latest version of the format */ + if(low == H5F_LIBVER_LATEST) { + if(idx_type != H5D_CHUNK_IDX_SINGLE) + FAIL_PUTS_ERROR("should be using Single Chunk indexing"); + } /* end if */ + else { + if(idx_type != H5D_CHUNK_IDX_BTREE) + FAIL_PUTS_ERROR("should be using v1 B-tree as index"); + } /* end else */ + + /* Write into dataset */ + if(H5Dwrite(did_max, H5T_NATIVE_INT, H5S_ALL, H5S_ALL, H5P_DEFAULT, wbuf) < 0) TEST_ERROR; + + /* Closing */ + if(H5Dclose(did_max) < 0) FAIL_STACK_ERROR + if(H5Sclose(sid_max) < 0) FAIL_STACK_ERROR + + /* Create second dataset with curr dim but NULL max dim */ + if((sid = H5Screate_simple(2, t_dim2, NULL)) < 0) FAIL_STACK_ERROR + did = H5Dcreate2(fid, DSET_SINGLE_NOMAX, H5T_NATIVE_INT, sid, H5P_DEFAULT, t_dcpl, H5P_DEFAULT); + if(did < 0) + FAIL_PUTS_ERROR(" Creating Chunked Dataset.") + + /* Get the chunk index type */ + if(H5D__layout_idx_type_test(did, &idx_type) < 0) FAIL_STACK_ERROR + + /* Chunk index type depends on whether we are using the latest version of the format */ + if(low == H5F_LIBVER_LATEST) { + if(idx_type != H5D_CHUNK_IDX_SINGLE) + FAIL_PUTS_ERROR("should be using Single Chunk indexing"); + } else { + if(idx_type != H5D_CHUNK_IDX_BTREE) + FAIL_PUTS_ERROR("should be using v1 B-tree as index"); + } /* end else */ + + /* Write into dataset */ + if(H5Dwrite(did, H5T_NATIVE_INT, H5S_ALL, sid, H5P_DEFAULT, t_wbuf) < 0) TEST_ERROR; + + /* Closing */ + if(H5Dclose(did) < 0) FAIL_STACK_ERROR + if(H5Sclose(sid) < 0) FAIL_STACK_ERROR + + /* Open the first dataset */ + if((did_max = H5Dopen2(fid, DSET_SINGLE_MAX, H5P_DEFAULT)) < 0) TEST_ERROR; + + /* Read from dataset */ + if(H5Dread(did_max, H5T_NATIVE_INT, H5S_ALL, H5S_ALL, H5P_DEFAULT, rbuf) < 0) TEST_ERROR; + + /* Verify that written and read data are the same */ + for(i = 0; i < (DSET_DIM1 * DSET_DIM2); i++) + if(rbuf[i] != wbuf[i]){ + printf(" Line %d: Incorrect value, wbuf[%u]=%d, rbuf[%u]=%d\n", + __LINE__,(unsigned)i,wbuf[i],(unsigned)i,rbuf[i]); + TEST_ERROR; + } /* end if */ + + /* Closing */ + if(H5Dclose(did_max) < 0) FAIL_STACK_ERROR + + /* Open the second dataset */ + if((did = H5Dopen2(fid, DSET_SINGLE_NOMAX, H5P_DEFAULT)) < 0) TEST_ERROR; + + HDmemset(rbuf, 0, sizeof(rbuf)); + + /* Read from dataset */ + if(H5Dread(did, H5T_NATIVE_INT, H5S_ALL, H5S_ALL, H5P_DEFAULT, t_rbuf) < 0) TEST_ERROR; + + /* Verify that written and read data are the same */ + for(i = 0; i < (50* 100); i++) + if(t_rbuf[i] != t_wbuf[i]){ + printf(" Line %d: Incorrect value, t_wbuf[%u]=%d, t_rbuf[%u]=%d\n", + __LINE__,(unsigned)i,t_wbuf[i],(unsigned)i,t_rbuf[i]); + TEST_ERROR; + } /* end if */ + + /* Closing */ + if(H5Dclose(did) < 0) FAIL_STACK_ERROR + + /* Delete datasets */ + if(H5Ldelete(fid, DSET_SINGLE_NOMAX, H5P_DEFAULT) < 0) FAIL_STACK_ERROR + if(H5Ldelete(fid, DSET_SINGLE_MAX, H5P_DEFAULT) < 0) FAIL_STACK_ERROR + + /* Close everything */ + if(H5Fclose(fid) < 0) FAIL_STACK_ERROR + + /* Get the size of the file */ + if((file_size = h5_get_file_size(filename, fapl)) < 0) + TEST_ERROR + + /* Verify the file is correct size */ + if(file_size != empty_size) + TEST_ERROR + + } /* end for */ +#ifdef H5_HAVE_FILTER_DEFLATE + } /* end for */ +#endif /* H5_HAVE_FILTER_DEFLATE */ + + PASSED(); + return 0; + +error: + H5E_BEGIN_TRY { + H5Pclose(dcpl); + H5Pclose(t_dcpl); + H5Dclose(did); + H5Dclose(did_max); + H5Sclose(sid); + H5Sclose(sid_max); + H5Fclose(fid); + } H5E_END_TRY; + return -1; +} /* end test_single_chunk() */ + /*------------------------------------------------------------------------- * @@ -11493,7 +11736,7 @@ dls_01_main( void ) { TESTING("Testing DLS bugfix 1"); - if ( NULL == h5_fixname(FILENAME[17], H5P_DEFAULT, filename, + if ( NULL == h5_fixname(FILENAME[19], H5P_DEFAULT, filename, sizeof(filename)) ) TEST_ERROR @@ -11685,6 +11928,7 @@ main(void) nerrors += (test_fixed_array(my_fapl) < 0 ? 1 : 0); nerrors += (test_idx_compatible() < 0 ? 1 : 0); nerrors += (test_unfiltered_edge_chunks(my_fapl) < 0 ? 1 : 0); + nerrors += (test_single_chunk(my_fapl) < 0 ? 1 : 0); nerrors += (test_large_chunk_shrink(my_fapl) < 0 ? 1 : 0); nerrors += (test_zero_dim_dset(my_fapl) < 0 ? 1 : 0); nerrors += (test_storage_size(my_fapl) < 0 ? 1 : 0); diff --git a/test/objcopy.c b/test/objcopy.c index 19caa52..229284d 100644 --- a/test/objcopy.c +++ b/test/objcopy.c @@ -2712,7 +2712,7 @@ test_copy_dataset_chunked(hid_t fcpl_src, hid_t fcpl_dst, hid_t src_fapl, hid_t if((did2 = H5Dopen2(fid_dst, NAME_DATASET_CHUNKED_SINGLE, H5P_DEFAULT)) < 0) TEST_ERROR /* Check if the array index type is correct */ - if(compare_idx_type(src_fapl, did2, H5D_CHUNK_IDX_FARRAY, H5D_CHUNK_IDX_BTREE) != TRUE) + if(compare_idx_type(src_fapl, did2, H5D_CHUNK_IDX_SINGLE, H5D_CHUNK_IDX_BTREE) != TRUE) TEST_ERROR /* Check if the datasets are equal */ @@ -2770,7 +2770,7 @@ test_copy_dataset_chunked(hid_t fcpl_src, hid_t fcpl_dst, hid_t src_fapl, hid_t if((did2 = H5Dopen2(fid_dst, NAME_DATASET_CHUNKED2_SINGLE, H5P_DEFAULT)) < 0) TEST_ERROR /* Check if the array index type is correct */ - if(compare_idx_type(src_fapl, did2, H5D_CHUNK_IDX_FARRAY, H5D_CHUNK_IDX_BTREE) != TRUE) + if(compare_idx_type(src_fapl, did2, H5D_CHUNK_IDX_SINGLE, H5D_CHUNK_IDX_BTREE) != TRUE) TEST_ERROR /* Check if the datasets are equal */ @@ -2789,7 +2789,7 @@ test_copy_dataset_chunked(hid_t fcpl_src, hid_t fcpl_dst, hid_t src_fapl, hid_t if((did2 = H5Dopen2(fid_dst, NAME_DATASET_CHUNKED3_SINGLE, H5P_DEFAULT)) < 0) TEST_ERROR /* Check if the array index type is correct */ - if(compare_idx_type(src_fapl, did2, H5D_CHUNK_IDX_FARRAY, H5D_CHUNK_IDX_BTREE) != TRUE) + if(compare_idx_type(src_fapl, did2, H5D_CHUNK_IDX_SINGLE, H5D_CHUNK_IDX_BTREE) != TRUE) TEST_ERROR /* Check if the datasets are equal */ @@ -2808,7 +2808,7 @@ test_copy_dataset_chunked(hid_t fcpl_src, hid_t fcpl_dst, hid_t src_fapl, hid_t if((did2 = H5Dopen2(fid_dst, NAME_DATASET_CHUNKED4_SINGLE, H5P_DEFAULT)) < 0) TEST_ERROR /* Check if the array index type is correct */ - if(compare_idx_type(src_fapl, did2, H5D_CHUNK_IDX_FARRAY, H5D_CHUNK_IDX_BTREE) != TRUE) + if(compare_idx_type(src_fapl, did2, H5D_CHUNK_IDX_SINGLE, H5D_CHUNK_IDX_BTREE) != TRUE) TEST_ERROR /* Check if the datasets are equal */ @@ -3108,7 +3108,7 @@ test_copy_dataset_chunked_empty(hid_t fcpl_src, hid_t fcpl_dst, hid_t src_fapl, if((did2 = H5Dopen2(fid_dst, NAME_DATASET_CHUNKED_SINGLE, H5P_DEFAULT)) < 0) TEST_ERROR /* Check if the array index type is correct */ - if(compare_idx_type(src_fapl, did2, H5D_CHUNK_IDX_FARRAY, H5D_CHUNK_IDX_BTREE) != TRUE) + if(compare_idx_type(src_fapl, did2, H5D_CHUNK_IDX_SINGLE, H5D_CHUNK_IDX_BTREE) != TRUE) TEST_ERROR /* Check if the datasets are equal */ @@ -3146,7 +3146,7 @@ test_copy_dataset_chunked_empty(hid_t fcpl_src, hid_t fcpl_dst, hid_t src_fapl, if((did2 = H5Dopen2(fid_dst, NAME_DATASET_CHUNKED2_SINGLE, H5P_DEFAULT)) < 0) TEST_ERROR /* Check if the array index type is correct */ - if(compare_idx_type(src_fapl, did2, H5D_CHUNK_IDX_FARRAY, H5D_CHUNK_IDX_BTREE) != TRUE) + if(compare_idx_type(src_fapl, did2, H5D_CHUNK_IDX_SINGLE, H5D_CHUNK_IDX_BTREE) != TRUE) TEST_ERROR /* Check if the datasets are equal */ @@ -3186,7 +3186,7 @@ test_copy_dataset_chunked_empty(hid_t fcpl_src, hid_t fcpl_dst, hid_t src_fapl, if((did2 = H5Dopen2(fid_dst, NAME_DATASET_CHUNKED3_SINGLE, H5P_DEFAULT)) < 0) TEST_ERROR /* Check if the array index type is correct */ - if(compare_idx_type(src_fapl, did2, H5D_CHUNK_IDX_FARRAY, H5D_CHUNK_IDX_BTREE) != TRUE) + if(compare_idx_type(src_fapl, did2, H5D_CHUNK_IDX_SINGLE, H5D_CHUNK_IDX_BTREE) != TRUE) TEST_ERROR /* Check if the datasets are equal */ @@ -3205,7 +3205,7 @@ test_copy_dataset_chunked_empty(hid_t fcpl_src, hid_t fcpl_dst, hid_t src_fapl, if((did2 = H5Dopen2(fid_dst, NAME_DATASET_CHUNKED4_SINGLE, H5P_DEFAULT)) < 0) TEST_ERROR /* Check if the array index type is correct */ - if(compare_idx_type(src_fapl, did2, H5D_CHUNK_IDX_FARRAY, H5D_CHUNK_IDX_BTREE) != TRUE) + if(compare_idx_type(src_fapl, did2, H5D_CHUNK_IDX_SINGLE, H5D_CHUNK_IDX_BTREE) != TRUE) TEST_ERROR /* Check if the datasets are equal */ @@ -3807,7 +3807,7 @@ test_copy_dataset_compressed(hid_t fcpl_src, hid_t fcpl_dst, hid_t src_fapl, hid /* open the copied dataset NAME_DATASET_CHUNKED2_SINGLE at destination */ if((did2 = H5Dopen2(fid_dst, NAME_DATASET_CHUNKED2_SINGLE, H5P_DEFAULT)) < 0) TEST_ERROR - if(compare_idx_type(src_fapl, did2, H5D_CHUNK_IDX_FARRAY, H5D_CHUNK_IDX_BTREE) != TRUE) + if(compare_idx_type(src_fapl, did2, H5D_CHUNK_IDX_SINGLE, H5D_CHUNK_IDX_BTREE) != TRUE) TEST_ERROR /* Check if the datasets are equal */ @@ -3825,7 +3825,7 @@ test_copy_dataset_compressed(hid_t fcpl_src, hid_t fcpl_dst, hid_t src_fapl, hid /* open the copied dataset NAME_DATASET_CHUNKED3_SINGLE at destination */ if((did2 = H5Dopen2(fid_dst, NAME_DATASET_CHUNKED3_SINGLE, H5P_DEFAULT)) < 0) TEST_ERROR - if(compare_idx_type(src_fapl, did2, H5D_CHUNK_IDX_FARRAY, H5D_CHUNK_IDX_BTREE) != TRUE) + if(compare_idx_type(src_fapl, did2, H5D_CHUNK_IDX_SINGLE, H5D_CHUNK_IDX_BTREE) != TRUE) TEST_ERROR /* Check if the datasets are equal */ @@ -3843,7 +3843,7 @@ test_copy_dataset_compressed(hid_t fcpl_src, hid_t fcpl_dst, hid_t src_fapl, hid /* open the copied dataset NAME_DATASET_CHUNKED4_SINGLE at destination */ if((did2 = H5Dopen2(fid_dst, NAME_DATASET_CHUNKED4_SINGLE, H5P_DEFAULT)) < 0) TEST_ERROR - if(compare_idx_type(src_fapl, did2, H5D_CHUNK_IDX_FARRAY, H5D_CHUNK_IDX_BTREE) != TRUE) + if(compare_idx_type(src_fapl, did2, H5D_CHUNK_IDX_SINGLE, H5D_CHUNK_IDX_BTREE) != TRUE) TEST_ERROR /* Check if the datasets are equal */ |