diff options
Diffstat (limited to 'testpar')
-rw-r--r-- | testpar/t_dset.c | 206 | ||||
-rw-r--r-- | testpar/t_filter_read.c | 116 |
2 files changed, 186 insertions, 136 deletions
diff --git a/testpar/t_dset.c b/testpar/t_dset.c index 2e1005c..b7f2fc0 100644 --- a/testpar/t_dset.c +++ b/testpar/t_dset.c @@ -2515,6 +2515,8 @@ compress_readAll(void) int rank=1; /* Dataspace rank */ hsize_t dim=dim0; /* Dataspace dimensions */ unsigned u; /* Local index variable */ + unsigned chunk_opts; /* Chunk options */ + unsigned disable_partial_chunk_filters; /* Whether filters are disabled on partial chunks */ DATATYPE *data_read = NULL; /* data buffer */ DATATYPE *data_orig = NULL; /* expected data buffer */ const char *filename; @@ -2541,116 +2543,132 @@ compress_readAll(void) for(u=0; u<dim;u++) data_orig[u]=u; - /* Process zero creates the file with a compressed, chunked dataset */ - if(mpi_rank==0) { - hsize_t chunk_dim; /* Chunk dimensions */ - - /* Create the file */ - fid = H5Fcreate(h5_rmprefix(filename), H5F_ACC_TRUNC, H5P_DEFAULT, H5P_DEFAULT); - VRFY((fid > 0), "H5Fcreate succeeded"); - - /* Create property list for chunking and compression */ - dcpl = H5Pcreate(H5P_DATASET_CREATE); - VRFY((dcpl > 0), "H5Pcreate succeeded"); - - ret = H5Pset_layout(dcpl, H5D_CHUNKED); - VRFY((ret >= 0), "H5Pset_layout succeeded"); - - /* Use eight chunks */ - chunk_dim = dim / 8; - ret = H5Pset_chunk(dcpl, rank, &chunk_dim); - VRFY((ret >= 0), "H5Pset_chunk succeeded"); + /* Run test both with and without filters disabled on partial chunks */ + for(disable_partial_chunk_filters = 0; disable_partial_chunk_filters <= 1; + disable_partial_chunk_filters++) { + /* Process zero creates the file with a compressed, chunked dataset */ + if(mpi_rank==0) { + hsize_t chunk_dim; /* Chunk dimensions */ + + /* Create the file */ + fid = H5Fcreate(h5_rmprefix(filename), H5F_ACC_TRUNC, H5P_DEFAULT, H5P_DEFAULT); + VRFY((fid > 0), "H5Fcreate succeeded"); + + /* Create property list for chunking and compression */ + dcpl = H5Pcreate(H5P_DATASET_CREATE); + VRFY((dcpl > 0), "H5Pcreate succeeded"); + + ret = H5Pset_layout(dcpl, H5D_CHUNKED); + VRFY((ret >= 0), "H5Pset_layout succeeded"); + + /* Use eight chunks */ + chunk_dim = dim / 8; + ret = H5Pset_chunk(dcpl, rank, &chunk_dim); + VRFY((ret >= 0), "H5Pset_chunk succeeded"); + + /* Set chunk options appropriately */ + if(disable_partial_chunk_filters) { + ret = H5Pget_chunk_opts(dcpl, &chunk_opts); + VRFY((ret>=0),"H5Pget_chunk_opts succeeded"); + + chunk_opts |= H5D_CHUNK_DONT_FILTER_PARTIAL_CHUNKS; + + ret = H5Pset_chunk_opts(dcpl, chunk_opts); + VRFY((ret>=0),"H5Pset_chunk_opts succeeded"); + } /* end if */ + + ret = H5Pset_deflate(dcpl, 9); + VRFY((ret >= 0), "H5Pset_deflate succeeded"); + + /* Create dataspace */ + dataspace = H5Screate_simple(rank, &dim, NULL); + VRFY((dataspace > 0), "H5Screate_simple succeeded"); + + /* Create dataset */ + dataset = H5Dcreate2(fid, "compressed_data", H5T_NATIVE_INT, dataspace, H5P_DEFAULT, dcpl, H5P_DEFAULT); + VRFY((dataset > 0), "H5Dcreate2 succeeded"); + + /* Write compressed data */ + ret = H5Dwrite(dataset, H5T_NATIVE_INT, H5S_ALL, H5S_ALL, H5P_DEFAULT, data_orig); + VRFY((ret >= 0), "H5Dwrite succeeded"); + + /* Close objects */ + ret = H5Pclose(dcpl); + VRFY((ret >= 0), "H5Pclose succeeded"); + ret = H5Sclose(dataspace); + VRFY((ret >= 0), "H5Sclose succeeded"); + ret = H5Dclose(dataset); + VRFY((ret >= 0), "H5Dclose succeeded"); + ret = H5Fclose(fid); + VRFY((ret >= 0), "H5Fclose succeeded"); + } - ret = H5Pset_deflate(dcpl, 9); - VRFY((ret >= 0), "H5Pset_deflate succeeded"); + /* Wait for file to be created */ + MPI_Barrier(comm); - /* Create dataspace */ - dataspace = H5Screate_simple(rank, &dim, NULL); - VRFY((dataspace > 0), "H5Screate_simple succeeded"); + /* ------------------- + * OPEN AN HDF5 FILE + * -------------------*/ - /* Create dataset */ - dataset = H5Dcreate2(fid, "compressed_data", H5T_NATIVE_INT, dataspace, H5P_DEFAULT, dcpl, H5P_DEFAULT); - VRFY((dataset > 0), "H5Dcreate2 succeeded"); + /* setup file access template */ + acc_tpl = create_faccess_plist(comm, info, facc_type); + VRFY((acc_tpl >= 0), ""); - /* Write compressed data */ - ret = H5Dwrite(dataset, H5T_NATIVE_INT, H5S_ALL, H5S_ALL, H5P_DEFAULT, data_orig); - VRFY((ret >= 0), "H5Dwrite succeeded"); + /* open the file collectively */ + fid=H5Fopen(filename,H5F_ACC_RDWR,acc_tpl); + VRFY((fid > 0), "H5Fopen succeeded"); - /* Close objects */ - ret = H5Pclose(dcpl); + /* Release file-access template */ + ret = H5Pclose(acc_tpl); VRFY((ret >= 0), "H5Pclose succeeded"); - ret = H5Sclose(dataspace); - VRFY((ret >= 0), "H5Sclose succeeded"); - ret = H5Dclose(dataset); - VRFY((ret >= 0), "H5Dclose succeeded"); - ret = H5Fclose(fid); - VRFY((ret >= 0), "H5Fclose succeeded"); - } - /* Wait for file to be created */ - MPI_Barrier(comm); - - /* ------------------- - * OPEN AN HDF5 FILE - * -------------------*/ - /* setup file access template */ - acc_tpl = create_faccess_plist(comm, info, facc_type); - VRFY((acc_tpl >= 0), ""); + /* Open dataset with compressed chunks */ + dataset = H5Dopen2(fid, "compressed_data", H5P_DEFAULT); + VRFY((dataset > 0), "H5Dopen2 succeeded"); - /* open the file collectively */ - fid=H5Fopen(filename,H5F_ACC_RDWR,acc_tpl); - VRFY((fid > 0), "H5Fopen succeeded"); + /* Try reading & writing data */ + if(dataset>0) { + /* Create dataset transfer property list */ + xfer_plist = H5Pcreate(H5P_DATASET_XFER); + VRFY((xfer_plist > 0), "H5Pcreate succeeded"); - /* Release file-access template */ - ret = H5Pclose(acc_tpl); - VRFY((ret >= 0), "H5Pclose succeeded"); - - - /* Open dataset with compressed chunks */ - dataset = H5Dopen2(fid, "compressed_data", H5P_DEFAULT); - VRFY((dataset > 0), "H5Dopen2 succeeded"); - - /* Try reading & writing data */ - if(dataset>0) { - /* Create dataset transfer property list */ - xfer_plist = H5Pcreate(H5P_DATASET_XFER); - VRFY((xfer_plist > 0), "H5Pcreate succeeded"); - - ret = H5Pset_dxpl_mpio(xfer_plist, H5FD_MPIO_COLLECTIVE); - VRFY((ret >= 0), "H5Pset_dxpl_mpio succeeded"); - if(dxfer_coll_type == DXFER_INDEPENDENT_IO) { - ret = H5Pset_dxpl_mpio_collective_opt(xfer_plist,H5FD_MPIO_INDIVIDUAL_IO); - VRFY((ret>= 0),"set independent IO collectively succeeded"); - } + ret = H5Pset_dxpl_mpio(xfer_plist, H5FD_MPIO_COLLECTIVE); + VRFY((ret >= 0), "H5Pset_dxpl_mpio succeeded"); + if(dxfer_coll_type == DXFER_INDEPENDENT_IO) { + ret = H5Pset_dxpl_mpio_collective_opt(xfer_plist,H5FD_MPIO_INDIVIDUAL_IO); + VRFY((ret>= 0),"set independent IO collectively succeeded"); + } - /* Try reading the data */ - ret = H5Dread(dataset, H5T_NATIVE_INT, H5S_ALL, H5S_ALL, xfer_plist, data_read); - VRFY((ret >= 0), "H5Pset_dxpl_mpio succeeded"); + /* Try reading the data */ + ret = H5Dread(dataset, H5T_NATIVE_INT, H5S_ALL, H5S_ALL, xfer_plist, data_read); + VRFY((ret >= 0), "H5Pset_dxpl_mpio succeeded"); - /* Verify data read */ - for(u=0; u<dim; u++) - if(data_orig[u]!=data_read[u]) { - printf("Line #%d: written!=retrieved: data_orig[%u]=%d, data_read[%u]=%d\n",__LINE__, - (unsigned)u,data_orig[u],(unsigned)u,data_read[u]); - nerrors++; - } + /* Verify data read */ + for(u=0; u<dim; u++) + if(data_orig[u]!=data_read[u]) { + printf("Line #%d: written!=retrieved: data_orig[%u]=%d, data_read[%u]=%d\n",__LINE__, + (unsigned)u,data_orig[u],(unsigned)u,data_read[u]); + nerrors++; + } - /* Writing to the compressed, chunked dataset in parallel should fail */ - H5E_BEGIN_TRY { - ret = H5Dwrite(dataset, H5T_NATIVE_INT, H5S_ALL, H5S_ALL, xfer_plist, data_read); - } H5E_END_TRY; - VRFY((ret < 0), "H5Dwrite failed"); + /* Writing to the compressed, chunked dataset in parallel should fail */ + H5E_BEGIN_TRY { + ret = H5Dwrite(dataset, H5T_NATIVE_INT, H5S_ALL, H5S_ALL, xfer_plist, data_read); + } H5E_END_TRY; + VRFY((ret < 0), "H5Dwrite failed"); - ret = H5Pclose(xfer_plist); - VRFY((ret >= 0), "H5Pclose succeeded"); - ret = H5Dclose(dataset); - VRFY((ret >= 0), "H5Dclose succeeded"); - } /* end if */ + ret = H5Pclose(xfer_plist); + VRFY((ret >= 0), "H5Pclose succeeded"); + ret = H5Dclose(dataset); + VRFY((ret >= 0), "H5Dclose succeeded"); + } /* end if */ - ret = H5Fclose(fid); - VRFY((ret >= 0), "H5Fclose succeeded"); + /* Close file */ + ret = H5Fclose(fid); + VRFY((ret >= 0), "H5Fclose succeeded"); + } /* end for */ /* release data buffers */ if(data_read) HDfree(data_read); diff --git a/testpar/t_filter_read.c b/testpar/t_filter_read.c index 5e1cd04..44f3f11 100644 --- a/testpar/t_filter_read.c +++ b/testpar/t_filter_read.c @@ -213,6 +213,8 @@ test_filter_read(void) hid_t dc; /* HDF5 IDs */ const hsize_t chunk_size[2] = {CHUNK_DIM1, CHUNK_DIM2}; /* Chunk dimensions */ hsize_t null_size; /* Size of dataset without filters */ + unsigned chunk_opts; /* Chunk options */ + unsigned disable_partial_chunk_filters; /* Whether filters are disabled on partial chunks */ herr_t hrc; const char *filename; hsize_t fletcher32_size; /* Size of dataset with Fletcher32 checksum */ @@ -254,74 +256,104 @@ test_filter_read(void) hrc = H5Pclose (dc); VRFY(hrc>=0,"H5Pclose"); - /*---------------------------------------------------------- - * STEP 1: Test Fletcher32 Checksum by itself. - *---------------------------------------------------------- - */ - - dc = H5Pcreate(H5P_DATASET_CREATE); - VRFY(dc>=0,"H5Pset_filter"); + /* Run steps 1-3 both with and without filters disabled on partial chunks */ + for(disable_partial_chunk_filters = 0; disable_partial_chunk_filters <= 1; + disable_partial_chunk_filters++) { + /* Set chunk options appropriately */ + dc = H5Pcreate(H5P_DATASET_CREATE); + VRFY(dc>=0,"H5Pcreate"); - hrc = H5Pset_chunk (dc, 2, chunk_size); - VRFY(hrc>=0,"H5Pset_filter"); + hrc = H5Pset_chunk (dc, 2, chunk_size); + VRFY(hrc>=0,"H5Pset_filter"); - hrc = H5Pset_filter (dc,H5Z_FILTER_FLETCHER32,0,0,NULL); - VRFY(hrc>=0,"H5Pset_filter"); + hrc = H5Pget_chunk_opts(dc, &chunk_opts); + VRFY(hrc>=0,"H5Pget_chunk_opts"); - filter_read_internal(filename,dc,&fletcher32_size); - VRFY(fletcher32_size > null_size,"Size after checksumming is incorrect."); + if(disable_partial_chunk_filters) + chunk_opts |= H5D_CHUNK_DONT_FILTER_PARTIAL_CHUNKS; - /* Clean up objects used for this test */ - hrc = H5Pclose (dc); - VRFY(hrc>=0, "H5Pclose"); + hrc = H5Pclose (dc); + VRFY(hrc>=0,"H5Pclose"); + /*---------------------------------------------------------- + * STEP 1: Test Fletcher32 Checksum by itself. + *---------------------------------------------------------- + */ +#ifdef H5_HAVE_FILTER_FLETCHER32 - /*---------------------------------------------------------- - * STEP 2: Test deflation by itself. - *---------------------------------------------------------- - */ -#ifdef H5_HAVE_FILTER_DEFLATE + dc = H5Pcreate(H5P_DATASET_CREATE); + VRFY(dc>=0,"H5Pset_filter"); - dc = H5Pcreate(H5P_DATASET_CREATE); - VRFY(dc>=0, "H5Pcreate"); + hrc = H5Pset_chunk (dc, 2, chunk_size); + VRFY(hrc>=0,"H5Pset_filter"); - hrc = H5Pset_chunk (dc, 2, chunk_size); - VRFY(hrc>=0, "H5Pset_chunk"); + hrc = H5Pset_chunk_opts (dc, chunk_opts); + VRFY(hrc>=0,"H5Pset_chunk_opts"); - hrc = H5Pset_deflate (dc, 6); - VRFY(hrc>=0, "H5Pset_deflate"); + hrc = H5Pset_filter (dc,H5Z_FILTER_FLETCHER32,0,0,NULL); + VRFY(hrc>=0,"H5Pset_filter"); - filter_read_internal(filename,dc,&deflate_size); + filter_read_internal(filename,dc,&fletcher32_size); + VRFY(fletcher32_size > null_size,"Size after checksumming is incorrect."); - /* Clean up objects used for this test */ - hrc = H5Pclose (dc); - VRFY(hrc>=0, "H5Pclose"); + /* Clean up objects used for this test */ + hrc = H5Pclose (dc); + VRFY(hrc>=0, "H5Pclose"); -#endif /* H5_HAVE_FILTER_DEFLATE */ +#endif /* H5_HAVE_FILTER_FLETCHER32 */ + /*---------------------------------------------------------- + * STEP 2: Test deflation by itself. + *---------------------------------------------------------- + */ +#ifdef H5_HAVE_FILTER_DEFLATE - /*---------------------------------------------------------- - * STEP 3: Test szip compression by itself. - *---------------------------------------------------------- - */ -#ifdef H5_HAVE_FILTER_SZIP - if(h5_szip_can_encode() == 1) { dc = H5Pcreate(H5P_DATASET_CREATE); VRFY(dc>=0, "H5Pcreate"); hrc = H5Pset_chunk (dc, 2, chunk_size); VRFY(hrc>=0, "H5Pset_chunk"); - hrc = H5Pset_szip(dc, szip_options_mask, szip_pixels_per_block); - VRFY(hrc>=0, "H5Pset_szip"); + hrc = H5Pset_chunk_opts (dc, chunk_opts); + VRFY(hrc>=0,"H5Pset_chunk_opts"); + + hrc = H5Pset_deflate (dc, 6); + VRFY(hrc>=0, "H5Pset_deflate"); - filter_read_internal(filename,dc,&szip_size); + filter_read_internal(filename,dc,&deflate_size); /* Clean up objects used for this test */ hrc = H5Pclose (dc); VRFY(hrc>=0, "H5Pclose"); - } + +#endif /* H5_HAVE_FILTER_DEFLATE */ + + /*---------------------------------------------------------- + * STEP 3: Test szip compression by itself. + *---------------------------------------------------------- + */ +#ifdef H5_HAVE_FILTER_SZIP + if(h5_szip_can_encode() == 1) { + dc = H5Pcreate(H5P_DATASET_CREATE); + VRFY(dc>=0, "H5Pcreate"); + + hrc = H5Pset_chunk (dc, 2, chunk_size); + VRFY(hrc>=0, "H5Pset_chunk"); + + hrc = H5Pset_chunk_opts (dc, chunk_opts); + VRFY(hrc>=0,"H5Pset_chunk_opts"); + + hrc = H5Pset_szip(dc, szip_options_mask, szip_pixels_per_block); + VRFY(hrc>=0, "H5Pset_szip"); + + filter_read_internal(filename,dc,&szip_size); + + /* Clean up objects used for this test */ + hrc = H5Pclose (dc); + VRFY(hrc>=0, "H5Pclose"); + } #endif /* H5_HAVE_FILTER_SZIP */ + } /* end for */ /*---------------------------------------------------------- |