diff options
author | Vailin Choi <vchoi@jam.ad.hdfgroup.org> | 2017-04-25 23:08:53 (GMT) |
---|---|---|
committer | Vailin Choi <vchoi@jam.ad.hdfgroup.org> | 2017-04-25 23:08:53 (GMT) |
commit | cb8e82a53d91067b16deba7ebc76307a336b0f02 (patch) | |
tree | 88fb7095226155a86b9e8c63c6c621b22b4fe8d5 /hl/test | |
parent | 9c30b393163cfaf069aa166ce261034b3010394d (diff) | |
download | hdf5-cb8e82a53d91067b16deba7ebc76307a336b0f02.zip hdf5-cb8e82a53d91067b16deba7ebc76307a336b0f02.tar.gz hdf5-cb8e82a53d91067b16deba7ebc76307a336b0f02.tar.bz2 |
Incorporate patch from GE Healthcare (HDFFV-9934)
This is the same patch that had already been integrated to 1.8 branch (pull request #387).
Tested on moohan, ostrich, platypus, emu, osx1010test, quail, kituo, mayll.
Diffstat (limited to 'hl/test')
-rw-r--r-- | hl/test/test_dset_opt.c | 461 |
1 files changed, 377 insertions, 84 deletions
diff --git a/hl/test/test_dset_opt.c b/hl/test/test_dset_opt.c index 03c467a..d3a6b5c 100644 --- a/hl/test/test_dset_opt.c +++ b/hl/test/test_dset_opt.c @@ -76,11 +76,10 @@ const H5Z_class2_t H5Z_BOGUS2[1] = {{ /*------------------------------------------------------------------------- * Function: test_direct_chunk_write * - * Purpose: Test the basic functionality of H5DOwrite_chunk + * Purpose: Test the basic functionality of H5DOwrite_chunk/H5DOread_chunk * * Return: Success: 0 - * - * Failure: 1 + * Failure: 1 * * Programmer: Raymond Lu * 30 November 2012 @@ -103,24 +102,31 @@ test_direct_chunk_write (hid_t file) int i, j, n; unsigned filter_mask = 0; + unsigned read_filter_mask = 0; int direct_buf[CHUNK_NX][CHUNK_NY]; int check_chunk[CHUNK_NX][CHUNK_NY]; hsize_t offset[2] = {0, 0}; size_t buf_size = CHUNK_NX*CHUNK_NY*sizeof(int); const Bytef *z_src = (const Bytef*)(direct_buf); - Bytef *z_dst; /*destination buffer */ + Bytef *z_dst; /*destination buffer */ uLongf z_dst_nbytes = (uLongf)DEFLATE_SIZE_ADJUST(buf_size); uLong z_src_nbytes = (uLong)buf_size; - int aggression = 9; /* Compression aggression setting */ - void *outbuf = NULL; /* Pointer to new buffer */ + int aggression = 9; /* Compression aggression setting */ + void *outbuf = NULL; /* Pointer to new buffer */ + + /* For H5DOread_chunk() */ + void *readbuf = NULL; /* Buffer for reading data */ + const Bytef *pt_readbuf; /* Point to the buffer for data read */ + hsize_t read_chunk_nbytes; /* Size of chunk on disk */ + int read_dst_buf[CHUNK_NX][CHUNK_NY]; /* Buffer to hold un-compressed data */ hsize_t start[2]; /* Start of hyperslab */ hsize_t stride[2]; /* Stride of hyperslab */ hsize_t count[2]; /* Block count */ hsize_t block[2]; /* Block sizes */ - TESTING("basic functionality of H5DOwrite_chunk"); + TESTING("basic functionality of H5DOwrite_chunk/H5DOread_chunk"); /* * Create the data space with unlimited dimensions. @@ -151,26 +157,76 @@ test_direct_chunk_write (hid_t file) cparms, H5P_DEFAULT)) < 0) goto error; - /* Initialize the dataset */ - for(i = n = 0; i < NX; i++) - for(j = 0; j < NY; j++) - data[i][j] = n++; - if((dxpl = H5Pcreate(H5P_DATASET_XFER)) < 0) goto error; + HDmemset(data, 0, sizeof(data)); + /* Initialize data for the first chunk */ + for(i = n = 0; i < CHUNK_NX; i++) + for(j = 0; j < CHUNK_NY; j++) + data[i][j] = n++; + /* - * Write the data for the dataset. It should stay in the chunk cache. - * It will be evicted from the cache by the H5DOwrite_chunk calls. - */ + * Write the data for the dataset. */ if((status = H5Dwrite(dataset, H5T_NATIVE_INT, H5S_ALL, H5S_ALL, dxpl, data)) < 0) goto error; + if(H5Fflush(dataset, H5F_SCOPE_LOCAL) < 0) + goto error; + + if(H5Dclose(dataset) < 0) + goto error; + + if((dataset = H5Dopen2(file, DATASETNAME1, H5P_DEFAULT)) < 0) + goto error; + + offset[0] = offset[1] = 0; + + /* Get the size of the compressed chunk */ + ret = H5Dget_chunk_storage_size(dataset, offset, &read_chunk_nbytes); + + readbuf = HDmalloc(read_chunk_nbytes); + pt_readbuf = (const Bytef *)readbuf; + + /* Test to use H5DOread_chunk() to read the chunk back */ + if((status = H5DOread_chunk(dataset, H5P_DEFAULT, offset, &read_filter_mask, readbuf)) < 0) + goto error; + + /* uncompress(Bytef *dest, uLongf *destLen, const Bytef *source, uLong sourceLen) */ + ret = uncompress((Bytef *)read_dst_buf, (uLongf *)&buf_size, pt_readbuf, (uLong)read_chunk_nbytes); + + /* Check for various zlib errors */ + if(Z_BUF_ERROR == ret) { + HDfprintf(stderr, "error: not enough room in output buffer"); + goto error; + } else if(Z_MEM_ERROR == ret) { + HDfprintf(stderr, "error: not enough memory"); + goto error; + } else if(Z_OK != ret) { + HDfprintf(stderr, "error: corrupted input data"); + goto error; + } + + /* Check that the values read are the same as the values written */ + for(i = 0; i < CHUNK_NX; i++) { + for(j = 0; j < CHUNK_NY; j++) { + if(data[i][j] != read_dst_buf[i][j]) { + printf(" 1. Read different values than written."); + printf(" At index %d,%d\n", i, j); + printf(" data=%d, read_dst_buf=%d\n", data[i][j], read_dst_buf[i][j]); + goto error; + } + } + } + + if(readbuf) + HDfree(readbuf); + /* Initialize data for one chunk */ for(i = n = 0; i < CHUNK_NX; i++) for(j = 0; j < CHUNK_NY; j++) - direct_buf[i][j] = n++; + direct_buf[i][j] = n++; /* Allocate output (compressed) buffer */ outbuf = HDmalloc(z_dst_nbytes); @@ -181,18 +237,18 @@ test_direct_chunk_write (hid_t file) /* Check for various zlib errors */ if(Z_BUF_ERROR == ret) { - fprintf(stderr, "overflow"); + HDfprintf(stderr, "overflow"); goto error; } else if(Z_MEM_ERROR == ret) { - fprintf(stderr, "deflate memory error"); + HDfprintf(stderr, "deflate memory error"); goto error; } else if(Z_OK != ret) { - fprintf(stderr, "other deflate error"); + HDfprintf(stderr, "other deflate error"); goto error; } - /* Write the compressed chunk data repeatedly to cover all the chunks in the - * dataset, using the direct writing function. */ + /* Write the compressed chunk data repeatedly to cover all the chunks in the + * dataset, using the direct writing function. */ for(i=0; i<NX/CHUNK_NX; i++) { for(j=0; j<NY/CHUNK_NY; j++) { status = H5DOwrite_chunk(dataset, dxpl, filter_mask, offset, z_dst_nbytes, outbuf); @@ -205,7 +261,7 @@ test_direct_chunk_write (hid_t file) if(outbuf) HDfree(outbuf); - if(H5Fflush(dataset, H5F_SCOPE_LOCAL) < 0) + if(H5Fflush(dataset, H5F_SCOPE_LOCAL) < 0) goto error; if(H5Dclose(dataset) < 0) @@ -214,6 +270,59 @@ test_direct_chunk_write (hid_t file) if((dataset = H5Dopen2(file, DATASETNAME1, H5P_DEFAULT)) < 0) goto error; + offset[0] = CHUNK_NX; + offset[1] = CHUNK_NY; + + /* Get the size of the compressed chunk */ + ret = H5Dget_chunk_storage_size(dataset, offset, &read_chunk_nbytes); + + if(read_chunk_nbytes != (hsize_t)z_dst_nbytes) { + HDfprintf(stderr, "Read/write chunk size not the same."); + goto error; + } + + readbuf = HDmalloc(read_chunk_nbytes); + pt_readbuf = (const Bytef *)readbuf; + + /* Test to use H5DOread_chunk() to read the chunk back */ + if((status = H5DOread_chunk(dataset, H5P_DEFAULT, offset, &read_filter_mask, readbuf)) < 0) + goto error; + + if(read_filter_mask != filter_mask) { + HDfprintf(stderr, " Read/write filter mask not the same."); + goto error; + } + + /* uncompress(Bytef *dest, uLongf *destLen, const Bytef *source, uLong sourceLen) */ + ret = uncompress((Bytef *)read_dst_buf, (uLongf *)&buf_size, pt_readbuf, (uLong)read_chunk_nbytes); + + /* Check for various zlib errors */ + if(Z_BUF_ERROR == ret) { + HDfprintf(stderr, "error: not enough room in output buffer"); + goto error; + } else if(Z_MEM_ERROR == ret) { + HDfprintf(stderr, "error: not enough memory"); + goto error; + } else if(Z_OK != ret) { + HDfprintf(stderr, "error: corrupted input data"); + goto error; + } + + /* Check that the values read are the same as the values written */ + for(i = 0; i < CHUNK_NX; i++) { + for(j = 0; j < CHUNK_NY; j++) { + if(direct_buf[i][j] != read_dst_buf[i][j]) { + printf(" 1. Read different values than written."); + printf(" At index %d,%d\n", i, j); + printf(" direct_buf=%d, read_dst_buf=%d\n", direct_buf[i][j], read_dst_buf[i][j]); + goto error; + } + } + } + + if(readbuf) + HDfree(readbuf); + /* * Select hyperslab for one chunk in the file */ @@ -224,7 +333,7 @@ test_direct_chunk_write (hid_t file) if((status = H5Sselect_hyperslab(dataspace, H5S_SELECT_SET, start, stride, count, block)) < 0) goto error; - /* Read the chunk back */ + /* Test to use H5Dread() to read the chunk back */ if((status = H5Dread(dataset, H5T_NATIVE_INT, mem_space, dataspace, H5P_DEFAULT, check_chunk)) < 0) goto error; @@ -234,38 +343,40 @@ test_direct_chunk_write (hid_t file) if(direct_buf[i][j] != check_chunk[i][j]) { printf(" 1. Read different values than written."); printf(" At index %d,%d\n", i, j); - printf(" direct_buf=%d, check_chunk=%d\n", direct_buf[i][j], check_chunk[i][j]); + printf(" direct_buf=%d, check_chunk=%d\n", direct_buf[i][j], check_chunk[i][j]); goto error; } } } + /* Reinitialize different data for one chunk */ for(i = 0; i < CHUNK_NX; i++) for(j = 0; j < CHUNK_NY; j++) - direct_buf[i][j] = i + j; + direct_buf[i][j] = i + j; /* Allocate output (compressed) buffer */ outbuf = HDmalloc(z_dst_nbytes); z_dst = (Bytef *)outbuf; + /* Perform compression from the source to the destination buffer */ ret = compress2(z_dst, &z_dst_nbytes, z_src, z_src_nbytes, aggression); /* Check for various zlib errors */ if(Z_BUF_ERROR == ret) { - fprintf(stderr, "overflow"); + HDfprintf(stderr, "overflow"); goto error; } else if(Z_MEM_ERROR == ret) { - fprintf(stderr, "deflate memory error"); + HDfprintf(stderr, "deflate memory error"); goto error; } else if(Z_OK != ret) { - fprintf(stderr, "other deflate error"); + HDfprintf(stderr, "other deflate error"); goto error; } - /* Rewrite the compressed chunk data repeatedly to cover all the chunks in the - * dataset, using the direct writing function. */ + /* Rewrite the compressed chunk data repeatedly to cover all the chunks in the + * dataset, using the direct writing function. */ offset[0] = offset[1] = 0; for(i=0; i<NX/CHUNK_NX; i++) { for(j=0; j<NY/CHUNK_NY; j++) { @@ -279,7 +390,7 @@ test_direct_chunk_write (hid_t file) if(outbuf) HDfree(outbuf); - if(H5Fflush(dataset, H5F_SCOPE_LOCAL) < 0) + if(H5Fflush(dataset, H5F_SCOPE_LOCAL) < 0) goto error; if(H5Dclose(dataset) < 0) @@ -288,7 +399,60 @@ test_direct_chunk_write (hid_t file) if((dataset = H5Dopen2(file, DATASETNAME1, H5P_DEFAULT)) < 0) goto error; - /* Read the chunk back */ + offset[0] = CHUNK_NX; + offset[1] = CHUNK_NY; + + /* Get the size of the compressed chunk */ + ret = H5Dget_chunk_storage_size(dataset, offset, &read_chunk_nbytes); + + if(read_chunk_nbytes != (hsize_t)z_dst_nbytes) { + HDfprintf(stderr, "Read/write chunk size not the same."); + goto error; + } + + readbuf = HDmalloc(read_chunk_nbytes); + pt_readbuf = (const Bytef *)readbuf; + + /* Test to use H5DOread_chunk() to read the chunk back */ + if((status = H5DOread_chunk(dataset, H5P_DEFAULT, offset, &read_filter_mask, readbuf)) < 0) + goto error; + + if(read_filter_mask != filter_mask) { + HDfprintf(stderr, " Read/write filter mask not the same."); + goto error; + } + + /* uncompress(Bytef *dest, uLongf *destLen, const Bytef *source, uLong sourceLen) */ + ret = uncompress((Bytef *)read_dst_buf, (uLongf *)&buf_size, pt_readbuf, (uLong)read_chunk_nbytes); + + /* Check for various zlib errors */ + if(Z_BUF_ERROR == ret) { + HDfprintf(stderr, "error: not enough room in output buffer"); + goto error; + } else if(Z_MEM_ERROR == ret) { + HDfprintf(stderr, "error: not enough memory"); + goto error; + } else if(Z_OK != ret) { + HDfprintf(stderr, "error: corrupted input data"); + goto error; + } + + /* Check that the values read are the same as the values written */ + for(i = 0; i < CHUNK_NX; i++) { + for(j = 0; j < CHUNK_NY; j++) { + if(direct_buf[i][j] != read_dst_buf[i][j]) { + printf(" 1. Read different values than written."); + printf(" At index %d,%d\n", i, j); + printf(" direct_buf=%d, my_buf=%d\n", direct_buf[i][j], read_dst_buf[i][j]); + goto error; + } + } + } + + if(readbuf) + HDfree(readbuf); + + /* Test to use H5Dread() to read the chunk back */ if((status = H5Dread(dataset, H5T_NATIVE_INT, mem_space, dataspace, H5P_DEFAULT, check_chunk)) < 0) goto error; @@ -298,7 +462,7 @@ test_direct_chunk_write (hid_t file) if(direct_buf[i][j] != check_chunk[i][j]) { printf(" 2. Read different values than written."); printf(" At index %d,%d\n", i, j); - printf(" direct_buf=%d, check_chunk=%d\n", direct_buf[i][j], check_chunk[i][j]); + printf(" direct_buf=%d, check_chunk=%d\n", direct_buf[i][j], check_chunk[i][j]); goto error; } } @@ -312,7 +476,7 @@ test_direct_chunk_write (hid_t file) H5Sclose(dataspace); H5Pclose(cparms); H5Pclose(dxpl); - + PASSED(); return 0; @@ -327,6 +491,8 @@ error: if(outbuf) HDfree(outbuf); + if(readbuf) + HDfree(readbuf); return 1; } @@ -335,12 +501,11 @@ error: /*------------------------------------------------------------------------- * Function: test_skip_compress_write1 * - * Purpose: Test skipping compression filter when it is the only filter + * Purpose: Test skipping compression filter when it is the only filter * for the dataset * * Return: Success: 0 - * - * Failure: 1 + * Failure: 1 * * Programmer: Raymond Lu * 30 November 2012 @@ -360,6 +525,7 @@ test_skip_compress_write1(hid_t file) int i, j, n; unsigned filter_mask = 0; + unsigned read_filter_mask = 0; int direct_buf[CHUNK_NX][CHUNK_NY]; int check_chunk[CHUNK_NX][CHUNK_NY]; hsize_t offset[2] = {0, 0}; @@ -371,7 +537,7 @@ test_skip_compress_write1(hid_t file) hsize_t count[2]; /* Block count */ hsize_t block[2]; /* Block sizes */ - TESTING("skipping compression filter for H5DOwrite_chunk"); + TESTING("skipping compression filter for H5DOwrite_chunk/H5DOread_chunk"); /* * Create the data space with unlimited dimensions. @@ -408,7 +574,7 @@ test_skip_compress_write1(hid_t file) /* Initialize data for one chunk */ for(i = n = 0; i < CHUNK_NX; i++) for(j = 0; j < CHUNK_NY; j++) { - direct_buf[i][j] = n++; + direct_buf[i][j] = n++; } /* write the uncompressed chunk data repeatedly to dataset, using the direct writing function. @@ -430,6 +596,30 @@ test_skip_compress_write1(hid_t file) if((dataset = H5Dopen2(file, DATASETNAME2, H5P_DEFAULT)) < 0) goto error; + /* Use H5DOread_chunk() to read the uncompressed data */ + if((status = H5DOread_chunk(dataset, dxpl, offset, &read_filter_mask, check_chunk)) < 0) + goto error; + + if(read_filter_mask != filter_mask) { + HDfprintf(stderr, " Read/write filter mask not the same."); + goto error; + } + + /* Check that the values read are the same as the values written */ + for(i = 0; i < CHUNK_NX; i++) { + for(j = 0; j < CHUNK_NY; j++) { + if(direct_buf[i][j] != check_chunk[i][j]) { + printf(" 1. Read different values than written."); + printf(" At index %d,%d\n", i, j); + printf(" direct_buf=%d, check_chunk=%d\n", direct_buf[i][j], check_chunk[i][j]); + goto error; + } + } + } + + /* Clear the buffer */ + HDmemset(check_chunk, 0, buf_size); + /* * Select hyperslab for the chunk just written in the file */ @@ -440,7 +630,7 @@ test_skip_compress_write1(hid_t file) if((status = H5Sselect_hyperslab(dataspace, H5S_SELECT_SET, start, stride, count, block)) < 0) goto error; - /* Read the chunk back */ + /* Use H5Dread() to read the chunk back */ if((status = H5Dread(dataset, H5T_NATIVE_INT, mem_space, dataspace, H5P_DEFAULT, check_chunk)) < 0) goto error; @@ -483,9 +673,9 @@ error: /*------------------------------------------------------------------------- * Function: filter_bogus1 * - * Purpose: A bogus filte that adds ADD_ON to the original value + * Purpose: A bogus filter that adds ADD_ON to the original value * - * Return: Success: Data chunk size + * Return: Success: Data chunk size * * Programmer: Raymond Lu * 30 November 2012 @@ -558,12 +748,11 @@ filter_bogus2(unsigned int flags, size_t H5_ATTR_UNUSED cd_nelmts, /*------------------------------------------------------------------------- * Function: test_skip_compress_write2 * - * Purpose: Test skipping compression filter when there are three filters + * Purpose: Test skipping compression filter when there are three filters * for the dataset * - * Return: Success: 0 - * - * Failure: 1 + * Return: Success: 0 + * Failure: 1 * * Programmer: Raymond Lu * 30 November 2012 @@ -583,6 +772,7 @@ test_skip_compress_write2(hid_t file) int i, j, n; unsigned filter_mask = 0; + unsigned read_filter_mask = 0; int origin_direct_buf[CHUNK_NX][CHUNK_NY]; int direct_buf[CHUNK_NX][CHUNK_NY]; int check_chunk[CHUNK_NX][CHUNK_NY]; @@ -618,10 +808,10 @@ test_skip_compress_write2(hid_t file) /* Register and enable first bogus filter */ if(H5Zregister (H5Z_BOGUS1) < 0) - goto error; + goto error; if(H5Pset_filter(cparms, H5Z_FILTER_BOGUS1, 0, (size_t)0, NULL) < 0) - goto error; + goto error; /* Enable compression filter */ if((status = H5Pset_deflate( cparms, (unsigned) aggression)) < 0) @@ -629,10 +819,10 @@ test_skip_compress_write2(hid_t file) /* Register and enable second bogus filter */ if(H5Zregister (H5Z_BOGUS2) < 0) - goto error; + goto error; if(H5Pset_filter(cparms, H5Z_FILTER_BOGUS2, 0, (size_t)0, NULL) < 0) - goto error; + goto error; /* * Create a new dataset within the file using cparms @@ -648,9 +838,9 @@ test_skip_compress_write2(hid_t file) /* Initialize data for one chunk. Apply operations of two bogus filters to the chunk */ for(i = n = 0; i < CHUNK_NX; i++) for(j = 0; j < CHUNK_NY; j++) { - origin_direct_buf[i][j] = n++; - direct_buf[i][j] = (origin_direct_buf[i][j] + ADD_ON) * FACTOR; - } + origin_direct_buf[i][j] = n++; + direct_buf[i][j] = (origin_direct_buf[i][j] + ADD_ON) * FACTOR; + } /* write the uncompressed chunk data repeatedly to dataset, using the direct writing function. * Indicate skipping the compression filter but keep the other two bogus filters */ @@ -672,6 +862,35 @@ test_skip_compress_write2(hid_t file) if((dataset = H5Dopen2(file, DATASETNAME3, H5P_DEFAULT)) < 0) goto error; + /* Use H5DOread_chunk() to read the uncompressed data */ + if((status = H5DOread_chunk(dataset, dxpl, offset, &read_filter_mask, check_chunk)) < 0) + goto error; + + if(read_filter_mask != filter_mask) { + HDfprintf(stderr, " Read/write filter mask not the same."); + goto error; + } + + /* De-compress the operations of the two bogus filters to the chunk */ + for(i = n = 0; i < CHUNK_NX; i++) + for(j = 0; j < CHUNK_NY; j++) + check_chunk[i][j] = (check_chunk[i][j]/FACTOR) - ADD_ON; + + /* Check that the values read are the same as the values written */ + for(i = 0; i < CHUNK_NX; i++) { + for(j = 0; j < CHUNK_NY; j++) { + if(origin_direct_buf[i][j] != check_chunk[i][j]) { + printf(" 1. Read different values than written."); + printf(" At index %d,%d\n", i, j); + printf(" origin_direct_buf=%d, check_chunk=%d\n", origin_direct_buf[i][j], check_chunk[i][j]); + goto error; + } + } + } + + /* Clear the buffer */ + HDmemset(check_chunk, 0, buf_size); + /* * Select hyperslab for one chunk in the file */ @@ -682,7 +901,7 @@ test_skip_compress_write2(hid_t file) if((status = H5Sselect_hyperslab(dataspace, H5S_SELECT_SET, start, stride, count, block)) < 0) goto error; - /* Read the chunk back */ + /* Use H5Dread() to read the chunk back */ if((status = H5Dread(dataset, H5T_NATIVE_INT, mem_space, dataspace, H5P_DEFAULT, check_chunk)) < 0) goto error; @@ -725,11 +944,10 @@ error: /*------------------------------------------------------------------------- * Function: test_data_conv * - * Purpose: Test data conversion - * - * Return: Success: 0 + * Purpose: Test data conversion * - * Failure: 1 + * Return: Success: 0 + * Failure: 1 * * Programmer: Raymond Lu * 30 November 2012 @@ -740,10 +958,10 @@ static int test_data_conv(hid_t file) { typedef struct { - int a, b, c[4], d, e; + int a, b, c[4], d, e; } src_type_t; typedef struct { - int a, c[4], e; + int a, c[4], e; } dst_type_t; hid_t dataspace = -1, dataset = -1; @@ -761,6 +979,8 @@ test_data_conv(hid_t file) unsigned filter_mask = 0; src_type_t direct_buf[CHUNK_NX][CHUNK_NY]; dst_type_t check_chunk[CHUNK_NX][CHUNK_NY]; + src_type_t read_chunk[CHUNK_NX][CHUNK_NY]; /* For H5DOread_chunk */ + hsize_t offset[2] = {0, 0}; size_t buf_size = CHUNK_NX*CHUNK_NY*sizeof(src_type_t); @@ -769,7 +989,7 @@ test_data_conv(hid_t file) hsize_t count[2]; /* Block count */ hsize_t block[2]; /* Block sizes */ - TESTING("data conversion for H5DOwrite_chunk"); + TESTING("data conversion for H5DOwrite_chunk/H5DOread_chunk"); /* * Create the data space with unlimited dimensions. @@ -854,6 +1074,36 @@ test_data_conv(hid_t file) if((dataset = H5Dopen2(file, DATASETNAME4, H5P_DEFAULT)) < 0) goto error; + /* Use H5DOread_chunk() to read the uncompressed data */ + if((status = H5DOread_chunk(dataset, dxpl, offset, &filter_mask, read_chunk)) < 0) + goto error; + + /* Check that the values read are the same as the values written */ + for(i = 0; i < CHUNK_NX; i++) { + for(j = 0; j < CHUNK_NY; j++) { + if ((direct_buf[i][j]).a != (read_chunk[i][j]).a || + (direct_buf[i][j]).b != (read_chunk[i][j]).b || + (direct_buf[i][j]).c[0] != (read_chunk[i][j]).c[0] || + (direct_buf[i][j]).c[1] != (read_chunk[i][j]).c[1] || + (direct_buf[i][j]).c[2] != (read_chunk[i][j]).c[2] || + (direct_buf[i][j]).c[3] != (read_chunk[i][j]).c[3] || + (direct_buf[i][j]).d != (read_chunk[i][j]).d || + (direct_buf[i][j]).e != (read_chunk[i][j]).e) { + printf(" 1. Read different values than written."); + printf(" At index %d,%d\n", i, j); + printf(" src={a=%d, b=%d, c=[%d,%d,%d,%d], d=%d, e=%d\n", + (direct_buf[i][j]).a, (direct_buf[i][j]).b, (direct_buf[i][j]).c[0], (direct_buf[i][j]).c[1], + (direct_buf[i][j]).c[2], (direct_buf[i][j]).c[3], (direct_buf[i][j]).d, (direct_buf[i][j]).e); + printf(" dst={a=%d, b=%d, c=[%d,%d,%d,%d], d=%d, e=%d\n", + (read_chunk[i][j]).a, (read_chunk[i][j]).b, (read_chunk[i][j]).c[0], (read_chunk[i][j]).c[1], + (read_chunk[i][j]).c[2], (read_chunk[i][j]).c[3], (read_chunk[i][j]).d, (read_chunk[i][j]).e); + + goto error; + } + } + } + + /* * Select hyperslab for the chunk just written in the file */ @@ -879,14 +1129,14 @@ test_data_conv(hid_t file) (direct_buf[i][j]).e != (check_chunk[i][j]).e) { printf(" 1. Read different values than written."); printf(" At index %d,%d\n", i, j); - printf(" src={a=%d, b=%d, c=[%d,%d,%d,%d], d=%d, e=%d\n", + printf(" src={a=%d, b=%d, c=[%d,%d,%d,%d], d=%d, e=%d\n", (direct_buf[i][j]).a, (direct_buf[i][j]).b, (direct_buf[i][j]).c[0], (direct_buf[i][j]).c[1], (direct_buf[i][j]).c[2], (direct_buf[i][j]).c[3], (direct_buf[i][j]).d, (direct_buf[i][j]).e); - printf(" dst={a=%d, c=[%d,%d,%d,%d], e=%d\n", + printf(" dst={a=%d, c=[%d,%d,%d,%d], e=%d\n", (check_chunk[i][j]).a, (check_chunk[i][j]).c[0], (check_chunk[i][j]).c[1], (check_chunk[i][j]).c[2], (check_chunk[i][j]).c[3], (check_chunk[i][j]).e); - goto error; + goto error; } } } @@ -923,11 +1173,10 @@ error: /*------------------------------------------------------------------------- * Function: test_invalid_parameters * - * Purpose: Test invalid parameters for H5DOwrite_chunk - * - * Return: Success: 0 + * Purpose: Test invalid parameters for H5DOwrite_chunk and H5DOread_chunk * - * Failure: 1 + * Return: Success: 0 + * Failure: 1 * * Programmer: Raymond Lu * 30 November 2012 @@ -951,7 +1200,9 @@ test_invalid_parameters(hid_t file) size_t buf_size = CHUNK_NX*CHUNK_NY*sizeof(int); int aggression = 9; /* Compression aggression setting */ - TESTING("invalid parameters for H5DOwrite_chunk"); + hsize_t chunk_nbytes; /* Chunk size */ + + TESTING("invalid parameters for H5DOwrite_chunk/H5DOread_chunk"); /* * Create the data space with unlimited dimensions. @@ -969,7 +1220,7 @@ test_invalid_parameters(hid_t file) goto error; /* - * Create a new contiguous dataset to verify H5DOwrite_chunk doesn't work + * Create a new contiguous dataset to verify H5DOwrite_chunk/H5DOread_chunk doesn't work */ if((dataset = H5Dcreate2(file, DATASETNAME5, H5T_NATIVE_INT, dataspace, H5P_DEFAULT, cparms, H5P_DEFAULT)) < 0) @@ -981,8 +1232,8 @@ test_invalid_parameters(hid_t file) /* Initialize data for one chunk */ for(i = n = 0; i < CHUNK_NX; i++) for(j = 0; j < CHUNK_NY; j++) { - direct_buf[i][j] = n++; - } + direct_buf[i][j] = n++; + } /* Try to write the chunk data to contiguous dataset. It should fail */ offset[0] = CHUNK_NX; @@ -993,6 +1244,18 @@ test_invalid_parameters(hid_t file) goto error; } H5E_END_TRY; + /* Try to get chunk size for a contiguous dataset. It should fail */ + H5E_BEGIN_TRY { + if((status = H5Dget_chunk_storage_size(dataset, offset, &chunk_nbytes)) != FAIL) + goto error; + } H5E_END_TRY; + + /* Try to H5DOread_chunk from the contiguous dataset. It should fail */ + H5E_BEGIN_TRY { + if((status = H5DOread_chunk(dataset, dxpl, offset, &filter_mask, direct_buf)) != FAIL) + goto error; + } H5E_END_TRY; + if(H5Dclose(dataset) < 0) goto error; @@ -1011,25 +1274,40 @@ test_invalid_parameters(hid_t file) cparms, H5P_DEFAULT)) < 0) goto error; - /* Check invalid dataset ID */ + /* Check invalid dataset ID for H5DOwrite_chunk and H5DOread_chunk */ H5E_BEGIN_TRY { if((status = H5DOwrite_chunk((hid_t)-1, dxpl, filter_mask, offset, buf_size, direct_buf)) != FAIL) goto error; } H5E_END_TRY; - /* Check invalid DXPL ID */ + H5E_BEGIN_TRY { + if((status = H5DOread_chunk((hid_t)-1, dxpl, offset, &filter_mask, direct_buf)) != FAIL) + goto error; + } H5E_END_TRY; + + /* Check invalid DXPL ID for H5DOwrite_chunk and H5DOread_chunk */ H5E_BEGIN_TRY { if((status = H5DOwrite_chunk(dataset, (hid_t)-1, filter_mask, offset, buf_size, direct_buf)) != FAIL) goto error; } H5E_END_TRY; - /* Check invalid OFFSET */ + H5E_BEGIN_TRY { + if((status = H5DOread_chunk(dataset, (hid_t)-1, offset, &filter_mask, direct_buf)) != FAIL) + goto error; + } H5E_END_TRY; + + /* Check invalid OFFSET for H5DOwrite_chunk and H5DOread_chunk */ H5E_BEGIN_TRY { if((status = H5DOwrite_chunk(dataset, dxpl, filter_mask, NULL, buf_size, direct_buf)) != FAIL) goto error; } H5E_END_TRY; - /* Check when OFFSET is out of dataset range */ + H5E_BEGIN_TRY { + if((status = H5DOread_chunk(dataset, dxpl, NULL, &filter_mask, direct_buf)) != FAIL) + goto error; + } H5E_END_TRY; + + /* Check when OFFSET is out of dataset range for H5DOwrite_chunk and H5DOread_chunk */ offset[0] = NX + 1; offset[1] = NY; H5E_BEGIN_TRY { @@ -1037,7 +1315,12 @@ test_invalid_parameters(hid_t file) goto error; } H5E_END_TRY; - /* Check when OFFSET is not on chunk boundary */ + H5E_BEGIN_TRY { + if((status = H5DOread_chunk(dataset, dxpl, offset, &filter_mask, direct_buf)) != FAIL) + goto error; + } H5E_END_TRY; + + /* Check when OFFSET is not on chunk boundary for H5DOwrite_chunk and H5DOread_chunk */ offset[0] = CHUNK_NX; offset[1] = CHUNK_NY + 1; H5E_BEGIN_TRY { @@ -1045,7 +1328,12 @@ test_invalid_parameters(hid_t file) goto error; } H5E_END_TRY; - /* Check invalid buffer size */ + H5E_BEGIN_TRY { + if((status = H5DOread_chunk(dataset, dxpl, offset, &filter_mask, direct_buf)) != FAIL) + goto error; + } H5E_END_TRY; + + /* Check invalid buffer size for H5DOwrite_chunk only */ offset[0] = CHUNK_NX; offset[1] = CHUNK_NY; buf_size = 0; @@ -1054,13 +1342,18 @@ test_invalid_parameters(hid_t file) goto error; } H5E_END_TRY; - /* Check invalid data buffer */ + /* Check invalid data buffer for H5DOwrite_chunk and H5DOread_chunk */ buf_size = CHUNK_NX*CHUNK_NY*sizeof(int); H5E_BEGIN_TRY { if((status = H5DOwrite_chunk(dataset, dxpl, filter_mask, offset, buf_size, NULL)) != FAIL) goto error; } H5E_END_TRY; + H5E_BEGIN_TRY { + if((status = H5DOread_chunk(dataset, dxpl, offset, &filter_mask, NULL)) != FAIL) + goto error; + } H5E_END_TRY; + if(H5Dclose(dataset) < 0) goto error; @@ -1090,11 +1383,11 @@ error: /*------------------------------------------------------------------------- * Function: Main function * - * Purpose: Test direct chunk write function H5DOwrite_chunk - * - * Return: Success: 0 + * Purpose: Test direct chunk write function H5DOwrite_chunk and + * chunk direct read function H5DOread_chunk * - * Failure: 1 + * Return: Success: 0 + * Failure: 1 * * Programmer: Raymond Lu * 30 November 2012 @@ -1112,7 +1405,7 @@ int main( void ) if((file_id = H5Fcreate(FILE_NAME, H5F_ACC_TRUNC, H5P_DEFAULT, H5P_DEFAULT)) < 0) goto error; - /* Test direct chunk write */ + /* Test direct chunk write and direct chunk read */ #ifdef H5_HAVE_FILTER_DEFLATE nerrors += test_direct_chunk_write(file_id); #endif /* H5_HAVE_FILTER_DEFLATE */ |