summaryrefslogtreecommitdiffstats
path: root/testpar/t_filter_read.c
diff options
context:
space:
mode:
authorAllen Byrne <byrn@hdfgroup.org>2019-08-24 19:07:19 (GMT)
committerAllen Byrne <byrn@hdfgroup.org>2019-08-24 19:07:33 (GMT)
commitba974c031deb709b6d4241da95485b48d747afdd (patch)
treeb580cd124bf1e663e9b4b0db4db2690cc206f2d4 /testpar/t_filter_read.c
parent884ad149e29f7c99487af676f0bbcd88cbf2735f (diff)
downloadhdf5-ba974c031deb709b6d4241da95485b48d747afdd.zip
hdf5-ba974c031deb709b6d4241da95485b48d747afdd.tar.gz
hdf5-ba974c031deb709b6d4241da95485b48d747afdd.tar.bz2
OESS-29 Update HD prefix mostly
Diffstat (limited to 'testpar/t_filter_read.c')
-rw-r--r--testpar/t_filter_read.c66
1 files changed, 33 insertions, 33 deletions
diff --git a/testpar/t_filter_read.c b/testpar/t_filter_read.c
index 4556b01..28baed5 100644
--- a/testpar/t_filter_read.c
+++ b/testpar/t_filter_read.c
@@ -37,9 +37,9 @@ static int mpi_size, mpi_rank;
#define HS_DIM1 200
#define HS_DIM2 100
-
+
/*-------------------------------------------------------------------------
- * Function: filter_read_internal
+ * Function: filter_read_internal
*
* Purpose: Tests parallel reading of a 2D dataset written serially using
* filters. During the parallel reading phase, the dataset is
@@ -54,13 +54,13 @@ static void
filter_read_internal(const char *filename, hid_t dcpl,
hsize_t *dset_size)
{
- hid_t file, dataset; /* HDF5 IDs */
- hid_t access_plist; /* Access property list ID */
- hid_t sid, memspace; /* Dataspace IDs */
- hsize_t size[2]; /* Dataspace dimensions */
- hsize_t hs_offset[2]; /* Hyperslab offset */
- hsize_t hs_size[2]; /* Hyperslab size */
- size_t i, j; /* Local index variables */
+ hid_t file, dataset; /* HDF5 IDs */
+ hid_t access_plist; /* Access property list ID */
+ hid_t sid, memspace; /* Dataspace IDs */
+ hsize_t size[2]; /* Dataspace dimensions */
+ hsize_t hs_offset[2]; /* Hyperslab offset */
+ hsize_t hs_size[2]; /* Hyperslab size */
+ size_t i, j; /* Local index variables */
char name[32] = "dataset";
herr_t hrc; /* Error status */
int *points = NULL; /* Writing buffer for entire dataset */
@@ -151,17 +151,17 @@ filter_read_internal(const char *filename, hid_t dcpl,
for (j=0; j<hs_size[1]; j++) {
if(points[i*size[1]+(size_t)hs_offset[1]+j] !=
check[i*hs_size[1]+j]) {
- fprintf(stderr," Read different values than written.\n");
- fprintf(stderr," At index %lu,%lu\n",
- (unsigned long)(i),
- (unsigned long)(hs_offset[1]+j));
- fprintf(stderr," At original: %d\n",
- (int)points[i*size[1]+(size_t)hs_offset[1]+j]);
- fprintf(stderr," At returned: %d\n",
- (int)check[i*hs_size[1]+j]);
+ HDfprintf(stderr," Read different values than written.\n");
+ HDfprintf(stderr," At index %lu,%lu\n",
+ (unsigned long)(i),
+ (unsigned long)(hs_offset[1]+j));
+ HDfprintf(stderr," At original: %d\n",
+ (int)points[i*size[1]+(size_t)hs_offset[1]+j]);
+ HDfprintf(stderr," At returned: %d\n",
+ (int)check[i*hs_size[1]+j]);
VRFY(FALSE, "");
- }
- }
+ }
+ }
}
/* Get the storage size of the dataset */
@@ -194,10 +194,10 @@ filter_read_internal(const char *filename, hid_t dcpl,
/*-------------------------------------------------------------------------
* Function: test_filter_read
*
- * Purpose: Tests parallel reading of datasets written serially using
+ * Purpose: Tests parallel reading of datasets written serially using
* several (combinations of) filters.
*
- * Programmer: Christian Chilan
+ * Programmer: Christian Chilan
* Tuesday, May 15, 2007
*
* Modifications:
@@ -208,7 +208,7 @@ filter_read_internal(const char *filename, hid_t dcpl,
void
test_filter_read(void)
{
- hid_t dc; /* HDF5 IDs */
+ hid_t dc; /* HDF5 IDs */
const hsize_t chunk_size[2] = {CHUNK_DIM1, CHUNK_DIM2}; /* Chunk dimensions */
hsize_t null_size; /* Size of dataset without filters */
unsigned chunk_opts; /* Chunk options */
@@ -236,7 +236,7 @@ test_filter_read(void)
filename = GetTestParameters();
if(VERBOSE_MED)
- printf("Parallel reading of dataset written with filters %s\n", filename);
+ HDprintf("Parallel reading of dataset written with filters %s\n", filename);
/*----------------------------------------------------------
* STEP 0: Test without filters.
@@ -448,10 +448,10 @@ test_filter_read(void)
/* Make sure encoding is enabled */
if(h5_szip_can_encode() == 1) {
- hrc = H5Pset_szip(dc, szip_options_mask, szip_pixels_per_block);
+ hrc = H5Pset_szip(dc, szip_options_mask, szip_pixels_per_block);
VRFY(hrc>=0, "H5Pset_szip");
- filter_read_internal(filename,dc,&combo_size);
+ filter_read_internal(filename,dc,&combo_size);
}
/* Clean up objects used for this test */
@@ -461,25 +461,25 @@ test_filter_read(void)
/* Testing shuffle+szip(with encoder)+checksum filters(checksum last) */
/* Make sure encoding is enabled */
if(h5_szip_can_encode() == 1) {
- dc = H5Pcreate(H5P_DATASET_CREATE);
+ dc = H5Pcreate(H5P_DATASET_CREATE);
VRFY(dc>=0, "H5Pcreate");
- hrc = H5Pset_chunk (dc, 2, chunk_size);
+ hrc = H5Pset_chunk (dc, 2, chunk_size);
VRFY(hrc>=0, "H5Pset_chunk");
- hrc = H5Pset_shuffle (dc);
+ hrc = H5Pset_shuffle (dc);
VRFY(hrc>=0, "H5Pset_shuffle");
- hrc = H5Pset_szip(dc, szip_options_mask, szip_pixels_per_block);
+ hrc = H5Pset_szip(dc, szip_options_mask, szip_pixels_per_block);
VRFY(hrc>=0, "H5Pset_szip");
- hrc = H5Pset_fletcher32 (dc);
+ hrc = H5Pset_fletcher32 (dc);
VRFY(hrc>=0, "H5Pset_fletcher32");
- filter_read_internal(filename,dc,&combo_size);
+ filter_read_internal(filename,dc,&combo_size);
- /* Clean up objects used for this test */
- hrc = H5Pclose (dc);
+ /* Clean up objects used for this test */
+ hrc = H5Pclose (dc);
VRFY(hrc>=0, "H5Pclose");
}