diff options
author | Allen Byrne <byrn@hdfgroup.org> | 2020-05-12 21:14:30 (GMT) |
---|---|---|
committer | Allen Byrne <byrn@hdfgroup.org> | 2020-05-12 21:14:30 (GMT) |
commit | 9f1d06c7f150d3ad34e24dc0055f493d2210fe91 (patch) | |
tree | dad2d9093c513674c8d4f71f03bc767cf83453a2 /tools | |
parent | 8fc4f67be4d66018b4dca4b352672a47e4717f44 (diff) | |
download | hdf5-9f1d06c7f150d3ad34e24dc0055f493d2210fe91.zip hdf5-9f1d06c7f150d3ad34e24dc0055f493d2210fe91.tar.gz hdf5-9f1d06c7f150d3ad34e24dc0055f493d2210fe91.tar.bz2 |
Whitespace cleanup
Diffstat (limited to 'tools')
-rw-r--r-- | tools/lib/h5trav.h | 4 | ||||
-rw-r--r-- | tools/src/Makefile.am | 2 | ||||
-rw-r--r-- | tools/src/h5diff/h5diff_common.c | 42 | ||||
-rw-r--r-- | tools/src/h5dump/h5dump.c | 4 | ||||
-rw-r--r-- | tools/src/h5dump/h5dump.h | 1 | ||||
-rw-r--r-- | tools/src/h5dump/h5dump_xml.c | 2 | ||||
-rw-r--r-- | tools/src/h5format_convert/h5format_convert.c | 104 | ||||
-rw-r--r-- | tools/src/h5jam/h5jam.c | 2 | ||||
-rw-r--r-- | tools/test/h5format_convert/h5fc_chk_idx.c | 6 | ||||
-rw-r--r-- | tools/test/h5repack/Makefile.am | 2 | ||||
-rw-r--r-- | tools/test/misc/vds/UC_1.h | 2 | ||||
-rw-r--r-- | tools/test/misc/vds/UC_1_one_dim_gen.c | 4 | ||||
-rw-r--r-- | tools/test/misc/vds/UC_2.h | 4 | ||||
-rw-r--r-- | tools/test/misc/vds/UC_2_two_dims_gen.c | 2 | ||||
-rw-r--r-- | tools/test/perform/chunk_cache.c | 98 | ||||
-rw-r--r-- | tools/test/perform/direct_write_perf.c | 86 | ||||
-rw-r--r-- | tools/test/perform/overhead.c | 6 | ||||
-rw-r--r-- | tools/test/perform/pio_perf.c | 2 | ||||
-rw-r--r-- | tools/test/perform/sio_engine.c | 2 |
19 files changed, 187 insertions, 188 deletions
diff --git a/tools/lib/h5trav.h b/tools/lib/h5trav.h index 88473ad..affdf99 100644 --- a/tools/lib/h5trav.h +++ b/tools/lib/h5trav.h @@ -133,8 +133,8 @@ extern "C" { *------------------------------------------------------------------------- */ H5TOOLS_DLL void h5trav_set_index(H5_index_t print_index_by, H5_iter_order_t print_index_order); -H5TOOLS_DLL int h5trav_visit(hid_t file_id, const char *grp_name, - hbool_t visit_start, hbool_t recurse, h5trav_obj_func_t visit_obj, +H5TOOLS_DLL int h5trav_visit(hid_t file_id, const char *grp_name, + hbool_t visit_start, hbool_t recurse, h5trav_obj_func_t visit_obj, h5trav_lnk_func_t visit_lnk, void *udata, unsigned fields); H5TOOLS_DLL herr_t symlink_visit_add(symlink_trav_t *visited, H5L_type_t type, const char *file, const char *path); H5TOOLS_DLL hbool_t symlink_is_visited(symlink_trav_t *visited, H5L_type_t type, const char *file, const char *path); diff --git a/tools/src/Makefile.am b/tools/src/Makefile.am index beceee5..93fcc1e 100644 --- a/tools/src/Makefile.am +++ b/tools/src/Makefile.am @@ -23,6 +23,6 @@ CONFIG=ordered # All subdirectories SUBDIRS=h5diff h5ls h5dump misc h5import h5repack h5jam h5copy \ - h5format_convert h5stat + h5format_convert h5stat include $(top_srcdir)/config/conclude.am diff --git a/tools/src/h5diff/h5diff_common.c b/tools/src/h5diff/h5diff_common.c index e05a8e3..3b0fe1c 100644 --- a/tools/src/h5diff/h5diff_common.c +++ b/tools/src/h5diff/h5diff_common.c @@ -27,20 +27,20 @@ static int check_d_input(const char*); */ static const char *s_opts = "hVrv:qn:d:p:NcelxE:S"; static struct long_options l_opts[] = { - { "help", no_arg, 'h' }, - { "version", no_arg, 'V' }, - { "report", no_arg, 'r' }, - { "verbose", optional_arg, 'v' }, - { "quiet", no_arg, 'q' }, - { "count", require_arg, 'n' }, - { "delta", require_arg, 'd' }, - { "relative", require_arg, 'p' }, - { "nan", no_arg, 'N' }, - { "compare", no_arg, 'c' }, + { "help", no_arg, 'h' }, + { "version", no_arg, 'V' }, + { "report", no_arg, 'r' }, + { "verbose", optional_arg, 'v' }, + { "quiet", no_arg, 'q' }, + { "count", require_arg, 'n' }, + { "delta", require_arg, 'd' }, + { "relative", require_arg, 'p' }, + { "nan", no_arg, 'N' }, + { "compare", no_arg, 'c' }, { "use-system-epsilon", no_arg, 'e' }, - { "follow-symlinks", no_arg, 'l' }, - { "no-dangling-links", no_arg, 'x' }, - { "exclude-path", require_arg, 'E' }, + { "follow-symlinks", no_arg, 'l' }, + { "no-dangling-links", no_arg, 'x' }, + { "exclude-path", require_arg, 'E' }, { "enable-error-stack", no_arg, 'S' }, { NULL, 0, '\0' } }; @@ -78,13 +78,7 @@ static void check_options(diff_opt_t* opts) *------------------------------------------------------------------------- */ -void parse_command_line(int argc, - const char* argv[], - const char** fname1, - const char** fname2, - const char** objname1, - const char** objname2, - diff_opt_t* opts) +void parse_command_line(int argc, const char* argv[], const char** fname1, const char** fname2, const char** objname1, const char** objname2, diff_opt_t* opts) { int i; int opt; @@ -201,7 +195,7 @@ void parse_command_line(int argc, } else { while(NULL != exclude_prev->next) - exclude_prev=exclude_prev->next; + exclude_prev = exclude_prev->next; exclude_node->next = NULL; exclude_prev->next = exclude_node; @@ -209,7 +203,7 @@ void parse_command_line(int argc, break; case 'd': - opts->d=1; + opts->d = 1; if (check_d_input(opt_arg) == - 1) { HDprintf("<-d %s> is not a valid option\n", opt_arg); @@ -224,7 +218,7 @@ void parse_command_line(int argc, break; case 'p': - opts->p=1; + opts->p = 1; if (check_p_input(opt_arg) == -1) { HDprintf("<-p %s> is not a valid option\n", opt_arg); usage(); @@ -238,7 +232,7 @@ void parse_command_line(int argc, break; case 'n': - opts->n=1; + opts->n = 1; if ( check_n_input(opt_arg) == -1) { HDprintf("<-n %s> is not a valid option\n", opt_arg); usage(); diff --git a/tools/src/h5dump/h5dump.c b/tools/src/h5dump/h5dump.c index d97fdc0..d9ea4b0 100644 --- a/tools/src/h5dump/h5dump.c +++ b/tools/src/h5dump/h5dump.c @@ -861,14 +861,14 @@ parse_command_line(int argc, const char *argv[]) int opt; int last_was_dset = FALSE; - /* no arguments */ + /* no arguments */ if (argc == 1) { usage(h5tools_getprogname()); goto error; } /* this will be plenty big enough to hold the info */ - if((hand = (struct handler_t *)HDcalloc((size_t)argc, sizeof(struct handler_t)))==NULL) { + if((hand = (struct handler_t *)HDcalloc((size_t)argc, sizeof(struct handler_t))) == NULL) { goto error; } diff --git a/tools/src/h5dump/h5dump.h b/tools/src/h5dump/h5dump.h index 801f60d..ca1fef6 100644 --- a/tools/src/h5dump/h5dump.h +++ b/tools/src/h5dump/h5dump.h @@ -43,6 +43,7 @@ typedef struct h5dump_table_items_t { table_t *dset_table; /* Table of datasets */ table_t *type_table; /* Table of datatypes */ } h5dump_table_items_t; + typedef struct h5dump_table_list_t { size_t nalloc; size_t nused; diff --git a/tools/src/h5dump/h5dump_xml.c b/tools/src/h5dump/h5dump_xml.c index d9f1f67..1a7c3ce 100644 --- a/tools/src/h5dump/h5dump_xml.c +++ b/tools/src/h5dump/h5dump_xml.c @@ -732,7 +732,7 @@ xml_escape_the_name(const char *str) * Programmer: REMcG *------------------------------------------------------------------------- */ -static char * +static char * xml_escape_the_string(const char *str, int slen) { size_t extra; diff --git a/tools/src/h5format_convert/h5format_convert.c b/tools/src/h5format_convert/h5format_convert.c index bb606ac..8e0d857 100644 --- a/tools/src/h5format_convert/h5format_convert.c +++ b/tools/src/h5format_convert/h5format_convert.c @@ -18,7 +18,7 @@ /* * We include the private header file so we can get to the uniform - * programming environment it declares. + * programming environment it declares. * HDF5 API functions (except for H5G_basename()) */ #include "H5private.h" @@ -42,13 +42,13 @@ static int verbose_g = 0; static const char *s_opts = "hVvd:n"; static struct long_options l_opts[] = { { "help", no_arg, 'h' }, - { "hel", no_arg, 'h'}, - { "he", no_arg, 'h'}, + { "hel", no_arg, 'h'}, + { "he", no_arg, 'h'}, { "version", no_arg, 'V' }, - { "version", no_arg, 'V' }, - { "versio", no_arg, 'V' }, - { "versi", no_arg, 'V' }, - { "vers", no_arg, 'V' }, + { "version", no_arg, 'V' }, + { "versio", no_arg, 'V' }, + { "versi", no_arg, 'V' }, + { "vers", no_arg, 'V' }, { "verbose", no_arg, 'v' }, { "verbos", no_arg, 'v' }, { "verbo", no_arg, 'v' }, @@ -73,7 +73,7 @@ static struct long_options l_opts[] = { * *------------------------------------------------------------------------- */ -static void usage(const char *prog) +static void usage(const char *prog) { HDfprintf(stdout, "usage: %s [OPTIONS] file_name\n", prog); HDfprintf(stdout, " OPTIONS\n"); @@ -113,62 +113,66 @@ static void usage(const char *prog) *------------------------------------------------------------------------- */ static int -parse_command_line(int argc, const char **argv) +parse_command_line(int argc, const char **argv) { int opt; /* no arguments */ if (argc == 1) { usage(h5tools_getprogname()); - h5tools_setstatus(EXIT_FAILURE); + h5tools_setstatus(EXIT_FAILURE); goto error; } /* parse command line options */ while ((opt = get_option(argc, argv, s_opts, l_opts)) != EOF) { - switch((char) opt) { - case 'h': - usage(h5tools_getprogname()); - h5tools_setstatus(EXIT_SUCCESS); - goto error; - - case 'V': - print_version(h5tools_getprogname()); - h5tools_setstatus(EXIT_SUCCESS); - goto error; - - case 'v': - verbose_g = TRUE; - break; - - case 'd': /* -d dname */ - if(opt_arg != NULL && *opt_arg) - dname_g = HDstrdup(opt_arg); - if(dname_g == NULL) { - h5tools_setstatus(EXIT_FAILURE); - error_msg("No dataset name\n", opt_arg); - usage(h5tools_getprogname()); - goto error; - } - dset_g = TRUE; - break; - - case 'n': /* -n */ - noop_g = TRUE; - break; - - default: - h5tools_setstatus(EXIT_FAILURE); - usage(h5tools_getprogname()); - goto error; - break; - } /* switch */ + switch((char) opt) { + case 'h': + usage(h5tools_getprogname()); + h5tools_setstatus(EXIT_SUCCESS); + goto error; + + case 'V': + print_version(h5tools_getprogname()); + h5tools_setstatus(EXIT_SUCCESS); + goto error; + + case 'v': + verbose_g = TRUE; + break; + + case 'd': /* -d dname */ + if(opt_arg != NULL && *opt_arg) + dname_g = HDstrdup(opt_arg); + if(dname_g == NULL) { + h5tools_setstatus(EXIT_FAILURE); + error_msg("No dataset name\n", opt_arg); + usage(h5tools_getprogname()); + goto error; + } + dset_g = TRUE; + break; + + case 'n': /* -n */ + noop_g = TRUE; + break; + + case 'E': + enable_error_stack = 1; + break; + + default: + h5tools_setstatus(EXIT_FAILURE); + usage(h5tools_getprogname()); + goto error; + break; + } /* switch */ } /* while */ if (argc <= opt_ind) { error_msg("missing file name\n"); usage(h5tools_getprogname()); - h5tools_setstatus(EXIT_FAILURE); + h5tools_setstatus(EXIT_FAILURE); goto error; } @@ -327,7 +331,7 @@ done: } else if(verbose_g) HDfprintf(stdout, "Close the dataset\n"); - + /* Close the dataset creation property list */ if(H5Pclose(dcpl) < 0) { error_msg("unable to close dataset creation property list\n"); @@ -470,7 +474,7 @@ done: HDfree(fname_g); if(dname_g) HDfree(dname_g); - + H5Eset_auto2(H5E_DEFAULT, func, edata); leave(h5tools_getstatus()); diff --git a/tools/src/h5jam/h5jam.c b/tools/src/h5jam/h5jam.c index 07797c8..009e527 100644 --- a/tools/src/h5jam/h5jam.c +++ b/tools/src/h5jam/h5jam.c @@ -148,7 +148,7 @@ parse_command_line (int argc, const char *argv[]) int opt = FALSE; /* parse command line options */ - while ((opt = get_option (argc, argv, s_opts, l_opts)) != EOF) + while ((opt = get_option(argc, argv, s_opts, l_opts)) != EOF) { switch ((char) opt) { diff --git a/tools/test/h5format_convert/h5fc_chk_idx.c b/tools/test/h5format_convert/h5fc_chk_idx.c index ad1742b..570e3a1 100644 --- a/tools/test/h5format_convert/h5fc_chk_idx.c +++ b/tools/test/h5format_convert/h5fc_chk_idx.c @@ -13,7 +13,7 @@ /* * A program to verify that the chunk indexing type of a dataset in a file - * is version 1 B-tree. + * is version 1 B-tree. * This is to support the testing of the tool "h5format_convert". */ @@ -32,7 +32,7 @@ usage(void) /*------------------------------------------------------------------------- * Function: main * - * Purpose: To check that the chunk indexing type for the dataset in + * Purpose: To check that the chunk indexing type for the dataset in * the file is version 1 B-tree. * * Return: 0 -- the indexing type is version 1 B-tree @@ -90,7 +90,7 @@ main(int argc, char *argv[]) } /* end if */ /* Return success when the chunk indexing type is version 1 B-tree */ - if(idx_type == H5D_CHUNK_IDX_BTREE) + if(idx_type == H5D_CHUNK_IDX_BTREE) HDexit(EXIT_SUCCESS); else { HDfprintf(stderr, "Error: chunk indexing type is %d\n", idx_type); diff --git a/tools/test/h5repack/Makefile.am b/tools/test/h5repack/Makefile.am index 38f7b2f..29906a1 100644 --- a/tools/test/h5repack/Makefile.am +++ b/tools/test/h5repack/Makefile.am @@ -42,7 +42,7 @@ LDADD=../../src/h5repack/libh5repack.la $(LIBH5TOOLS) $(LIBH5TEST) $(LIBHDF5) testh5repack_detect_szip_SOURCES=testh5repack_detect_szip.c -h5repacktst_SOURCES=h5repacktst.c +h5repacktst_SOURCES=h5repacktst.c if HAVE_SHARED_CONDITIONAL diff --git a/tools/test/misc/vds/UC_1.h b/tools/test/misc/vds/UC_1.h index 9d1f758..d922d22 100644 --- a/tools/test/misc/vds/UC_1.h +++ b/tools/test/misc/vds/UC_1.h @@ -49,7 +49,7 @@ | | +-------M-------+ - + dim[0] / diff --git a/tools/test/misc/vds/UC_1_one_dim_gen.c b/tools/test/misc/vds/UC_1_one_dim_gen.c index b5ddae3..7c4201e 100644 --- a/tools/test/misc/vds/UC_1_one_dim_gen.c +++ b/tools/test/misc/vds/UC_1_one_dim_gen.c @@ -44,7 +44,7 @@ static char UC_1_VDS_FILE_NAME[NAME_LEN] = "1_vds.h5"; /* Dataset names */ static char UC_1_SOURCE_DSET_NAME[NAME_LEN] = "source_dset"; static char UC_1_VDS_DSET_NAME[NAME_LEN] = "vds_dset"; - + /* Fill values */ static int UC_1_FILL_VALUES[UC_1_N_SOURCES] = { -1, @@ -162,7 +162,7 @@ main(void) value = ((i + 1) * 10) + j; for(k = 0; k < count; k++) - buffer[k] = value; + buffer[k] = value; start[0] = (hsize_t)j; start[1] = 0; diff --git a/tools/test/misc/vds/UC_2.h b/tools/test/misc/vds/UC_2.h index a3ee0f7..07f9b9a 100644 --- a/tools/test/misc/vds/UC_2.h +++ b/tools/test/misc/vds/UC_2.h @@ -45,7 +45,7 @@ | | dim[1] - + */ #define UC_2_N_SOURCES 5 @@ -98,7 +98,7 @@ static char UC_2_FILE_NAMES[UC_2_N_SOURCES][NAME_LEN] = { /* VDS file name */ #define UC_2_VDS_FILE_NAME "2_vds.h5" - + /* Dataset names */ #define UC_2_SOURCE_DSET_NAME "source_dset" #define UC_2_SOURCE_DSET_PATH "/source_dset" diff --git a/tools/test/misc/vds/UC_2_two_dims_gen.c b/tools/test/misc/vds/UC_2_two_dims_gen.c index 8e1554b..b9799d6 100644 --- a/tools/test/misc/vds/UC_2_two_dims_gen.c +++ b/tools/test/misc/vds/UC_2_two_dims_gen.c @@ -168,7 +168,7 @@ main(void) value = ((i + 1) * 10) + j; for(k = 0; k < count; k++) - buffer[k] = value; + buffer[k] = value; start[0] = (hsize_t)j; start[1] = 0; diff --git a/tools/test/perform/chunk_cache.c b/tools/test/perform/chunk_cache.c index 5557558..e594e34 100644 --- a/tools/test/perform/chunk_cache.c +++ b/tools/test/perform/chunk_cache.c @@ -112,51 +112,51 @@ static int create_dset1(hid_t file) hsize_t chunk_dims[RANK] = {CHUNK1_DIM1, CHUNK1_DIM2}; int data[DSET1_DIM1][DSET1_DIM2]; /* data for writing */ int i, j; - + /* Create the data space. */ if((dataspace = H5Screate_simple (RANK, dims, NULL)) < 0) goto error; - + /* Modify dataset creation properties, i.e. enable chunking */ if((dcpl = H5Pcreate (H5P_DATASET_CREATE)) < 0) goto error; if(H5Pset_chunk (dcpl, RANK, chunk_dims) < 0) goto error; - + /* Set the dummy filter simply for counting the number of bytes being read into the memory */ if(H5Zregister(H5Z_COUNTER) < 0) goto error; - + if(H5Pset_filter(dcpl, FILTER_COUNTER, 0, 0, NULL) < 0) goto error; - + /* Create a new dataset within the file using chunk creation properties. */ if((dataset = H5Dcreate2 (file, DSET1_NAME, H5T_NATIVE_INT, dataspace, H5P_DEFAULT, dcpl, H5P_DEFAULT)) < 0) goto error; - + for (i = 0; i < DSET1_DIM1; i++) for (j = 0; j < DSET1_DIM2; j++) data[i][j] = i+j; - + /* Write data to dataset */ if(H5Dwrite (dataset, H5T_NATIVE_INT, H5S_ALL, H5S_ALL, H5P_DEFAULT, data) < 0) goto error; - + /* Close resources */ H5Dclose (dataset); H5Pclose (dcpl); H5Sclose (dataspace); return 0; - + error: H5E_BEGIN_TRY { H5Dclose (dataset); H5Pclose (dcpl); H5Sclose (dataspace); } H5E_END_TRY; - + return 1; } @@ -173,51 +173,51 @@ static int create_dset2(hid_t file) hsize_t chunk_dims[RANK] = {CHUNK2_DIM1, CHUNK2_DIM2}; int data[DSET2_DIM1][DSET2_DIM2]; /* data for writing */ int i, j; - + /* Create the data space. */ if((dataspace = H5Screate_simple (RANK, dims, NULL)) < 0) goto error; - + /* Modify dataset creation properties, i.e. enable chunking */ if((dcpl = H5Pcreate (H5P_DATASET_CREATE)) < 0) goto error; if(H5Pset_chunk (dcpl, RANK, chunk_dims) < 0) goto error; - + /* Set the dummy filter simply for counting the number of bytes being read into the memory */ if(H5Zregister(H5Z_COUNTER) < 0) goto error; if(H5Pset_filter(dcpl, FILTER_COUNTER, 0, 0, NULL) < 0) goto error; - + /* Create a new dataset within the file using chunk creation properties. */ if((dataset = H5Dcreate2 (file, DSET2_NAME, H5T_NATIVE_INT, dataspace, H5P_DEFAULT, dcpl, H5P_DEFAULT)) < 0) goto error; - + for (i = 0; i < DSET2_DIM1; i++) for (j = 0; j < DSET2_DIM2; j++) data[i][j] = i+j; - + /* Write data to dataset */ if(H5Dwrite (dataset, H5T_NATIVE_INT, H5S_ALL, H5S_ALL, H5P_DEFAULT, data) < 0) goto error; - + /* Close resources */ H5Dclose (dataset); H5Pclose (dcpl); H5Sclose (dataspace); - + return 0; - + error: H5E_BEGIN_TRY { H5Dclose (dataset); H5Pclose (dcpl); H5Sclose (dataspace); } H5E_END_TRY; - + return 1; } /*--------------------------------------------------------------------------- @@ -230,56 +230,56 @@ static int check_partial_chunks_perf(hid_t file) hid_t filespace = H5I_INVALID_HID; hid_t memspace = H5I_INVALID_HID; hid_t dapl = H5I_INVALID_HID; - + int rdata[DSET1_DIM2]; /* data for reading */ int i; - + hsize_t row_rank = 1; hsize_t row_dim[1] = {DSET1_DIM2}; hsize_t start[RANK] = {0, 0}; hsize_t count[RANK] = {1, DSET1_DIM2}; double start_t, end_t; - + if((dapl = H5Pcreate(H5P_DATASET_ACCESS)) < 0) goto error; if(H5Pset_chunk_cache (dapl, RDCC_NSLOTS, RDCC_NBYTES, RDCC_W0) < 0) goto error; dataset = H5Dopen2 (file, DSET1_NAME, dapl); - + H5_CHECK_OVERFLOW(row_rank, hsize_t, int); memspace = H5Screate_simple((int)row_rank, row_dim, NULL); filespace = H5Dget_space(dataset); - + nbytes_global = 0; - + start_t = retrieve_time(); - + /* Read the data row by row */ for(i = 0; i < DSET1_DIM1; i++) { start[0] = (hsize_t)i; if(H5Sselect_hyperslab(filespace, H5S_SELECT_SET, start, NULL, count, NULL) < 0) goto error; - + if(H5Dread (dataset, H5T_NATIVE_INT, memspace, filespace, H5P_DEFAULT, rdata) < 0) goto error; } - + end_t = retrieve_time(); - + #ifdef H5_HAVE_GETTIMEOFDAY printf("1. Partial chunks: total read time is %lf; number of bytes being read from file is %lu\n", (end_t -start_t), nbytes_global); #else printf("1. Partial chunks: no total read time because gettimeofday() is not available; number of bytes being read from file is %lu\n", nbytes_global); #endif - + H5Dclose (dataset); H5Sclose (filespace); H5Sclose (memspace); H5Pclose (dapl); - + return 0; error: H5E_BEGIN_TRY { @@ -302,21 +302,21 @@ static int check_hash_value_perf(hid_t file) hid_t filespace = H5I_INVALID_HID; hid_t memspace = H5I_INVALID_HID; hid_t dapl = H5I_INVALID_HID; - + int rdata[DSET2_DIM1]; /* data for reading */ int i; - + hsize_t column_rank = 1; hsize_t column_dim[1] = {DSET2_DIM1}; hsize_t start[RANK] = {0, 0}; hsize_t count[RANK] = {DSET2_DIM1, 1}; double start_t, end_t; - + if((dapl = H5Pcreate(H5P_DATASET_ACCESS)) < 0) goto error; if(H5Pset_chunk_cache (dapl, RDCC_NSLOTS, RDCC_NBYTES, RDCC_W0) < 0) goto error; - + if((dataset = H5Dopen2 (file, DSET2_NAME, dapl)) < 0) goto error; @@ -325,37 +325,37 @@ static int check_hash_value_perf(hid_t file) goto error; if((filespace = H5Dget_space(dataset)) < 0) goto error; - + nbytes_global = 0; - + start_t = retrieve_time(); - + /* Read the data column by column */ for(i = 0; i < DSET2_DIM2; i++) { start[1] = (hsize_t)i; if(H5Sselect_hyperslab(filespace, H5S_SELECT_SET, start, NULL, count, NULL) < 0) goto error; - + if(H5Dread (dataset, H5T_NATIVE_INT, memspace, filespace, H5P_DEFAULT, rdata) < 0) goto error; } - + end_t = retrieve_time(); - + #ifdef H5_HAVE_GETTIMEOFDAY printf("2. Hash value: total read time is %lf; number of bytes being read from file is %lu\n", (end_t -start_t), nbytes_global); #else printf("2. Hash value: no total read time because gettimeofday() is not available; number of bytes being read from file is %lu\n", nbytes_global); #endif - + H5Dclose (dataset); H5Sclose (filespace); H5Sclose (memspace); H5Pclose (dapl); return 0; - + error: H5E_BEGIN_TRY { H5Dclose (dataset); @@ -377,14 +377,14 @@ main (void) { hid_t file; /* handles */ int nerrors = 0; - + /* Create a new file. If file exists its contents will be overwritten. */ if((file = H5Fcreate (FILENAME, H5F_ACC_TRUNC, H5P_DEFAULT, H5P_DEFAULT)) < 0) goto error; nerrors += create_dset1(file); nerrors += create_dset2(file); - + if(H5Fclose (file) < 0) goto error; @@ -394,14 +394,14 @@ main (void) nerrors += check_partial_chunks_perf(file); nerrors += check_hash_value_perf(file); - + if(H5Fclose (file) < 0) goto error; - + if (nerrors>0) goto error; cleanup(); return 0; - + error: fprintf(stderr, "*** ERRORS DETECTED ***\n"); return 1; diff --git a/tools/test/perform/direct_write_perf.c b/tools/test/perform/direct_write_perf.c index f13cd24..20b7237 100644 --- a/tools/test/perform/direct_write_perf.c +++ b/tools/test/perform/direct_write_perf.c @@ -75,7 +75,7 @@ const char *FILENAME[] = { #define NX 100 #define NY 1000 #define NZ 250 -#define CHUNK_NX 1 +#define CHUNK_NX 1 #define CHUNK_NY 1000 #define CHUNK_NZ 250 @@ -108,7 +108,7 @@ void reportTime(struct timeval start, double mbytes) } /* end if */ /*printf("mbytes=%lf, sec=%lf, usec=%lf\n", mbytes, (double)timeval_diff.tv_sec, (double)timeval_diff.tv_usec);*/ - printf("MBytes/second: %lf\n", (double)mbytes/((double)timeval_diff.tv_sec+((double)timeval_diff.tv_usec/(double)1000000.0))); + printf("MBytes/second: %lf\n", (double)mbytes/((double)timeval_diff.tv_sec+((double)timeval_diff.tv_usec/(double)1000000.0))); } /*-------------------------------------------------- @@ -121,7 +121,7 @@ int create_file(hid_t fapl_id) hid_t fapl; hid_t cparms; hid_t dataspace, dataset; - hsize_t dims[RANK] = {NX, NY, NZ}; + hsize_t dims[RANK] = {NX, NY, NZ}; hsize_t chunk_dims[RANK] ={CHUNK_NX, CHUNK_NY, CHUNK_NZ}; unsigned int aggression = 9; /* Compression aggression setting */ int ret; @@ -198,7 +198,7 @@ int create_file(hid_t fapl_id) if(H5Dclose(dataset) < 0) TEST_ERROR; - if(H5Fclose(file) < 0) + if(H5Fclose(file) < 0) TEST_ERROR; if(H5Sclose(dataspace) < 0) @@ -223,7 +223,7 @@ int create_file(hid_t fapl_id) /* Initialize data for chunks */ for(i = 0; i < NX; i++) { p = direct_buf[i] = (unsigned int*)malloc(CHUNK_NY*CHUNK_NZ*sizeof(unsigned int)); - + for(j=0; j < CHUNK_NY*CHUNK_NZ; j++, p++) *p = rand() % 65000; @@ -267,7 +267,7 @@ error: } /*-------------------------------------------------- - * Benchmark the performance of the new function + * Benchmark the performance of the new function * with precompressed data. *-------------------------------------------------- */ @@ -283,8 +283,8 @@ test_direct_write_uncompressed_data(hid_t fapl_id) unsigned filter_mask = 0; hsize_t offset[RANK] = {0, 0, 0}; - struct timeval timeval_start; - + struct timeval timeval_start; + TESTING("H5Dwrite_chunk for uncompressed data"); if((dxpl = H5Pcreate(H5P_DATASET_XFER)) < 0) @@ -301,8 +301,8 @@ test_direct_write_uncompressed_data(hid_t fapl_id) TEST_ERROR; - /* Write the compressed chunk data repeatedly to cover all the chunks in the - * dataset, using the direct writing function. */ + /* Write the compressed chunk data repeatedly to cover all the chunks in the + * dataset, using the direct writing function. */ for(i=0; i<NX; i++) { status = H5Dwrite_chunk(dataset, dxpl, filter_mask, offset, CHUNK_NY*CHUNK_NZ*sizeof(unsigned int), direct_buf[i]); (offset[0])++; @@ -315,8 +315,8 @@ test_direct_write_uncompressed_data(hid_t fapl_id) H5Pclose(dxpl); H5Fclose(file); - /* Report the performance */ - reportTime(timeval_start, (double)(NX*NY*NZ*sizeof(unsigned int)/MB)); + /* Report the performance */ + reportTime(timeval_start, (double)(NX*NY*NZ*sizeof(unsigned int)/MB)); PASSED(); return 0; @@ -332,7 +332,7 @@ error: /*-------------------------------------------------- - * Benchmark the performance of the new function + * Benchmark the performance of the new function * with precompressed data. *-------------------------------------------------- */ @@ -348,8 +348,8 @@ test_direct_write_compressed_data(hid_t fapl_id) unsigned filter_mask = 0; hsize_t offset[RANK] = {0, 0, 0}; - struct timeval timeval_start; - + struct timeval timeval_start; + TESTING("H5DOwrite_chunk for pre-compressed data"); if((dxpl = H5Pcreate(H5P_DATASET_XFER)) < 0) @@ -366,8 +366,8 @@ test_direct_write_compressed_data(hid_t fapl_id) TEST_ERROR; - /* Write the compressed chunk data repeatedly to cover all the chunks in the - * dataset, using the direct writing function. */ + /* Write the compressed chunk data repeatedly to cover all the chunks in the + * dataset, using the direct writing function. */ for(i=0; i<NX; i++) { status = H5Dwrite_chunk(dataset, dxpl, filter_mask, offset, data_size[i], outbuf[i]); (offset[0])++; @@ -379,9 +379,9 @@ test_direct_write_compressed_data(hid_t fapl_id) H5Dclose(dataset); H5Pclose(dxpl); H5Fclose(file); - - /* Report the performance */ - reportTime(timeval_start, (double)(total_size/MB)); + + /* Report the performance */ + reportTime(timeval_start, (double)(total_size/MB)); PASSED(); return 0; @@ -416,7 +416,7 @@ test_compressed_write(hid_t fapl_id) hsize_t count[RANK]; /* Block count */ hsize_t block[RANK]; /* Block sizes */ - struct timeval timeval_start; + struct timeval timeval_start; TESTING("H5Dwrite with compression enabled"); @@ -443,14 +443,14 @@ test_compressed_write(hid_t fapl_id) stride[0] = stride[1] = stride[2] = 1; count[0] = count[1] = count[2] = 1; block[0] = CHUNK_NX; block[1] = CHUNK_NY; block[2] = CHUNK_NZ; - + for(i=0; i<NX; i++) { /* * Select hyperslab for one chunk in the file */ if((status = H5Sselect_hyperslab(dataspace, H5S_SELECT_SET, start, stride, count, block)) < 0) TEST_ERROR; - (start[0])++; + (start[0])++; if((status = H5Dwrite(dataset, H5T_NATIVE_INT, mem_space, dataspace, H5P_DEFAULT, direct_buf[i])) < 0) @@ -465,10 +465,10 @@ test_compressed_write(hid_t fapl_id) H5Sclose(mem_space); H5Pclose(dxpl); H5Fclose(file); - - /* Report the performance */ - reportTime(timeval_start, (double)(NX*NY*NZ*sizeof(unsigned int)/MB)); - + + /* Report the performance */ + reportTime(timeval_start, (double)(NX*NY*NZ*sizeof(unsigned int)/MB)); + PASSED(); return 0; @@ -504,7 +504,7 @@ test_no_compress_write(hid_t fapl_id) hsize_t count[RANK]; /* Block count */ hsize_t block[RANK]; /* Block sizes */ - struct timeval timeval_start; + struct timeval timeval_start; TESTING("H5Dwrite without compression"); @@ -531,14 +531,14 @@ test_no_compress_write(hid_t fapl_id) stride[0] = stride[1] = stride[2] = 1; count[0] = count[1] = count[2] = 1; block[0] = CHUNK_NX; block[1] = CHUNK_NY; block[2] = CHUNK_NZ; - + for(i=0; i<NX; i++) { /* * Select hyperslab for one chunk in the file */ if((status = H5Sselect_hyperslab(dataspace, H5S_SELECT_SET, start, stride, count, block)) < 0) TEST_ERROR; - (start[0])++; + (start[0])++; if((status = H5Dwrite(dataset, H5T_NATIVE_INT, mem_space, dataspace, H5P_DEFAULT, direct_buf[i])) < 0) @@ -553,10 +553,10 @@ test_no_compress_write(hid_t fapl_id) H5Sclose(mem_space); H5Pclose(dxpl); H5Fclose(file); - - /* Report the performance */ - reportTime(timeval_start, (double)(NX*NY*NZ*sizeof(unsigned int)/MB)); - + + /* Report the performance */ + reportTime(timeval_start, (double)(NX*NY*NZ*sizeof(unsigned int)/MB)); + PASSED(); return 0; @@ -576,13 +576,13 @@ error: * data to a Unix file *-------------------------------------------------- */ -int +int test_unix_write(void) { int file, flag; - ssize_t op_size; + ssize_t op_size; int i; - struct timeval timeval_start; + struct timeval timeval_start; TESTING("Write compressed data to a Unix file"); @@ -595,8 +595,8 @@ test_unix_write(void) if ((file=open(FILENAME[1],flag))== -1) TEST_ERROR; - /* Write the compressed chunk data repeatedly to cover all the chunks in the - * dataset, using the direct writing function. */ + /* Write the compressed chunk data repeatedly to cover all the chunks in the + * dataset, using the direct writing function. */ for(i=0; i<NX; i++) { op_size = write(file, outbuf[i],data_size[i]); if (op_size < 0) @@ -617,14 +617,14 @@ test_unix_write(void) TEST_ERROR; } - /* Report the performance */ - reportTime(timeval_start, (double)(total_size/MB)); + /* Report the performance */ + reportTime(timeval_start, (double)(total_size/MB)); PASSED(); return 0; error: - return 1; + return 1; } /*-------------------------------------------------- @@ -650,7 +650,7 @@ main (void) free(outbuf[i]); free(direct_buf[i]); } - + return 0; } diff --git a/tools/test/perform/overhead.c b/tools/test/perform/overhead.c index 108d9e4..58558a5 100644 --- a/tools/test/perform/overhead.c +++ b/tools/test/perform/overhead.c @@ -217,9 +217,9 @@ test(fill_t fill_style, const double splits[], fspace, H5P_DEFAULT, dcpl, H5P_DEFAULT)) < 0) goto error; if ((fd=HDopen(FILE_NAME_1, O_RDONLY, 0666)) < 0) goto error; - if(FILL_RANDOM==fill_style) + if(FILL_RANDOM==fill_style) had = (int *)calloc((size_t)cur_size[0], sizeof(int)); - + for (i=1; i<=cur_size[0]; i++) { /* Decide which chunk to write to */ @@ -391,7 +391,7 @@ main(int argc, char *argv[]) nerrors += test(FILL_INWARD, splits, FALSE, use_cache); nerrors += test(FILL_OUTWARD, splits, FALSE, use_cache); nerrors += test(FILL_RANDOM, splits, FALSE, use_cache); - } + } else { if (use_cache) usage(argv[0]); nerrors += test(fill_style, splits, TRUE, FALSE); diff --git a/tools/test/perform/pio_perf.c b/tools/test/perform/pio_perf.c index 826e7a9..8146d84 100644 --- a/tools/test/perform/pio_perf.c +++ b/tools/test/perform/pio_perf.c @@ -80,7 +80,7 @@ #define PIO_MPI 0x2 #define PIO_HDF5 0x4 -#ifdef STANDALONE +#ifdef STANDALONE #define DBL_EPSILON 2.2204460492503131e-16 #define H5_DBL_ABS_EQUAL(X,Y) (fabs((X)-(Y)) < DBL_EPSILON) #endif diff --git a/tools/test/perform/sio_engine.c b/tools/test/perform/sio_engine.c index 9ea94cb..aa3a316 100644 --- a/tools/test/perform/sio_engine.c +++ b/tools/test/perform/sio_engine.c @@ -1321,7 +1321,7 @@ do_cleanupfile(iotype iot, char *filename) } H5Pclose(fapl); break; - + default: /* unknown request */ HDfprintf(stderr, "Unknown IO type request (%d)\n", (int)iot); |