diff options
author | Allen Byrne <byrn@hdfgroup.org> | 2018-09-26 21:04:34 (GMT) |
---|---|---|
committer | Allen Byrne <byrn@hdfgroup.org> | 2018-09-26 21:04:34 (GMT) |
commit | 8f103a935be942a4240da9e57a0f67504588948a (patch) | |
tree | 130b0379ef8b355fd4555225f182727773aed084 /tools/src | |
parent | 873249b58bcf62d73f830d7d9ee056cd554f370d (diff) | |
download | hdf5-8f103a935be942a4240da9e57a0f67504588948a.zip hdf5-8f103a935be942a4240da9e57a0f67504588948a.tar.gz hdf5-8f103a935be942a4240da9e57a0f67504588948a.tar.bz2 |
HDFFV-10594, 10332, 10531
Diffstat (limited to 'tools/src')
-rw-r--r-- | tools/src/h5dump/h5dump.c | 2 | ||||
-rw-r--r-- | tools/src/h5format_convert/h5format_convert.c | 164 | ||||
-rw-r--r-- | tools/src/h5jam/h5jam.c | 30 | ||||
-rw-r--r-- | tools/src/misc/h5debug.c | 2 |
4 files changed, 102 insertions, 96 deletions
diff --git a/tools/src/h5dump/h5dump.c b/tools/src/h5dump/h5dump.c index a5f0369..b9e37e8 100644 --- a/tools/src/h5dump/h5dump.c +++ b/tools/src/h5dump/h5dump.c @@ -65,7 +65,7 @@ struct handler_t { * parameters. The long-named ones can be partially spelled. When * adding more, make sure that they don't clash with each other. */ -/* The following initialization makes use of C language cancatenating */ +/* The following initialization makes use of C language concatenating */ /* "xxx" "yyy" into "xxxyyy". */ static const char *s_opts = "hn*peyBHirVa:c:d:f:g:k:l:t:w:xD:uX:o*b*F:s:S:A*q:z:m:RE*CM:O*N:vG:"; static struct long_options l_opts[] = { diff --git a/tools/src/h5format_convert/h5format_convert.c b/tools/src/h5format_convert/h5format_convert.c index 0fc0289..b9ed9ce 100644 --- a/tools/src/h5format_convert/h5format_convert.c +++ b/tools/src/h5format_convert/h5format_convert.c @@ -226,81 +226,84 @@ convert(hid_t fid, const char *dname) /* Open the dataset */ if((did = H5Dopen2(fid, dname, H5P_DEFAULT)) < 0) { - error_msg("unable to open dataset \"%s\"\n", dname); - h5tools_setstatus(EXIT_FAILURE); - goto error; - - } else if(verbose_g) - HDfprintf(stdout, "Open the dataset\n"); + error_msg("unable to open dataset \"%s\"\n", dname); + h5tools_setstatus(EXIT_FAILURE); + goto error; + } + else if(verbose_g) + HDfprintf(stdout, "Open the dataset\n"); /* Get the dataset's creation property list */ if((dcpl = H5Dget_create_plist(did)) < 0) { - error_msg("unable to get the dataset creation property list\n"); - h5tools_setstatus(EXIT_FAILURE); - goto error; + error_msg("unable to get the dataset creation property list\n"); + h5tools_setstatus(EXIT_FAILURE); + goto error; } /* Get the dataset's layout */ if((layout_type = H5Pget_layout(dcpl)) < 0) { - error_msg("unable to get the dataset layout type\n"); - h5tools_setstatus(EXIT_FAILURE); - goto error; - - } else if(verbose_g) - HDfprintf(stdout, "Retrieve the dataset's layout\n"); + error_msg("unable to get the dataset layout type\n"); + h5tools_setstatus(EXIT_FAILURE); + goto error; + } + else if(verbose_g) + HDfprintf(stdout, "Retrieve the dataset's layout\n"); switch(layout_type) { - case H5D_CHUNKED: - if(verbose_g) - HDfprintf(stdout, "Dataset is a chunked dataset\n"); - - /* Get the dataset's chunk indexing type */ - if(H5Dget_chunk_index_type(did, &idx_type) < 0) { - error_msg("unable to get the chunk indexing type for \"%s\"\n", dname); - h5tools_setstatus(EXIT_FAILURE); - goto error; - } else if(verbose_g) - HDfprintf(stdout, "Retrieve the dataset's chunk indexing type\n"); + case H5D_CHUNKED: + if(verbose_g) + HDfprintf(stdout, "Dataset is a chunked dataset\n"); + + /* Get the dataset's chunk indexing type */ + if(H5Dget_chunk_index_type(did, &idx_type) < 0) { + error_msg("unable to get the chunk indexing type for \"%s\"\n", dname); + h5tools_setstatus(EXIT_FAILURE); + goto error; + } + else if(verbose_g) + HDfprintf(stdout, "Retrieve the dataset's chunk indexing type\n"); + + if(idx_type == H5D_CHUNK_IDX_BTREE) { + if(verbose_g) + HDfprintf(stdout, "Dataset's chunk indexing type is already version 1 B-tree: no further action\n"); + h5tools_setstatus(EXIT_SUCCESS); + goto done; + } + else if (verbose_g) + HDfprintf(stdout, "Dataset's chunk indexing type is not version 1 B-tree\n"); - if(idx_type == H5D_CHUNK_IDX_BTREE) { - if(verbose_g) - HDfprintf(stdout, "Dataset's chunk indexing type is already version 1 B-tree: no further action\n"); - h5tools_setstatus(EXIT_SUCCESS); - goto done; - } else if (verbose_g) - HDfprintf(stdout, "Dataset's chunk indexing type is not version 1 B-tree\n"); break; - case H5D_CONTIGUOUS: - if(verbose_g) - HDfprintf(stdout, "Dataset is a contiguous dataset: downgrade layout version as needed\n"); - break; + case H5D_CONTIGUOUS: + if(verbose_g) + HDfprintf(stdout, "Dataset is a contiguous dataset: downgrade layout version as needed\n"); + break; - case H5D_COMPACT: - if(verbose_g) - HDfprintf(stdout, "Dataset is a compact dataset: downgrade layout version as needed\n"); - break; + case H5D_COMPACT: + if(verbose_g) + HDfprintf(stdout, "Dataset is a compact dataset: downgrade layout version as needed\n"); + break; - case H5D_VIRTUAL: - if(verbose_g) - HDfprintf(stdout, "No further action for virtual dataset\n"); - goto done; + case H5D_VIRTUAL: + if(verbose_g) + HDfprintf(stdout, "No further action for virtual dataset\n"); + goto done; - case H5D_NLAYOUTS: - case H5D_LAYOUT_ERROR: - default: - error_msg("unknown layout type for \"%s\"\n", dname); - h5tools_setstatus(EXIT_FAILURE); - goto error; + case H5D_NLAYOUTS: + case H5D_LAYOUT_ERROR: + default: + error_msg("unknown layout type for \"%s\"\n", dname); + h5tools_setstatus(EXIT_FAILURE); + goto error; } /* end switch */ /* No further action if it is a noop */ if(noop_g) { - if(verbose_g) - HDfprintf(stdout, "Not converting the dataset\n"); - h5tools_setstatus(EXIT_SUCCESS); - goto done; + if(verbose_g) + HDfprintf(stdout, "Not converting the dataset\n"); + h5tools_setstatus(EXIT_SUCCESS); + goto done; } if(verbose_g) @@ -308,51 +311,54 @@ convert(hid_t fid, const char *dname) /* Downgrade the dataset */ if(H5Dformat_convert(did) < 0) { - error_msg("unable to downgrade dataset \"%s\"\n", dname); - h5tools_setstatus(EXIT_FAILURE); - goto error; - } else if(verbose_g) - HDfprintf(stdout, "Done\n"); + error_msg("unable to downgrade dataset \"%s\"\n", dname); + h5tools_setstatus(EXIT_FAILURE); + goto error; + } + else if(verbose_g) + HDfprintf(stdout, "Done\n"); done: /* Close the dataset */ if(H5Dclose(did) < 0) { error_msg("unable to close dataset \"%s\"\n", dname); h5tools_setstatus(EXIT_FAILURE); - goto error; - } else if(verbose_g) - HDfprintf(stdout, "Close the dataset\n"); + goto error; + } + else if(verbose_g) + HDfprintf(stdout, "Close the dataset\n"); /* Close the dataset creation property list */ if(H5Pclose(dcpl) < 0) { error_msg("unable to close dataset creation property list\n"); h5tools_setstatus(EXIT_FAILURE); - goto error; - } else if(verbose_g) - printf("Close the dataset creation property list\n"); + goto error; + } + else if(verbose_g) + HDprintf("Close the dataset creation property list\n"); - return(0); + return 0; error: if(verbose_g) - HDfprintf(stdout, "Error encountered\n"); + HDfprintf(stdout, "Error encountered\n"); H5E_BEGIN_TRY { H5Pclose(dcpl); H5Dclose(did); } H5E_END_TRY; - return(-1); + return -1; } /* convert() */ /*------------------------------------------------------------------------- - * Function: convert_dsets_cb() + * Function: convert_dsets_cb() * - * Purpose: The callback routine from the traversal to convert the - * chunk indexing type of the dataset object. + * Purpose: The callback routine from the traversal to convert the + * chunk indexing type of the dataset object. * - * Return: Success: 0 - * Failure: 1 + * Return: Success: 0 + * Failure: -1 *------------------------------------------------------------------------- */ static int @@ -363,11 +369,11 @@ convert_dsets_cb(const char *path, const H5O_info_t *oi, const char *already_vis /* If the object has already been seen then just return */ if(NULL == already_visited) { if(oi->type == H5O_TYPE_DATASET) { - if(verbose_g) - HDfprintf(stdout, "Going to process dataset:%s...\n", path); - if(convert(fid, path) < 0) - goto error; - } /* end if */ + if(verbose_g) + HDfprintf(stdout, "Going to process dataset:%s...\n", path); + if(convert(fid, path) < 0) + goto error; + } /* end if */ } /* end if */ return 0; diff --git a/tools/src/h5jam/h5jam.c b/tools/src/h5jam/h5jam.c index 01ba4af..4f3e7a4 100644 --- a/tools/src/h5jam/h5jam.c +++ b/tools/src/h5jam/h5jam.c @@ -221,7 +221,7 @@ main (int argc, const char *argv[]) /* Initialize h5tools lib */ h5tools_init(); - parse_command_line (argc, argv); + parse_command_line(argc, argv); if (ub_file == NULL) { /* no user block */ @@ -252,21 +252,21 @@ main (int argc, const char *argv[]) leave (EXIT_FAILURE); } - ifile = H5Fopen (input_file, H5F_ACC_RDONLY, H5P_DEFAULT); + ifile = H5Fopen(input_file, H5F_ACC_RDONLY, H5P_DEFAULT); if (ifile < 0) { error_msg("Can't open input HDF5 file \"%s\"\n", input_file); leave (EXIT_FAILURE); } - plist = H5Fget_create_plist (ifile); + plist = H5Fget_create_plist(ifile); if (plist < 0) { error_msg("Can't get file creation plist for file \"%s\"\n", input_file); H5Fclose(ifile); leave (EXIT_FAILURE); } - status = H5Pget_userblock (plist, &usize); + status = H5Pget_userblock(plist, &usize); if (status < 0) { error_msg("Can't get user block for file \"%s\"\n", input_file); H5Pclose(plist); @@ -330,7 +330,7 @@ main (int argc, const char *argv[]) } } - newubsize = compute_user_block_size ((hsize_t) fsize); + newubsize = compute_user_block_size((hsize_t) fsize); startub = usize; @@ -345,22 +345,22 @@ main (int argc, const char *argv[]) else { /* add new ub to current ublock, pad to new offset */ newubsize += usize; - newubsize = compute_user_block_size ((hsize_t) newubsize); + newubsize = compute_user_block_size((hsize_t) newubsize); } } /* copy the HDF5 from starting at usize to starting at newubsize: * makes room at 'from' for new ub */ /* if no current ub, usize is 0 */ - copy_some_to_file (h5fid, ofid, usize, newubsize, (ssize_t) (h5fsize - usize)); + copy_some_to_file(h5fid, ofid, usize, newubsize, (ssize_t) (h5fsize - usize)); /* copy the old ub to the beginning of the new file */ if (!do_clobber) { - where = copy_some_to_file (h5fid, ofid, (hsize_t) 0, (hsize_t) 0, (ssize_t) usize); + where = copy_some_to_file(h5fid, ofid, (hsize_t) 0, (hsize_t) 0, (ssize_t) usize); } /* copy the new ub to the end of the ub */ - where = copy_some_to_file (ufid, ofid, (hsize_t) 0, startub, (ssize_t) - 1); + where = copy_some_to_file(ufid, ofid, (hsize_t) 0, startub, (ssize_t) - 1); /* pad the ub */ if(write_pad(ofid, where, &where) < 0) { @@ -372,18 +372,18 @@ main (int argc, const char *argv[]) } /* end if */ if(ub_file) - HDfree (ub_file); + HDfree(ub_file); if(input_file) - HDfree (input_file); + HDfree(input_file); if(output_file) - HDfree (output_file); + HDfree(output_file); if(ufid >= 0) - HDclose (ufid); + HDclose(ufid); if(h5fid >= 0) - HDclose (h5fid); + HDclose(h5fid); if(ofid >= 0) - HDclose (ofid); + HDclose(ofid); return h5tools_getstatus(); } diff --git a/tools/src/misc/h5debug.c b/tools/src/misc/h5debug.c index b66e7ce..088e6e9 100644 --- a/tools/src/misc/h5debug.c +++ b/tools/src/misc/h5debug.c @@ -346,7 +346,7 @@ main(int argc, char *argv[]) /* * Debug a global heap collection. */ - status = H5HG_debug (f, addr, stdout, 0, VCOL); + status = H5HG_debug(f, addr, stdout, 0, VCOL); } else if(!HDmemcmp(sig, H5G_NODE_MAGIC, (size_t)H5_SIZEOF_MAGIC)) { /* |