summaryrefslogtreecommitdiffstats
path: root/tools/src/h5stat
diff options
context:
space:
mode:
authorAllen Byrne <byrn@hdfgroup.org>2019-08-18 20:36:57 (GMT)
committerAllen Byrne <byrn@hdfgroup.org>2019-08-18 20:36:57 (GMT)
commit16093cc00241824a3f595392aaa5aeab53d93b59 (patch)
tree421e39191c5e6d1b4ca1fbbe97e37c597b985335 /tools/src/h5stat
parent3413544b00a5a0c349e8f29dc98562bef287ef9c (diff)
downloadhdf5-16093cc00241824a3f595392aaa5aeab53d93b59.zip
hdf5-16093cc00241824a3f595392aaa5aeab53d93b59.tar.gz
hdf5-16093cc00241824a3f595392aaa5aeab53d93b59.tar.bz2
Merge HD prefix and whitespace changes from develop
Diffstat (limited to 'tools/src/h5stat')
-rw-r--r--tools/src/h5stat/h5stat.c165
1 files changed, 84 insertions, 81 deletions
diff --git a/tools/src/h5stat/h5stat.c b/tools/src/h5stat/h5stat.c
index b3ccc3f..bd10e8b 100644
--- a/tools/src/h5stat/h5stat.c
+++ b/tools/src/h5stat/h5stat.c
@@ -357,12 +357,12 @@ attribute_stats(iter_t *iter, const H5O_info_t *oi)
/* Add attribute count to proper bin */
bin = ceil_log10((unsigned long)oi->num_attrs);
if((bin + 1) > iter->attr_nbins) {
- iter->attr_bins = (unsigned long *)HDrealloc(iter->attr_bins, (bin + 1) * sizeof(unsigned long));
+ iter->attr_bins = (unsigned long *)HDrealloc(iter->attr_bins, (bin + 1) * sizeof(unsigned long));
HDassert(iter->attr_bins);
/* Initialize counts for intermediate bins */
while(iter->attr_nbins < bin)
- iter->attr_bins[iter->attr_nbins++] = 0;
+ iter->attr_bins[iter->attr_nbins++] = 0;
iter->attr_nbins++;
/* Initialize count for new bin */
@@ -509,8 +509,8 @@ dataset_stats(iter_t *iter, const char *name, const H5O_info_t *oi)
HGOTO_ERROR(FAIL, H5E_tools_min_id_g, "attribute_stats() failed");
/* Get storage info */
- if((storage = H5Dget_storage_size(did)) < 0)
- HGOTO_ERROR(FAIL, H5E_tools_min_id_g, "H5Dget_storage_size() failed");
+ /* Failure 0 indistinguishable from no-data-stored 0 */
+ storage = H5Dget_storage_size(did);
/* Gather layout statistics */
if((dcpl = H5Dget_create_plist(did)) < 0)
@@ -535,7 +535,8 @@ dataset_stats(iter_t *iter, const char *name, const H5O_info_t *oi)
if(num_ext) {
iter->nexternal += (unsigned long)num_ext;
iter->dset_external_storage_size += (unsigned long)storage;
- } else
+ }
+ else
iter->dset_storage_size += storage;
/* Gather dataspace statistics */
@@ -931,7 +932,8 @@ parse_command_line(int argc, const char *argv[], struct handler_t **hand_ret)
error_msg("Invalid threshold for small groups\n");
goto error;
}
- } else
+ }
+ else
error_msg("Missing threshold for small groups\n");
break;
@@ -953,7 +955,8 @@ parse_command_line(int argc, const char *argv[], struct handler_t **hand_ret)
error_msg("Invalid threshold for small datasets\n");
goto error;
}
- } else
+ }
+ else
error_msg("Missing threshold for small datasets\n");
break;
@@ -975,7 +978,8 @@ parse_command_line(int argc, const char *argv[], struct handler_t **hand_ret)
error_msg("Invalid threshold for small # of attributes\n");
goto error;
}
- } else
+ }
+ else
error_msg("Missing threshold for small # of attributes\n");
break;
@@ -1126,13 +1130,13 @@ iter_free(iter_t *iter)
static herr_t
print_file_info(const iter_t *iter)
{
- printf("File information\n");
- printf("\t# of unique groups: %lu\n", iter->uniq_groups);
- printf("\t# of unique datasets: %lu\n", iter->uniq_dsets);
- printf("\t# of unique named datatypes: %lu\n", iter->uniq_dtypes);
- printf("\t# of unique links: %lu\n", iter->uniq_links);
- printf("\t# of unique other: %lu\n", iter->uniq_others);
- printf("\tMax. # of links to object: %lu\n", iter->max_links);
+ HDprintf("File information\n");
+ HDprintf("\t# of unique groups: %lu\n", iter->uniq_groups);
+ HDprintf("\t# of unique datasets: %lu\n", iter->uniq_dsets);
+ HDprintf("\t# of unique named datatypes: %lu\n", iter->uniq_dtypes);
+ HDprintf("\t# of unique links: %lu\n", iter->uniq_links);
+ HDprintf("\t# of unique other: %lu\n", iter->uniq_others);
+ HDprintf("\tMax. # of links to object: %lu\n", iter->max_links);
HDfprintf(stdout, "\tMax. # of objects in group: %Hu\n", iter->max_fanout);
return 0;
@@ -1162,15 +1166,12 @@ print_file_metadata(const iter_t *iter)
HDfprintf(stdout, "\tUser block: %Hu\n", iter->ublk_size);
HDfprintf(stdout, "\tObject headers: (total/unused)\n");
- HDfprintf(stdout, "\t\tGroups: %Hu/%Hu\n",
- iter->group_ohdr_info.total_size,
- iter->group_ohdr_info.free_size);
+ HDfprintf(stdout, "\t\tGroups: %Hu/%Hu\n", iter->group_ohdr_info.total_size,
+ iter->group_ohdr_info.free_size);
HDfprintf(stdout, "\t\tDatasets(exclude compact data): %Hu/%Hu\n",
- iter->dset_ohdr_info.total_size,
- iter->dset_ohdr_info.free_size);
- HDfprintf(stdout, "\t\tDatatypes: %Hu/%Hu\n",
- iter->dtype_ohdr_info.total_size,
- iter->dtype_ohdr_info.free_size);
+ iter->dset_ohdr_info.total_size, iter->dset_ohdr_info.free_size);
+ HDfprintf(stdout, "\t\tDatatypes: %Hu/%Hu\n", iter->dtype_ohdr_info.total_size,
+ iter->dtype_ohdr_info.free_size);
HDfprintf(stdout, "\tGroups:\n");
HDfprintf(stdout, "\t\tB-tree/List: %Hu\n", iter->groups_btree_storage_size);
@@ -1229,28 +1230,28 @@ print_group_info(const iter_t *iter)
total = 0;
for(u = 0; u < (unsigned)sgroups_threshold; u++) {
if(iter->num_small_groups[u] > 0) {
- printf("\t# of groups with %u link(s): %lu\n", u, iter->num_small_groups[u]);
+ HDprintf("\t# of groups with %u link(s): %lu\n", u, iter->num_small_groups[u]);
total += iter->num_small_groups[u];
} /* end if */
} /* end for */
- printf("\tTotal # of small groups: %lu\n", total);
+ HDprintf("\tTotal # of small groups: %lu\n", total);
- printf("Group bins:\n");
+ HDprintf("Group bins:\n");
total = 0;
if((iter->group_nbins > 0) && (iter->group_bins[0] > 0)) {
- printf("\t# of groups with 0 link: %lu\n", iter->group_bins[0]);
+ HDprintf("\t# of groups with 0 link: %lu\n", iter->group_bins[0]);
total = iter->group_bins[0];
} /* end if */
power = 1;
for(u = 1; u < iter->group_nbins; u++) {
if(iter->group_bins[u] > 0) {
- printf("\t# of groups with %lu - %lu links: %lu\n", power, (power * 10) - 1,
+ HDprintf("\t# of groups with %lu - %lu links: %lu\n", power, (power * 10) - 1,
iter->group_bins[u]);
total += iter->group_bins[u];
} /* end if */
power *= 10;
} /* end for */
- printf("\tTotal # of groups: %lu\n", total);
+ HDprintf("\tTotal # of groups: %lu\n", total);
return 0;
} /* print_group_info() */
@@ -1271,7 +1272,7 @@ print_group_info(const iter_t *iter)
static herr_t
print_group_metadata(const iter_t *iter)
{
- printf("File space information for groups' metadata (in bytes):\n");
+ HDprintf("File space information for groups' metadata (in bytes):\n");
HDfprintf(stdout, "\tObject headers (total/unused): %Hu/%Hu\n",
iter->group_ohdr_info.total_size, iter->group_ohdr_info.free_size);
@@ -1304,66 +1305,66 @@ print_dataset_info(const iter_t *iter)
unsigned u; /* Local index variable */
if(iter->uniq_dsets > 0) {
- printf("Dataset dimension information:\n");
- printf("\tMax. rank of datasets: %u\n", iter->max_dset_rank);
- printf("\tDataset ranks:\n");
+ HDprintf("Dataset dimension information:\n");
+ HDprintf("\tMax. rank of datasets: %u\n", iter->max_dset_rank);
+ HDprintf("\tDataset ranks:\n");
for(u = 0; u < H5S_MAX_RANK; u++)
if(iter->dset_rank_count[u] > 0)
- printf("\t\t# of dataset with rank %u: %lu\n", u, iter->dset_rank_count[u]);
+ HDprintf("\t\t# of dataset with rank %u: %lu\n", u, iter->dset_rank_count[u]);
- printf("1-D Dataset information:\n");
+ HDprintf("1-D Dataset information:\n");
HDfprintf(stdout, "\tMax. dimension size of 1-D datasets: %Hu\n", iter->max_dset_dims);
- printf("\tSmall 1-D datasets (with dimension sizes 0 to %u):\n", sdsets_threshold - 1);
+ HDprintf("\tSmall 1-D datasets (with dimension sizes 0 to %u):\n", sdsets_threshold - 1);
total = 0;
for(u = 0; u < (unsigned)sdsets_threshold; u++) {
if(iter->small_dset_dims[u] > 0) {
- printf("\t\t# of datasets with dimension sizes %u: %lu\n", u,
+ HDprintf("\t\t# of datasets with dimension sizes %u: %lu\n", u,
iter->small_dset_dims[u]);
total += iter->small_dset_dims[u];
} /* end if */
} /* end for */
- printf("\t\tTotal # of small datasets: %lu\n", total);
+ HDprintf("\t\tTotal # of small datasets: %lu\n", total);
/* Protect against no datasets in file */
if(iter->dset_dim_nbins > 0) {
- printf("\t1-D Dataset dimension bins:\n");
+ HDprintf("\t1-D Dataset dimension bins:\n");
total = 0;
if(iter->dset_dim_bins[0] > 0) {
- printf("\t\t# of datasets with dimension size 0: %lu\n", iter->dset_dim_bins[0]);
+ HDprintf("\t\t# of datasets with dimension size 0: %lu\n", iter->dset_dim_bins[0]);
total = iter->dset_dim_bins[0];
} /* end if */
power = 1;
for(u = 1; u < iter->dset_dim_nbins; u++) {
if(iter->dset_dim_bins[u] > 0) {
- printf("\t\t# of datasets with dimension size %lu - %lu: %lu\n", power, (power * 10) - 1,
+ HDprintf("\t\t# of datasets with dimension size %lu - %lu: %lu\n", power, (power * 10) - 1,
iter->dset_dim_bins[u]);
total += iter->dset_dim_bins[u];
} /* end if */
power *= 10;
} /* end for */
- printf("\t\tTotal # of datasets: %lu\n", total);
+ HDprintf("\t\tTotal # of datasets: %lu\n", total);
} /* end if */
- printf("Dataset storage information:\n");
+ HDprintf("Dataset storage information:\n");
HDfprintf(stdout, "\tTotal raw data size: %Hu\n", iter->dset_storage_size);
HDfprintf(stdout, "\tTotal external raw data size: %Hu\n", iter->dset_external_storage_size);
- printf("Dataset layout information:\n");
+ HDprintf("Dataset layout information:\n");
for(u = 0; u < H5D_NLAYOUTS; u++)
- printf("\tDataset layout counts[%s]: %lu\n", (u == H5D_COMPACT ? "COMPACT" :
+ HDprintf("\tDataset layout counts[%s]: %lu\n", (u == H5D_COMPACT ? "COMPACT" :
(u == H5D_CONTIGUOUS ? "CONTIG" : (u == H5D_CHUNKED ? "CHUNKED" : "VIRTUAL"))), iter->dset_layouts[u]);
- printf("\tNumber of external files : %lu\n", iter->nexternal);
-
- printf("Dataset filters information:\n");
- printf("\tNumber of datasets with:\n");
- printf("\t\tNO filter: %lu\n", iter->dset_comptype[H5Z_FILTER_ERROR+1]);
- printf("\t\tGZIP filter: %lu\n", iter->dset_comptype[H5Z_FILTER_DEFLATE]);
- printf("\t\tSHUFFLE filter: %lu\n", iter->dset_comptype[H5Z_FILTER_SHUFFLE]);
- printf("\t\tFLETCHER32 filter: %lu\n", iter->dset_comptype[H5Z_FILTER_FLETCHER32]);
- printf("\t\tSZIP filter: %lu\n", iter->dset_comptype[H5Z_FILTER_SZIP]);
- printf("\t\tNBIT filter: %lu\n", iter->dset_comptype[H5Z_FILTER_NBIT]);
- printf("\t\tSCALEOFFSET filter: %lu\n", iter->dset_comptype[H5Z_FILTER_SCALEOFFSET]);
- printf("\t\tUSER-DEFINED filter: %lu\n", iter->dset_comptype[H5_NFILTERS_IMPL-1]);
+ HDprintf("\tNumber of external files : %lu\n", iter->nexternal);
+
+ HDprintf("Dataset filters information:\n");
+ HDprintf("\tNumber of datasets with:\n");
+ HDprintf("\t\tNO filter: %lu\n", iter->dset_comptype[H5Z_FILTER_ERROR+1]);
+ HDprintf("\t\tGZIP filter: %lu\n", iter->dset_comptype[H5Z_FILTER_DEFLATE]);
+ HDprintf("\t\tSHUFFLE filter: %lu\n", iter->dset_comptype[H5Z_FILTER_SHUFFLE]);
+ HDprintf("\t\tFLETCHER32 filter: %lu\n", iter->dset_comptype[H5Z_FILTER_FLETCHER32]);
+ HDprintf("\t\tSZIP filter: %lu\n", iter->dset_comptype[H5Z_FILTER_SZIP]);
+ HDprintf("\t\tNBIT filter: %lu\n", iter->dset_comptype[H5Z_FILTER_NBIT]);
+ HDprintf("\t\tSCALEOFFSET filter: %lu\n", iter->dset_comptype[H5Z_FILTER_SCALEOFFSET]);
+ HDprintf("\t\tUSER-DEFINED filter: %lu\n", iter->dset_comptype[H5_NFILTERS_IMPL-1]);
} /* end if */
return 0;
@@ -1386,7 +1387,7 @@ print_dataset_info(const iter_t *iter)
static herr_t
print_dset_metadata(const iter_t *iter)
{
- printf("File space information for datasets' metadata (in bytes):\n");
+ HDprintf("File space information for datasets' metadata (in bytes):\n");
HDfprintf(stdout, "\tObject headers (total/unused): %Hu/%Hu\n",
iter->dset_ohdr_info.total_size, iter->dset_ohdr_info.free_size);
@@ -1420,20 +1421,20 @@ print_dset_dtype_meta(const iter_t *iter)
unsigned u; /* Local index variable */
if(iter->dset_ntypes) {
- printf("Dataset datatype information:\n");
- printf("\t# of unique datatypes used by datasets: %lu\n", iter->dset_ntypes);
+ HDprintf("Dataset datatype information:\n");
+ HDprintf("\t# of unique datatypes used by datasets: %lu\n", iter->dset_ntypes);
total = 0;
for(u = 0; u < iter->dset_ntypes; u++) {
H5Tencode(iter->dset_type_info[u].tid, NULL, &dtype_size);
- printf("\tDataset datatype #%u:\n", u);
- printf("\t\tCount (total/named) = (%lu/%lu)\n",
+ HDprintf("\tDataset datatype #%u:\n", u);
+ HDprintf("\t\tCount (total/named) = (%lu/%lu)\n",
iter->dset_type_info[u].count, iter->dset_type_info[u].named);
- printf("\t\tSize (desc./elmt) = (%lu/%lu)\n", (unsigned long)dtype_size,
+ HDprintf("\t\tSize (desc./elmt) = (%lu/%lu)\n", (unsigned long)dtype_size,
(unsigned long)H5Tget_size(iter->dset_type_info[u].tid));
H5Tclose(iter->dset_type_info[u].tid);
total += iter->dset_type_info[u].count;
} /* end for */
- printf("\tTotal dataset datatype count: %lu\n", total);
+ HDprintf("\tTotal dataset datatype count: %lu\n", total);
} /* end if */
return 0;
@@ -1461,29 +1462,29 @@ print_attr_info(const iter_t *iter)
unsigned long total; /* Total count for various statistics */
unsigned u; /* Local index variable */
- printf("Small # of attributes (objects with 1 to %u attributes):\n", sattrs_threshold);
+ HDprintf("Small # of attributes (objects with 1 to %u attributes):\n", sattrs_threshold);
total = 0;
for(u = 1; u <= (unsigned)sattrs_threshold; u++) {
if(iter->num_small_attrs[u] > 0) {
- printf("\t# of objects with %u attributes: %lu\n", u, iter->num_small_attrs[u]);
+ HDprintf("\t# of objects with %u attributes: %lu\n", u, iter->num_small_attrs[u]);
total += iter->num_small_attrs[u];
} /* end if */
} /* end for */
- printf("\tTotal # of objects with small # of attributes: %lu\n", total);
+ HDprintf("\tTotal # of objects with small # of attributes: %lu\n", total);
- printf("Attribute bins:\n");
+ HDprintf("Attribute bins:\n");
total = 0;
power = 1;
for(u = 1; u < iter->attr_nbins; u++) {
if(iter->attr_bins[u] > 0) {
- printf("\t# of objects with %lu - %lu attributes: %lu\n", power, (power * 10) - 1,
+ HDprintf("\t# of objects with %lu - %lu attributes: %lu\n", power, (power * 10) - 1,
iter->attr_bins[u]);
total += iter->attr_bins[u];
} /* end if */
power *= 10;
} /* end for */
- printf("\tTotal # of objects with attributes: %lu\n", total);
- printf("\tMax. # of attributes to objects: %lu\n", (unsigned long)iter->max_attrs);
+ HDprintf("\tTotal # of objects with attributes: %lu\n", total);
+ HDprintf("\tMax. # of attributes to objects: %lu\n", (unsigned long)iter->max_attrs);
return 0;
} /* print_attr_info() */
@@ -1515,25 +1516,25 @@ print_freespace_info(const iter_t *iter)
total = 0;
for(u = 0; u < SIZE_SMALL_SECTS; u++) {
if(iter->num_small_sects[u] > 0) {
- printf("\t# of sections of size %u: %lu\n", u, iter->num_small_sects[u]);
+ HDprintf("\t# of sections of size %u: %lu\n", u, iter->num_small_sects[u]);
total += iter->num_small_sects[u];
} /* end if */
} /* end for */
printf("\tTotal # of small size sections: %lu\n", total);
- printf("Free-space section bins:\n");
+ HDprintf("Free-space section bins:\n");
total = 0;
power = 1;
for(u = 1; u < iter->sect_nbins; u++) {
if(iter->sect_bins[u] > 0) {
- printf("\t# of sections of size %lu - %lu: %lu\n", power, (power * 10) - 1,
+ HDprintf("\t# of sections of size %lu - %lu: %lu\n", power, (power * 10) - 1,
iter->sect_bins[u]);
total += iter->sect_bins[u];
} /* end if */
power *= 10;
} /* end for */
- printf("\tTotal # of sections: %lu\n", total);
+ HDprintf("\tTotal # of sections: %lu\n", total);
return 0;
} /* print_freespace_info() */
@@ -1561,7 +1562,7 @@ print_storage_summary(const iter_t *iter)
HDfprintf(stdout, "File space management strategy: %s\n", FS_STRATEGY_NAME[iter->fs_strategy]);
HDfprintf(stdout, "File space page size: %Hu bytes\n", iter->fsp_size);
- printf("Summary of file space information:\n");
+ HDprintf("Summary of file space information:\n");
total_meta =
iter->super_size + iter->super_ext_size + iter->ublk_size +
iter->group_ohdr_info.total_size +
@@ -1669,7 +1670,7 @@ print_file_statistics(const iter_t *iter)
static void
print_object_statistics(const char *name)
{
- printf("Object name %s\n", name);
+ HDprintf("Object name %s\n", name);
} /* print_object_statistics() */
@@ -1751,7 +1752,7 @@ main(int argc, const char *argv[])
hid_t fcpl;
H5F_info2_t finfo;
- printf("Filename: %s\n", fname);
+ HDprintf("Filename: %s\n", fname);
fid = H5Fopen(fname, H5F_ACC_RDONLY, H5P_DEFAULT);
if(fid < 0) {
@@ -1786,7 +1787,7 @@ main(int argc, const char *argv[])
if(iter.num_small_groups == NULL || iter.num_small_attrs == NULL || iter.small_dset_dims == NULL) {
error_msg("Unable to allocate memory for tracking small groups/datasets/attributes\n");
- h5tools_setstatus(EXIT_FAILURE);
+ h5tools_setstatus(EXIT_FAILURE);
goto done;
}
@@ -1815,7 +1816,8 @@ main(int argc, const char *argv[])
if(h5trav_visit(fid, hand->obj[u], TRUE, TRUE, obj_stats, lnk_stats, &iter, H5O_INFO_ALL) < 0) {
error_msg("unable to traverse object \"%s\"\n", hand->obj[u]);
h5tools_setstatus(EXIT_FAILURE);
- } else
+ }
+ else
print_statistics(hand->obj[u], &iter);
} /* end for */
} /* end if */
@@ -1823,7 +1825,8 @@ main(int argc, const char *argv[])
if(h5trav_visit(fid, "/", TRUE, TRUE, obj_stats, lnk_stats, &iter, H5O_INFO_ALL) < 0) {
error_msg("unable to traverse objects/links in file \"%s\"\n", fname);
h5tools_setstatus(EXIT_FAILURE);
- } else
+ }
+ else
print_statistics("/", &iter);
} /* end else */
} /* end if */