diff options
Diffstat (limited to 'tools/src/h5stat/h5stat.c')
-rw-r--r-- | tools/src/h5stat/h5stat.c | 347 |
1 files changed, 172 insertions, 175 deletions
diff --git a/tools/src/h5stat/h5stat.c b/tools/src/h5stat/h5stat.c index 021529e..9da6ba2 100644 --- a/tools/src/h5stat/h5stat.c +++ b/tools/src/h5stat/h5stat.c @@ -208,45 +208,44 @@ static void usage(const char *prog) { HDfflush(stdout); - HDfprintf(stdout, "usage: %s [OPTIONS] file\n", prog); - HDfprintf(stdout, "\n"); - HDfprintf(stdout, " ERROR\n"); - HDfprintf(stdout, " --enable-error-stack Prints messages from the HDF5 error stack as they occur\n"); - HDfprintf(stdout, " Optional value 2 also prints file open errors\n"); - HDfprintf(stdout, " OPTIONS\n"); - HDfprintf(stdout, " -h, --help Print a usage message and exit\n"); - HDfprintf(stdout, " -V, --version Print version number and exit\n"); - HDfprintf(stdout, " -f, --file Print file information\n"); - HDfprintf(stdout, " -F, --filemetadata Print file space information for file's metadata\n"); - HDfprintf(stdout, " -g, --group Print group information\n"); - HDfprintf(stdout, " -l N, --links=N Set the threshold for the # of links when printing\n"); - HDfprintf(stdout, " information for small groups. N is an integer greater\n"); - HDfprintf(stdout, " than 0. The default threshold is 10.\n"); - HDfprintf(stdout, " -G, --groupmetadata Print file space information for groups' metadata\n"); - HDfprintf(stdout, " -d, --dset Print dataset information\n"); - HDfprintf(stdout, " -m N, --dims=N Set the threshold for the dimension sizes when printing\n"); - HDfprintf(stdout, - " information for small datasets. N is an integer greater\n"); - HDfprintf(stdout, " than 0. The default threshold is 10.\n"); - HDfprintf(stdout, " -D, --dsetmetadata Print file space information for datasets' metadata\n"); - HDfprintf(stdout, " -T, --dtypemetadata Print datasets' datatype information\n"); - HDfprintf(stdout, " -A, --attribute Print attribute information\n"); - HDfprintf(stdout, " -a N, --numattrs=N Set the threshold for the # of attributes when printing\n"); - HDfprintf(stdout, - " information for small # of attributes. N is an integer greater\n"); - HDfprintf(stdout, " than 0. The default threshold is 10.\n"); - HDfprintf(stdout, " -s, --freespace Print free space information\n"); - HDfprintf(stdout, " -S, --summary Print summary of file space information\n"); - HDfprintf(stdout, " --s3-cred=<cred> Access file on S3, using provided credential\n"); - HDfprintf(stdout, " <cred> :: (region,id,key)\n"); - HDfprintf(stdout, " If <cred> == \"(,,)\", no authentication is used.\n"); - HDfprintf(stdout, " --hdfs-attrs=<attrs> Access a file on HDFS with given configuration\n"); - HDfprintf(stdout, " attributes.\n"); - HDfprintf(stdout, " <attrs> :: (<namenode name>,<namenode port>,\n"); - HDfprintf(stdout, " <kerberos cache path>,<username>,\n"); - HDfprintf(stdout, " <buffer size>)\n"); - HDfprintf(stdout, " If an attribute is empty, a default value will be\n"); - HDfprintf(stdout, " used.\n"); + fprintf(stdout, "usage: %s [OPTIONS] file\n", prog); + fprintf(stdout, "\n"); + fprintf(stdout, " ERROR\n"); + fprintf(stdout, " --enable-error-stack Prints messages from the HDF5 error stack as they occur\n"); + fprintf(stdout, " Optional value 2 also prints file open errors\n"); + fprintf(stdout, " OPTIONS\n"); + fprintf(stdout, " -h, --help Print a usage message and exit\n"); + fprintf(stdout, " -V, --version Print version number and exit\n"); + fprintf(stdout, " -f, --file Print file information\n"); + fprintf(stdout, " -F, --filemetadata Print file space information for file's metadata\n"); + fprintf(stdout, " -g, --group Print group information\n"); + fprintf(stdout, " -l N, --links=N Set the threshold for the # of links when printing\n"); + fprintf(stdout, " information for small groups. N is an integer greater\n"); + fprintf(stdout, " than 0. The default threshold is 10.\n"); + fprintf(stdout, " -G, --groupmetadata Print file space information for groups' metadata\n"); + fprintf(stdout, " -d, --dset Print dataset information\n"); + fprintf(stdout, " -m N, --dims=N Set the threshold for the dimension sizes when printing\n"); + fprintf(stdout, " information for small datasets. N is an integer greater\n"); + fprintf(stdout, " than 0. The default threshold is 10.\n"); + fprintf(stdout, " -D, --dsetmetadata Print file space information for datasets' metadata\n"); + fprintf(stdout, " -T, --dtypemetadata Print datasets' datatype information\n"); + fprintf(stdout, " -A, --attribute Print attribute information\n"); + fprintf(stdout, " -a N, --numattrs=N Set the threshold for the # of attributes when printing\n"); + fprintf(stdout, + " information for small # of attributes. N is an integer greater\n"); + fprintf(stdout, " than 0. The default threshold is 10.\n"); + fprintf(stdout, " -s, --freespace Print free space information\n"); + fprintf(stdout, " -S, --summary Print summary of file space information\n"); + fprintf(stdout, " --s3-cred=<cred> Access file on S3, using provided credential\n"); + fprintf(stdout, " <cred> :: (region,id,key)\n"); + fprintf(stdout, " If <cred> == \"(,,)\", no authentication is used.\n"); + fprintf(stdout, " --hdfs-attrs=<attrs> Access a file on HDFS with given configuration\n"); + fprintf(stdout, " attributes.\n"); + fprintf(stdout, " <attrs> :: (<namenode name>,<namenode port>,\n"); + fprintf(stdout, " <kerberos cache path>,<username>,\n"); + fprintf(stdout, " <buffer size>)\n"); + fprintf(stdout, " If an attribute is empty, a default value will be\n"); + fprintf(stdout, " used.\n"); } /*------------------------------------------------------------------------- @@ -1096,14 +1095,14 @@ iter_free(iter_t *iter) static herr_t print_file_info(const iter_t *iter) { - HDprintf("File information\n"); - HDprintf("\t# of unique groups: %lu\n", iter->uniq_groups); - HDprintf("\t# of unique datasets: %lu\n", iter->uniq_dsets); - HDprintf("\t# of unique named datatypes: %lu\n", iter->uniq_dtypes); - HDprintf("\t# of unique links: %lu\n", iter->uniq_links); - HDprintf("\t# of unique other: %lu\n", iter->uniq_others); - HDprintf("\tMax. # of links to object: %lu\n", iter->max_links); - HDfprintf(stdout, "\tMax. # of objects in group: %" PRIuHSIZE "\n", iter->max_fanout); + printf("File information\n"); + printf("\t# of unique groups: %lu\n", iter->uniq_groups); + printf("\t# of unique datasets: %lu\n", iter->uniq_dsets); + printf("\t# of unique named datatypes: %lu\n", iter->uniq_dtypes); + printf("\t# of unique links: %lu\n", iter->uniq_links); + printf("\t# of unique other: %lu\n", iter->uniq_others); + printf("\tMax. # of links to object: %lu\n", iter->max_links); + fprintf(stdout, "\tMax. # of objects in group: %" PRIuHSIZE "\n", iter->max_fanout); return 0; } /* print_file_info() */ @@ -1125,41 +1124,41 @@ print_file_info(const iter_t *iter) static herr_t print_file_metadata(const iter_t *iter) { - HDfprintf(stdout, "File space information for file metadata (in bytes):\n"); - HDfprintf(stdout, "\tSuperblock: %" PRIuHSIZE "\n", iter->super_size); - HDfprintf(stdout, "\tSuperblock extension: %" PRIuHSIZE "\n", iter->super_ext_size); - HDfprintf(stdout, "\tUser block: %" PRIuHSIZE "\n", iter->ublk_size); - - HDfprintf(stdout, "\tObject headers: (total/unused)\n"); - HDfprintf(stdout, "\t\tGroups: %" PRIuHSIZE "/%" PRIuHSIZE "\n", iter->group_ohdr_info.total_size, - iter->group_ohdr_info.free_size); - HDfprintf(stdout, "\t\tDatasets(exclude compact data): %" PRIuHSIZE "/%" PRIuHSIZE "\n", - iter->dset_ohdr_info.total_size, iter->dset_ohdr_info.free_size); - HDfprintf(stdout, "\t\tDatatypes: %" PRIuHSIZE "/%" PRIuHSIZE "\n", iter->dtype_ohdr_info.total_size, - iter->dtype_ohdr_info.free_size); - - HDfprintf(stdout, "\tGroups:\n"); - HDfprintf(stdout, "\t\tB-tree/List: %" PRIuHSIZE "\n", iter->groups_btree_storage_size); - HDfprintf(stdout, "\t\tHeap: %" PRIuHSIZE "\n", iter->groups_heap_storage_size); - - HDfprintf(stdout, "\tAttributes:\n"); - HDfprintf(stdout, "\t\tB-tree/List: %" PRIuHSIZE "\n", iter->attrs_btree_storage_size); - HDfprintf(stdout, "\t\tHeap: %" PRIuHSIZE "\n", iter->attrs_heap_storage_size); - - HDfprintf(stdout, "\tChunked datasets:\n"); - HDfprintf(stdout, "\t\tIndex: %" PRIuHSIZE "\n", iter->datasets_index_storage_size); - - HDfprintf(stdout, "\tDatasets:\n"); - HDfprintf(stdout, "\t\tHeap: %" PRIuHSIZE "\n", iter->datasets_heap_storage_size); - - HDfprintf(stdout, "\tShared Messages:\n"); - HDfprintf(stdout, "\t\tHeader: %" PRIuHSIZE "\n", iter->SM_hdr_storage_size); - HDfprintf(stdout, "\t\tB-tree/List: %" PRIuHSIZE "\n", iter->SM_index_storage_size); - HDfprintf(stdout, "\t\tHeap: %" PRIuHSIZE "\n", iter->SM_heap_storage_size); - - HDfprintf(stdout, "\tFree-space managers:\n"); - HDfprintf(stdout, "\t\tHeader: %" PRIuHSIZE "\n", iter->free_hdr); - HDfprintf(stdout, "\t\tAmount of free space: %" PRIuHSIZE "\n", iter->free_space); + fprintf(stdout, "File space information for file metadata (in bytes):\n"); + fprintf(stdout, "\tSuperblock: %" PRIuHSIZE "\n", iter->super_size); + fprintf(stdout, "\tSuperblock extension: %" PRIuHSIZE "\n", iter->super_ext_size); + fprintf(stdout, "\tUser block: %" PRIuHSIZE "\n", iter->ublk_size); + + fprintf(stdout, "\tObject headers: (total/unused)\n"); + fprintf(stdout, "\t\tGroups: %" PRIuHSIZE "/%" PRIuHSIZE "\n", iter->group_ohdr_info.total_size, + iter->group_ohdr_info.free_size); + fprintf(stdout, "\t\tDatasets(exclude compact data): %" PRIuHSIZE "/%" PRIuHSIZE "\n", + iter->dset_ohdr_info.total_size, iter->dset_ohdr_info.free_size); + fprintf(stdout, "\t\tDatatypes: %" PRIuHSIZE "/%" PRIuHSIZE "\n", iter->dtype_ohdr_info.total_size, + iter->dtype_ohdr_info.free_size); + + fprintf(stdout, "\tGroups:\n"); + fprintf(stdout, "\t\tB-tree/List: %" PRIuHSIZE "\n", iter->groups_btree_storage_size); + fprintf(stdout, "\t\tHeap: %" PRIuHSIZE "\n", iter->groups_heap_storage_size); + + fprintf(stdout, "\tAttributes:\n"); + fprintf(stdout, "\t\tB-tree/List: %" PRIuHSIZE "\n", iter->attrs_btree_storage_size); + fprintf(stdout, "\t\tHeap: %" PRIuHSIZE "\n", iter->attrs_heap_storage_size); + + fprintf(stdout, "\tChunked datasets:\n"); + fprintf(stdout, "\t\tIndex: %" PRIuHSIZE "\n", iter->datasets_index_storage_size); + + fprintf(stdout, "\tDatasets:\n"); + fprintf(stdout, "\t\tHeap: %" PRIuHSIZE "\n", iter->datasets_heap_storage_size); + + fprintf(stdout, "\tShared Messages:\n"); + fprintf(stdout, "\t\tHeader: %" PRIuHSIZE "\n", iter->SM_hdr_storage_size); + fprintf(stdout, "\t\tB-tree/List: %" PRIuHSIZE "\n", iter->SM_index_storage_size); + fprintf(stdout, "\t\tHeap: %" PRIuHSIZE "\n", iter->SM_heap_storage_size); + + fprintf(stdout, "\tFree-space managers:\n"); + fprintf(stdout, "\t\tHeader: %" PRIuHSIZE "\n", iter->free_hdr); + fprintf(stdout, "\t\tAmount of free space: %" PRIuHSIZE "\n", iter->free_space); return 0; } /* print_file_metadata() */ @@ -1185,32 +1184,31 @@ print_group_info(const iter_t *iter) unsigned long total; /* Total count for various statistics */ unsigned u; /* Local index variable */ - HDprintf("Small groups (with 0 to %u links):\n", sgroups_threshold - 1); + printf("Small groups (with 0 to %u links):\n", sgroups_threshold - 1); total = 0; for (u = 0; u < (unsigned)sgroups_threshold; u++) { if (iter->num_small_groups[u] > 0) { - HDprintf("\t# of groups with %u link(s): %lu\n", u, iter->num_small_groups[u]); + printf("\t# of groups with %u link(s): %lu\n", u, iter->num_small_groups[u]); total += iter->num_small_groups[u]; } /* end if */ } /* end for */ - HDprintf("\tTotal # of small groups: %lu\n", total); + printf("\tTotal # of small groups: %lu\n", total); - HDprintf("Group bins:\n"); + printf("Group bins:\n"); total = 0; if ((iter->group_nbins > 0) && (iter->group_bins[0] > 0)) { - HDprintf("\t# of groups with 0 link: %lu\n", iter->group_bins[0]); + printf("\t# of groups with 0 link: %lu\n", iter->group_bins[0]); total = iter->group_bins[0]; } /* end if */ power = 1; for (u = 1; u < iter->group_nbins; u++) { if (iter->group_bins[u] > 0) { - HDprintf("\t# of groups with %lu - %lu links: %lu\n", power, (power * 10) - 1, - iter->group_bins[u]); + printf("\t# of groups with %lu - %lu links: %lu\n", power, (power * 10) - 1, iter->group_bins[u]); total += iter->group_bins[u]; } /* end if */ power *= 10; } /* end for */ - HDprintf("\tTotal # of groups: %lu\n", total); + printf("\tTotal # of groups: %lu\n", total); return 0; } /* print_group_info() */ @@ -1230,13 +1228,13 @@ print_group_info(const iter_t *iter) static herr_t print_group_metadata(const iter_t *iter) { - HDprintf("File space information for groups' metadata (in bytes):\n"); + printf("File space information for groups' metadata (in bytes):\n"); - HDfprintf(stdout, "\tObject headers (total/unused): %" PRIuHSIZE "/%" PRIuHSIZE "\n", - iter->group_ohdr_info.total_size, iter->group_ohdr_info.free_size); + fprintf(stdout, "\tObject headers (total/unused): %" PRIuHSIZE "/%" PRIuHSIZE "\n", + iter->group_ohdr_info.total_size, iter->group_ohdr_info.free_size); - HDfprintf(stdout, "\tB-tree/List: %" PRIuHSIZE "\n", iter->groups_btree_storage_size); - HDfprintf(stdout, "\tHeap: %" PRIuHSIZE "\n", iter->groups_heap_storage_size); + fprintf(stdout, "\tB-tree/List: %" PRIuHSIZE "\n", iter->groups_btree_storage_size); + fprintf(stdout, "\tHeap: %" PRIuHSIZE "\n", iter->groups_heap_storage_size); return 0; } /* print_group_metadata() */ @@ -1262,69 +1260,68 @@ print_dataset_info(const iter_t *iter) unsigned u; /* Local index variable */ if (iter->uniq_dsets > 0) { - HDprintf("Dataset dimension information:\n"); - HDprintf("\tMax. rank of datasets: %u\n", iter->max_dset_rank); - HDprintf("\tDataset ranks:\n"); + printf("Dataset dimension information:\n"); + printf("\tMax. rank of datasets: %u\n", iter->max_dset_rank); + printf("\tDataset ranks:\n"); for (u = 0; u < H5S_MAX_RANK; u++) if (iter->dset_rank_count[u] > 0) - HDprintf("\t\t# of dataset with rank %u: %lu\n", u, iter->dset_rank_count[u]); + printf("\t\t# of dataset with rank %u: %lu\n", u, iter->dset_rank_count[u]); - HDprintf("1-D Dataset information:\n"); - HDfprintf(stdout, "\tMax. dimension size of 1-D datasets: %" PRIuHSIZE "\n", iter->max_dset_dims); - HDprintf("\tSmall 1-D datasets (with dimension sizes 0 to %u):\n", sdsets_threshold - 1); + printf("1-D Dataset information:\n"); + fprintf(stdout, "\tMax. dimension size of 1-D datasets: %" PRIuHSIZE "\n", iter->max_dset_dims); + printf("\tSmall 1-D datasets (with dimension sizes 0 to %u):\n", sdsets_threshold - 1); total = 0; for (u = 0; u < (unsigned)sdsets_threshold; u++) { if (iter->small_dset_dims[u] > 0) { - HDprintf("\t\t# of datasets with dimension sizes %u: %lu\n", u, iter->small_dset_dims[u]); + printf("\t\t# of datasets with dimension sizes %u: %lu\n", u, iter->small_dset_dims[u]); total += iter->small_dset_dims[u]; } /* end if */ } /* end for */ - HDprintf("\t\tTotal # of small datasets: %lu\n", total); + printf("\t\tTotal # of small datasets: %lu\n", total); /* Protect against no datasets in file */ if (iter->dset_dim_nbins > 0) { - HDprintf("\t1-D Dataset dimension bins:\n"); + printf("\t1-D Dataset dimension bins:\n"); total = 0; if (iter->dset_dim_bins[0] > 0) { - HDprintf("\t\t# of datasets with dimension size 0: %lu\n", iter->dset_dim_bins[0]); + printf("\t\t# of datasets with dimension size 0: %lu\n", iter->dset_dim_bins[0]); total = iter->dset_dim_bins[0]; } /* end if */ power = 1; for (u = 1; u < iter->dset_dim_nbins; u++) { if (iter->dset_dim_bins[u] > 0) { - HDprintf("\t\t# of datasets with dimension size %lu - %lu: %lu\n", power, - (power * 10) - 1, iter->dset_dim_bins[u]); + printf("\t\t# of datasets with dimension size %lu - %lu: %lu\n", power, (power * 10) - 1, + iter->dset_dim_bins[u]); total += iter->dset_dim_bins[u]; } /* end if */ power *= 10; } /* end for */ - HDprintf("\t\tTotal # of datasets: %lu\n", total); + printf("\t\tTotal # of datasets: %lu\n", total); } /* end if */ - HDprintf("Dataset storage information:\n"); - HDfprintf(stdout, "\tTotal raw data size: %" PRIuHSIZE "\n", iter->dset_storage_size); - HDfprintf(stdout, "\tTotal external raw data size: %" PRIuHSIZE "\n", - iter->dset_external_storage_size); + printf("Dataset storage information:\n"); + fprintf(stdout, "\tTotal raw data size: %" PRIuHSIZE "\n", iter->dset_storage_size); + fprintf(stdout, "\tTotal external raw data size: %" PRIuHSIZE "\n", iter->dset_external_storage_size); - HDprintf("Dataset layout information:\n"); + printf("Dataset layout information:\n"); for (u = 0; u < H5D_NLAYOUTS; u++) - HDprintf("\tDataset layout counts[%s]: %lu\n", - (u == H5D_COMPACT - ? "COMPACT" - : (u == H5D_CONTIGUOUS ? "CONTIG" : (u == H5D_CHUNKED ? "CHUNKED" : "VIRTUAL"))), - iter->dset_layouts[u]); - HDprintf("\tNumber of external files : %lu\n", iter->nexternal); - - HDprintf("Dataset filters information:\n"); - HDprintf("\tNumber of datasets with:\n"); - HDprintf("\t\tNO filter: %lu\n", iter->dset_comptype[H5Z_FILTER_ERROR + 1]); - HDprintf("\t\tGZIP filter: %lu\n", iter->dset_comptype[H5Z_FILTER_DEFLATE]); - HDprintf("\t\tSHUFFLE filter: %lu\n", iter->dset_comptype[H5Z_FILTER_SHUFFLE]); - HDprintf("\t\tFLETCHER32 filter: %lu\n", iter->dset_comptype[H5Z_FILTER_FLETCHER32]); - HDprintf("\t\tSZIP filter: %lu\n", iter->dset_comptype[H5Z_FILTER_SZIP]); - HDprintf("\t\tNBIT filter: %lu\n", iter->dset_comptype[H5Z_FILTER_NBIT]); - HDprintf("\t\tSCALEOFFSET filter: %lu\n", iter->dset_comptype[H5Z_FILTER_SCALEOFFSET]); - HDprintf("\t\tUSER-DEFINED filter: %lu\n", iter->dset_comptype[H5_NFILTERS_IMPL - 1]); + printf("\tDataset layout counts[%s]: %lu\n", + (u == H5D_COMPACT + ? "COMPACT" + : (u == H5D_CONTIGUOUS ? "CONTIG" : (u == H5D_CHUNKED ? "CHUNKED" : "VIRTUAL"))), + iter->dset_layouts[u]); + printf("\tNumber of external files : %lu\n", iter->nexternal); + + printf("Dataset filters information:\n"); + printf("\tNumber of datasets with:\n"); + printf("\t\tNO filter: %lu\n", iter->dset_comptype[H5Z_FILTER_ERROR + 1]); + printf("\t\tGZIP filter: %lu\n", iter->dset_comptype[H5Z_FILTER_DEFLATE]); + printf("\t\tSHUFFLE filter: %lu\n", iter->dset_comptype[H5Z_FILTER_SHUFFLE]); + printf("\t\tFLETCHER32 filter: %lu\n", iter->dset_comptype[H5Z_FILTER_FLETCHER32]); + printf("\t\tSZIP filter: %lu\n", iter->dset_comptype[H5Z_FILTER_SZIP]); + printf("\t\tNBIT filter: %lu\n", iter->dset_comptype[H5Z_FILTER_NBIT]); + printf("\t\tSCALEOFFSET filter: %lu\n", iter->dset_comptype[H5Z_FILTER_SCALEOFFSET]); + printf("\t\tUSER-DEFINED filter: %lu\n", iter->dset_comptype[H5_NFILTERS_IMPL - 1]); } /* end if */ return 0; @@ -1346,13 +1343,13 @@ print_dataset_info(const iter_t *iter) static herr_t print_dset_metadata(const iter_t *iter) { - HDprintf("File space information for datasets' metadata (in bytes):\n"); + printf("File space information for datasets' metadata (in bytes):\n"); - HDfprintf(stdout, "\tObject headers (total/unused): %" PRIuHSIZE "/%" PRIuHSIZE "\n", - iter->dset_ohdr_info.total_size, iter->dset_ohdr_info.free_size); + fprintf(stdout, "\tObject headers (total/unused): %" PRIuHSIZE "/%" PRIuHSIZE "\n", + iter->dset_ohdr_info.total_size, iter->dset_ohdr_info.free_size); - HDfprintf(stdout, "\tIndex for Chunked datasets: %" PRIuHSIZE "\n", iter->datasets_index_storage_size); - HDfprintf(stdout, "\tHeap: %" PRIuHSIZE "\n", iter->datasets_heap_storage_size); + fprintf(stdout, "\tIndex for Chunked datasets: %" PRIuHSIZE "\n", iter->datasets_index_storage_size); + fprintf(stdout, "\tHeap: %" PRIuHSIZE "\n", iter->datasets_heap_storage_size); return 0; } /* print_dset_metadata() */ @@ -1378,20 +1375,20 @@ print_dset_dtype_meta(const iter_t *iter) unsigned u; /* Local index variable */ if (iter->dset_ntypes) { - HDprintf("Dataset datatype information:\n"); - HDprintf("\t# of unique datatypes used by datasets: %lu\n", iter->dset_ntypes); + printf("Dataset datatype information:\n"); + printf("\t# of unique datatypes used by datasets: %lu\n", iter->dset_ntypes); total = 0; for (u = 0; u < iter->dset_ntypes; u++) { H5Tencode(iter->dset_type_info[u].tid, NULL, &dtype_size); - HDprintf("\tDataset datatype #%u:\n", u); - HDprintf("\t\tCount (total/named) = (%lu/%lu)\n", iter->dset_type_info[u].count, - iter->dset_type_info[u].named); - HDprintf("\t\tSize (desc./elmt) = (%lu/%lu)\n", (unsigned long)dtype_size, - (unsigned long)H5Tget_size(iter->dset_type_info[u].tid)); + printf("\tDataset datatype #%u:\n", u); + printf("\t\tCount (total/named) = (%lu/%lu)\n", iter->dset_type_info[u].count, + iter->dset_type_info[u].named); + printf("\t\tSize (desc./elmt) = (%lu/%lu)\n", (unsigned long)dtype_size, + (unsigned long)H5Tget_size(iter->dset_type_info[u].tid)); H5Tclose(iter->dset_type_info[u].tid); total += iter->dset_type_info[u].count; } /* end for */ - HDprintf("\tTotal dataset datatype count: %lu\n", total); + printf("\tTotal dataset datatype count: %lu\n", total); } /* end if */ return 0; @@ -1418,29 +1415,29 @@ print_attr_info(const iter_t *iter) unsigned long total; /* Total count for various statistics */ unsigned u; /* Local index variable */ - HDprintf("Small # of attributes (objects with 1 to %u attributes):\n", sattrs_threshold); + printf("Small # of attributes (objects with 1 to %u attributes):\n", sattrs_threshold); total = 0; for (u = 1; u <= (unsigned)sattrs_threshold; u++) { if (iter->num_small_attrs[u] > 0) { - HDprintf("\t# of objects with %u attributes: %lu\n", u, iter->num_small_attrs[u]); + printf("\t# of objects with %u attributes: %lu\n", u, iter->num_small_attrs[u]); total += iter->num_small_attrs[u]; } /* end if */ } /* end for */ - HDprintf("\tTotal # of objects with small # of attributes: %lu\n", total); + printf("\tTotal # of objects with small # of attributes: %lu\n", total); - HDprintf("Attribute bins:\n"); + printf("Attribute bins:\n"); total = 0; power = 1; for (u = 1; u < iter->attr_nbins; u++) { if (iter->attr_bins[u] > 0) { - HDprintf("\t# of objects with %lu - %lu attributes: %lu\n", power, (power * 10) - 1, - iter->attr_bins[u]); + printf("\t# of objects with %lu - %lu attributes: %lu\n", power, (power * 10) - 1, + iter->attr_bins[u]); total += iter->attr_bins[u]; } /* end if */ power *= 10; } /* end for */ - HDprintf("\tTotal # of objects with attributes: %lu\n", total); - HDprintf("\tMax. # of attributes to objects: %lu\n", (unsigned long)iter->max_attrs); + printf("\tTotal # of objects with attributes: %lu\n", total); + printf("\tMax. # of attributes to objects: %lu\n", (unsigned long)iter->max_attrs); return 0; } /* print_attr_info() */ @@ -1465,30 +1462,30 @@ print_freespace_info(const iter_t *iter) unsigned long total; /* Total count for various statistics */ unsigned u; /* Local index variable */ - HDfprintf(stdout, "Free-space persist: %s\n", iter->fs_persist ? "TRUE" : "FALSE"); - HDfprintf(stdout, "Free-space section threshold: %" PRIuHSIZE " bytes\n", iter->fs_threshold); - HDprintf("Small size free-space sections (< %u bytes):\n", (unsigned)SIZE_SMALL_SECTS); + fprintf(stdout, "Free-space persist: %s\n", iter->fs_persist ? "TRUE" : "FALSE"); + fprintf(stdout, "Free-space section threshold: %" PRIuHSIZE " bytes\n", iter->fs_threshold); + printf("Small size free-space sections (< %u bytes):\n", (unsigned)SIZE_SMALL_SECTS); total = 0; for (u = 0; u < SIZE_SMALL_SECTS; u++) { if (iter->num_small_sects[u] > 0) { - HDprintf("\t# of sections of size %u: %lu\n", u, iter->num_small_sects[u]); + printf("\t# of sections of size %u: %lu\n", u, iter->num_small_sects[u]); total += iter->num_small_sects[u]; } /* end if */ } /* end for */ - HDprintf("\tTotal # of small size sections: %lu\n", total); + printf("\tTotal # of small size sections: %lu\n", total); - HDprintf("Free-space section bins:\n"); + printf("Free-space section bins:\n"); total = 0; power = 1; for (u = 1; u < iter->sect_nbins; u++) { if (iter->sect_bins[u] > 0) { - HDprintf("\t# of sections of size %lu - %lu: %lu\n", power, (power * 10) - 1, iter->sect_bins[u]); + printf("\t# of sections of size %lu - %lu: %lu\n", power, (power * 10) - 1, iter->sect_bins[u]); total += iter->sect_bins[u]; } /* end if */ power *= 10; } /* end for */ - HDprintf("\tTotal # of sections: %lu\n", total); + printf("\tTotal # of sections: %lu\n", total); return 0; } /* print_freespace_info() */ @@ -1513,9 +1510,9 @@ print_storage_summary(const iter_t *iter) hsize_t unaccount = 0; double percent = 0.0; - HDfprintf(stdout, "File space management strategy: %s\n", FS_STRATEGY_NAME[iter->fs_strategy]); - HDfprintf(stdout, "File space page size: %" PRIuHSIZE " bytes\n", iter->fsp_size); - HDprintf("Summary of file space information:\n"); + fprintf(stdout, "File space management strategy: %s\n", FS_STRATEGY_NAME[iter->fs_strategy]); + fprintf(stdout, "File space page size: %" PRIuHSIZE " bytes\n", iter->fsp_size); + printf("Summary of file space information:\n"); total_meta = iter->super_size + iter->super_ext_size + iter->ublk_size + iter->group_ohdr_info.total_size + iter->dset_ohdr_info.total_size + iter->dtype_ohdr_info.total_size + iter->groups_btree_storage_size + @@ -1523,28 +1520,28 @@ print_storage_summary(const iter_t *iter) iter->datasets_index_storage_size + iter->datasets_heap_storage_size + iter->SM_hdr_storage_size + iter->SM_index_storage_size + iter->SM_heap_storage_size + iter->free_hdr; - HDfprintf(stdout, " File metadata: %" PRIuHSIZE " bytes\n", total_meta); - HDfprintf(stdout, " Raw data: %" PRIuHSIZE " bytes\n", iter->dset_storage_size); + fprintf(stdout, " File metadata: %" PRIuHSIZE " bytes\n", total_meta); + fprintf(stdout, " Raw data: %" PRIuHSIZE " bytes\n", iter->dset_storage_size); percent = ((double)iter->free_space / (double)iter->filesize) * 100.0; - HDfprintf(stdout, " Amount/Percent of tracked free space: %" PRIuHSIZE " bytes/%3.1f%%\n", - iter->free_space, percent); + fprintf(stdout, " Amount/Percent of tracked free space: %" PRIuHSIZE " bytes/%3.1f%%\n", + iter->free_space, percent); if (iter->filesize < (total_meta + iter->dset_storage_size + iter->free_space)) { unaccount = (total_meta + iter->dset_storage_size + iter->free_space) - iter->filesize; - HDfprintf(stdout, " ??? File has %" PRIuHSIZE " more bytes accounted for than its size! ???\n", - unaccount); + fprintf(stdout, " ??? File has %" PRIuHSIZE " more bytes accounted for than its size! ???\n", + unaccount); } else { unaccount = iter->filesize - (total_meta + iter->dset_storage_size + iter->free_space); - HDfprintf(stdout, " Unaccounted space: %" PRIuHSIZE " bytes\n", unaccount); + fprintf(stdout, " Unaccounted space: %" PRIuHSIZE " bytes\n", unaccount); } - HDfprintf(stdout, "Total space: %" PRIuHSIZE " bytes\n", - total_meta + iter->dset_storage_size + iter->free_space + unaccount); + fprintf(stdout, "Total space: %" PRIuHSIZE " bytes\n", + total_meta + iter->dset_storage_size + iter->free_space + unaccount); if (iter->nexternal) - HDfprintf(stdout, "External raw data: %" PRIuHSIZE " bytes\n", iter->dset_external_storage_size); + fprintf(stdout, "External raw data: %" PRIuHSIZE " bytes\n", iter->dset_external_storage_size); return 0; } /* print_storage_summary() */ @@ -1622,7 +1619,7 @@ print_file_statistics(const iter_t *iter) static void print_object_statistics(const char *name) { - HDprintf("Object name %s\n", name); + printf("Object name %s\n", name); } /* print_object_statistics() */ /*------------------------------------------------------------------------- @@ -1705,7 +1702,7 @@ main(int argc, char *argv[]) hid_t fcpl; H5F_info2_t finfo; - HDprintf("Filename: %s\n", fname); + printf("Filename: %s\n", fname); fid = h5tools_fopen(fname, H5F_ACC_RDONLY, fapl_id, (fapl_id != H5P_DEFAULT), NULL, 0); |