summaryrefslogtreecommitdiffstats
path: root/tools/lib/h5tools_utils.c
diff options
context:
space:
mode:
authorQuincey Koziol <koziol@hdfgroup.org>2010-08-19 19:55:48 (GMT)
committerQuincey Koziol <koziol@hdfgroup.org>2010-08-19 19:55:48 (GMT)
commit2f6e3cb5bee1abac0be573123439938cb5d3096e (patch)
treefac2c21a94dde6fc226321a0df5a775d31f04269 /tools/lib/h5tools_utils.c
parente56b6f6c4019ad7dddf2325c91b134646fcb55e6 (diff)
downloadhdf5-2f6e3cb5bee1abac0be573123439938cb5d3096e.zip
hdf5-2f6e3cb5bee1abac0be573123439938cb5d3096e.tar.gz
hdf5-2f6e3cb5bee1abac0be573123439938cb5d3096e.tar.bz2
[svn-r19252] Description:
Bring Coverity changes from branch to trunk: r19161: Fixed the part for matching the subset info with dataset r19189: BZ1646: h5dump does not check number of dimensions for subsetting parameters against the dataset Changed subset_t structure from holding hsize_t pointers to holding new subset_d pointers, which hold the original hsize_t pointer + len. this len is then checked against dataset ndims in the handle_dataset function of h5dump. Changed all references to use new data structure. Added tests for each subset parameter. r19190: Added new h5dump ddl files Tested on: Mac OS X/32 10.6.4 (amazon) w/debug & production (h5committested on branch)
Diffstat (limited to 'tools/lib/h5tools_utils.c')
-rw-r--r--tools/lib/h5tools_utils.c80
1 files changed, 76 insertions, 4 deletions
diff --git a/tools/lib/h5tools_utils.c b/tools/lib/h5tools_utils.c
index b226bd7..2e3c756 100644
--- a/tools/lib/h5tools_utils.c
+++ b/tools/lib/h5tools_utils.c
@@ -42,6 +42,12 @@ const char *opt_arg; /*flag argument (or value) */
static int h5tools_d_status = 0;
static const char *h5tools_progname = "h5tools";
+/* ``parallel_print'' variables */
+unsigned char g_Parallel = 0; /*0 for serial, 1 for parallel */
+char outBuff[OUTBUFF_SIZE];
+int outBuffOffset;
+FILE* overflow_file = NULL;
+
/* local functions */
static void init_table(table_t **tbl);
#ifdef H5DUMP_DEBUG
@@ -49,6 +55,72 @@ static void dump_table(char* tablename, table_t *table);
#endif /* H5DUMP_DEBUG */
static void add_obj(table_t *table, haddr_t objno, const char *objname, hbool_t recorded);
+/*-------------------------------------------------------------------------
+ * Function: parallel_print
+ *
+ * Purpose: wrapper for printf for use in parallel mode.
+ *
+ * Programmer: Leon Arber
+ *
+ * Date: December 1, 2004
+ *
+ *-------------------------------------------------------------------------
+ */
+void parallel_print(const char* format, ...)
+{
+ int bytes_written;
+ va_list ap;
+
+ va_start(ap, format);
+
+ if(!g_Parallel)
+ vprintf(format, ap);
+ else
+ {
+
+ if(overflow_file == NULL) /*no overflow has occurred yet */
+ {
+#if 0
+ printf("calling HDvsnprintf: OUTBUFF_SIZE=%ld, outBuffOffset=%ld, ", (long)OUTBUFF_SIZE, (long)outBuffOffset);
+#endif
+ bytes_written = HDvsnprintf(outBuff+outBuffOffset, OUTBUFF_SIZE-outBuffOffset, format, ap);
+#if 0
+ printf("bytes_written=%ld\n", (long)bytes_written);
+#endif
+ va_end(ap);
+ va_start(ap, format);
+
+#if 0
+ printf("Result: bytes_written=%ld, OUTBUFF_SIZE-outBuffOffset=%ld\n", (long)bytes_written, (long)OUTBUFF_SIZE-outBuffOffset);
+#endif
+
+ if ((bytes_written < 0) ||
+#ifdef H5_VSNPRINTF_WORKS
+ (bytes_written >= (OUTBUFF_SIZE-outBuffOffset))
+#else
+ ((bytes_written+1) == (OUTBUFF_SIZE-outBuffOffset))
+#endif
+ )
+ {
+ /* Terminate the outbuff at the end of the previous output */
+ outBuff[outBuffOffset] = '\0';
+
+ overflow_file = HDtmpfile();
+ if(overflow_file == NULL)
+ fprintf(stderr, "warning: could not create overflow file. Output may be truncated.\n");
+ else
+ bytes_written = HDvfprintf(overflow_file, format, ap);
+ }
+ else
+ outBuffOffset += bytes_written;
+ }
+ else
+ bytes_written = HDvfprintf(overflow_file, format, ap);
+
+ }
+ va_end(ap);
+}
+
/*-------------------------------------------------------------------------
* Function: error_msg
@@ -591,7 +663,7 @@ add_obj(table_t *table, haddr_t objno, const char *objname, hbool_t record)
/* See if we need to make table larger */
if(table->nobjs == table->size) {
table->size *= 2;
- table->objs = HDrealloc(table->objs, table->size * sizeof(table->objs[0]));
+ table->objs = (struct obj_t *)HDrealloc(table->objs, table->size * sizeof(table->objs[0]));
} /* end if */
/* Increment number of objects in table */
@@ -690,7 +762,7 @@ H5tools_get_link_info(hid_t file_id, const char * linkpath, h5tool_link_info_t *
HDassert(link_info->trg_path);
/* get link value */
- if(H5Lget_val(file_id, linkpath, link_info->trg_path, link_info->linfo.u.val_size, H5P_DEFAULT) < 0) {
+ if(H5Lget_val(file_id, linkpath, (void *)link_info->trg_path, link_info->linfo.u.val_size, H5P_DEFAULT) < 0) {
if(link_info->opt.msg_mode == 1)
parallel_print("Warning: unable to get link value from <%s>\n",linkpath);
goto out;
@@ -769,12 +841,12 @@ void h5tools_setstatus(int D_status)
h5tools_d_status = D_status;
}
-const char*h5tools_getprogname()
+const char*h5tools_getprogname(void)
{
return h5tools_progname;
}
-int h5tools_getstatus()
+int h5tools_getstatus(void)
{
return h5tools_d_status;
}