summaryrefslogtreecommitdiffstats
path: root/src/H5D.c
diff options
context:
space:
mode:
authorQuincey Koziol <koziol@hdfgroup.org>2005-07-21 14:49:11 (GMT)
committerQuincey Koziol <koziol@hdfgroup.org>2005-07-21 14:49:11 (GMT)
commit1e2ddfc8b1847c4b1f67c764d4920c5d1e0c538d (patch)
tree60e8d8ee4b4fa77ef7a01ebf106d53b9d7675a32 /src/H5D.c
parentc9b45e24d5f1935d5356cf90f0521d1e82efc4e3 (diff)
downloadhdf5-1e2ddfc8b1847c4b1f67c764d4920c5d1e0c538d.zip
hdf5-1e2ddfc8b1847c4b1f67c764d4920c5d1e0c538d.tar.gz
hdf5-1e2ddfc8b1847c4b1f67c764d4920c5d1e0c538d.tar.bz2
[svn-r11094] Purpose:
Bug fix Description: Rewrite code for mounting files to clean up layers of kludges and implement a much cleaner and more maintainable design. Platforms tested: FreeBSD 4.11 (sleipnir) Linux 2.4
Diffstat (limited to 'src/H5D.c')
-rw-r--r--src/H5D.c17
1 files changed, 7 insertions, 10 deletions
diff --git a/src/H5D.c b/src/H5D.c
index 2a063b8..cd63c53 100644
--- a/src/H5D.c
+++ b/src/H5D.c
@@ -3989,8 +3989,6 @@ done:
*
* Date: August 14, 2002
*
- * Comments: Just flushing the compact data information currently.
- *
* Modifications:
*
*-------------------------------------------------------------------------
@@ -3998,11 +3996,11 @@ done:
herr_t
H5D_flush(const H5F_t *f, hid_t dxpl_id, unsigned flags)
{
- int num_dsets; /* Number of datasets in file */
+ unsigned num_dsets; /* Number of datasets in file */
hid_t *id_list=NULL; /* list of dataset IDs */
H5D_t *dataset=NULL; /* Dataset pointer */
+ unsigned u; /* Index variable */
herr_t ret_value = SUCCEED; /* Return value */
- int j; /* Index variable */
FUNC_ENTER_NOAPI(H5D_flush, FAIL)
@@ -4010,18 +4008,17 @@ H5D_flush(const H5F_t *f, hid_t dxpl_id, unsigned flags)
assert(f);
/* Update layout message for compact dataset */
- if((num_dsets=H5F_get_obj_count(f, H5F_OBJ_DATASET))<0)
- HGOTO_ERROR(H5E_FILE, H5E_CANTINIT, FAIL, "unable to get dataset count")
+ num_dsets=H5F_get_obj_count(f, H5F_OBJ_DATASET);
/* Check for something to do */
if(num_dsets>0) {
- H5_CHECK_OVERFLOW(num_dsets,int,size_t);
+ H5_CHECK_OVERFLOW(num_dsets,unsigned,size_t);
if(NULL==(id_list=H5MM_malloc((size_t)num_dsets*sizeof(hid_t))))
HGOTO_ERROR(H5E_FILE, H5E_CANTINIT, FAIL, "unable to allocate memory for ID list")
- if(H5F_get_obj_ids(f, H5F_OBJ_DATASET, -1, id_list)<0)
+ if(H5F_get_obj_ids(f, H5F_OBJ_DATASET, -1, id_list) != num_dsets)
HGOTO_ERROR(H5E_FILE, H5E_CANTINIT, FAIL, "unable to get dataset ID list")
- for(j=0; j<num_dsets; j++) {
- if(NULL==(dataset=H5I_object_verify(id_list[j], H5I_DATASET)))
+ for(u = 0; u < num_dsets; u++) {
+ if(NULL==(dataset=H5I_object_verify(id_list[u], H5I_DATASET)))
HGOTO_ERROR(H5E_FILE, H5E_CANTINIT, FAIL, "unable to get dataset object")
/* Flush the raw data buffer, if we have a dirty one */