diff options
author | Quincey Koziol <koziol@hdfgroup.org> | 2010-04-23 18:11:37 (GMT) |
---|---|---|
committer | Quincey Koziol <koziol@hdfgroup.org> | 2010-04-23 18:11:37 (GMT) |
commit | 55f2a52191fcbf38d67b698f90e2c876f1b39d02 (patch) | |
tree | 9d1d85c6480a548c5ae2ff1046f3a57fd1222352 /src | |
parent | 40c757d48b5fcdf0c217ae6392fb111b1a4acc91 (diff) | |
download | hdf5-55f2a52191fcbf38d67b698f90e2c876f1b39d02.zip hdf5-55f2a52191fcbf38d67b698f90e2c876f1b39d02.tar.gz hdf5-55f2a52191fcbf38d67b698f90e2c876f1b39d02.tar.bz2 |
[svn-r18622] Description:
Bring r18621 from trunk to 1.8 branch:
Clean up compiler warnings.
Tested on:
Mac OS X/32 10.6.3 (amazon) w/debug
(too minor to require h5committest)
Diffstat (limited to 'src')
-rw-r--r-- | src/H5B.c | 2 | ||||
-rw-r--r-- | src/H5B2int.c | 110 | ||||
-rw-r--r-- | src/H5B2pkg.h | 2 | ||||
-rw-r--r-- | src/H5C.c | 14 | ||||
-rw-r--r-- | src/H5Dchunk.c | 2 | ||||
-rw-r--r-- | src/H5overflow.h | 267 | ||||
-rw-r--r-- | src/H5overflow.txt | 1 |
7 files changed, 336 insertions, 62 deletions
@@ -1718,7 +1718,7 @@ H5B_shared_new(const H5F_t *f, const H5B_class_t *type, size_t sizeof_rkey) shared->sizeof_rkey = sizeof_rkey; HDassert(shared->sizeof_rkey); shared->sizeof_keys = (shared->two_k + 1) * type->sizeof_nkey; - shared->sizeof_rnode = (H5B_SIZEOF_HDR(f) + /*node header */ + shared->sizeof_rnode = ((size_t)H5B_SIZEOF_HDR(f) + /*node header */ shared->two_k * H5F_SIZEOF_ADDR(f) + /*child pointers */ (shared->two_k + 1) * shared->sizeof_rkey); /*keys */ HDassert(shared->sizeof_rnode); diff --git a/src/H5B2int.c b/src/H5B2int.c index 9b9c75d..f7f8995 100644 --- a/src/H5B2int.c +++ b/src/H5B2int.c @@ -271,14 +271,14 @@ H5B2_split1(H5B2_hdr_t *hdr, hid_t dxpl_id, unsigned depth, H5B2_node_ptr_t *cur /* Copy "upper half" of node pointers, if the node is an internal node */ if(depth > 1) HDmemcpy(&(right_node_ptrs[0]), &(left_node_ptrs[mid_record + (unsigned)1]), - sizeof(H5B2_node_ptr_t) * (old_node_nrec - mid_record)); + sizeof(H5B2_node_ptr_t) * (size_t)(old_node_nrec - mid_record)); /* Copy "middle" record to internal node */ HDmemcpy(H5B2_INT_NREC(internal, hdr, idx), H5B2_NAT_NREC(left_native, hdr, mid_record), hdr->cls->nrec_size); /* Update record counts in child nodes */ internal->node_ptrs[idx].node_nrec = *left_nrec = mid_record; - internal->node_ptrs[idx + 1].node_nrec = *right_nrec = old_node_nrec - (mid_record + (unsigned)1); + internal->node_ptrs[idx + 1].node_nrec = *right_nrec = (uint16_t)(old_node_nrec - (mid_record + 1)); /* Determine total number of records in new child nodes */ if(depth > 1) { @@ -516,15 +516,15 @@ H5B2_redistribute2(H5B2_hdr_t *hdr, hid_t dxpl_id, unsigned depth, if(*left_nrec < *right_nrec) { /* Moving record from right node to left */ - uint16_t new_right_nrec = (*left_nrec + *right_nrec) / 2; /* New number of records for right child */ - uint16_t move_nrec = *right_nrec - new_right_nrec; /* Number of records to move from right node to left */ + uint16_t new_right_nrec = (uint16_t)(*left_nrec + *right_nrec) / 2; /* New number of records for right child */ + uint16_t move_nrec = (uint16_t)(*right_nrec - new_right_nrec); /* Number of records to move from right node to left */ /* Copy record from parent node down into left child */ HDmemcpy(H5B2_NAT_NREC(left_native, hdr, *left_nrec), H5B2_INT_NREC(internal, hdr, idx), hdr->cls->nrec_size); /* See if we need to move records from right node */ if(move_nrec > 1) - HDmemcpy(H5B2_NAT_NREC(left_native, hdr, (*left_nrec + 1)), H5B2_NAT_NREC(right_native, hdr, 0), hdr->cls->nrec_size * (move_nrec - 1)); + HDmemcpy(H5B2_NAT_NREC(left_native, hdr, (*left_nrec + 1)), H5B2_NAT_NREC(right_native, hdr, 0), hdr->cls->nrec_size * (size_t)(move_nrec - 1)); /* Move record from right node into parent node */ HDmemcpy(H5B2_INT_NREC(internal, hdr, idx), H5B2_NAT_NREC(right_native, hdr, (move_nrec - 1)), hdr->cls->nrec_size); @@ -551,14 +551,14 @@ H5B2_redistribute2(H5B2_hdr_t *hdr, hid_t dxpl_id, unsigned depth, } /* end if */ /* Update number of records in child nodes */ - *left_nrec += move_nrec; + *left_nrec = (uint16_t)(*left_nrec + move_nrec); *right_nrec = new_right_nrec; } /* end if */ else { /* Moving record from left node to right */ - uint16_t new_left_nrec = (*left_nrec + *right_nrec) / 2; /* New number of records for left child */ - uint16_t move_nrec = *left_nrec - new_left_nrec; /* Number of records to move from left node to right */ + uint16_t new_left_nrec = (uint16_t)(*left_nrec + *right_nrec) / 2; /* New number of records for left child */ + uint16_t move_nrec = (uint16_t)(*left_nrec - new_left_nrec); /* Number of records to move from left node to right */ /* Slide records in right node up */ HDmemmove(H5B2_NAT_NREC(right_native, hdr, move_nrec), @@ -570,7 +570,7 @@ H5B2_redistribute2(H5B2_hdr_t *hdr, hid_t dxpl_id, unsigned depth, /* See if we need to move records from left node */ if(move_nrec > 1) - HDmemcpy(H5B2_NAT_NREC(right_native, hdr, 0), H5B2_NAT_NREC(left_native, hdr, ((*left_nrec - move_nrec) + 1)), hdr->cls->nrec_size * (move_nrec - 1)); + HDmemcpy(H5B2_NAT_NREC(right_native, hdr, 0), H5B2_NAT_NREC(left_native, hdr, ((*left_nrec - move_nrec) + 1)), hdr->cls->nrec_size * (size_t)(move_nrec - 1)); /* Move record from left node into parent node */ HDmemcpy(H5B2_INT_NREC(internal, hdr, idx), H5B2_NAT_NREC(left_native, hdr, (*left_nrec - move_nrec)), hdr->cls->nrec_size); @@ -581,7 +581,7 @@ H5B2_redistribute2(H5B2_hdr_t *hdr, hid_t dxpl_id, unsigned depth, unsigned u; /* Local index variable */ /* Slide node pointers in right node up */ - HDmemmove(&(right_node_ptrs[move_nrec]), &(right_node_ptrs[0]), sizeof(H5B2_node_ptr_t) * (*right_nrec + 1)); + HDmemmove(&(right_node_ptrs[move_nrec]), &(right_node_ptrs[0]), sizeof(H5B2_node_ptr_t) * (size_t)(*right_nrec + 1)); /* Copy node pointers from left node to right */ HDmemcpy(&(right_node_ptrs[0]), &(left_node_ptrs[new_left_nrec + 1]), sizeof(H5B2_node_ptr_t) * move_nrec); @@ -595,7 +595,7 @@ H5B2_redistribute2(H5B2_hdr_t *hdr, hid_t dxpl_id, unsigned depth, /* Update number of records in child nodes */ *left_nrec = new_left_nrec; - *right_nrec += move_nrec; + *right_nrec = (uint16_t)(*right_nrec + move_nrec); } /* end else */ /* Update # of records in child nodes */ @@ -604,8 +604,8 @@ H5B2_redistribute2(H5B2_hdr_t *hdr, hid_t dxpl_id, unsigned depth, /* Update total # of records in child B-trees */ if(depth > 1) { - internal->node_ptrs[idx].all_nrec += left_moved_nrec; - internal->node_ptrs[idx + 1].all_nrec += right_moved_nrec; + internal->node_ptrs[idx].all_nrec = (hsize_t)((hssize_t)internal->node_ptrs[idx].all_nrec + left_moved_nrec); + internal->node_ptrs[idx + 1].all_nrec = (hsize_t)((hssize_t)internal->node_ptrs[idx + 1].all_nrec + right_moved_nrec); } /* end if */ else { internal->node_ptrs[idx].all_nrec = internal->node_ptrs[idx].node_nrec; @@ -743,10 +743,10 @@ H5B2_redistribute3(H5B2_hdr_t *hdr, hid_t dxpl_id, unsigned depth, /* Redistribute records */ { /* Compute new # of records in each node */ - unsigned total_nrec = *left_nrec + *middle_nrec + *right_nrec + 2; - uint16_t new_middle_nrec = (total_nrec - 2) / 3; - uint16_t new_left_nrec = ((total_nrec - 2) - new_middle_nrec) / 2; - uint16_t new_right_nrec = (total_nrec - 2) - (new_left_nrec + new_middle_nrec); + unsigned total_nrec = (unsigned)(*left_nrec + *middle_nrec + *right_nrec + 2); + uint16_t new_middle_nrec = (uint16_t)(total_nrec - 2) / 3; + uint16_t new_left_nrec = (uint16_t)((total_nrec - 2) - new_middle_nrec) / 2; + uint16_t new_right_nrec = (uint16_t)((total_nrec - 2) - (unsigned)(new_left_nrec + new_middle_nrec)); uint16_t curr_middle_nrec = *middle_nrec; /* Sanity check rounding */ @@ -762,7 +762,7 @@ H5B2_redistribute3(H5B2_hdr_t *hdr, hid_t dxpl_id, unsigned depth, /* Move records from middle node into left node */ if((new_left_nrec - 1) > *left_nrec) { - moved_middle_nrec = new_left_nrec - (*left_nrec + 1); + moved_middle_nrec = (uint16_t)(new_left_nrec - (*left_nrec + 1)); HDmemcpy(H5B2_NAT_NREC(left_native, hdr, *left_nrec + 1), H5B2_NAT_NREC(middle_native, hdr, 0), hdr->cls->nrec_size * moved_middle_nrec); } /* end if */ @@ -771,7 +771,7 @@ H5B2_redistribute3(H5B2_hdr_t *hdr, hid_t dxpl_id, unsigned depth, moved_middle_nrec++; /* Slide records in middle node down */ - HDmemmove(H5B2_NAT_NREC(middle_native, hdr, 0), H5B2_NAT_NREC(middle_native, hdr, moved_middle_nrec), hdr->cls->nrec_size * (*middle_nrec - moved_middle_nrec)); + HDmemmove(H5B2_NAT_NREC(middle_native, hdr, 0), H5B2_NAT_NREC(middle_native, hdr, moved_middle_nrec), hdr->cls->nrec_size * (size_t)(*middle_nrec - moved_middle_nrec)); /* Move node pointers also if this is an internal node */ if(depth > 1) { @@ -780,26 +780,26 @@ H5B2_redistribute3(H5B2_hdr_t *hdr, hid_t dxpl_id, unsigned depth, unsigned u; /* Local index variable */ /* Move middle node pointers into left node */ - move_nptrs = new_left_nrec - *left_nrec; + move_nptrs = (unsigned)(new_left_nrec - *left_nrec); HDmemcpy(&(left_node_ptrs[*left_nrec + 1]), &(middle_node_ptrs[0]), sizeof(H5B2_node_ptr_t)*move_nptrs); /* Count the number of records being moved into the left node */ for(u = 0, moved_nrec = 0; u < move_nptrs; u++) moved_nrec += middle_node_ptrs[u].all_nrec; - left_moved_nrec = moved_nrec + move_nptrs; - middle_moved_nrec -= moved_nrec + move_nptrs; + left_moved_nrec = (hssize_t)(moved_nrec + move_nptrs); + middle_moved_nrec -= (hssize_t)(moved_nrec + move_nptrs); /* Slide the node pointers in middle node down */ HDmemmove(&(middle_node_ptrs[0]), &(middle_node_ptrs[move_nptrs]), sizeof(H5B2_node_ptr_t) * ((*middle_nrec - move_nptrs) + 1)); } /* end if */ /* Update the current number of records in middle node */ - curr_middle_nrec -= moved_middle_nrec; + curr_middle_nrec = (uint16_t)(curr_middle_nrec - moved_middle_nrec); } /* end if */ /* Move records into right node */ if(new_right_nrec > *right_nrec) { - unsigned right_nrec_move = new_right_nrec - *right_nrec; /* Number of records to move out of right node */ + unsigned right_nrec_move = (unsigned)(new_right_nrec - *right_nrec); /* Number of records to move out of right node */ /* Slide records in right node up */ HDmemmove(H5B2_NAT_NREC(right_native, hdr, right_nrec_move), H5B2_NAT_NREC(right_native, hdr, 0), hdr->cls->nrec_size * (*right_nrec)); @@ -820,7 +820,7 @@ H5B2_redistribute3(H5B2_hdr_t *hdr, hid_t dxpl_id, unsigned depth, unsigned u; /* Local index variable */ /* Slide the node pointers in right node up */ - HDmemmove(&(right_node_ptrs[right_nrec_move]), &(right_node_ptrs[0]), sizeof(H5B2_node_ptr_t) * (*right_nrec + 1)); + HDmemmove(&(right_node_ptrs[right_nrec_move]), &(right_node_ptrs[0]), sizeof(H5B2_node_ptr_t) * (size_t)(*right_nrec + 1)); /* Move middle node pointers into right node */ HDmemcpy(&(right_node_ptrs[0]), &(middle_node_ptrs[(curr_middle_nrec - right_nrec_move) + 1]), sizeof(H5B2_node_ptr_t) * right_nrec_move); @@ -828,17 +828,17 @@ H5B2_redistribute3(H5B2_hdr_t *hdr, hid_t dxpl_id, unsigned depth, /* Count the number of records being moved into the right node */ for(u = 0, moved_nrec = 0; u < right_nrec_move; u++) moved_nrec += right_node_ptrs[u].all_nrec; - right_moved_nrec = moved_nrec + right_nrec_move; - middle_moved_nrec -= moved_nrec + right_nrec_move; + right_moved_nrec = (hssize_t)(moved_nrec + right_nrec_move); + middle_moved_nrec -= (hssize_t)(moved_nrec + right_nrec_move); } /* end if */ /* Update the current number of records in middle node */ - curr_middle_nrec -= right_nrec_move; + curr_middle_nrec = (uint16_t)(curr_middle_nrec - right_nrec_move); } /* end if */ /* Move records out of left node */ if(new_left_nrec < *left_nrec) { - unsigned left_nrec_move = *left_nrec - new_left_nrec; /* Number of records to move out of left node */ + unsigned left_nrec_move = (unsigned)(*left_nrec - new_left_nrec); /* Number of records to move out of left node */ /* Slide middle records up */ HDmemmove(H5B2_NAT_NREC(middle_native, hdr, left_nrec_move), H5B2_NAT_NREC(middle_native, hdr, 0), hdr->cls->nrec_size * curr_middle_nrec); @@ -859,7 +859,7 @@ H5B2_redistribute3(H5B2_hdr_t *hdr, hid_t dxpl_id, unsigned depth, unsigned u; /* Local index variable */ /* Slide the node pointers in middle node up */ - HDmemmove(&(middle_node_ptrs[left_nrec_move]), &(middle_node_ptrs[0]), sizeof(H5B2_node_ptr_t) * (curr_middle_nrec + 1)); + HDmemmove(&(middle_node_ptrs[left_nrec_move]), &(middle_node_ptrs[0]), sizeof(H5B2_node_ptr_t) * (size_t)(curr_middle_nrec + 1)); /* Move left node pointers into middle node */ HDmemcpy(&(middle_node_ptrs[0]), &(left_node_ptrs[new_left_nrec + 1]), sizeof(H5B2_node_ptr_t) * left_nrec_move); @@ -867,17 +867,17 @@ H5B2_redistribute3(H5B2_hdr_t *hdr, hid_t dxpl_id, unsigned depth, /* Count the number of records being moved into the left node */ for(u = 0, moved_nrec = 0; u < left_nrec_move; u++) moved_nrec += middle_node_ptrs[u].all_nrec; - left_moved_nrec -= moved_nrec + left_nrec_move; - middle_moved_nrec += moved_nrec + left_nrec_move; + left_moved_nrec -= (hssize_t)(moved_nrec + left_nrec_move); + middle_moved_nrec += (hssize_t)(moved_nrec + left_nrec_move); } /* end if */ /* Update the current number of records in middle node */ - curr_middle_nrec += left_nrec_move; + curr_middle_nrec = (uint16_t)(curr_middle_nrec + left_nrec_move); } /* end if */ /* Move records out of right node */ if(new_right_nrec < *right_nrec) { - unsigned right_nrec_move = *right_nrec - new_right_nrec; /* Number of records to move out of right node */ + unsigned right_nrec_move = (unsigned)(*right_nrec - new_right_nrec); /* Number of records to move out of right node */ /* Move right parent record down to middle node */ HDmemcpy(H5B2_NAT_NREC(middle_native, hdr, curr_middle_nrec), H5B2_INT_NREC(internal, hdr, idx), hdr->cls->nrec_size); @@ -902,11 +902,11 @@ H5B2_redistribute3(H5B2_hdr_t *hdr, hid_t dxpl_id, unsigned depth, /* Count the number of records being moved into the right node */ for(u = 0, moved_nrec = 0; u < right_nrec_move; u++) moved_nrec += right_node_ptrs[u].all_nrec; - right_moved_nrec -= moved_nrec + right_nrec_move; - middle_moved_nrec += moved_nrec + right_nrec_move; + right_moved_nrec -= (hssize_t)(moved_nrec + right_nrec_move); + middle_moved_nrec += (hssize_t)(moved_nrec + right_nrec_move); /* Slide the node pointers in right node down */ - HDmemmove(&(right_node_ptrs[0]), &(right_node_ptrs[right_nrec_move]), sizeof(H5B2_node_ptr_t) * (new_right_nrec + 1)); + HDmemmove(&(right_node_ptrs[0]), &(right_node_ptrs[right_nrec_move]), sizeof(H5B2_node_ptr_t) * (size_t)(new_right_nrec + 1)); } /* end if */ } /* end if */ @@ -923,9 +923,9 @@ H5B2_redistribute3(H5B2_hdr_t *hdr, hid_t dxpl_id, unsigned depth, /* Update total # of records in child B-trees */ if(depth > 1) { - internal->node_ptrs[idx - 1].all_nrec += left_moved_nrec; - internal->node_ptrs[idx].all_nrec += middle_moved_nrec; - internal->node_ptrs[idx + 1].all_nrec += right_moved_nrec; + internal->node_ptrs[idx - 1].all_nrec = (hsize_t)((hssize_t)internal->node_ptrs[idx - 1].all_nrec + left_moved_nrec); + internal->node_ptrs[idx].all_nrec = (hsize_t)((hssize_t)internal->node_ptrs[idx].all_nrec + middle_moved_nrec); + internal->node_ptrs[idx + 1].all_nrec = (hsize_t)((hssize_t)internal->node_ptrs[idx + 1].all_nrec + right_moved_nrec); } /* end if */ else { internal->node_ptrs[idx - 1].all_nrec = internal->node_ptrs[idx - 1].node_nrec; @@ -1100,10 +1100,10 @@ H5B2_merge2(H5B2_hdr_t *hdr, hid_t dxpl_id, unsigned depth, /* Copy node pointers from right node into left node */ if(depth > 1) - HDmemcpy(&(left_node_ptrs[*left_nrec + 1]), &(right_node_ptrs[0]), sizeof(H5B2_node_ptr_t) * (*right_nrec + 1)); + HDmemcpy(&(left_node_ptrs[*left_nrec + 1]), &(right_node_ptrs[0]), sizeof(H5B2_node_ptr_t) * (size_t)(*right_nrec + 1)); /* Update # of records in left node */ - *left_nrec += *right_nrec + 1; + *left_nrec = (uint16_t)(*left_nrec + *right_nrec + 1); } /* end block */ /* Update # of records in child nodes */ @@ -1261,7 +1261,7 @@ H5B2_merge3(H5B2_hdr_t *hdr, hid_t dxpl_id, unsigned depth, /* Redistribute records into left node */ { - unsigned total_nrec = *left_nrec + *middle_nrec + *right_nrec + 2; + unsigned total_nrec = (unsigned)(*left_nrec + *middle_nrec + *right_nrec + 2); unsigned middle_nrec_move = ((total_nrec - 1) / 2) - *left_nrec; /* Set the base number of records moved from middle node */ @@ -1291,12 +1291,12 @@ H5B2_merge3(H5B2_hdr_t *hdr, hid_t dxpl_id, unsigned depth, middle_moved_nrec += middle_node_ptrs[u].all_nrec; /* Slide the node pointers in right node down */ - HDmemmove(&(middle_node_ptrs[0]), &(middle_node_ptrs[middle_nrec_move]), sizeof(H5B2_node_ptr_t) * ((*middle_nrec + 1) - middle_nrec_move)); + HDmemmove(&(middle_node_ptrs[0]), &(middle_node_ptrs[middle_nrec_move]), sizeof(H5B2_node_ptr_t) * (size_t)((unsigned)(*middle_nrec + 1) - middle_nrec_move)); } /* end if */ /* Update # of records in left & middle nodes */ - *left_nrec += middle_nrec_move; - *middle_nrec -= middle_nrec_move; + *left_nrec = (uint16_t)(*left_nrec + middle_nrec_move); + *middle_nrec = (uint16_t)(*middle_nrec - middle_nrec_move); } /* end block */ /* Redistribute records into middle node */ @@ -1310,10 +1310,10 @@ H5B2_merge3(H5B2_hdr_t *hdr, hid_t dxpl_id, unsigned depth, /* Move node pointers also if this is an internal node */ if(depth > 1) /* Copy node pointers from middle node into left node */ - HDmemcpy(&(middle_node_ptrs[*middle_nrec + 1]), &(right_node_ptrs[0]), sizeof(H5B2_node_ptr_t) * (*right_nrec + 1)); + HDmemcpy(&(middle_node_ptrs[*middle_nrec + 1]), &(right_node_ptrs[0]), sizeof(H5B2_node_ptr_t) * (size_t)(*right_nrec + 1)); /* Update # of records in middle node */ - *middle_nrec += *right_nrec + 1; + *middle_nrec = (uint16_t)(*middle_nrec + (*right_nrec + 1)); } /* end block */ /* Update # of records in child nodes */ @@ -1796,7 +1796,7 @@ HDmemset(internal->node_ptrs, 0, sizeof(H5B2_node_ptr_t) * (hdr->node_info[depth /* Set number of records & depth of the node */ internal->nrec = 0; - internal->depth = depth; + internal->depth = (uint16_t)depth; /* Allocate space on disk for the internal node */ if(HADDR_UNDEF == (node_ptr->addr = H5MF_alloc(hdr->f, H5FD_MEM_BTREE, dxpl_id, (hsize_t)hdr->node_size))) @@ -1846,8 +1846,8 @@ H5B2_protect_internal(H5B2_hdr_t *hdr, hid_t dxpl_id, haddr_t addr, /* Set up user data for callback */ udata.hdr = hdr; - udata.nrec = nrec; - udata.depth = depth; + H5_ASSIGN_OVERFLOW(/* To: */ udata.nrec, /* From: */ nrec, /* From: */ unsigned, /* To: */ uint16_t) + H5_ASSIGN_OVERFLOW(/* To: */ udata.depth, /* From: */ depth, /* From: */ unsigned, /* To: */ uint16_t) /* Protect the internal node */ if(NULL == (ret_value = (H5B2_internal_t *)H5AC_protect(hdr->f, dxpl_id, H5AC_BT2_INT, addr, &udata, NULL, rw))) @@ -1912,7 +1912,7 @@ H5B2_iterate_node(H5B2_hdr_t *hdr, hid_t dxpl_id, unsigned depth, HGOTO_ERROR(H5E_RESOURCE, H5E_NOSPACE, FAIL, "memory allocation failed for B-tree internal node pointers") /* Copy the node pointers */ - HDmemcpy(node_ptrs, internal->node_ptrs, (sizeof(H5B2_node_ptr_t) * (curr_node->node_nrec + 1))); + HDmemcpy(node_ptrs, internal->node_ptrs, (sizeof(H5B2_node_ptr_t) * (size_t)(curr_node->node_nrec + 1))); } /* end if */ else { H5B2_leaf_t *leaf; /* Pointer to leaf node */ @@ -2800,7 +2800,7 @@ H5B2_delete_node(H5B2_hdr_t *hdr, hid_t dxpl_id, unsigned depth, native = internal->int_native; /* Descend into children */ - for(u = 0; u < internal->nrec + 1; u++) + for(u = 0; u < internal->nrec + (unsigned)1; u++) if(H5B2_delete_node(hdr, dxpl_id, depth - 1, &(internal->node_ptrs[u]), op, op_data) < 0) HGOTO_ERROR(H5E_BTREE, H5E_CANTLIST, FAIL, "node descent failed") } /* end if */ @@ -2875,12 +2875,12 @@ H5B2_node_size(H5B2_hdr_t *hdr, hid_t dxpl_id, unsigned depth, unsigned u; /* Local index */ /* Descend into children */ - for(u = 0; u < internal->nrec + 1; u++) + for(u = 0; u < internal->nrec + (unsigned)1; u++) if(H5B2_node_size(hdr, dxpl_id, (depth - 1), &(internal->node_ptrs[u]), btree_size) < 0) HGOTO_ERROR(H5E_BTREE, H5E_CANTLIST, FAIL, "node iteration failed") } /* end if */ else /* depth is 1: count all the leaf nodes from this node */ - *btree_size += (internal->nrec + 1) * hdr->node_size; + *btree_size += (hsize_t)(internal->nrec + 1) * hdr->node_size; /* Count this node */ *btree_size += hdr->node_size; diff --git a/src/H5B2pkg.h b/src/H5B2pkg.h index dd8f67b..f2f4e61 100644 --- a/src/H5B2pkg.h +++ b/src/H5B2pkg.h @@ -53,7 +53,7 @@ /* Size of a internal node pointer (on disk) */ #define H5B2_INT_POINTER_SIZE(h, d) ( \ - (h)->sizeof_addr /* Address of child node */ \ + (unsigned)(h)->sizeof_addr /* Address of child node */ \ + (h)->max_nrec_size /* # of records in child node */ \ + (h)->node_info[(d) - 1].cum_max_nrec_size /* Total # of records in child & below */ \ ) @@ -157,7 +157,7 @@ static herr_t H5C__flash_increase_cache_size(H5C_t * cache_ptr, size_t old_entry_size, size_t new_entry_size); -static herr_t H5C_flush_single_entry(const H5F_t * f, +static herr_t H5C_flush_single_entry(H5F_t * f, hid_t primary_dxpl_id, hid_t secondary_dxpl_id, const H5C_class_t * type_ptr, @@ -166,7 +166,7 @@ static herr_t H5C_flush_single_entry(const H5F_t * f, hbool_t * first_flush_ptr, hbool_t del_entry_from_slist_on_destroy); -static herr_t H5C_flush_invalidate_cache(const H5F_t * f, +static herr_t H5C_flush_invalidate_cache(H5F_t * f, hid_t primary_dxpl_id, hid_t secondary_dxpl_id, unsigned flags); @@ -741,6 +741,12 @@ H5C_def_auto_resize_rpt_fcn(H5C_t * cache_ptr, switch ( (cache_ptr->resize_ctl).decr_mode ) { + case H5C_decr__off: + HDfprintf(stdout, + "%sAuto cache resize -- decrease off. HR = %lf\n", + cache_ptr->prefix, hit_rate); + break; + case H5C_decr__threshold: HDassert( hit_rate > (cache_ptr->resize_ctl).upper_hr_threshold ); @@ -7109,7 +7115,7 @@ done: *------------------------------------------------------------------------- */ static herr_t -H5C_flush_invalidate_cache(const H5F_t * f, +H5C_flush_invalidate_cache(H5F_t * f, hid_t primary_dxpl_id, hid_t secondary_dxpl_id, unsigned flags) @@ -7693,7 +7699,7 @@ done: *------------------------------------------------------------------------- */ static herr_t -H5C_flush_single_entry(const H5F_t * f, +H5C_flush_single_entry(H5F_t * f, hid_t primary_dxpl_id, hid_t secondary_dxpl_id, const H5C_class_t * type_ptr, diff --git a/src/H5Dchunk.c b/src/H5Dchunk.c index ce7deb5..aa7b8dc 100644 --- a/src/H5Dchunk.c +++ b/src/H5Dchunk.c @@ -405,7 +405,7 @@ H5D_chunk_construct(H5F_t UNUSED *f, H5D_t *dset) HGOTO_ERROR(H5E_DATASET, H5E_BADVALUE, FAIL, "external storage not supported with chunked layout") /* Set the last dimension of the chunk size to the size of the datatype */ - dset->shared->layout.u.chunk.dim[dset->shared->layout.u.chunk.ndims - 1] = H5T_GET_SIZE(type); + dset->shared->layout.u.chunk.dim[dset->shared->layout.u.chunk.ndims - 1] = (uint32_t)H5T_GET_SIZE(type); /* Get local copy of dataset dimensions (for sanity checking) */ if(H5S_get_simple_extent_dims(dset->shared->space, NULL, max_dim) < 0) diff --git a/src/H5overflow.h b/src/H5overflow.h index 1c00acf..b3b343e 100644 --- a/src/H5overflow.h +++ b/src/H5overflow.h @@ -51,6 +51,18 @@ ASSIGN_TO_SAME_SIZE_SAME_SIGNED(dst, dsttype, src, srctype) #endif /* src: unsigned dst: uint8_t */ +/* src: unsigned, dst: uint16_t */ +#if H5_SIZEOF_UNSIGNED < H5_SIZEOF_UINT16_T + #define ASSIGN_unsigned_TO_uint16_t(dst, dsttype, src, srctype) \ + ASSIGN_TO_LARGER_SIZE_SAME_SIGNED(dst, dsttype, src, srctype) +#elif H5_SIZEOF_UNSIGNED > H5_SIZEOF_UINT16_T + #define ASSIGN_unsigned_TO_uint16_t(dst, dsttype, src, srctype) \ + ASSIGN_TO_SMALLER_SIZE(dst, dsttype, src, srctype) +#else /* H5_SIZEOF_UNSIGNED == H5_SIZEOF_UINT16_T */ + #define ASSIGN_unsigned_TO_uint16_t(dst, dsttype, src, srctype) \ + ASSIGN_TO_SAME_SIZE_SAME_SIGNED(dst, dsttype, src, srctype) +#endif /* src: unsigned dst: uint16_t */ + /* src: unsigned, dst: uint32_t */ #if H5_SIZEOF_UNSIGNED < H5_SIZEOF_UINT32_T #define ASSIGN_unsigned_TO_uint32_t(dst, dsttype, src, srctype) \ @@ -174,6 +186,18 @@ ASSIGN_TO_SAME_SIZE_SIGNED_TO_UNSIGNED(dst, dsttype, src, srctype) #endif /* src: int dst: uint8_t */ +/* src: int, dst: uint16_t */ +#if H5_SIZEOF_INT < H5_SIZEOF_UINT16_T + #define ASSIGN_int_TO_uint16_t(dst, dsttype, src, srctype) \ + ASSIGN_TO_LARGER_SIZE_SIGNED_TO_UNSIGNED(dst, dsttype, src, srctype) +#elif H5_SIZEOF_INT > H5_SIZEOF_UINT16_T + #define ASSIGN_int_TO_uint16_t(dst, dsttype, src, srctype) \ + ASSIGN_TO_SMALLER_SIZE(dst, dsttype, src, srctype) +#else /* H5_SIZEOF_INT == H5_SIZEOF_UINT16_T */ + #define ASSIGN_int_TO_uint16_t(dst, dsttype, src, srctype) \ + ASSIGN_TO_SAME_SIZE_SIGNED_TO_UNSIGNED(dst, dsttype, src, srctype) +#endif /* src: int dst: uint16_t */ + /* src: int, dst: uint32_t */ #if H5_SIZEOF_INT < H5_SIZEOF_UINT32_T #define ASSIGN_int_TO_uint32_t(dst, dsttype, src, srctype) \ @@ -297,6 +321,18 @@ ASSIGN_TO_SAME_SIZE_UNSIGNED_TO_SIGNED(dst, dsttype, src, srctype) #endif /* src: uint8_t dst: int */ +/* src: uint8_t, dst: uint16_t */ +#if H5_SIZEOF_UINT8_T < H5_SIZEOF_UINT16_T + #define ASSIGN_uint8_t_TO_uint16_t(dst, dsttype, src, srctype) \ + ASSIGN_TO_LARGER_SIZE_SAME_SIGNED(dst, dsttype, src, srctype) +#elif H5_SIZEOF_UINT8_T > H5_SIZEOF_UINT16_T + #define ASSIGN_uint8_t_TO_uint16_t(dst, dsttype, src, srctype) \ + ASSIGN_TO_SMALLER_SIZE(dst, dsttype, src, srctype) +#else /* H5_SIZEOF_UINT8_T == H5_SIZEOF_UINT16_T */ + #define ASSIGN_uint8_t_TO_uint16_t(dst, dsttype, src, srctype) \ + ASSIGN_TO_SAME_SIZE_SAME_SIGNED(dst, dsttype, src, srctype) +#endif /* src: uint8_t dst: uint16_t */ + /* src: uint8_t, dst: uint32_t */ #if H5_SIZEOF_UINT8_T < H5_SIZEOF_UINT32_T #define ASSIGN_uint8_t_TO_uint32_t(dst, dsttype, src, srctype) \ @@ -394,6 +430,141 @@ #endif /* src: uint8_t dst: h5_stat_size_t */ +/* Assignment checks for uint16_t */ + +/* src: uint16_t, dst: unsigned */ +#if H5_SIZEOF_UINT16_T < H5_SIZEOF_UNSIGNED + #define ASSIGN_uint16_t_TO_unsigned(dst, dsttype, src, srctype) \ + ASSIGN_TO_LARGER_SIZE_SAME_SIGNED(dst, dsttype, src, srctype) +#elif H5_SIZEOF_UINT16_T > H5_SIZEOF_UNSIGNED + #define ASSIGN_uint16_t_TO_unsigned(dst, dsttype, src, srctype) \ + ASSIGN_TO_SMALLER_SIZE(dst, dsttype, src, srctype) +#else /* H5_SIZEOF_UINT16_T == H5_SIZEOF_UNSIGNED */ + #define ASSIGN_uint16_t_TO_unsigned(dst, dsttype, src, srctype) \ + ASSIGN_TO_SAME_SIZE_SAME_SIGNED(dst, dsttype, src, srctype) +#endif /* src: uint16_t dst: unsigned */ + +/* src: uint16_t, dst: int */ +#if H5_SIZEOF_UINT16_T < H5_SIZEOF_INT + #define ASSIGN_uint16_t_TO_int(dst, dsttype, src, srctype) \ + ASSIGN_TO_LARGER_SIZE_UNSIGNED_TO_SIGNED(dst, dsttype, src, srctype) +#elif H5_SIZEOF_UINT16_T > H5_SIZEOF_INT + #define ASSIGN_uint16_t_TO_int(dst, dsttype, src, srctype) \ + ASSIGN_TO_SMALLER_SIZE(dst, dsttype, src, srctype) +#else /* H5_SIZEOF_UINT16_T == H5_SIZEOF_INT */ + #define ASSIGN_uint16_t_TO_int(dst, dsttype, src, srctype) \ + ASSIGN_TO_SAME_SIZE_UNSIGNED_TO_SIGNED(dst, dsttype, src, srctype) +#endif /* src: uint16_t dst: int */ + +/* src: uint16_t, dst: uint8_t */ +#if H5_SIZEOF_UINT16_T < H5_SIZEOF_UINT8_T + #define ASSIGN_uint16_t_TO_uint8_t(dst, dsttype, src, srctype) \ + ASSIGN_TO_LARGER_SIZE_SAME_SIGNED(dst, dsttype, src, srctype) +#elif H5_SIZEOF_UINT16_T > H5_SIZEOF_UINT8_T + #define ASSIGN_uint16_t_TO_uint8_t(dst, dsttype, src, srctype) \ + ASSIGN_TO_SMALLER_SIZE(dst, dsttype, src, srctype) +#else /* H5_SIZEOF_UINT16_T == H5_SIZEOF_UINT8_T */ + #define ASSIGN_uint16_t_TO_uint8_t(dst, dsttype, src, srctype) \ + ASSIGN_TO_SAME_SIZE_SAME_SIGNED(dst, dsttype, src, srctype) +#endif /* src: uint16_t dst: uint8_t */ + +/* src: uint16_t, dst: uint32_t */ +#if H5_SIZEOF_UINT16_T < H5_SIZEOF_UINT32_T + #define ASSIGN_uint16_t_TO_uint32_t(dst, dsttype, src, srctype) \ + ASSIGN_TO_LARGER_SIZE_SAME_SIGNED(dst, dsttype, src, srctype) +#elif H5_SIZEOF_UINT16_T > H5_SIZEOF_UINT32_T + #define ASSIGN_uint16_t_TO_uint32_t(dst, dsttype, src, srctype) \ + ASSIGN_TO_SMALLER_SIZE(dst, dsttype, src, srctype) +#else /* H5_SIZEOF_UINT16_T == H5_SIZEOF_UINT32_T */ + #define ASSIGN_uint16_t_TO_uint32_t(dst, dsttype, src, srctype) \ + ASSIGN_TO_SAME_SIZE_SAME_SIGNED(dst, dsttype, src, srctype) +#endif /* src: uint16_t dst: uint32_t */ + +/* src: uint16_t, dst: uint64_t */ +#if H5_SIZEOF_UINT16_T < H5_SIZEOF_UINT64_T + #define ASSIGN_uint16_t_TO_uint64_t(dst, dsttype, src, srctype) \ + ASSIGN_TO_LARGER_SIZE_SAME_SIGNED(dst, dsttype, src, srctype) +#elif H5_SIZEOF_UINT16_T > H5_SIZEOF_UINT64_T + #define ASSIGN_uint16_t_TO_uint64_t(dst, dsttype, src, srctype) \ + ASSIGN_TO_SMALLER_SIZE(dst, dsttype, src, srctype) +#else /* H5_SIZEOF_UINT16_T == H5_SIZEOF_UINT64_T */ + #define ASSIGN_uint16_t_TO_uint64_t(dst, dsttype, src, srctype) \ + ASSIGN_TO_SAME_SIZE_SAME_SIGNED(dst, dsttype, src, srctype) +#endif /* src: uint16_t dst: uint64_t */ + +/* src: uint16_t, dst: size_t */ +#if H5_SIZEOF_UINT16_T < H5_SIZEOF_SIZE_T + #define ASSIGN_uint16_t_TO_size_t(dst, dsttype, src, srctype) \ + ASSIGN_TO_LARGER_SIZE_SAME_SIGNED(dst, dsttype, src, srctype) +#elif H5_SIZEOF_UINT16_T > H5_SIZEOF_SIZE_T + #define ASSIGN_uint16_t_TO_size_t(dst, dsttype, src, srctype) \ + ASSIGN_TO_SMALLER_SIZE(dst, dsttype, src, srctype) +#else /* H5_SIZEOF_UINT16_T == H5_SIZEOF_SIZE_T */ + #define ASSIGN_uint16_t_TO_size_t(dst, dsttype, src, srctype) \ + ASSIGN_TO_SAME_SIZE_SAME_SIGNED(dst, dsttype, src, srctype) +#endif /* src: uint16_t dst: size_t */ + +/* src: uint16_t, dst: ssize_t */ +#if H5_SIZEOF_UINT16_T < H5_SIZEOF_SSIZE_T + #define ASSIGN_uint16_t_TO_ssize_t(dst, dsttype, src, srctype) \ + ASSIGN_TO_LARGER_SIZE_UNSIGNED_TO_SIGNED(dst, dsttype, src, srctype) +#elif H5_SIZEOF_UINT16_T > H5_SIZEOF_SSIZE_T + #define ASSIGN_uint16_t_TO_ssize_t(dst, dsttype, src, srctype) \ + ASSIGN_TO_SMALLER_SIZE(dst, dsttype, src, srctype) +#else /* H5_SIZEOF_UINT16_T == H5_SIZEOF_SSIZE_T */ + #define ASSIGN_uint16_t_TO_ssize_t(dst, dsttype, src, srctype) \ + ASSIGN_TO_SAME_SIZE_UNSIGNED_TO_SIGNED(dst, dsttype, src, srctype) +#endif /* src: uint16_t dst: ssize_t */ + +/* src: uint16_t, dst: haddr_t */ +#if H5_SIZEOF_UINT16_T < H5_SIZEOF_HADDR_T + #define ASSIGN_uint16_t_TO_haddr_t(dst, dsttype, src, srctype) \ + ASSIGN_TO_LARGER_SIZE_SAME_SIGNED(dst, dsttype, src, srctype) +#elif H5_SIZEOF_UINT16_T > H5_SIZEOF_HADDR_T + #define ASSIGN_uint16_t_TO_haddr_t(dst, dsttype, src, srctype) \ + ASSIGN_TO_SMALLER_SIZE(dst, dsttype, src, srctype) +#else /* H5_SIZEOF_UINT16_T == H5_SIZEOF_HADDR_T */ + #define ASSIGN_uint16_t_TO_haddr_t(dst, dsttype, src, srctype) \ + ASSIGN_TO_SAME_SIZE_SAME_SIGNED(dst, dsttype, src, srctype) +#endif /* src: uint16_t dst: haddr_t */ + +/* src: uint16_t, dst: hsize_t */ +#if H5_SIZEOF_UINT16_T < H5_SIZEOF_HSIZE_T + #define ASSIGN_uint16_t_TO_hsize_t(dst, dsttype, src, srctype) \ + ASSIGN_TO_LARGER_SIZE_SAME_SIGNED(dst, dsttype, src, srctype) +#elif H5_SIZEOF_UINT16_T > H5_SIZEOF_HSIZE_T + #define ASSIGN_uint16_t_TO_hsize_t(dst, dsttype, src, srctype) \ + ASSIGN_TO_SMALLER_SIZE(dst, dsttype, src, srctype) +#else /* H5_SIZEOF_UINT16_T == H5_SIZEOF_HSIZE_T */ + #define ASSIGN_uint16_t_TO_hsize_t(dst, dsttype, src, srctype) \ + ASSIGN_TO_SAME_SIZE_SAME_SIGNED(dst, dsttype, src, srctype) +#endif /* src: uint16_t dst: hsize_t */ + +/* src: uint16_t, dst: hssize_t */ +#if H5_SIZEOF_UINT16_T < H5_SIZEOF_HSSIZE_T + #define ASSIGN_uint16_t_TO_hssize_t(dst, dsttype, src, srctype) \ + ASSIGN_TO_LARGER_SIZE_UNSIGNED_TO_SIGNED(dst, dsttype, src, srctype) +#elif H5_SIZEOF_UINT16_T > H5_SIZEOF_HSSIZE_T + #define ASSIGN_uint16_t_TO_hssize_t(dst, dsttype, src, srctype) \ + ASSIGN_TO_SMALLER_SIZE(dst, dsttype, src, srctype) +#else /* H5_SIZEOF_UINT16_T == H5_SIZEOF_HSSIZE_T */ + #define ASSIGN_uint16_t_TO_hssize_t(dst, dsttype, src, srctype) \ + ASSIGN_TO_SAME_SIZE_UNSIGNED_TO_SIGNED(dst, dsttype, src, srctype) +#endif /* src: uint16_t dst: hssize_t */ + +/* src: uint16_t, dst: h5_stat_size_t */ +#if H5_SIZEOF_UINT16_T < H5_SIZEOF_H5_STAT_SIZE_T + #define ASSIGN_uint16_t_TO_h5_stat_size_t(dst, dsttype, src, srctype) \ + ASSIGN_TO_LARGER_SIZE_SAME_SIGNED(dst, dsttype, src, srctype) +#elif H5_SIZEOF_UINT16_T > H5_SIZEOF_H5_STAT_SIZE_T + #define ASSIGN_uint16_t_TO_h5_stat_size_t(dst, dsttype, src, srctype) \ + ASSIGN_TO_SMALLER_SIZE(dst, dsttype, src, srctype) +#else /* H5_SIZEOF_UINT16_T == H5_SIZEOF_H5_STAT_SIZE_T */ + #define ASSIGN_uint16_t_TO_h5_stat_size_t(dst, dsttype, src, srctype) \ + ASSIGN_TO_SAME_SIZE_SAME_SIGNED(dst, dsttype, src, srctype) +#endif /* src: uint16_t dst: h5_stat_size_t */ + + /* Assignment checks for uint32_t */ /* src: uint32_t, dst: unsigned */ @@ -432,6 +603,18 @@ ASSIGN_TO_SAME_SIZE_SAME_SIGNED(dst, dsttype, src, srctype) #endif /* src: uint32_t dst: uint8_t */ +/* src: uint32_t, dst: uint16_t */ +#if H5_SIZEOF_UINT32_T < H5_SIZEOF_UINT16_T + #define ASSIGN_uint32_t_TO_uint16_t(dst, dsttype, src, srctype) \ + ASSIGN_TO_LARGER_SIZE_SAME_SIGNED(dst, dsttype, src, srctype) +#elif H5_SIZEOF_UINT32_T > H5_SIZEOF_UINT16_T + #define ASSIGN_uint32_t_TO_uint16_t(dst, dsttype, src, srctype) \ + ASSIGN_TO_SMALLER_SIZE(dst, dsttype, src, srctype) +#else /* H5_SIZEOF_UINT32_T == H5_SIZEOF_UINT16_T */ + #define ASSIGN_uint32_t_TO_uint16_t(dst, dsttype, src, srctype) \ + ASSIGN_TO_SAME_SIZE_SAME_SIGNED(dst, dsttype, src, srctype) +#endif /* src: uint32_t dst: uint16_t */ + /* src: uint32_t, dst: uint64_t */ #if H5_SIZEOF_UINT32_T < H5_SIZEOF_UINT64_T #define ASSIGN_uint32_t_TO_uint64_t(dst, dsttype, src, srctype) \ @@ -555,6 +738,18 @@ ASSIGN_TO_SAME_SIZE_SAME_SIGNED(dst, dsttype, src, srctype) #endif /* src: uint64_t dst: uint8_t */ +/* src: uint64_t, dst: uint16_t */ +#if H5_SIZEOF_UINT64_T < H5_SIZEOF_UINT16_T + #define ASSIGN_uint64_t_TO_uint16_t(dst, dsttype, src, srctype) \ + ASSIGN_TO_LARGER_SIZE_SAME_SIGNED(dst, dsttype, src, srctype) +#elif H5_SIZEOF_UINT64_T > H5_SIZEOF_UINT16_T + #define ASSIGN_uint64_t_TO_uint16_t(dst, dsttype, src, srctype) \ + ASSIGN_TO_SMALLER_SIZE(dst, dsttype, src, srctype) +#else /* H5_SIZEOF_UINT64_T == H5_SIZEOF_UINT16_T */ + #define ASSIGN_uint64_t_TO_uint16_t(dst, dsttype, src, srctype) \ + ASSIGN_TO_SAME_SIZE_SAME_SIGNED(dst, dsttype, src, srctype) +#endif /* src: uint64_t dst: uint16_t */ + /* src: uint64_t, dst: uint32_t */ #if H5_SIZEOF_UINT64_T < H5_SIZEOF_UINT32_T #define ASSIGN_uint64_t_TO_uint32_t(dst, dsttype, src, srctype) \ @@ -678,6 +873,18 @@ ASSIGN_TO_SAME_SIZE_SAME_SIGNED(dst, dsttype, src, srctype) #endif /* src: size_t dst: uint8_t */ +/* src: size_t, dst: uint16_t */ +#if H5_SIZEOF_SIZE_T < H5_SIZEOF_UINT16_T + #define ASSIGN_size_t_TO_uint16_t(dst, dsttype, src, srctype) \ + ASSIGN_TO_LARGER_SIZE_SAME_SIGNED(dst, dsttype, src, srctype) +#elif H5_SIZEOF_SIZE_T > H5_SIZEOF_UINT16_T + #define ASSIGN_size_t_TO_uint16_t(dst, dsttype, src, srctype) \ + ASSIGN_TO_SMALLER_SIZE(dst, dsttype, src, srctype) +#else /* H5_SIZEOF_SIZE_T == H5_SIZEOF_UINT16_T */ + #define ASSIGN_size_t_TO_uint16_t(dst, dsttype, src, srctype) \ + ASSIGN_TO_SAME_SIZE_SAME_SIGNED(dst, dsttype, src, srctype) +#endif /* src: size_t dst: uint16_t */ + /* src: size_t, dst: uint32_t */ #if H5_SIZEOF_SIZE_T < H5_SIZEOF_UINT32_T #define ASSIGN_size_t_TO_uint32_t(dst, dsttype, src, srctype) \ @@ -801,6 +1008,18 @@ ASSIGN_TO_SAME_SIZE_SIGNED_TO_UNSIGNED(dst, dsttype, src, srctype) #endif /* src: ssize_t dst: uint8_t */ +/* src: ssize_t, dst: uint16_t */ +#if H5_SIZEOF_SSIZE_T < H5_SIZEOF_UINT16_T + #define ASSIGN_ssize_t_TO_uint16_t(dst, dsttype, src, srctype) \ + ASSIGN_TO_LARGER_SIZE_SIGNED_TO_UNSIGNED(dst, dsttype, src, srctype) +#elif H5_SIZEOF_SSIZE_T > H5_SIZEOF_UINT16_T + #define ASSIGN_ssize_t_TO_uint16_t(dst, dsttype, src, srctype) \ + ASSIGN_TO_SMALLER_SIZE(dst, dsttype, src, srctype) +#else /* H5_SIZEOF_SSIZE_T == H5_SIZEOF_UINT16_T */ + #define ASSIGN_ssize_t_TO_uint16_t(dst, dsttype, src, srctype) \ + ASSIGN_TO_SAME_SIZE_SIGNED_TO_UNSIGNED(dst, dsttype, src, srctype) +#endif /* src: ssize_t dst: uint16_t */ + /* src: ssize_t, dst: uint32_t */ #if H5_SIZEOF_SSIZE_T < H5_SIZEOF_UINT32_T #define ASSIGN_ssize_t_TO_uint32_t(dst, dsttype, src, srctype) \ @@ -924,6 +1143,18 @@ ASSIGN_TO_SAME_SIZE_SAME_SIGNED(dst, dsttype, src, srctype) #endif /* src: haddr_t dst: uint8_t */ +/* src: haddr_t, dst: uint16_t */ +#if H5_SIZEOF_HADDR_T < H5_SIZEOF_UINT16_T + #define ASSIGN_haddr_t_TO_uint16_t(dst, dsttype, src, srctype) \ + ASSIGN_TO_LARGER_SIZE_SAME_SIGNED(dst, dsttype, src, srctype) +#elif H5_SIZEOF_HADDR_T > H5_SIZEOF_UINT16_T + #define ASSIGN_haddr_t_TO_uint16_t(dst, dsttype, src, srctype) \ + ASSIGN_TO_SMALLER_SIZE(dst, dsttype, src, srctype) +#else /* H5_SIZEOF_HADDR_T == H5_SIZEOF_UINT16_T */ + #define ASSIGN_haddr_t_TO_uint16_t(dst, dsttype, src, srctype) \ + ASSIGN_TO_SAME_SIZE_SAME_SIGNED(dst, dsttype, src, srctype) +#endif /* src: haddr_t dst: uint16_t */ + /* src: haddr_t, dst: uint32_t */ #if H5_SIZEOF_HADDR_T < H5_SIZEOF_UINT32_T #define ASSIGN_haddr_t_TO_uint32_t(dst, dsttype, src, srctype) \ @@ -1047,6 +1278,18 @@ ASSIGN_TO_SAME_SIZE_SAME_SIGNED(dst, dsttype, src, srctype) #endif /* src: hsize_t dst: uint8_t */ +/* src: hsize_t, dst: uint16_t */ +#if H5_SIZEOF_HSIZE_T < H5_SIZEOF_UINT16_T + #define ASSIGN_hsize_t_TO_uint16_t(dst, dsttype, src, srctype) \ + ASSIGN_TO_LARGER_SIZE_SAME_SIGNED(dst, dsttype, src, srctype) +#elif H5_SIZEOF_HSIZE_T > H5_SIZEOF_UINT16_T + #define ASSIGN_hsize_t_TO_uint16_t(dst, dsttype, src, srctype) \ + ASSIGN_TO_SMALLER_SIZE(dst, dsttype, src, srctype) +#else /* H5_SIZEOF_HSIZE_T == H5_SIZEOF_UINT16_T */ + #define ASSIGN_hsize_t_TO_uint16_t(dst, dsttype, src, srctype) \ + ASSIGN_TO_SAME_SIZE_SAME_SIGNED(dst, dsttype, src, srctype) +#endif /* src: hsize_t dst: uint16_t */ + /* src: hsize_t, dst: uint32_t */ #if H5_SIZEOF_HSIZE_T < H5_SIZEOF_UINT32_T #define ASSIGN_hsize_t_TO_uint32_t(dst, dsttype, src, srctype) \ @@ -1170,6 +1413,18 @@ ASSIGN_TO_SAME_SIZE_SIGNED_TO_UNSIGNED(dst, dsttype, src, srctype) #endif /* src: hssize_t dst: uint8_t */ +/* src: hssize_t, dst: uint16_t */ +#if H5_SIZEOF_HSSIZE_T < H5_SIZEOF_UINT16_T + #define ASSIGN_hssize_t_TO_uint16_t(dst, dsttype, src, srctype) \ + ASSIGN_TO_LARGER_SIZE_SIGNED_TO_UNSIGNED(dst, dsttype, src, srctype) +#elif H5_SIZEOF_HSSIZE_T > H5_SIZEOF_UINT16_T + #define ASSIGN_hssize_t_TO_uint16_t(dst, dsttype, src, srctype) \ + ASSIGN_TO_SMALLER_SIZE(dst, dsttype, src, srctype) +#else /* H5_SIZEOF_HSSIZE_T == H5_SIZEOF_UINT16_T */ + #define ASSIGN_hssize_t_TO_uint16_t(dst, dsttype, src, srctype) \ + ASSIGN_TO_SAME_SIZE_SIGNED_TO_UNSIGNED(dst, dsttype, src, srctype) +#endif /* src: hssize_t dst: uint16_t */ + /* src: hssize_t, dst: uint32_t */ #if H5_SIZEOF_HSSIZE_T < H5_SIZEOF_UINT32_T #define ASSIGN_hssize_t_TO_uint32_t(dst, dsttype, src, srctype) \ @@ -1293,6 +1548,18 @@ ASSIGN_TO_SAME_SIZE_SAME_SIGNED(dst, dsttype, src, srctype) #endif /* src: h5_stat_size_t dst: uint8_t */ +/* src: h5_stat_size_t, dst: uint16_t */ +#if H5_SIZEOF_H5_STAT_SIZE_T < H5_SIZEOF_UINT16_T + #define ASSIGN_h5_stat_size_t_TO_uint16_t(dst, dsttype, src, srctype) \ + ASSIGN_TO_LARGER_SIZE_SAME_SIGNED(dst, dsttype, src, srctype) +#elif H5_SIZEOF_H5_STAT_SIZE_T > H5_SIZEOF_UINT16_T + #define ASSIGN_h5_stat_size_t_TO_uint16_t(dst, dsttype, src, srctype) \ + ASSIGN_TO_SMALLER_SIZE(dst, dsttype, src, srctype) +#else /* H5_SIZEOF_H5_STAT_SIZE_T == H5_SIZEOF_UINT16_T */ + #define ASSIGN_h5_stat_size_t_TO_uint16_t(dst, dsttype, src, srctype) \ + ASSIGN_TO_SAME_SIZE_SAME_SIGNED(dst, dsttype, src, srctype) +#endif /* src: h5_stat_size_t dst: uint16_t */ + /* src: h5_stat_size_t, dst: uint32_t */ #if H5_SIZEOF_H5_STAT_SIZE_T < H5_SIZEOF_UINT32_T #define ASSIGN_h5_stat_size_t_TO_uint32_t(dst, dsttype, src, srctype) \ diff --git a/src/H5overflow.txt b/src/H5overflow.txt index ea62fa0..c8c1996 100644 --- a/src/H5overflow.txt +++ b/src/H5overflow.txt @@ -33,6 +33,7 @@ unsigned, UNSIGNED; int, SIGNED; uint8_t, UNSIGNED; +uint16_t, UNSIGNED; uint32_t, UNSIGNED; uint64_t, UNSIGNED; size_t, UNSIGNED; |