summaryrefslogtreecommitdiffstats
path: root/src/H5Dchunk.c
diff options
context:
space:
mode:
authorDana Robinson <derobins@hdfgroup.org>2020-08-01 16:18:18 (GMT)
committerDana Robinson <derobins@hdfgroup.org>2020-08-01 16:18:18 (GMT)
commit1ae6daaa807f2bfd63076215950d9c412bfa08d7 (patch)
treed60f5aa4158d275f953561b47089add1ea76ecd2 /src/H5Dchunk.c
parent0a64f3e8106a1ced78ae7c4a6744b3509d7ca0fc (diff)
downloadhdf5-1ae6daaa807f2bfd63076215950d9c412bfa08d7.zip
hdf5-1ae6daaa807f2bfd63076215950d9c412bfa08d7.tar.gz
hdf5-1ae6daaa807f2bfd63076215950d9c412bfa08d7.tar.bz2
Sync with develop
Diffstat (limited to 'src/H5Dchunk.c')
-rw-r--r--src/H5Dchunk.c45
1 files changed, 23 insertions, 22 deletions
diff --git a/src/H5Dchunk.c b/src/H5Dchunk.c
index f6fa91a..ee83564 100644
--- a/src/H5Dchunk.c
+++ b/src/H5Dchunk.c
@@ -216,6 +216,7 @@ typedef struct H5D_chunk_readvv_ud_t {
const H5D_t *dset; /* Dataset to operate on */
} H5D_chunk_readvv_ud_t;
+/* Typedef for chunk info iterator callback */
typedef struct H5D_chunk_info_iter_ud_t {
hsize_t scaled[H5O_LAYOUT_NDIMS]; /* Logical offset of the chunk */
hsize_t ndims; /* Number of dimensions in the dataset */
@@ -433,7 +434,7 @@ H5D__chunk_direct_write(const H5D_t *dset, uint32_t filters, hsize_t *offset,
/* Allocate storage */
if(H5D__alloc_storage(&io_info, H5D_ALLOC_WRITE, FALSE, NULL) < 0)
HGOTO_ERROR(H5E_DATASET, H5E_CANTINIT, FAIL, "unable to initialize storage")
- } /* end if */
+ }
/* Calculate the index of this chunk */
H5VM_chunk_scaled(dset->shared->ndims, offset, layout->u.chunk.dim, scaled);
@@ -1913,7 +1914,7 @@ H5D__create_chunk_file_map_hyper(H5D_chunk_map_t *fm, const H5D_io_info_t
new_chunk_info->mspace_shared = FALSE;
/* Copy the chunk's scaled coordinates */
- H5MM_memcpy(new_chunk_info->scaled, scaled, sizeof(hsize_t) * fm->f_ndims);
+ H5MM_memcpy(new_chunk_info->scaled, scaled, sizeof(hsize_t) * fm->f_ndims);
new_chunk_info->scaled[fm->f_ndims] = 0;
/* Insert the new chunk into the skip list */
@@ -2094,7 +2095,7 @@ H5D__create_chunk_mem_map_hyper(const H5D_chunk_map_t *fm)
for(u = 0; u < fm->f_ndims; u++) {
/* Compensate for the chunk offset */
H5_CHECK_OVERFLOW(coords[u], hsize_t, hssize_t);
- chunk_adjust[u] = adjust[u] - (hssize_t)coords[u]; /*lint !e771 The adjust array will always be initialized */
+ chunk_adjust[u] = adjust[u] - (hssize_t)coords[u];
} /* end for */
/* Adjust the selection */
@@ -2328,7 +2329,7 @@ H5D__chunk_mem_cb(void H5_ATTR_UNUSED *elem, const H5T_t H5_ATTR_UNUSED *type, u
{
H5D_chunk_map_t *fm = (H5D_chunk_map_t *)_fm; /* File<->memory chunk mapping info */
H5D_chunk_info_t *chunk_info; /* Chunk information for current chunk */
- hsize_t coords_in_mem[H5S_MAX_RANK]; /* Coordinates of element in memory */
+ hsize_t coords_in_mem[H5S_MAX_RANK]; /* Coordinates of element in memory */
hsize_t chunk_index; /* Chunk index */
herr_t ret_value = SUCCEED; /* Return value */
@@ -3146,8 +3147,8 @@ done:
/*-------------------------------------------------------------------------
* Function: H5D__chunk_hash_val
*
- * Purpose: To calculate an index based on the dataset's scaled
- * coordinates and sizes of the faster dimensions.
+ * Purpose: To calculate an index based on the dataset's scaled
+ * coordinates and sizes of the faster dimensions.
*
* Return: Hash value index
*
@@ -4852,7 +4853,7 @@ H5D__chunk_update_old_edge_chunks(H5D_t *dset, hsize_t old_dim[])
/* Check if the dataspace expanded enough to cause the old edge chunks
* in this dimension to become full */
- if((space_dim[op_dim]/chunk_dim[op_dim]) >= (old_edge_chunk_sc[op_dim] + 1))
+ if((space_dim[op_dim]/chunk_dim[op_dim]) >= (old_edge_chunk_sc[op_dim] + 1))
new_full_dim[op_dim] = TRUE;
} /* end for */
@@ -5990,7 +5991,7 @@ H5D__chunk_copy_cb(const H5D_chunk_rec_t *chunk_rec, void *_udata)
if((udata->common.layout->flags & H5O_LAYOUT_CHUNK_DONT_FILTER_PARTIAL_BOUND_CHUNKS) &&
H5D__chunk_is_partial_edge_chunk(udata->dset_ndims, udata->common.layout->dim, chunk_rec->scaled, udata->dset_dims))
must_filter = FALSE;
- } /* end if */
+ }
/* Check parameter for type conversion */
if(udata->do_convert) {
@@ -6028,7 +6029,7 @@ H5D__chunk_copy_cb(const H5D_chunk_rec_t *chunk_rec, void *_udata)
HDassert(!H5F_addr_defined(chunk_rec->chunk_addr));
H5MM_memcpy(buf, udata->chunk, nbytes);
udata->chunk = NULL;
- } /* end if */
+ }
else {
H5D_rdcc_ent_t *ent = NULL; /* Cache entry */
unsigned idx; /* Index of chunk in cache, if present */
@@ -6061,13 +6062,13 @@ H5D__chunk_copy_cb(const H5D_chunk_rec_t *chunk_rec, void *_udata)
H5_CHECKED_ASSIGN(nbytes, size_t, shared_fo->layout.u.chunk.size, uint32_t);
H5MM_memcpy(buf, ent->chunk, nbytes);
- } /* end if */
+ }
else {
/* read chunk data from the source file */
if(H5F_block_read(udata->file_src, H5FD_MEM_DRAW, chunk_rec->chunk_addr, nbytes, buf) < 0)
HGOTO_ERROR(H5E_IO, H5E_READERROR, H5_ITER_ERROR, "unable to read raw data chunk")
- } /* end else */
- } /* end else */
+ }
+ }
/* Need to uncompress filtered variable-length & reference data elements that are not found in chunk cache */
if(must_filter && (is_vlen || fix_ref) && !udata->chunk_in_cache) {
@@ -7200,10 +7201,10 @@ H5D__get_num_chunks(const H5D_t *dset, const H5S_t H5_ATTR_UNUSED *space, hsize_
if((dset->shared->layout.storage.u.chunk.ops->iterate)(&idx_info, H5D__get_num_chunks_cb, &num_chunks) < 0)
HGOTO_ERROR(H5E_DATASET, H5E_CANTGET, FAIL, "unable to retrieve allocated chunk information from index")
*nchunks = num_chunks;
- } /* end else */
+ }
done:
- FUNC_LEAVE_NOAPI(ret_value)
+ FUNC_LEAVE_NOAPI_TAG(ret_value)
} /* end H5D__get_num_chunks() */
@@ -7247,7 +7248,7 @@ H5D__get_chunk_info_cb(const H5D_chunk_rec_t *chunk_rec, void *_udata)
/* Stop iterating */
ret_value = H5_ITER_STOP;
- } /* end if */
+ }
/* Go to the next chunk */
else
chunk_info->curr_idx++;
@@ -7306,9 +7307,9 @@ H5D__get_chunk_info(const H5D_t *dset, const H5S_t H5_ATTR_UNUSED *space, hsize_
idx_info.storage = &dset->shared->layout.storage.u.chunk;
/* Set addr & size for when dset is not written or queried chunk is not found */
- if(addr)
+ if (addr)
*addr = HADDR_UNDEF;
- if(size)
+ if (size)
*size = 0;
/* If the chunk is written, get its info, otherwise, return without error */
@@ -7341,7 +7342,7 @@ H5D__get_chunk_info(const H5D_t *dset, const H5S_t H5_ATTR_UNUSED *space, hsize_
} /* end if H5F_addr_defined */
done:
- FUNC_LEAVE_NOAPI(ret_value)
+ FUNC_LEAVE_NOAPI_TAG(ret_value)
} /* end H5D__get_chunk_info() */
@@ -7374,12 +7375,12 @@ H5D__get_chunk_info_by_coord_cb(const H5D_chunk_rec_t *chunk_rec, void *_udata)
HDassert(chunk_info);
/* Going through the scaled, stop when a mismatch is found */
- for(ii = 0; ii < chunk_info->ndims && !different; ii++)
- if(chunk_info->scaled[ii] != chunk_rec->scaled[ii])
+ for (ii = 0; ii < chunk_info->ndims && !different; ii++)
+ if (chunk_info->scaled[ii] != chunk_rec->scaled[ii])
different = TRUE;
/* Same scaled coords means the chunk is found, copy the chunk info */
- if(!different) {
+ if (!different) {
chunk_info->nbytes = chunk_rec->nbytes;
chunk_info->filter_mask = chunk_rec->filter_mask;
chunk_info->chunk_addr = chunk_rec->chunk_addr;
@@ -7387,7 +7388,7 @@ H5D__get_chunk_info_by_coord_cb(const H5D_chunk_rec_t *chunk_rec, void *_udata)
/* Stop iterating */
ret_value = H5_ITER_STOP;
- } /* end if */
+ }
FUNC_LEAVE_NOAPI(ret_value)
} /* H5D__get_chunk_info_by_coord_cb() */