summaryrefslogtreecommitdiffstats
path: root/src/H5Dlayout.c
diff options
context:
space:
mode:
authorNeil Fortner <nfortne2@hdfgroup.org>2015-05-05 20:15:40 (GMT)
committerNeil Fortner <nfortne2@hdfgroup.org>2015-05-05 20:15:40 (GMT)
commit8fe3dfbf9410ad5acabeaeb187bfb5f7b489042e (patch)
tree393d8485ce30da4ef690f7a1f7fc20a30347d9e4 /src/H5Dlayout.c
parent8ba06cb15e5bab3b49f210ff6d635d35fc4ed1ec (diff)
downloadhdf5-8fe3dfbf9410ad5acabeaeb187bfb5f7b489042e.zip
hdf5-8fe3dfbf9410ad5acabeaeb187bfb5f7b489042e.tar.gz
hdf5-8fe3dfbf9410ad5acabeaeb187bfb5f7b489042e.tar.bz2
[svn-r27022] Add new functions H5Pset_virtual_dataspace_bounds and
H5Pget_virtual_dataspace_bounds. Minor refactor of dataset initialization code to make this cleaner. Update h5_vds-percival-unlim-maxmin.c Other minor fixes/cleanup. Tested: ummon
Diffstat (limited to 'src/H5Dlayout.c')
-rw-r--r--src/H5Dlayout.c126
1 files changed, 48 insertions, 78 deletions
diff --git a/src/H5Dlayout.c b/src/H5Dlayout.c
index 87e1bc8..cb8b27d 100644
--- a/src/H5Dlayout.c
+++ b/src/H5Dlayout.c
@@ -391,85 +391,55 @@ H5D__layout_oh_read(H5D_t *dataset, hid_t dxpl_id, hid_t dapl_id, H5P_genplist_t
if(H5D_CHUNKED == dataset->shared->layout.type)
dataset->shared->layout.u.chunk.ndims++;
- switch(dataset->shared->layout.type) {
- case H5D_CONTIGUOUS:
- {
- hsize_t tmp_size; /* Temporary holder for raw data size */
- size_t tmp_sieve_buf_size; /* Temporary holder for sieve buffer size */
-
- /* Compute the size of the contiguous storage for versions of the
- * layout message less than version 3 because versions 1 & 2 would
- * truncate the dimension sizes to 32-bits of information. - QAK 5/26/04
- */
- if(dataset->shared->layout.version < 3) {
- hssize_t snelmts; /* Temporary holder for number of elements in dataspace */
- hsize_t nelmts; /* Number of elements in dataspace */
- size_t dt_size; /* Size of datatype */
-
- /* Retrieve the number of elements in the dataspace */
- if((snelmts = H5S_GET_EXTENT_NPOINTS(dataset->shared->space)) < 0)
- HGOTO_ERROR(H5E_DATASET, H5E_CANTGET, FAIL, "unable to retrieve number of elements in dataspace")
- nelmts = (hsize_t)snelmts;
-
- /* Get the datatype's size */
- if(0 == (dt_size = H5T_GET_SIZE(dataset->shared->type)))
- HGOTO_ERROR(H5E_DATASET, H5E_CANTGET, FAIL, "unable to retrieve size of datatype")
-
- /* Compute the size of the dataset's contiguous storage */
- tmp_size = nelmts * dt_size;
-
- /* Check for overflow during multiplication */
- if(nelmts != (tmp_size / dt_size))
- HGOTO_ERROR(H5E_DATASET, H5E_OVERFLOW, FAIL, "size of dataset's storage overflowed")
-
- /* Assign the dataset's contiguous storage size */
- dataset->shared->layout.storage.u.contig.size = tmp_size;
- } else
- tmp_size = dataset->shared->layout.storage.u.contig.size;
-
- /* Get the sieve buffer size for the file */
- tmp_sieve_buf_size = H5F_SIEVE_BUF_SIZE(dataset->oloc.file);
-
- /* Adjust the sieve buffer size to the smaller one between the dataset size and the buffer size
- * from the file access property. (SLU - 2012/3/30) */
- if(tmp_size < tmp_sieve_buf_size)
- dataset->shared->cache.contig.sieve_buf_size = tmp_size;
- else
- dataset->shared->cache.contig.sieve_buf_size = tmp_sieve_buf_size;
- }
- break;
-
- case H5D_CHUNKED:
- /* Initialize the chunk cache for the dataset */
- if(H5D__chunk_init(dataset->oloc.file, dxpl_id, dataset, dapl_id) < 0)
- HGOTO_ERROR(H5E_DATASET, H5E_CANTINIT, FAIL, "can't initialize chunk cache")
- break;
-
- case H5D_COMPACT:
- break;
-
- case H5D_VIRTUAL:
- {
- size_t i;
-
- HDassert(dataset->shared->layout.storage.u.virt.list || (dataset->shared->layout.storage.u.virt.list_nused == 0));
-
- /* Patch the virtual selection dataspaces if necessary */
- for(i = 0; i < dataset->shared->layout.storage.u.virt.list_nused; i++) {
- if(dataset->shared->layout.storage.u.virt.list[i].virtual_space_status != H5O_VIRTUAL_STATUS_CORRECT) {
- if(H5S_extent_copy(dataset->shared->layout.storage.u.virt.list[i].virtual_select, dataset->shared->space) < 0)
- HGOTO_ERROR(H5E_DATASET, H5E_CANTCOPY, FAIL, "can't copy virtual dataspace extent")
- dataset->shared->layout.storage.u.virt.list[i].virtual_space_status = H5O_VIRTUAL_STATUS_CORRECT;
- } /* end if */
- } /* end for */
- } /* end block */
- break;
+ if(H5D_CONTIGUOUS == dataset->shared->layout.type) {
+ hsize_t tmp_size; /* Temporary holder for raw data size */
+ size_t tmp_sieve_buf_size; /* Temporary holder for sieve buffer size */
+
+ /* Compute the size of the contiguous storage for versions of the
+ * layout message less than version 3 because versions 1 & 2 would
+ * truncate the dimension sizes to 32-bits of information. - QAK 5/26/04
+ */
+ if(dataset->shared->layout.version < 3) {
+ hssize_t snelmts; /* Temporary holder for number of elements in dataspace */
+ hsize_t nelmts; /* Number of elements in dataspace */
+ size_t dt_size; /* Size of datatype */
+
+ /* Retrieve the number of elements in the dataspace */
+ if((snelmts = H5S_GET_EXTENT_NPOINTS(dataset->shared->space)) < 0)
+ HGOTO_ERROR(H5E_DATASET, H5E_CANTGET, FAIL, "unable to retrieve number of elements in dataspace")
+ nelmts = (hsize_t)snelmts;
+
+ /* Get the datatype's size */
+ if(0 == (dt_size = H5T_GET_SIZE(dataset->shared->type)))
+ HGOTO_ERROR(H5E_DATASET, H5E_CANTGET, FAIL, "unable to retrieve size of datatype")
+
+ /* Compute the size of the dataset's contiguous storage */
+ tmp_size = nelmts * dt_size;
+
+ /* Check for overflow during multiplication */
+ if(nelmts != (tmp_size / dt_size))
+ HGOTO_ERROR(H5E_DATASET, H5E_OVERFLOW, FAIL, "size of dataset's storage overflowed")
+
+ /* Assign the dataset's contiguous storage size */
+ dataset->shared->layout.storage.u.contig.size = tmp_size;
+ } /* end if */
+ else
+ tmp_size = dataset->shared->layout.storage.u.contig.size;
+
+ /* Get the sieve buffer size for the file */
+ tmp_sieve_buf_size = H5F_SIEVE_BUF_SIZE(dataset->oloc.file);
+
+ /* Adjust the sieve buffer size to the smaller one between the dataset size and the buffer size
+ * from the file access property. (SLU - 2012/3/30) */
+ if(tmp_size < tmp_sieve_buf_size)
+ dataset->shared->cache.contig.sieve_buf_size = tmp_size;
+ else
+ dataset->shared->cache.contig.sieve_buf_size = tmp_sieve_buf_size;
+ } /* end if */
- case H5D_LAYOUT_ERROR:
- case H5D_NLAYOUTS:
- default:
- HGOTO_ERROR(H5E_DATASET, H5E_UNSUPPORTED, FAIL, "unknown storage method")
- } /* end switch */ /*lint !e788 All appropriate cases are covered */
+ /* Initialize the layout information for the new dataset */
+ if(dataset->shared->layout.ops->init && (dataset->shared->layout.ops->init)(dataset->oloc.file, dxpl_id, dataset, dapl_id) < 0)
+ HGOTO_ERROR(H5E_DATASET, H5E_CANTINIT, FAIL, "unable to initialize layout information")
done:
FUNC_LEAVE_NOAPI(ret_value)