summaryrefslogtreecommitdiffstats
path: root/src/H5Dio.c
diff options
context:
space:
mode:
authorRaymond Lu <songyulu@hdfgroup.org>2013-01-11 21:34:31 (GMT)
committerRaymond Lu <songyulu@hdfgroup.org>2013-01-11 21:34:31 (GMT)
commit3a7b11863a86b07935707b2896a5fd9f7eebf9a3 (patch)
tree57712960393292ac85cd61d5c7c09b668a0f1c6b /src/H5Dio.c
parent7b895e1dd8c9fdba0d94bf4100a1be77ee05a64b (diff)
downloadhdf5-3a7b11863a86b07935707b2896a5fd9f7eebf9a3.zip
hdf5-3a7b11863a86b07935707b2896a5fd9f7eebf9a3.tar.gz
hdf5-3a7b11863a86b07935707b2896a5fd9f7eebf9a3.tar.bz2
[svn-r23152] I took out the obsolete function H5PSIwrite_chunk and its related test and performance test.
Tested on koala.
Diffstat (limited to 'src/H5Dio.c')
-rw-r--r--src/H5Dio.c88
1 files changed, 0 insertions, 88 deletions
diff --git a/src/H5Dio.c b/src/H5Dio.c
index 095c8a5..8b6d380 100644
--- a/src/H5Dio.c
+++ b/src/H5Dio.c
@@ -372,94 +372,6 @@ done:
/*-------------------------------------------------------------------------
- * Function: H5PSIdirect_write
- *
- * Purpose: Temporary name for the DECTRIS project. It writes an entire
- * chunk to the file directly.
- *
- * Return: Non-negative on success/Negative on failure
- *
- * Programmer: Raymond Lu
- * 30 July 2012
- *
- *-------------------------------------------------------------------------
- */
-herr_t
-H5PSIdirect_write(hid_t dset_id, hid_t dxpl_id, uint32_t filters, hsize_t *offset,
- size_t data_size, const void *buf)
-{
- H5D_t *dset = NULL;
- int ndims;
- hsize_t *dims = NULL;
- hsize_t *internal_offset = NULL;
- int i;
- herr_t ret_value = SUCCEED; /* Return value */
-
- FUNC_ENTER_API(FAIL)
-
- /* check arguments */
- if(NULL == (dset = (H5D_t *)H5I_object_verify(dset_id, H5I_DATASET)))
- HGOTO_ERROR(H5E_ARGS, H5E_BADTYPE, FAIL, "not a dataset")
- if(NULL == dset->oloc.file)
- HGOTO_ERROR(H5E_ARGS, H5E_BADTYPE, FAIL, "not a file")
-
- if(H5D_CHUNKED != dset->shared->layout.type)
- HGOTO_ERROR(H5E_ARGS, H5E_BADTYPE, FAIL, "not a chunked dataset")
-
- /* Get the default dataset transfer property list if the user didn't provide one */
- if(H5P_DEFAULT == dxpl_id)
- dxpl_id= H5P_DATASET_XFER_DEFAULT;
- else
- if(TRUE != H5P_isa_class(dxpl_id, H5P_DATASET_XFER))
- HGOTO_ERROR(H5E_ARGS, H5E_BADTYPE, FAIL, "not xfer parms")
-
- if(!offset)
- HGOTO_ERROR(H5E_ARGS, H5E_BADVALUE, FAIL, "no offset")
-
- if(!data_size)
- HGOTO_ERROR(H5E_ARGS, H5E_BADVALUE, FAIL, "no data size")
-
- if(!buf)
- HGOTO_ERROR(H5E_ARGS, H5E_BADVALUE, FAIL, "no data buffer")
-
- ndims = (int)H5S_GET_EXTENT_NDIMS(dset->shared->space);
- if(NULL == (dims = (hsize_t *)H5MM_malloc(ndims*sizeof(hsize_t))))
- HGOTO_ERROR(H5E_RESOURCE, H5E_NOSPACE, FAIL, "memory allocation failed for dimensions")
-
- if(NULL == (internal_offset = (hsize_t *)H5MM_malloc((ndims+1)*sizeof(hsize_t))))
- HGOTO_ERROR(H5E_RESOURCE, H5E_NOSPACE, FAIL, "memory allocation failed for offset")
-
- if(H5S_get_simple_extent_dims(dset->shared->space, dims, NULL) < 0)
- HGOTO_ERROR(H5E_DATASPACE, H5E_CANTGET, FAIL, "can't retrieve dataspace extent dims")
-
- for(i=0; i<ndims; i++) {
- /* Make sure the offset doesn't exceed the dataset's dimensions */
- if(offset[i] > dims[i])
- HGOTO_ERROR(H5E_DATASPACE, H5E_BADTYPE, FAIL, "offset exceeds dimensions of dataset")
-
- /* Make sure the offset fall right on a chunk's boundary */
- if(offset[i] % dset->shared->layout.u.chunk.dim[i])
- HGOTO_ERROR(H5E_DATASPACE, H5E_BADTYPE, FAIL, "offset doesn't fall on chunks's boundary")
-
- internal_offset[i] = offset[i];
- }
-
- /* The library's chunking code requires the offset terminates with a zero */
- internal_offset[ndims] = 0;
-
- /* write raw data */
- if(H5D__chunk_direct_write(dset, dxpl_id, filters, internal_offset, data_size, buf) < 0)
- HGOTO_ERROR(H5E_DATASET, H5E_WRITEERROR, FAIL, "can't write chunk directly")
-
-done:
- if(dims)
- H5MM_free(dims);
-
- FUNC_LEAVE_API(ret_value)
-} /* end H5PSIdirect_write() */
-
-
-/*-------------------------------------------------------------------------
* Function: H5D__read
*
* Purpose: Reads (part of) a DATASET into application memory BUF. See