summaryrefslogtreecommitdiffstats
path: root/src/H5Dint.c
diff options
context:
space:
mode:
authorQuincey Koziol <koziol@hdfgroup.org>2008-10-13 07:52:10 (GMT)
committerQuincey Koziol <koziol@hdfgroup.org>2008-10-13 07:52:10 (GMT)
commit7cfab788fc243ccc493cc98f8c6e504ca42157d0 (patch)
tree19efa867ce18943c04124403ad1a484d3d4d2d8c /src/H5Dint.c
parentbc010e55565dd53a40cb0187afe34f8506a990e2 (diff)
downloadhdf5-7cfab788fc243ccc493cc98f8c6e504ca42157d0.zip
hdf5-7cfab788fc243ccc493cc98f8c6e504ca42157d0.tar.gz
hdf5-7cfab788fc243ccc493cc98f8c6e504ca42157d0.tar.bz2
[svn-r15841] Description:
Use metadata journaling callback to allow dataset code to track journal status changes and flush cached info appropriately. Tested on: FreeBSD/32 6.2 (duty) in debug mode FreeBSD/64 6.2 (liberty) w/C++ & FORTRAN, in debug mode Linux/32 2.6 (kagiso) w/PGI compilers, w/C++ & FORTRAN, w/threadsafe, in debug mode Linux/64-amd64 2.6 (smirom) w/default API=1.6.x, w/C++ & FORTRAN, in production mode Linux/64-ia64 2.6 (cobalt) w/Intel compilers, w/C++ & FORTRAN, in production mode Solaris/32 2.10 (linew) w/deprecated symbols disabled, w/C++ & FORTRAN, w/szip filter, in production mode Mac OS X/32 10.5.2 (amazon) in debug mode Linux/64-ia64 2.4 (tg-login3) w/parallel, w/FORTRAN, in production mode
Diffstat (limited to 'src/H5Dint.c')
-rw-r--r--src/H5Dint.c60
1 files changed, 58 insertions, 2 deletions
diff --git a/src/H5Dint.c b/src/H5Dint.c
index 2d7b16d..d4a0ab0 100644
--- a/src/H5Dint.c
+++ b/src/H5Dint.c
@@ -502,6 +502,47 @@ done:
/*-------------------------------------------------------------------------
+ * Function: H5D_journal_status_cb
+ *
+ * Purpose: Update journal status for dataset
+ *
+ * Return: Success: Non-negative
+ * Failure: Negative
+ *
+ * Programmer: Quincey Koziol
+ * Sunday, October 12, 2008
+ *
+ *-------------------------------------------------------------------------
+ */
+static void
+H5D_journal_status_cb(const H5C2_mdj_config_t *mdj_config, hid_t dxpl_id,
+ void *udata)
+{
+ H5D_t *dset = (H5D_t *)udata; /* User callback data */
+ herr_t ret_value = SUCCEED; /* Return value */
+
+ FUNC_ENTER_NOAPI_NOINIT(H5D_journal_status_cb)
+
+ /* Sanity check */
+ HDassert(mdj_config);
+ HDassert(dset);
+
+ /* Check if journaling is now enabled */
+ if(mdj_config->enable_journaling) {
+ /* Flush any cached dataset information */
+ if(H5D_flush_real(dset, dxpl_id, (unsigned)H5F_FLUSH_NONE) < 0)
+ HGOTO_ERROR(H5E_DATASET, H5E_WRITEERROR, FAIL, "unable to flush cached dataset info")
+ } /* end if */
+
+ /* Keep current journaling state */
+ dset->shared->journaling_enabled = mdj_config->enable_journaling;
+
+done:
+ FUNC_LEAVE_NOAPI_VOID
+} /* end H5D_journal_status_cb() */
+
+
+/*-------------------------------------------------------------------------
* Function: H5D_new
*
* Purpose: Creates a new, empty dataset structure
@@ -536,7 +577,7 @@ H5D_new(const H5F_t *file, hid_t dcpl_id, hbool_t creating, hbool_t vl_type)
/* Copy the default dataset information */
HDmemcpy(new_dset, &H5D_def_dset, sizeof(H5D_shared_t));
- /* Remember whether journaling is enabled, to help managed future behavior */
+ /* Remember whether journaling is enabled, to help manage future behavior */
new_dset->journaling_enabled = journaling_enabled;
/* If we are using the default dataset creation property list, during creation
@@ -544,7 +585,7 @@ H5D_new(const H5F_t *file, hid_t dcpl_id, hbool_t creating, hbool_t vl_type)
*/
if(!vl_type && creating && dcpl_id == H5P_DATASET_CREATE_DEFAULT) {
if(H5I_inc_ref(dcpl_id) < 0)
- HGOTO_ERROR(H5E_DATASET, H5E_CANTINC, NULL, "Can't increment default DCPL ID")
+ HGOTO_ERROR(H5E_DATASET, H5E_CANTINC, NULL, "can't increment default DCPL ID")
new_dset->dcpl_id = dcpl_id;
} /* end if */
else {
@@ -1055,6 +1096,7 @@ H5D_create(H5F_t *file, hid_t type_id, const H5S_t *space, hid_t dcpl_id,
/* Initialize the dataset object */
if(NULL == (new_dset = H5FL_CALLOC(H5D_t)))
HGOTO_ERROR(H5E_RESOURCE, H5E_NOSPACE, NULL, "memory allocation failed")
+ new_dset->mdjsc_idx = (-1);
/* Set up & reset dataset location */
dset_loc.oloc = &(new_dset->oloc);
@@ -1162,6 +1204,10 @@ H5D_create(H5F_t *file, hid_t type_id, const H5S_t *space, hid_t dcpl_id,
new_dset->shared->fo_count = 1;
+ /* Register callback for this dataset with cache, when journaling status changes */
+ if(H5AC2_register_mdjsc_callback(new_dset->oloc.file, H5D_journal_status_cb, new_dset, &new_dset->mdjsc_idx, NULL) < 0)
+ HGOTO_ERROR(H5E_DATASET, H5E_CANTINIT, NULL, "can't register journal status callback")
+
/* Success */
ret_value = new_dset;
@@ -1232,6 +1278,7 @@ H5D_open(const H5G_loc_t *loc, hid_t dxpl_id)
/* Allocate the dataset structure */
if(NULL == (dataset = H5FL_CALLOC(H5D_t)))
HGOTO_ERROR(H5E_RESOURCE, H5E_NOSPACE, NULL, "memory allocation failed")
+ dataset->mdjsc_idx = (-1);
/* Shallow copy (take ownership) of the object location object */
if(H5O_loc_copy(&(dataset->oloc), loc->oloc, H5_COPY_SHALLOW) < 0)
@@ -1250,6 +1297,10 @@ H5D_open(const H5G_loc_t *loc, hid_t dxpl_id)
if(H5D_open_oid(dataset, dxpl_id) < 0)
HGOTO_ERROR(H5E_DATASET, H5E_NOTFOUND, NULL, "not found")
+ /* Register callback for this dataset with cache, when journaling status changes */
+ if(H5AC2_register_mdjsc_callback(dataset->oloc.file, H5D_journal_status_cb, dataset, &dataset->mdjsc_idx, NULL) < 0)
+ HGOTO_ERROR(H5E_DATASET, H5E_CANTINIT, NULL, "can't register journal status callback")
+
/* Add the dataset to the list of opened objects in the file */
if(H5FO_insert(dataset->oloc.file, dataset->oloc.addr, dataset->shared, FALSE) < 0)
HGOTO_ERROR(H5E_DATASET, H5E_CANTINSERT, NULL, "can't insert dataset into list of open objects")
@@ -1569,6 +1620,11 @@ H5D_close(H5D_t *dataset)
H5D_chunk_stats(dataset, FALSE);
#endif /* H5D_CHUNK_DEBUG */
+ /* Deregister callback for this dataset with cache, when journaling status changes */
+ if(dataset->mdjsc_idx >= 0)
+ if(H5AC2_deregister_mdjsc_callback(dataset->oloc.file, dataset->mdjsc_idx) < 0)
+ HGOTO_ERROR(H5E_DATASET, H5E_CANTRELEASE, FAIL, "can't deregister journal status callback")
+
dataset->shared->fo_count--;
if(dataset->shared->fo_count == 0) {
/* Flush the dataset's information */