From a4e730506f73381d8d3fc768249ce860b23b2d54 Mon Sep 17 00:00:00 2001 From: Vailin Choi Date: Thu, 10 Sep 2015 23:39:09 -0500 Subject: [svn-r27758] Implementation for "h5format_convert" tool to convert a chunked dataset's indexing type to v1 B-tree indexing type. Code review pending. --- MANIFEST | 25 + configure | 4 +- configure.ac | 2 + src/H5D.c | 76 ++ src/H5Dchunk.c | 142 ++ src/H5Dearray.c | 3 + src/H5Dfarray.c | 8 +- src/H5Dint.c | 103 ++ src/H5Dpkg.h | 7 + src/H5Dpublic.h | 4 + src/H5EA.c | 4 +- src/H5EAprivate.h | 2 +- src/H5FA.c | 6 +- testpar/t_cache.c | 11 +- tools/Makefile.am | 3 +- tools/Makefile.in | 4 +- tools/h5format_convert/Makefile.am | 49 + tools/h5format_convert/Makefile.in | 1427 ++++++++++++++++++++ tools/h5format_convert/h5fc_chk_idx.c | 101 ++ tools/h5format_convert/h5fc_gentest.c | 623 +++++++++ tools/h5format_convert/h5format_convert.c | 438 ++++++ tools/h5format_convert/testfiles/h5fc_all.h5 | Bin 0 -> 7672 bytes tools/h5format_convert/testfiles/h5fc_d_file.ddl | 22 + tools/h5format_convert/testfiles/h5fc_dname.ddl | 22 + tools/h5format_convert/testfiles/h5fc_edge.h5 | Bin 0 -> 2526 bytes tools/h5format_convert/testfiles/h5fc_help.ddl | 21 + tools/h5format_convert/testfiles/h5fc_new.h5 | Bin 0 -> 6130 bytes .../testfiles/h5fc_nonexistdset_file.ddl | 1 + .../testfiles/h5fc_nonexistfile.ddl | 1 + tools/h5format_convert/testfiles/h5fc_nooption.ddl | 21 + tools/h5format_convert/testfiles/h5fc_old.h5 | Bin 0 -> 5688 bytes tools/h5format_convert/testfiles/h5fc_v_all.ddl | 26 + tools/h5format_convert/testfiles/h5fc_v_bt1.ddl | 11 + tools/h5format_convert/testfiles/h5fc_v_n_1d.ddl | 13 + tools/h5format_convert/testfiles/h5fc_v_n_all.ddl | 45 + .../testfiles/h5fc_v_ndata_bt1.ddl | 12 + .../testfiles/h5fc_v_non_chunked.ddl | 9 + tools/h5format_convert/testfiles/h5fc_version.ddl | 1 + tools/h5format_convert/testh5fc.sh.in | 381 ++++++ tools/h5stat/h5stat_gentest.c | 1 - 40 files changed, 3607 insertions(+), 22 deletions(-) create mode 100644 tools/h5format_convert/Makefile.am create mode 100644 tools/h5format_convert/Makefile.in create mode 100644 tools/h5format_convert/h5fc_chk_idx.c create mode 100644 tools/h5format_convert/h5fc_gentest.c create mode 100644 tools/h5format_convert/h5format_convert.c create mode 100644 tools/h5format_convert/testfiles/h5fc_all.h5 create mode 100644 tools/h5format_convert/testfiles/h5fc_d_file.ddl create mode 100644 tools/h5format_convert/testfiles/h5fc_dname.ddl create mode 100644 tools/h5format_convert/testfiles/h5fc_edge.h5 create mode 100644 tools/h5format_convert/testfiles/h5fc_help.ddl create mode 100644 tools/h5format_convert/testfiles/h5fc_new.h5 create mode 100644 tools/h5format_convert/testfiles/h5fc_nonexistdset_file.ddl create mode 100644 tools/h5format_convert/testfiles/h5fc_nonexistfile.ddl create mode 100644 tools/h5format_convert/testfiles/h5fc_nooption.ddl create mode 100644 tools/h5format_convert/testfiles/h5fc_old.h5 create mode 100644 tools/h5format_convert/testfiles/h5fc_v_all.ddl create mode 100644 tools/h5format_convert/testfiles/h5fc_v_bt1.ddl create mode 100644 tools/h5format_convert/testfiles/h5fc_v_n_1d.ddl create mode 100644 tools/h5format_convert/testfiles/h5fc_v_n_all.ddl create mode 100644 tools/h5format_convert/testfiles/h5fc_v_ndata_bt1.ddl create mode 100644 tools/h5format_convert/testfiles/h5fc_v_non_chunked.ddl create mode 100644 tools/h5format_convert/testfiles/h5fc_version.ddl create mode 100644 tools/h5format_convert/testh5fc.sh.in diff --git a/MANIFEST b/MANIFEST index 5365b39..1658a2c 100644 --- a/MANIFEST +++ b/MANIFEST @@ -1289,6 +1289,31 @@ ./tools/h5diff/testh5diff.sh.in ./tools/h5diff/testph5diff.sh.in +# h5format_convert sources +./tools/h5format_convert/Makefile.am +./tools/h5format_convert/Makefile.in +./tools/h5format_convert/h5fc_chk_idx.c +./tools/h5format_convert/h5fc_gentest.c +./tools/h5format_convert/h5format_convert.c +./tools/h5format_convert/testfiles/h5fc_v_n_all.ddl +./tools/h5format_convert/testfiles/h5fc_v_bt1.ddl +./tools/h5format_convert/testfiles/h5fc_v_non_chunked.ddl +./tools/h5format_convert/testfiles/h5fc_d_file.ddl +./tools/h5format_convert/testfiles/h5fc_v_ndata_bt1.ddl +./tools/h5format_convert/testfiles/h5fc_dname.ddl +./tools/h5format_convert/testfiles/h5fc_version.ddl +./tools/h5format_convert/testfiles/h5fc_nonexistdset_file.ddl +./tools/h5format_convert/testfiles/h5fc_help.ddl +./tools/h5format_convert/testfiles/h5fc_v_all.ddl +./tools/h5format_convert/testfiles/h5fc_nooption.ddl +./tools/h5format_convert/testfiles/h5fc_v_n_1d.ddl +./tools/h5format_convert/testfiles/h5fc_nonexistfile.ddl +./tools/h5format_convert/testfiles/h5fc_old.h5 +./tools/h5format_convert/testfiles/h5fc_new.h5 +./tools/h5format_convert/testfiles/h5fc_all.h5 +./tools/h5format_convert/testfiles/h5fc_edge.h5 +./tools/h5format_convert/testh5fc.sh.in + # h5repack sources ./tools/h5repack/Makefile.am ./tools/h5repack/Makefile.in diff --git a/configure b/configure index d2dfa0e..47dc311 100755 --- a/configure +++ b/configure @@ -30305,7 +30305,7 @@ else fi -ac_config_files="$ac_config_files src/libhdf5.settings Makefile src/Makefile test/Makefile test/testcheck_version.sh test/testerror.sh test/testflushrefresh.sh test/H5srcdir_str.h test/testlibinfo.sh test/testlinks_env.sh test/testswmr.sh test/test_plugin.sh test/test_usecases.sh testpar/Makefile tools/Makefile tools/h5dump/Makefile tools/h5dump/testh5dump.sh tools/h5dump/testh5dumppbits.sh tools/h5dump/testh5dumpxml.sh tools/h5ls/testh5ls.sh tools/h5import/Makefile tools/h5import/h5importtestutil.sh tools/h5diff/Makefile tools/h5diff/testh5diff.sh tools/h5diff/testph5diff.sh tools/h5jam/Makefile tools/h5jam/testh5jam.sh tools/h5repack/Makefile tools/h5repack/h5repack.sh tools/h5repack/h5repack_plugin.sh tools/h5ls/Makefile tools/h5copy/Makefile tools/h5copy/testh5copy.sh tools/lib/Makefile tools/misc/Makefile tools/misc/h5cc tools/misc/testh5clear.sh tools/misc/testh5mkgrp.sh tools/misc/testh5repart.sh tools/h5stat/testh5stat.sh tools/h5stat/Makefile tools/perform/Makefile examples/Makefile examples/run-c-ex.sh examples/testh5cc.sh c++/Makefile c++/src/Makefile c++/src/h5c++ c++/test/Makefile c++/test/H5srcdir_str.h c++/examples/Makefile c++/examples/run-c++-ex.sh c++/examples/testh5c++.sh fortran/Makefile fortran/src/h5fc fortran/src/Makefile fortran/test/Makefile fortran/testpar/Makefile fortran/examples/Makefile fortran/examples/run-fortran-ex.sh fortran/examples/testh5fc.sh hl/Makefile hl/src/Makefile hl/test/Makefile hl/test/H5srcdir_str.h hl/tools/Makefile hl/tools/gif2h5/Makefile hl/tools/gif2h5/h52giftest.sh hl/tools/h5watch/Makefile hl/tools/h5watch/testh5watch.sh hl/examples/Makefile hl/examples/run-hlc-ex.sh hl/c++/Makefile hl/c++/src/Makefile hl/c++/test/Makefile hl/c++/examples/Makefile hl/c++/examples/run-hlc++-ex.sh hl/fortran/Makefile hl/fortran/src/Makefile hl/fortran/test/Makefile hl/fortran/examples/Makefile hl/fortran/examples/run-hlfortran-ex.sh" +ac_config_files="$ac_config_files src/libhdf5.settings Makefile src/Makefile test/Makefile test/testcheck_version.sh test/testerror.sh test/testflushrefresh.sh test/H5srcdir_str.h test/testlibinfo.sh test/testlinks_env.sh test/testswmr.sh test/test_plugin.sh test/test_usecases.sh testpar/Makefile tools/Makefile tools/h5dump/Makefile tools/h5dump/testh5dump.sh tools/h5dump/testh5dumppbits.sh tools/h5dump/testh5dumpxml.sh tools/h5ls/testh5ls.sh tools/h5import/Makefile tools/h5import/h5importtestutil.sh tools/h5diff/Makefile tools/h5diff/testh5diff.sh tools/h5diff/testph5diff.sh tools/h5jam/Makefile tools/h5jam/testh5jam.sh tools/h5repack/Makefile tools/h5repack/h5repack.sh tools/h5repack/h5repack_plugin.sh tools/h5ls/Makefile tools/h5copy/Makefile tools/h5copy/testh5copy.sh tools/lib/Makefile tools/misc/Makefile tools/misc/h5cc tools/misc/testh5clear.sh tools/misc/testh5mkgrp.sh tools/misc/testh5repart.sh tools/h5stat/testh5stat.sh tools/h5stat/Makefile tools/h5format_convert/Makefile tools/h5format_convert/testh5fc.sh tools/perform/Makefile examples/Makefile examples/run-c-ex.sh examples/testh5cc.sh c++/Makefile c++/src/Makefile c++/src/h5c++ c++/test/Makefile c++/test/H5srcdir_str.h c++/examples/Makefile c++/examples/run-c++-ex.sh c++/examples/testh5c++.sh fortran/Makefile fortran/src/h5fc fortran/src/Makefile fortran/test/Makefile fortran/testpar/Makefile fortran/examples/Makefile fortran/examples/run-fortran-ex.sh fortran/examples/testh5fc.sh hl/Makefile hl/src/Makefile hl/test/Makefile hl/test/H5srcdir_str.h hl/tools/Makefile hl/tools/gif2h5/Makefile hl/tools/gif2h5/h52giftest.sh hl/tools/h5watch/Makefile hl/tools/h5watch/testh5watch.sh hl/examples/Makefile hl/examples/run-hlc-ex.sh hl/c++/Makefile hl/c++/src/Makefile hl/c++/test/Makefile hl/c++/examples/Makefile hl/c++/examples/run-hlc++-ex.sh hl/fortran/Makefile hl/fortran/src/Makefile hl/fortran/test/Makefile hl/fortran/examples/Makefile hl/fortran/examples/run-hlfortran-ex.sh" cat >confcache <<\_ACEOF @@ -31609,6 +31609,8 @@ do "tools/misc/testh5repart.sh") CONFIG_FILES="$CONFIG_FILES tools/misc/testh5repart.sh" ;; "tools/h5stat/testh5stat.sh") CONFIG_FILES="$CONFIG_FILES tools/h5stat/testh5stat.sh" ;; "tools/h5stat/Makefile") CONFIG_FILES="$CONFIG_FILES tools/h5stat/Makefile" ;; + "tools/h5format_convert/Makefile") CONFIG_FILES="$CONFIG_FILES tools/h5format_convert/Makefile" ;; + "tools/h5format_convert/testh5fc.sh") CONFIG_FILES="$CONFIG_FILES tools/h5format_convert/testh5fc.sh" ;; "tools/perform/Makefile") CONFIG_FILES="$CONFIG_FILES tools/perform/Makefile" ;; "examples/Makefile") CONFIG_FILES="$CONFIG_FILES examples/Makefile" ;; "examples/run-c-ex.sh") CONFIG_FILES="$CONFIG_FILES examples/run-c-ex.sh" ;; diff --git a/configure.ac b/configure.ac index b271a55..c0cff4c 100644 --- a/configure.ac +++ b/configure.ac @@ -3009,6 +3009,8 @@ AC_CONFIG_FILES([src/libhdf5.settings tools/misc/testh5repart.sh tools/h5stat/testh5stat.sh tools/h5stat/Makefile + tools/h5format_convert/Makefile + tools/h5format_convert/testh5fc.sh tools/perform/Makefile examples/Makefile examples/run-c-ex.sh diff --git a/src/H5D.c b/src/H5D.c index 98872fd..ff57431 100644 --- a/src/H5D.c +++ b/src/H5D.c @@ -1057,3 +1057,79 @@ done: FUNC_LEAVE_API(ret_value) } /* H5Drefresh */ + +/*------------------------------------------------------------------------- + * Function: H5Dformat_convert (Internal) + * + * Purpose: Convert a dataset's chunk indexing type to version 1 B-tree + * + * Return: Non-negative on success, negative on failure + * + * Programmer: Vailin Choi; Feb 2015 + * + *------------------------------------------------------------------------- + */ +herr_t +H5Dformat_convert(hid_t dset_id) +{ + H5D_t *dset; /* Dataset to refresh */ + herr_t ret_value = SUCCEED; /* return value */ + + FUNC_ENTER_API(FAIL) + H5TRACE1("e", "i", dset_id); + + /* Check args */ + if(NULL == (dset = (H5D_t *)H5I_object_verify(dset_id, H5I_DATASET))) + HGOTO_ERROR(H5E_ARGS, H5E_BADTYPE, FAIL, "not a dataset") + + /* Nothing to do if not a chunked dataset */ + if(dset->shared->layout.type != H5D_CHUNKED) + HGOTO_DONE(SUCCEED) + + /* Nothing to do if the chunk indexing type is already version 1 B-tree */ + if(dset->shared->layout.u.chunk.idx_type == H5D_CHUNK_IDX_BTREE) + HGOTO_DONE(SUCCEED) + + /* Call private function to do the conversion */ + if((H5D__format_convert(dset, H5AC_dxpl_id)) < 0) + HGOTO_ERROR(H5E_DATASET, H5E_CANTLOAD, FAIL, "unable to convert chunk indexing type for dataset") + +done: + FUNC_LEAVE_API(ret_value) +} /* H5Dformat_convert */ + + +/*------------------------------------------------------------------------- + * Function: H5Dget_chunk_index_type (Internal) + * + * Purpose: Retrieve a dataset's chunk indexing type + * + * Return: Non-negative on success, negative on failure + * + * Programmer: Vailin Choi; Feb 2015 + * + *------------------------------------------------------------------------- + */ +herr_t +H5Dget_chunk_index_type(hid_t did, H5D_chunk_index_t *idx_type) +{ + H5D_t *dset; /* Dataset to refresh */ + herr_t ret_value = SUCCEED; /* return value */ + + FUNC_ENTER_API(FAIL) + H5TRACE1("e", "i", did); + + /* Check args */ + if(NULL == (dset = (H5D_t *)H5I_object_verify(did, H5I_DATASET))) + HGOTO_ERROR(H5E_ARGS, H5E_BADTYPE, FAIL, "not a dataset") + + /* Should be a chunked dataset */ + if(dset->shared->layout.type != H5D_CHUNKED) + HGOTO_ERROR(H5E_ARGS, H5E_BADTYPE, FAIL, "dataset is not chunked") + + if(idx_type) /* Get the chunk indexing type */ + *idx_type = dset->shared->layout.u.chunk.idx_type; + +done: + FUNC_LEAVE_API(ret_value) +} /* H5Dget_chunk_index_type() */ diff --git a/src/H5Dchunk.c b/src/H5Dchunk.c index a90ea75..3e1f409 100644 --- a/src/H5Dchunk.c +++ b/src/H5Dchunk.c @@ -165,6 +165,13 @@ typedef struct H5D_chunk_it_ud4_t { uint32_t *chunk_dim; /* Chunk dimensions */ } H5D_chunk_it_ud4_t; +/* Callback info for iteration to format convert chunks */ +typedef struct H5D_chunk_it_ud5_t { + H5D_chk_idx_info_t *new_idx_info; /* Dest. chunk index info object */ + unsigned dset_ndims; /* Number of dimensions in dataset */ + hsize_t *dset_dims; /* Dataset dimensions */ +} H5D_chunk_it_ud5_t; + /* Callback info for nonexistent readvv operation */ typedef struct H5D_chunk_readvv_ud_t { unsigned char *rbuf; /* Read buffer to initialize */ @@ -212,6 +219,9 @@ H5D__nonexistent_readvv(const H5D_io_info_t *io_info, size_t chunk_max_nseq, size_t *chunk_curr_seq, size_t chunk_len_arr[], hsize_t chunk_offset_arr[], size_t mem_max_nseq, size_t *mem_curr_seq, size_t mem_len_arr[], hsize_t mem_offset_arr[]); +/* format convert cb */ +static int H5D__chunk_format_convert_cb(const H5D_chunk_rec_t *chunk_rec, void *_udata); + /* Helper routines */ static herr_t H5D__chunk_set_info_real(H5O_layout_chunk_t *layout, unsigned ndims, const hsize_t *curr_dims, const hsize_t *max_dims); @@ -6338,3 +6348,135 @@ H5D__chunk_file_alloc(const H5D_chk_idx_info_t *idx_info, const H5F_block_t *old done: FUNC_LEAVE_NOAPI(ret_value) } /* H5D__chunk_file_alloc() */ + +/*------------------------------------------------------------------------- + * Function: H5D__chunk_format_convert_cb + * + * Purpose: Callback routine to insert chunk address into v1 B-tree + * chunk index. + * + * Return: Success: Non-negative + * Failure: Negative + * + * Programmer: Vailin Choi; Feb 2015 + * + *------------------------------------------------------------------------- + */ +static int +H5D__chunk_format_convert_cb(const H5D_chunk_rec_t *chunk_rec, void *_udata) +{ + H5D_chunk_it_ud5_t *udata = (H5D_chunk_it_ud5_t *)_udata; /* User data */ + H5D_chk_idx_info_t *new_idx_info; /* The new chunk index information */ + H5D_chunk_ud_t insert_udata; /* Chunk information to be inserted */ + haddr_t chunk_addr; /* Chunk address */ + size_t nbytes; /* Chunk size */ + void *buf = NULL; /* Pointer to buffer of chunk data */ + int ret_value = H5_ITER_CONT; /* Return value */ + + FUNC_ENTER_STATIC + + new_idx_info = udata->new_idx_info; + H5_CHECKED_ASSIGN(nbytes, size_t, chunk_rec->nbytes, uint32_t); + chunk_addr = chunk_rec->chunk_addr; + + if(new_idx_info->pline->nused && + (new_idx_info->layout->flags & H5O_LAYOUT_CHUNK_DONT_FILTER_PARTIAL_BOUND_CHUNKS) && + (H5D__chunk_is_partial_edge_chunk(chunk_rec->scaled, udata->dset_ndims, udata->dset_dims, + new_idx_info->layout->dim)) ) { + /* This is a partial non-filtered edge chunk */ + /* Convert the chunk to a filtered edge chunk for v1 B-tree chunk index */ + + unsigned filter_mask = chunk_rec->filter_mask; + H5Z_cb_t cb_struct; /* Filter failure callback struct */ + size_t read_size = nbytes; /* Bytes to read */ + + HDassert(read_size == new_idx_info->layout->size); + + cb_struct.func = NULL; /* no callback function when failed */ + + /* Allocate buffer for chunk data */ + if(NULL == (buf = H5MM_malloc(read_size))) + HGOTO_ERROR(H5E_RESOURCE, H5E_NOSPACE, H5_ITER_ERROR, "memory allocation failed for raw data chunk") + + /* Read the non-filtered edge chunk */ + if(H5F_block_read(new_idx_info->f, H5FD_MEM_DRAW, chunk_addr, read_size, new_idx_info->dxpl_id, buf) < 0) + HGOTO_ERROR(H5E_IO, H5E_READERROR, H5_ITER_ERROR, "unable to read raw data chunk") + + /* Pass the chunk through the pipeline */ + if(H5Z_pipeline(new_idx_info->pline, 0, &filter_mask, H5Z_NO_EDC, cb_struct, &nbytes, + &read_size, &buf) < 0) + HGOTO_ERROR(H5E_PLINE, H5E_CANTFILTER, H5_ITER_ERROR, "output pipeline failed") + +#if H5_SIZEOF_SIZE_T > 4 + /* Check for the chunk expanding too much to encode in a 32-bit value */ + if(nbytes > ((size_t)0xffffffff)) + HGOTO_ERROR(H5E_DATASET, H5E_BADRANGE, H5_ITER_ERROR, "chunk too large for 32-bit length") +#endif /* H5_SIZEOF_SIZE_T > 4 */ + + /* Allocate space for the filtered chunk */ + if((chunk_addr = H5MF_alloc(new_idx_info->f, H5FD_MEM_DRAW, new_idx_info->dxpl_id, (hsize_t)nbytes)) == HADDR_UNDEF) + HGOTO_ERROR(H5E_DATASET, H5E_NOSPACE, H5_ITER_ERROR, "file allocation failed for filtered chunk") + HDassert(H5F_addr_defined(chunk_addr)); + + /* Write the filtered chunk to disk */ + if(H5F_block_write(new_idx_info->f, H5FD_MEM_DRAW, chunk_addr, nbytes, + new_idx_info->dxpl_id, buf) < 0) + HGOTO_ERROR(H5E_DATASET, H5E_WRITEERROR, H5_ITER_ERROR, "unable to write raw data to file") + } + + /* Set up chunk information for insertion to chunk index */ + insert_udata.chunk_block.offset = chunk_addr; + insert_udata.chunk_block.length = nbytes; + insert_udata.filter_mask = chunk_rec->filter_mask; + insert_udata.common.scaled = chunk_rec->scaled; + insert_udata.common.layout = new_idx_info->layout; + insert_udata.common.storage = new_idx_info->storage; + + /* Insert chunk into the v1 B-tree chunk index */ + if((new_idx_info->storage->ops->insert_addr)(new_idx_info, &insert_udata) < 0) + HGOTO_ERROR(H5E_DATASET, H5E_CANTINSERT, H5_ITER_ERROR, "unable to insert chunk addr into index") + +done: + if(buf) + H5MM_xfree(buf); + + FUNC_LEAVE_NOAPI(ret_value) +} /* H5D__chunk_format_convert_cb() */ + + +/*------------------------------------------------------------------------- + * Function: H5D__chunk_format_convert + * + * Purpose: Iterate over the chunks for the current chunk index and insert the + * the chunk addresses into v1 B-tree chunk index via callback. + * + * Return: Non-negative on success/Negative on failure + * + * Programmer: Vailin Choi; Feb 2015 + * + *------------------------------------------------------------------------- + */ +herr_t +H5D__chunk_format_convert(H5D_t *dset, H5D_chk_idx_info_t *idx_info, H5D_chk_idx_info_t *new_idx_info) +{ + H5D_chunk_it_ud5_t udata; /* User data */ + herr_t ret_value = SUCCEED; /* Return value */ + + FUNC_ENTER_PACKAGE + + /* Check args */ + HDassert(dset); + + /* Set up user data */ + udata.new_idx_info = new_idx_info; + udata.dset_ndims = dset->shared->ndims; + udata.dset_dims = dset->shared->curr_dims; + + /* terate over the chunks in the current index and insert the chunk addresses into version 1 B-tree index */ + if((dset->shared->layout.storage.u.chunk.ops->iterate)(idx_info, H5D__chunk_format_convert_cb, &udata) < 0) + HGOTO_ERROR(H5E_DATASET, H5E_BADITER, FAIL, "unable to iterate over chunk index to chunk info") + +done: + + FUNC_LEAVE_NOAPI(ret_value) +} /* end H5D__chunk_format_convert() */ diff --git a/src/H5Dearray.c b/src/H5Dearray.c index 49e6783..dcd2632 100644 --- a/src/H5Dearray.c +++ b/src/H5Dearray.c @@ -97,6 +97,9 @@ typedef struct H5D_earray_filt_elmt_t { static int H5D_earray_idx_iterate_cb(hsize_t idx, const void *_elmt, void *_udata); static int H5D_earray_idx_delete_cb(const H5D_chunk_rec_t *chunk_rec, void *_udata); +/* Extensible Array iterator callbacks */ +static int H5D_earray_idx_iterate_cb(hsize_t idx, const void *_elmt, void *_udata); + /* Extensible array class callbacks for chunks w/o filters */ static void *H5D_earray_crt_context(void *udata); static herr_t H5D_earray_dst_context(void *ctx); diff --git a/src/H5Dfarray.c b/src/H5Dfarray.c index 3e51516..a074c87 100644 --- a/src/H5Dfarray.c +++ b/src/H5Dfarray.c @@ -923,7 +923,7 @@ done: * * Modifications: * Vailin Choi; June 2010 - * Modified to handle extendible datdaset. + * Modified to handle extensible datdaset. * (fixed max. dim. setting but not H5S_UNLIMITED) * *------------------------------------------------------------------------- @@ -1100,7 +1100,7 @@ done: * * Modifications: * Vailin Choi; June 2010 - * Modified to handle extendible datdaset. + * Modified to handle extensible array dataset. * (fixed max. dim. setting but not H5S_UNLIMITED) * *------------------------------------------------------------------------- @@ -1180,7 +1180,7 @@ done: * * Modifications: * Vailin Choi; June 2010 - * Modified to handle extendible datdaset. + * Modified to handle extensible array datdaset. * (fixed max. dim. setting but not H5S_UNLIMITED) * *------------------------------------------------------------------------- @@ -1320,7 +1320,7 @@ done: * * Modifications: * Vailin Choi; June 2010 - * Modified to handle extendible datdaset. + * Modified to handle extensible array datdaset. * (fixed max. dim. setting but not H5S_UNLIMITED) * *------------------------------------------------------------------------- diff --git a/src/H5Dint.c b/src/H5Dint.c index ee0f49f..45d6ac9 100644 --- a/src/H5Dint.c +++ b/src/H5Dint.c @@ -2678,6 +2678,109 @@ done: /*------------------------------------------------------------------------- + * Function: H5D__format_convert + * + * Purpose: To convert a dataset's chunk indexing type to version 1 btree + * + * Return: Success: Non-negative + * Failure: Negative + * + * Programmer: Vailin Choi; Feb 2015 + * + *------------------------------------------------------------------------- + */ +herr_t +H5D__format_convert(H5D_t *dataset, hid_t dxpl_id) +{ + H5O_t *oh = NULL; /* Pointer to dataset's object header */ + H5D_chk_idx_info_t new_idx_info; /* Index info for the new layout */ + H5D_chk_idx_info_t idx_info; /* Index info for the current layout */ + H5O_layout_t newlayout; /* The new layout */ + unsigned update_flags = H5O_UPDATE_TIME; /* Modification time flag */ + herr_t ret_value = SUCCEED; /* Return value */ + + FUNC_ENTER_PACKAGE_TAG(dxpl_id, dataset->oloc.addr, FAIL) + + /* Check args */ + HDassert(dataset); + + /* Set up the current index info */ + idx_info.f = dataset->oloc.file; + idx_info.dxpl_id = dxpl_id; + idx_info.pline = &dataset->shared->dcpl_cache.pline; + idx_info.layout = &dataset->shared->layout.u.chunk; + idx_info.storage = &dataset->shared->layout.storage.u.chunk; + + /* Copy the current layout info to the new layout */ + HDmemcpy(&newlayout, &dataset->shared->layout, sizeof(H5O_layout_t)); + + /* Set up info for version 1 B-tree in the new layout */ + newlayout.version = H5O_LAYOUT_VERSION_3; + newlayout.storage.u.chunk.idx_type = H5D_CHUNK_IDX_BTREE; + newlayout.storage.u.chunk.idx_addr = HADDR_UNDEF; + newlayout.storage.u.chunk.ops = H5D_COPS_BTREE; + newlayout.storage.u.chunk.u.btree.shared = NULL; + + /* Set up the index info to version 1 B-tree */ + new_idx_info.f = dataset->oloc.file; + new_idx_info.dxpl_id = dxpl_id; + new_idx_info.pline = &dataset->shared->dcpl_cache.pline; + new_idx_info.layout = &newlayout.u.chunk; + new_idx_info.storage = &newlayout.storage.u.chunk; + + /* Initialize version 1 B-tree */ + if(newlayout.storage.u.chunk.ops->init && + (newlayout.storage.u.chunk.ops->init)(&new_idx_info, dataset->shared->space, dataset->oloc.addr) < 0) + HGOTO_ERROR(H5E_DATASET, H5E_CANTINIT, FAIL, "can't initialize indexing information") + + /* If the current chunk index exists */ + if(H5F_addr_defined(dataset->shared->layout.storage.u.chunk.idx_addr)) { + + /* Create version 1 B-tree chunk index */ + if((newlayout.storage.u.chunk.ops->create)(&new_idx_info) < 0) + HGOTO_ERROR(H5E_DATASET, H5E_CANTINIT, FAIL, "can't create chunk index") + + /* Iterate over the chunks in the current index and insert the chunk addresses + * into the version 1 B-tree chunk index */ + if(H5D__chunk_format_convert(dataset, &idx_info, &new_idx_info) < 0) + HGOTO_ERROR(H5E_DATASET, H5E_BADITER, FAIL, "unable to iterate over chunk index to chunk info") + } + + /* Release the old (i.e. current) chunk index */ + if(dataset->shared->layout.storage.u.chunk.ops->dest && + (dataset->shared->layout.storage.u.chunk.ops->dest)(&idx_info) < 0) + HGOTO_ERROR(H5E_DATASET, H5E_CANTFREE, FAIL, "unable to release chunk index info") + + /* Delete the "storage" and "layout" messages */ + if(H5O_msg_remove(&dataset->oloc, H5O_STORAGE_ID, H5O_ALL, TRUE, dxpl_id) < 0) + HGOTO_ERROR(H5E_SYM, H5E_CANTDELETE, FAIL, "unable to delete storage message") + if(H5O_msg_remove(&dataset->oloc, H5O_LAYOUT_ID, H5O_ALL, TRUE, dxpl_id) < 0) + HGOTO_ERROR(H5E_SYM, H5E_CANTDELETE, FAIL, "unable to delete layout message") + + HDmemcpy(&dataset->shared->layout, &newlayout, sizeof(H5O_layout_t)); + + if(NULL == (oh = H5O_pin(&dataset->oloc, dxpl_id))) + HGOTO_ERROR(H5E_DATASET, H5E_CANTPIN, FAIL, "unable to pin dataset object header") + + /* Append the new layout message to the object header */ + if(H5O_msg_append_oh(dataset->oloc.file, dxpl_id, oh, H5O_LAYOUT_ID, 0, 0, &newlayout) < 0) + HGOTO_ERROR(H5E_DATASET, H5E_CANTINIT, FAIL, "unable to update old fill value header message") + + /* Update the layout on disk, if it's been changed */ + if(H5D__layout_oh_write(dataset, dxpl_id, oh, update_flags) < 0) + HGOTO_ERROR(H5E_DATASET, H5E_WRITEERROR, FAIL, "unable to update layout/pline/efl info") + +done: + /* Release pointer to object header */ + if(oh != NULL) + if(H5O_unpin(oh) < 0) + HDONE_ERROR(H5E_DATASET, H5E_CANTUNPIN, FAIL, "unable to unpin dataset object header") + + FUNC_LEAVE_NOAPI_TAG(ret_value, FAIL) +} /* end H5D__format_convert() */ + + +/*------------------------------------------------------------------------- * Function: H5D__mark * * Purpose: Mark some aspect of a dataset as dirty diff --git a/src/H5Dpkg.h b/src/H5Dpkg.h index 0a5818b..9e8211d 100644 --- a/src/H5Dpkg.h +++ b/src/H5Dpkg.h @@ -605,6 +605,10 @@ H5_DLL herr_t H5D__flush_sieve_buf(H5D_t *dataset, hid_t dxpl_id); H5_DLL herr_t H5D__flush_real(H5D_t *dataset, hid_t dxpl_id); H5_DLL herr_t H5D__mark(const H5D_t *dataset, hid_t dxpl_id, unsigned flags); + +/* To convert a dataset's chunk indexing type to v1 B-tree */ +H5_DLL herr_t H5D__format_convert(H5D_t *dataset, hid_t dxpl_id); + /* Internal I/O routines */ H5_DLL herr_t H5D__read(H5D_t *dataset, hid_t mem_type_id, const H5S_t *mem_space, const H5S_t *file_space, hid_t dset_xfer_plist, @@ -711,6 +715,9 @@ H5_DLL herr_t H5D__chunk_direct_write(const H5D_t *dset, hid_t dxpl_id, uint32_t H5_DLL herr_t H5D__chunk_stats(const H5D_t *dset, hbool_t headers); #endif /* H5D_CHUNK_DEBUG */ +/* format convert */ +H5_DLL herr_t H5D__chunk_format_convert(H5D_t *dset, H5D_chk_idx_info_t *idx_info, H5D_chk_idx_info_t *new_idx_info); + /* Functions that operate on compact dataset storage */ H5_DLL herr_t H5D__compact_fill(const H5D_t *dset, hid_t dxpl_id); H5_DLL herr_t H5D__compact_copy(H5F_t *f_src, H5O_storage_compact_t *storage_src, diff --git a/src/H5Dpublic.h b/src/H5Dpublic.h index 8f4a613..ef40db9 100644 --- a/src/H5Dpublic.h +++ b/src/H5Dpublic.h @@ -159,6 +159,10 @@ H5_DLL herr_t H5Dgather(hid_t src_space_id, const void *src_buf, hid_t type_id, size_t dst_buf_size, void *dst_buf, H5D_gather_func_t op, void *op_data); H5_DLL herr_t H5Ddebug(hid_t dset_id); +/* Internal API routines */ +H5_DLL herr_t H5Dformat_convert(hid_t dset_id); +H5_DLL herr_t H5Dget_chunk_index_type(hid_t did, H5D_chunk_index_t *idx_type); + /* Symbols defined for compatibility with previous versions of the HDF5 API. * * Use of these symbols is deprecated. diff --git a/src/H5EA.c b/src/H5EA.c index 134f559..b8ff7bd 100644 --- a/src/H5EA.c +++ b/src/H5EA.c @@ -1037,8 +1037,8 @@ END_FUNC(PRIV) /* end H5EA_delete() */ /*------------------------------------------------------------------------- * Function: H5EA_iterate * - * Purpose: Iterate over the elements of an extensible array - * (copied and modified from FA_iterate() in H5FA.c) + * Purpose: Iterate over the elements of an extensible array + * (copied and modified from FA_iterate() in H5FA.c) * * Return: SUCCEED/FAIL * diff --git a/src/H5EAprivate.h b/src/H5EAprivate.h index 92c2849..5203af7 100644 --- a/src/H5EAprivate.h +++ b/src/H5EAprivate.h @@ -144,7 +144,7 @@ H5_DLL herr_t H5EA_set(const H5EA_t *ea, hid_t dxpl_id, hsize_t idx, const void H5_DLL herr_t H5EA_get(const H5EA_t *ea, hid_t dxpl_id, hsize_t idx, void *elmt); H5_DLL herr_t H5EA_depend(H5AC_info_t *parent_entry, H5EA_t *ea); H5_DLL herr_t H5EA_undepend(H5AC_info_t *parent_entry, H5EA_t *ea); -H5_DLL herr_t H5EA_iterate(H5EA_t *ea, hid_t dxpl_id, H5EA_operator_t op, void *udata); +H5_DLL herr_t H5EA_iterate(H5EA_t *fa, hid_t dxpl_id, H5EA_operator_t op, void *udata); H5_DLL herr_t H5EA_close(H5EA_t *ea, hid_t dxpl_id); H5_DLL herr_t H5EA_delete(H5F_t *f, hid_t dxpl_id, haddr_t ea_addr, void *ctx_udata); diff --git a/src/H5FA.c b/src/H5FA.c index bd4a162..80ee762 100644 --- a/src/H5FA.c +++ b/src/H5FA.c @@ -95,7 +95,7 @@ const H5FA_class_t *const H5FA_client_class_g[] = { H5FL_DEFINE_STATIC(H5FA_t); /* Declare a PQ free list to manage the element */ -H5FL_BLK_DEFINE(native_elmt); +H5FL_BLK_DEFINE(fa_native_elmt); @@ -713,7 +713,7 @@ H5FA_iterate(H5FA_t *fa, hid_t dxpl_id, H5FA_operator_t op, void *udata)) HDassert(udata); /* Allocate space for a native array element */ - if(NULL == (elmt = H5FL_BLK_MALLOC(native_elmt, fa->hdr->cparam.cls->nat_elmt_size))) + if(NULL == (elmt = H5FL_BLK_MALLOC(fa_native_elmt, fa->hdr->cparam.cls->nat_elmt_size))) H5E_THROW(H5E_CANTALLOC, "memory allocation failed for fixed array element") /* Iterate over all elements in array */ @@ -734,7 +734,7 @@ H5FA_iterate(H5FA_t *fa, hid_t dxpl_id, H5FA_operator_t op, void *udata)) CATCH if(elmt) - elmt = H5FL_BLK_FREE(native_elmt, elmt); + elmt = H5FL_BLK_FREE(fa_native_elmt, elmt); END_FUNC(PRIV) /* end H5FA_iterate() */ diff --git a/testpar/t_cache.c b/testpar/t_cache.c index daef6f2..cc0cb69 100644 --- a/testpar/t_cache.c +++ b/testpar/t_cache.c @@ -389,8 +389,10 @@ static hbool_t serve_rw_count_reset_request(struct mssg_t * mssg_ptr); /* call back functions & related data structures */ -static herr_t datum_get_load_size(const void * udata_ptr, - size_t *image_len_ptr); +static herr_t datum_get_load_size(const void *image_ptr, + const void *udata_ptr, + size_t *image_len_ptr, + size_t *actual_len_ptr); static void * datum_deserialize(const void * image_ptr, size_t len, @@ -440,7 +442,6 @@ const H5C_class_t types[NUMBER_OF_ENTRY_TYPES] = /* mem_type */ H5FD_MEM_DEFAULT, /* flags */ H5AC__CLASS_SKIP_READS | H5AC__CLASS_SKIP_WRITES, /* get_load_size */ (H5AC_get_load_size_func_t)datum_get_load_size, - /* actual_len */ NULL, /* verify_chksum */ NULL, /* deserialize */ (H5AC_deserialize_func_t)datum_deserialize, /* image_len */ (H5AC_image_len_func_t)datum_image_len, @@ -2335,8 +2336,8 @@ serve_rw_count_reset_request(struct mssg_t * mssg_ptr) *------------------------------------------------------------------------- */ static herr_t -datum_get_load_size(const void * udata_ptr, - size_t *image_len_ptr) +datum_get_load_size(const void *image_ptr, const void *udata_ptr, + size_t *image_len_ptr, size_t *actual_len_ptr) { haddr_t addr = *(haddr_t *)udata_ptr; int idx; diff --git a/tools/Makefile.am b/tools/Makefile.am index 095cc30..bffc14d 100644 --- a/tools/Makefile.am +++ b/tools/Makefile.am @@ -24,7 +24,6 @@ include $(top_srcdir)/config/commence.am CONFIG=ordered # All subdirectories -SUBDIRS=lib h5diff h5ls h5dump misc h5import h5repack h5jam h5copy h5stat \ - perform +SUBDIRS=lib h5diff h5ls h5dump misc h5import h5repack h5jam h5copy h5stat h5format_convert perform include $(top_srcdir)/config/conclude.am diff --git a/tools/Makefile.in b/tools/Makefile.in index 974e6b4..727e3c5 100644 --- a/tools/Makefile.in +++ b/tools/Makefile.in @@ -657,9 +657,7 @@ CHECK_CLEANFILES = *.chkexe *.chklog *.clog *.clog2 CONFIG = ordered # All subdirectories -SUBDIRS = lib h5diff h5ls h5dump misc h5import h5repack h5jam h5copy h5stat \ - perform - +SUBDIRS = lib h5diff h5ls h5dump misc h5import h5repack h5jam h5copy h5stat h5format_convert perform # Automake needs to be taught how to build lib, progs, and tests targets. # These will be filled in automatically for the most part (e.g., diff --git a/tools/h5format_convert/Makefile.am b/tools/h5format_convert/Makefile.am new file mode 100644 index 0000000..d3aef7d --- /dev/null +++ b/tools/h5format_convert/Makefile.am @@ -0,0 +1,49 @@ +# +# Copyright by The HDF Group. +# Copyright by the Board of Trustees of the University of Illinois. +# All rights reserved. +# +# This file is part of HDF5. The full HDF5 copyright notice, including +# terms governing use, modification, and redistribution, is contained in +# the files COPYING and Copyright.html. COPYING can be found at the root +# of the source code distribution tree; Copyright.html can be found at the +# root level of an installed copy of the electronic HDF5 document set and +# is linked from the top-level documents page. It can also be found at +# http://hdfgroup.org/HDF5/doc/Copyright.html. If you do not have +# access to either file, you may request a copy from help@hdfgroup.org. +## +## Makefile.am +## Run automake to generate a Makefile.in from this file. +# +# HDF5 Library Makefile(.in) +# + +include $(top_srcdir)/config/commence.am + +# Include src directory +AM_CPPFLAGS+=-I$(top_srcdir)/src -I$(top_srcdir)/tools/lib + +#test script and program +TEST_PROG=h5fc_gentest +TEST_SCRIPT=testh5fc.sh + +check_PROGRAMS=$(TEST_PROG) h5fc_chk_idx +check_SCRIPTS=$(TEST_SCRIPT) +SCRIPT_DEPEND=h5format_convert$(EXEEXT) + +# These are our main targets, the tools +bin_PROGRAMS=h5format_convert + +# Add h5format_convert specific linker flags here +h5format_convert_LDFLAGS = $(LT_STATIC_EXEC) $(AM_LDFLAGS) + +# Tell automake to clean h5redeploy script +CHECK_CLEANFILES+=*.h5 + +# These were generated by configure. Remove them only when distclean. +DISTCLEANFILES=testh5fc.sh + +# All programs rely on hdf5 library and h5tools library +LDADD=$(LIBH5TOOLS) $(LIBHDF5) + +include $(top_srcdir)/config/conclude.am diff --git a/tools/h5format_convert/Makefile.in b/tools/h5format_convert/Makefile.in new file mode 100644 index 0000000..f565cb3 --- /dev/null +++ b/tools/h5format_convert/Makefile.in @@ -0,0 +1,1427 @@ +# Makefile.in generated by automake 1.14.1 from Makefile.am. +# @configure_input@ + +# Copyright (C) 1994-2013 Free Software Foundation, Inc. + +# This Makefile.in is free software; the Free Software Foundation +# gives unlimited permission to copy and/or distribute it, +# with or without modifications, as long as this notice is preserved. + +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY, to the extent permitted by law; without +# even the implied warranty of MERCHANTABILITY or FITNESS FOR A +# PARTICULAR PURPOSE. + +@SET_MAKE@ + +# +# Copyright by The HDF Group. +# Copyright by the Board of Trustees of the University of Illinois. +# All rights reserved. +# +# This file is part of HDF5. The full HDF5 copyright notice, including +# terms governing use, modification, and redistribution, is contained in +# the files COPYING and Copyright.html. COPYING can be found at the root +# of the source code distribution tree; Copyright.html can be found at the +# root level of an installed copy of the electronic HDF5 document set and +# is linked from the top-level documents page. It can also be found at +# http://hdfgroup.org/HDF5/doc/Copyright.html. If you do not have +# access to either file, you may request a copy from help@hdfgroup.org. +# +# HDF5 Library Makefile(.in) +# + +VPATH = @srcdir@ +am__is_gnu_make = test -n '$(MAKEFILE_LIST)' && test -n '$(MAKELEVEL)' +am__make_running_with_option = \ + case $${target_option-} in \ + ?) ;; \ + *) echo "am__make_running_with_option: internal error: invalid" \ + "target option '$${target_option-}' specified" >&2; \ + exit 1;; \ + esac; \ + has_opt=no; \ + sane_makeflags=$$MAKEFLAGS; \ + if $(am__is_gnu_make); then \ + sane_makeflags=$$MFLAGS; \ + else \ + case $$MAKEFLAGS in \ + *\\[\ \ ]*) \ + bs=\\; \ + sane_makeflags=`printf '%s\n' "$$MAKEFLAGS" \ + | sed "s/$$bs$$bs[$$bs $$bs ]*//g"`;; \ + esac; \ + fi; \ + skip_next=no; \ + strip_trailopt () \ + { \ + flg=`printf '%s\n' "$$flg" | sed "s/$$1.*$$//"`; \ + }; \ + for flg in $$sane_makeflags; do \ + test $$skip_next = yes && { skip_next=no; continue; }; \ + case $$flg in \ + *=*|--*) continue;; \ + -*I) strip_trailopt 'I'; skip_next=yes;; \ + -*I?*) strip_trailopt 'I';; \ + -*O) strip_trailopt 'O'; skip_next=yes;; \ + -*O?*) strip_trailopt 'O';; \ + -*l) strip_trailopt 'l'; skip_next=yes;; \ + -*l?*) strip_trailopt 'l';; \ + -[dEDm]) skip_next=yes;; \ + -[JT]) skip_next=yes;; \ + esac; \ + case $$flg in \ + *$$target_option*) has_opt=yes; break;; \ + esac; \ + done; \ + test $$has_opt = yes +am__make_dryrun = (target_option=n; $(am__make_running_with_option)) +am__make_keepgoing = (target_option=k; $(am__make_running_with_option)) +pkgdatadir = $(datadir)/@PACKAGE@ +pkgincludedir = $(includedir)/@PACKAGE@ +pkglibdir = $(libdir)/@PACKAGE@ +pkglibexecdir = $(libexecdir)/@PACKAGE@ +am__cd = CDPATH="$${ZSH_VERSION+.}$(PATH_SEPARATOR)" && cd +install_sh_DATA = $(install_sh) -c -m 644 +install_sh_PROGRAM = $(install_sh) -c +install_sh_SCRIPT = $(install_sh) -c +INSTALL_HEADER = $(INSTALL_DATA) +transform = $(program_transform_name) +NORMAL_INSTALL = : +PRE_INSTALL = : +POST_INSTALL = : +NORMAL_UNINSTALL = : +PRE_UNINSTALL = : +POST_UNINSTALL = : +build_triplet = @build@ +host_triplet = @host@ +DIST_COMMON = $(top_srcdir)/config/commence.am \ + $(top_srcdir)/config/conclude.am $(srcdir)/Makefile.in \ + $(srcdir)/Makefile.am $(top_srcdir)/bin/mkinstalldirs \ + $(srcdir)/testh5fc.sh.in $(top_srcdir)/bin/depcomp \ + $(top_srcdir)/bin/test-driver +check_PROGRAMS = $(am__EXEEXT_1) h5fc_chk_idx$(EXEEXT) +bin_PROGRAMS = h5format_convert$(EXEEXT) +TESTS = $(am__EXEEXT_1) $(TEST_SCRIPT) +subdir = tools/h5format_convert +ACLOCAL_M4 = $(top_srcdir)/aclocal.m4 +am__aclocal_m4_deps = $(top_srcdir)/m4/aclocal_cxx.m4 \ + $(top_srcdir)/m4/aclocal_fc.m4 $(top_srcdir)/configure.ac +am__configure_deps = $(am__aclocal_m4_deps) $(CONFIGURE_DEPENDENCIES) \ + $(ACLOCAL_M4) +mkinstalldirs = $(SHELL) $(top_srcdir)/bin/mkinstalldirs +CONFIG_HEADER = $(top_builddir)/src/H5config.h +CONFIG_CLEAN_FILES = testh5fc.sh +CONFIG_CLEAN_VPATH_FILES = +am__installdirs = "$(DESTDIR)$(bindir)" +am__EXEEXT_1 = h5fc_gentest$(EXEEXT) +PROGRAMS = $(bin_PROGRAMS) +h5fc_chk_idx_SOURCES = h5fc_chk_idx.c +h5fc_chk_idx_OBJECTS = h5fc_chk_idx.$(OBJEXT) +h5fc_chk_idx_LDADD = $(LDADD) +h5fc_chk_idx_DEPENDENCIES = $(LIBH5TOOLS) $(LIBHDF5) +AM_V_lt = $(am__v_lt_@AM_V@) +am__v_lt_ = $(am__v_lt_@AM_DEFAULT_V@) +am__v_lt_0 = --silent +am__v_lt_1 = +h5fc_gentest_SOURCES = h5fc_gentest.c +h5fc_gentest_OBJECTS = h5fc_gentest.$(OBJEXT) +h5fc_gentest_LDADD = $(LDADD) +h5fc_gentest_DEPENDENCIES = $(LIBH5TOOLS) $(LIBHDF5) +h5format_convert_SOURCES = h5format_convert.c +h5format_convert_OBJECTS = h5format_convert.$(OBJEXT) +h5format_convert_LDADD = $(LDADD) +h5format_convert_DEPENDENCIES = $(LIBH5TOOLS) $(LIBHDF5) +h5format_convert_LINK = $(LIBTOOL) $(AM_V_lt) --tag=CC \ + $(AM_LIBTOOLFLAGS) $(LIBTOOLFLAGS) --mode=link $(CCLD) \ + $(AM_CFLAGS) $(CFLAGS) $(h5format_convert_LDFLAGS) $(LDFLAGS) \ + -o $@ +AM_V_P = $(am__v_P_@AM_V@) +am__v_P_ = $(am__v_P_@AM_DEFAULT_V@) +am__v_P_0 = false +am__v_P_1 = : +AM_V_GEN = $(am__v_GEN_@AM_V@) +am__v_GEN_ = $(am__v_GEN_@AM_DEFAULT_V@) +am__v_GEN_0 = @echo " GEN " $@; +am__v_GEN_1 = +AM_V_at = $(am__v_at_@AM_V@) +am__v_at_ = $(am__v_at_@AM_DEFAULT_V@) +am__v_at_0 = @ +am__v_at_1 = +DEFAULT_INCLUDES = -I.@am__isrc@ -I$(top_builddir)/src +depcomp = $(SHELL) $(top_srcdir)/bin/depcomp +am__depfiles_maybe = depfiles +am__mv = mv -f +COMPILE = $(CC) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) \ + $(CPPFLAGS) $(AM_CFLAGS) $(CFLAGS) +LTCOMPILE = $(LIBTOOL) $(AM_V_lt) --tag=CC $(AM_LIBTOOLFLAGS) \ + $(LIBTOOLFLAGS) --mode=compile $(CC) $(DEFS) \ + $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) \ + $(AM_CFLAGS) $(CFLAGS) +AM_V_CC = $(am__v_CC_@AM_V@) +am__v_CC_ = $(am__v_CC_@AM_DEFAULT_V@) +am__v_CC_0 = @echo " CC " $@; +am__v_CC_1 = +CCLD = $(CC) +LINK = $(LIBTOOL) $(AM_V_lt) --tag=CC $(AM_LIBTOOLFLAGS) \ + $(LIBTOOLFLAGS) --mode=link $(CCLD) $(AM_CFLAGS) $(CFLAGS) \ + $(AM_LDFLAGS) $(LDFLAGS) -o $@ +AM_V_CCLD = $(am__v_CCLD_@AM_V@) +am__v_CCLD_ = $(am__v_CCLD_@AM_DEFAULT_V@) +am__v_CCLD_0 = @echo " CCLD " $@; +am__v_CCLD_1 = +SOURCES = h5fc_chk_idx.c h5fc_gentest.c h5format_convert.c +DIST_SOURCES = h5fc_chk_idx.c h5fc_gentest.c h5format_convert.c +am__can_run_installinfo = \ + case $$AM_UPDATE_INFO_DIR in \ + n|no|NO) false;; \ + *) (install-info --version) >/dev/null 2>&1;; \ + esac +am__tagged_files = $(HEADERS) $(SOURCES) $(TAGS_FILES) $(LISP) +# Read a list of newline-separated strings from the standard input, +# and print each of them once, without duplicates. Input order is +# *not* preserved. +am__uniquify_input = $(AWK) '\ + BEGIN { nonempty = 0; } \ + { items[$$0] = 1; nonempty = 1; } \ + END { if (nonempty) { for (i in items) print i; }; } \ +' +# Make sure the list of sources is unique. This is necessary because, +# e.g., the same source file might be shared among _SOURCES variables +# for different programs/libraries. +am__define_uniq_tagged_files = \ + list='$(am__tagged_files)'; \ + unique=`for i in $$list; do \ + if test -f "$$i"; then echo $$i; else echo $(srcdir)/$$i; fi; \ + done | $(am__uniquify_input)` +ETAGS = etags +CTAGS = ctags +am__tty_colors_dummy = \ + mgn= red= grn= lgn= blu= brg= std=; \ + am__color_tests=no +am__tty_colors = { \ + $(am__tty_colors_dummy); \ + if test "X$(AM_COLOR_TESTS)" = Xno; then \ + am__color_tests=no; \ + elif test "X$(AM_COLOR_TESTS)" = Xalways; then \ + am__color_tests=yes; \ + elif test "X$$TERM" != Xdumb && { test -t 1; } 2>/dev/null; then \ + am__color_tests=yes; \ + fi; \ + if test $$am__color_tests = yes; then \ + red=''; \ + grn=''; \ + lgn=''; \ + blu=''; \ + mgn=''; \ + brg=''; \ + std=''; \ + fi; \ +} +am__vpath_adj_setup = srcdirstrip=`echo "$(srcdir)" | sed 's|.|.|g'`; +am__vpath_adj = case $$p in \ + $(srcdir)/*) f=`echo "$$p" | sed "s|^$$srcdirstrip/||"`;; \ + *) f=$$p;; \ + esac; +am__strip_dir = f=`echo $$p | sed -e 's|^.*/||'`; +am__install_max = 40 +am__nobase_strip_setup = \ + srcdirstrip=`echo "$(srcdir)" | sed 's/[].[^$$\\*|]/\\\\&/g'` +am__nobase_strip = \ + for p in $$list; do echo "$$p"; done | sed -e "s|$$srcdirstrip/||" +am__nobase_list = $(am__nobase_strip_setup); \ + for p in $$list; do echo "$$p $$p"; done | \ + sed "s| $$srcdirstrip/| |;"' / .*\//!s/ .*/ ./; s,\( .*\)/[^/]*$$,\1,' | \ + $(AWK) 'BEGIN { files["."] = "" } { files[$$2] = files[$$2] " " $$1; \ + if (++n[$$2] == $(am__install_max)) \ + { print $$2, files[$$2]; n[$$2] = 0; files[$$2] = "" } } \ + END { for (dir in files) print dir, files[dir] }' +am__base_list = \ + sed '$$!N;$$!N;$$!N;$$!N;$$!N;$$!N;$$!N;s/\n/ /g' | \ + sed '$$!N;$$!N;$$!N;$$!N;s/\n/ /g' +am__uninstall_files_from_dir = { \ + test -z "$$files" \ + || { test ! -d "$$dir" && test ! -f "$$dir" && test ! -r "$$dir"; } \ + || { echo " ( cd '$$dir' && rm -f" $$files ")"; \ + $(am__cd) "$$dir" && rm -f $$files; }; \ + } +am__recheck_rx = ^[ ]*:recheck:[ ]* +am__global_test_result_rx = ^[ ]*:global-test-result:[ ]* +am__copy_in_global_log_rx = ^[ ]*:copy-in-global-log:[ ]* +# A command that, given a newline-separated list of test names on the +# standard input, print the name of the tests that are to be re-run +# upon "make recheck". +am__list_recheck_tests = $(AWK) '{ \ + recheck = 1; \ + while ((rc = (getline line < ($$0 ".trs"))) != 0) \ + { \ + if (rc < 0) \ + { \ + if ((getline line2 < ($$0 ".log")) < 0) \ + recheck = 0; \ + break; \ + } \ + else if (line ~ /$(am__recheck_rx)[nN][Oo]/) \ + { \ + recheck = 0; \ + break; \ + } \ + else if (line ~ /$(am__recheck_rx)[yY][eE][sS]/) \ + { \ + break; \ + } \ + }; \ + if (recheck) \ + print $$0; \ + close ($$0 ".trs"); \ + close ($$0 ".log"); \ +}' +# A command that, given a newline-separated list of test names on the +# standard input, create the global log from their .trs and .log files. +am__create_global_log = $(AWK) ' \ +function fatal(msg) \ +{ \ + print "fatal: making $@: " msg | "cat >&2"; \ + exit 1; \ +} \ +function rst_section(header) \ +{ \ + print header; \ + len = length(header); \ + for (i = 1; i <= len; i = i + 1) \ + printf "="; \ + printf "\n\n"; \ +} \ +{ \ + copy_in_global_log = 1; \ + global_test_result = "RUN"; \ + while ((rc = (getline line < ($$0 ".trs"))) != 0) \ + { \ + if (rc < 0) \ + fatal("failed to read from " $$0 ".trs"); \ + if (line ~ /$(am__global_test_result_rx)/) \ + { \ + sub("$(am__global_test_result_rx)", "", line); \ + sub("[ ]*$$", "", line); \ + global_test_result = line; \ + } \ + else if (line ~ /$(am__copy_in_global_log_rx)[nN][oO]/) \ + copy_in_global_log = 0; \ + }; \ + if (copy_in_global_log) \ + { \ + rst_section(global_test_result ": " $$0); \ + while ((rc = (getline line < ($$0 ".log"))) != 0) \ + { \ + if (rc < 0) \ + fatal("failed to read from " $$0 ".log"); \ + print line; \ + }; \ + printf "\n"; \ + }; \ + close ($$0 ".trs"); \ + close ($$0 ".log"); \ +}' +# Restructured Text title. +am__rst_title = { sed 's/.*/ & /;h;s/./=/g;p;x;s/ *$$//;p;g' && echo; } +# Solaris 10 'make', and several other traditional 'make' implementations, +# pass "-e" to $(SHELL), and POSIX 2008 even requires this. Work around it +# by disabling -e (using the XSI extension "set +e") if it's set. +am__sh_e_setup = case $$- in *e*) set +e;; esac +# Default flags passed to test drivers. +am__common_driver_flags = \ + --color-tests "$$am__color_tests" \ + --enable-hard-errors "$$am__enable_hard_errors" \ + --expect-failure "$$am__expect_failure" +# To be inserted before the command running the test. Creates the +# directory for the log if needed. Stores in $dir the directory +# containing $f, in $tst the test, in $log the log. Executes the +# developer- defined test setup AM_TESTS_ENVIRONMENT (if any), and +# passes TESTS_ENVIRONMENT. Set up options for the wrapper that +# will run the test scripts (or their associated LOG_COMPILER, if +# thy have one). +am__check_pre = \ +$(am__sh_e_setup); \ +$(am__vpath_adj_setup) $(am__vpath_adj) \ +$(am__tty_colors); \ +srcdir=$(srcdir); export srcdir; \ +case "$@" in \ + */*) am__odir=`echo "./$@" | sed 's|/[^/]*$$||'`;; \ + *) am__odir=.;; \ +esac; \ +test "x$$am__odir" = x"." || test -d "$$am__odir" \ + || $(MKDIR_P) "$$am__odir" || exit $$?; \ +if test -f "./$$f"; then dir=./; \ +elif test -f "$$f"; then dir=; \ +else dir="$(srcdir)/"; fi; \ +tst=$$dir$$f; log='$@'; \ +if test -n '$(DISABLE_HARD_ERRORS)'; then \ + am__enable_hard_errors=no; \ +else \ + am__enable_hard_errors=yes; \ +fi; \ +case " $(XFAIL_TESTS) " in \ + *[\ \ ]$$f[\ \ ]* | *[\ \ ]$$dir$$f[\ \ ]*) \ + am__expect_failure=yes;; \ + *) \ + am__expect_failure=no;; \ +esac; \ +$(AM_TESTS_ENVIRONMENT) $(TESTS_ENVIRONMENT) +# A shell command to get the names of the tests scripts with any registered +# extension removed (i.e., equivalently, the names of the test logs, with +# the '.log' extension removed). The result is saved in the shell variable +# '$bases'. This honors runtime overriding of TESTS and TEST_LOGS. Sadly, +# we cannot use something simpler, involving e.g., "$(TEST_LOGS:.log=)", +# since that might cause problem with VPATH rewrites for suffix-less tests. +# See also 'test-harness-vpath-rewrite.sh' and 'test-trs-basic.sh'. +am__set_TESTS_bases = \ + bases='$(TEST_LOGS)'; \ + bases=`for i in $$bases; do echo $$i; done | sed 's/\.log$$//'`; \ + bases=`echo $$bases` +RECHECK_LOGS = $(TEST_LOGS) +AM_RECURSIVE_TARGETS = check recheck +TEST_SUITE_LOG = test-suite.log +LOG_DRIVER = $(SHELL) $(top_srcdir)/bin/test-driver +LOG_COMPILE = $(LOG_COMPILER) $(AM_LOG_FLAGS) $(LOG_FLAGS) +am__set_b = \ + case '$@' in \ + */*) \ + case '$*' in \ + */*) b='$*';; \ + *) b=`echo '$@' | sed 's/\.log$$//'`; \ + esac;; \ + *) \ + b='$*';; \ + esac +am__test_logs1 = $(TESTS:=.log) +am__test_logs2 = $(am__test_logs1:@EXEEXT@.log=.log) +TEST_LOGS = $(am__test_logs2:.sh.log=.log) +SH_LOG_DRIVER = $(SHELL) $(top_srcdir)/bin/test-driver +SH_LOG_COMPILE = $(SH_LOG_COMPILER) $(AM_SH_LOG_FLAGS) $(SH_LOG_FLAGS) +DISTFILES = $(DIST_COMMON) $(DIST_SOURCES) $(TEXINFOS) $(EXTRA_DIST) +ACLOCAL = @ACLOCAL@ +ADD_PARALLEL_FILES = @ADD_PARALLEL_FILES@ +AMTAR = @AMTAR@ + +# H5_CFLAGS holds flags that should be used when building hdf5, +# but which should not be exported to h5cc for building other programs. +# AM_CFLAGS is an automake construct which should be used by Makefiles +# instead of CFLAGS, as CFLAGS is reserved solely for the user to define. +# This applies to FCFLAGS, CXXFLAGS, CPPFLAGS, and LDFLAGS as well. +AM_CFLAGS = @AM_CFLAGS@ @H5_CFLAGS@ + +# Include src directory +AM_CPPFLAGS = @AM_CPPFLAGS@ @H5_CPPFLAGS@ -I$(top_srcdir)/src \ + -I$(top_srcdir)/tools/lib +AM_CXXFLAGS = @AM_CXXFLAGS@ @H5_CXXFLAGS@ +AM_DEFAULT_VERBOSITY = @AM_DEFAULT_VERBOSITY@ +AM_FCFLAGS = @AM_FCFLAGS@ @H5_FCFLAGS@ +AM_LDFLAGS = @AM_LDFLAGS@ @H5_LDFLAGS@ +AM_MAKEFLAGS = @AM_MAKEFLAGS@ +AR = @AR@ +AUTOCONF = @AUTOCONF@ +AUTOHEADER = @AUTOHEADER@ +AUTOMAKE = @AUTOMAKE@ +AWK = @AWK@ +BYTESEX = @BYTESEX@ +CC = @CC@ +CCDEPMODE = @CCDEPMODE@ +CC_VERSION = @CC_VERSION@ +CFLAGS = @CFLAGS@ +CLEARFILEBUF = @CLEARFILEBUF@ +CODESTACK = @CODESTACK@ +CONFIG_DATE = @CONFIG_DATE@ +CONFIG_MODE = @CONFIG_MODE@ +CONFIG_USER = @CONFIG_USER@ +CPP = @CPP@ +CPPFLAGS = @CPPFLAGS@ +CXX = @CXX@ +CXXCPP = @CXXCPP@ +CXXDEPMODE = @CXXDEPMODE@ +CXXFLAGS = @CXXFLAGS@ +CXX_VERSION = @CXX_VERSION@ +CYGPATH_W = @CYGPATH_W@ +DEBUG_PKG = @DEBUG_PKG@ +DEFAULT_API_VERSION = @DEFAULT_API_VERSION@ +DEFS = @DEFS@ +DEPDIR = @DEPDIR@ +DEPRECATED_SYMBOLS = @DEPRECATED_SYMBOLS@ +DIRECT_VFD = @DIRECT_VFD@ +DLLTOOL = @DLLTOOL@ +DSYMUTIL = @DSYMUTIL@ +DUMPBIN = @DUMPBIN@ +ECHO_C = @ECHO_C@ +ECHO_N = @ECHO_N@ +ECHO_T = @ECHO_T@ +EGREP = @EGREP@ +EXEEXT = @EXEEXT@ +EXTERNAL_FILTERS = @EXTERNAL_FILTERS@ + +# Make sure that these variables are exported to the Makefiles +F9XMODEXT = @F9XMODEXT@ +F9XMODFLAG = @F9XMODFLAG@ +F9XSUFFIXFLAG = @F9XSUFFIXFLAG@ +FC = @FC@ +FC2003 = @FC2003@ +FCFLAGS = @FCFLAGS@ +FCFLAGS_f90 = @FCFLAGS_f90@ +FCLIBS = @FCLIBS@ +FC_VERSION = @FC_VERSION@ +FGREP = @FGREP@ +FSEARCH_DIRS = @FSEARCH_DIRS@ +GREP = @GREP@ +H5_CFLAGS = @H5_CFLAGS@ +H5_CPPFLAGS = @H5_CPPFLAGS@ +H5_CXXFLAGS = @H5_CXXFLAGS@ +H5_FCFLAGS = @H5_FCFLAGS@ +H5_FORTRAN_SHARED = @H5_FORTRAN_SHARED@ +H5_LDFLAGS = @H5_LDFLAGS@ +H5_VERSION = @H5_VERSION@ +HADDR_T = @HADDR_T@ +HAVE_DMALLOC = @HAVE_DMALLOC@ +HAVE_FORTRAN_2003 = @HAVE_FORTRAN_2003@ +HAVE_PTHREAD = @HAVE_PTHREAD@ +HDF5_HL = @HDF5_HL@ +HDF5_INTERFACES = @HDF5_INTERFACES@ +HDF_CXX = @HDF_CXX@ +HDF_FORTRAN = @HDF_FORTRAN@ +HDF_FORTRAN2003 = @HDF_FORTRAN2003@ +HID_T = @HID_T@ +HL = @HL@ +HL_FOR = @HL_FOR@ +HSIZE_T = @HSIZE_T@ +HSSIZE_T = @HSSIZE_T@ +INSTALL = @INSTALL@ +INSTALL_DATA = @INSTALL_DATA@ +INSTALL_PROGRAM = @INSTALL_PROGRAM@ +INSTALL_SCRIPT = @INSTALL_SCRIPT@ +INSTALL_STRIP_PROGRAM = @INSTALL_STRIP_PROGRAM@ +INSTRUMENT = @INSTRUMENT@ +INSTRUMENT_LIBRARY = @INSTRUMENT_LIBRARY@ +LD = @LD@ +LDFLAGS = @LDFLAGS@ +LIBOBJS = @LIBOBJS@ +LIBS = @LIBS@ +LIBTOOL = @LIBTOOL@ +LIPO = @LIPO@ +LL_PATH = @LL_PATH@ +LN_S = @LN_S@ +LTLIBOBJS = @LTLIBOBJS@ +LT_STATIC_EXEC = @LT_STATIC_EXEC@ +MAINT = @MAINT@ +MAKEINFO = @MAKEINFO@ +MANIFEST_TOOL = @MANIFEST_TOOL@ +MKDIR_P = @MKDIR_P@ +MPE = @MPE@ +NM = @NM@ +NMEDIT = @NMEDIT@ +OBJDUMP = @OBJDUMP@ +OBJECT_NAMELEN_DEFAULT_F = @OBJECT_NAMELEN_DEFAULT_F@ +OBJEXT = @OBJEXT@ +OTOOL = @OTOOL@ +OTOOL64 = @OTOOL64@ +PACKAGE = @PACKAGE@ +PACKAGE_BUGREPORT = @PACKAGE_BUGREPORT@ +PACKAGE_NAME = @PACKAGE_NAME@ +PACKAGE_STRING = @PACKAGE_STRING@ +PACKAGE_TARNAME = @PACKAGE_TARNAME@ +PACKAGE_URL = @PACKAGE_URL@ +PACKAGE_VERSION = @PACKAGE_VERSION@ +PARALLEL = @PARALLEL@ +PATH_SEPARATOR = @PATH_SEPARATOR@ +PERL = @PERL@ +RANLIB = @RANLIB@ +ROOT = @ROOT@ +RUNPARALLEL = @RUNPARALLEL@ +RUNSERIAL = @RUNSERIAL@ +R_INTEGER = @R_INTEGER@ +R_LARGE = @R_LARGE@ +SEARCH = @SEARCH@ +SED = @SED@ +SET_MAKE = @SET_MAKE@ +SHELL = @SHELL@ +SIZE_T = @SIZE_T@ +STATIC_EXEC = @STATIC_EXEC@ +STATIC_SHARED = @STATIC_SHARED@ +STRICT_FORMAT_CHECKS = @STRICT_FORMAT_CHECKS@ +STRIP = @STRIP@ +TESTPARALLEL = @TESTPARALLEL@ +THREADSAFE = @THREADSAFE@ +TIME = @TIME@ +TR = @TR@ +TRACE_API = @TRACE_API@ +UNAME_INFO = @UNAME_INFO@ +USE_FILTER_DEFLATE = @USE_FILTER_DEFLATE@ +USE_FILTER_SZIP = @USE_FILTER_SZIP@ +USINGMEMCHECKER = @USINGMEMCHECKER@ +VERSION = @VERSION@ +WORDS_BIGENDIAN = @WORDS_BIGENDIAN@ +abs_builddir = @abs_builddir@ +abs_srcdir = @abs_srcdir@ +abs_top_builddir = @abs_top_builddir@ +abs_top_srcdir = @abs_top_srcdir@ +ac_ct_AR = @ac_ct_AR@ +ac_ct_CC = @ac_ct_CC@ +ac_ct_CXX = @ac_ct_CXX@ +ac_ct_DUMPBIN = @ac_ct_DUMPBIN@ +ac_ct_FC = @ac_ct_FC@ +am__include = @am__include@ +am__leading_dot = @am__leading_dot@ +am__quote = @am__quote@ +am__tar = @am__tar@ +am__untar = @am__untar@ +bindir = @bindir@ +build = @build@ +build_alias = @build_alias@ +build_cpu = @build_cpu@ +build_os = @build_os@ +build_vendor = @build_vendor@ +builddir = @builddir@ +datadir = @datadir@ +datarootdir = @datarootdir@ + +# Install directories that automake doesn't know about +docdir = $(exec_prefix)/doc +dvidir = @dvidir@ +enable_shared = @enable_shared@ +enable_static = @enable_static@ +exec_prefix = @exec_prefix@ +host = @host@ +host_alias = @host_alias@ +host_cpu = @host_cpu@ +host_os = @host_os@ +host_vendor = @host_vendor@ +htmldir = @htmldir@ +includedir = @includedir@ +infodir = @infodir@ +install_sh = @install_sh@ +libdir = @libdir@ +libexecdir = @libexecdir@ +localedir = @localedir@ +localstatedir = @localstatedir@ +mandir = @mandir@ +mkdir_p = @mkdir_p@ +oldincludedir = @oldincludedir@ +pdfdir = @pdfdir@ +prefix = @prefix@ +program_transform_name = @program_transform_name@ +psdir = @psdir@ +sbindir = @sbindir@ +sharedstatedir = @sharedstatedir@ +srcdir = @srcdir@ +sysconfdir = @sysconfdir@ +target_alias = @target_alias@ +top_build_prefix = @top_build_prefix@ +top_builddir = @top_builddir@ +top_srcdir = @top_srcdir@ + +# Shell commands used in Makefiles +RM = rm -f +CP = cp + +# Some machines need a command to run executables; this is that command +# so that our tests will run. +# We use RUNEXEC instead of RUNSERIAL directly because it may be that +# some tests need to be run with a different command. Older versions +# of the makefiles used the command +# $(LIBTOOL) --mode=execute +# in some directories, for instance. +RUNEXEC = $(RUNSERIAL) + +# Libraries to link to while building +LIBHDF5 = $(top_builddir)/src/libhdf5.la +LIBH5TEST = $(top_builddir)/test/libh5test.la +LIBH5F = $(top_builddir)/fortran/src/libhdf5_fortran.la +LIBH5FTEST = $(top_builddir)/fortran/test/libh5test_fortran.la +LIBH5CPP = $(top_builddir)/c++/src/libhdf5_cpp.la +LIBH5TOOLS = $(top_builddir)/tools/lib/libh5tools.la +LIBH5_HL = $(top_builddir)/hl/src/libhdf5_hl.la +LIBH5F_HL = $(top_builddir)/hl/fortran/src/libhdf5hl_fortran.la +LIBH5CPP_HL = $(top_builddir)/hl/c++/src/libhdf5_hl_cpp.la + +# Note that in svn revision 19400 the '/' after DESTDIR in H5* variables below +# has been removed. According to the official description of DESTDIR by Gnu at +# http://www.gnu.org/prep/standards/html_node/DESTDIR.html, DESTDIR is +# prepended to the normal and complete install path that it precedes for the +# purpose of installing in a temporary directory which is useful for building +# rpms and other packages. The '/' after ${DESTDIR} will be followed by another +# '/' at the beginning of the normal install path. When DESTDIR is empty the +# path then begins with '//', which is incorrect and causes problems at least for +# Cygwin. + +# Scripts used to build examples +# If only shared libraries have been installed, have h5cc build examples with +# shared libraries instead of static libraries +H5CC = ${DESTDIR}$(bindir)/h5cc +H5CC_PP = ${DESTDIR}$(bindir)/h5pcc +H5FC = ${DESTDIR}$(bindir)/h5fc +H5FC_PP = ${DESTDIR}$(bindir)/h5pfc +H5CPP = ${DESTDIR}$(bindir)/h5c++ +ACLOCAL_AMFLAGS = "-I m4" + +# The trace script; this is used on source files from the C library to +# insert tracing macros. +TRACE = perl $(top_srcdir)/bin/trace + +# .chkexe files are used to mark tests that have run successfully. +# .chklog files are output from those tests. +# *.clog and *.clog2 are from the MPE option. + +# Tell automake to clean h5redeploy script +CHECK_CLEANFILES = *.chkexe *.chklog *.clog *.clog2 *.h5 + +#test script and program +TEST_PROG = h5fc_gentest +TEST_SCRIPT = testh5fc.sh +check_SCRIPTS = $(TEST_SCRIPT) +SCRIPT_DEPEND = h5format_convert$(EXEEXT) + +# Add h5format_convert specific linker flags here +h5format_convert_LDFLAGS = $(LT_STATIC_EXEC) $(AM_LDFLAGS) + +# These were generated by configure. Remove them only when distclean. +DISTCLEANFILES = testh5fc.sh + +# All programs rely on hdf5 library and h5tools library +LDADD = $(LIBH5TOOLS) $(LIBHDF5) + +# Automake needs to be taught how to build lib, progs, and tests targets. +# These will be filled in automatically for the most part (e.g., +# lib_LIBRARIES are built for lib target), but EXTRA_LIB, EXTRA_PROG, and +# EXTRA_TEST variables are supplied to allow the user to force targets to +# be built at certain times. +LIB = $(lib_LIBRARIES) $(lib_LTLIBRARIES) $(noinst_LIBRARIES) \ + $(noinst_LTLIBRARIES) $(check_LIBRARIES) $(check_LTLIBRARIES) $(EXTRA_LIB) + +PROGS = $(bin_PROGRAMS) $(bin_SCRIPTS) $(noinst_PROGRAMS) $(noinst_SCRIPTS) \ + $(EXTRA_PROG) + +chk_TESTS = $(check_PROGRAMS) $(check_SCRIPTS) $(EXTRA_TEST) +TEST_EXTENSIONS = .sh +SH_LOG_COMPILER = $(SHELL) +AM_SH_LOG_FLAGS = +TEST_PROG_CHKEXE = $(TEST_PROG:=.chkexe_) +TEST_PROG_PARA_CHKEXE = $(TEST_PROG_PARA:=.chkexe_) +TEST_SCRIPT_CHKSH = $(TEST_SCRIPT:=.chkexe_) +TEST_SCRIPT_PARA_CHKSH = $(TEST_SCRIPT_PARA:=.chkexe_) +all: all-am + +.SUFFIXES: +.SUFFIXES: .c .lo .log .o .obj .sh .sh$(EXEEXT) .trs +$(srcdir)/Makefile.in: @MAINTAINER_MODE_TRUE@ $(srcdir)/Makefile.am $(top_srcdir)/config/commence.am $(top_srcdir)/config/conclude.am $(am__configure_deps) + @for dep in $?; do \ + case '$(am__configure_deps)' in \ + *$$dep*) \ + ( cd $(top_builddir) && $(MAKE) $(AM_MAKEFLAGS) am--refresh ) \ + && { if test -f $@; then exit 0; else break; fi; }; \ + exit 1;; \ + esac; \ + done; \ + echo ' cd $(top_srcdir) && $(AUTOMAKE) --foreign tools/h5format_convert/Makefile'; \ + $(am__cd) $(top_srcdir) && \ + $(AUTOMAKE) --foreign tools/h5format_convert/Makefile +.PRECIOUS: Makefile +Makefile: $(srcdir)/Makefile.in $(top_builddir)/config.status + @case '$?' in \ + *config.status*) \ + cd $(top_builddir) && $(MAKE) $(AM_MAKEFLAGS) am--refresh;; \ + *) \ + echo ' cd $(top_builddir) && $(SHELL) ./config.status $(subdir)/$@ $(am__depfiles_maybe)'; \ + cd $(top_builddir) && $(SHELL) ./config.status $(subdir)/$@ $(am__depfiles_maybe);; \ + esac; +$(top_srcdir)/config/commence.am $(top_srcdir)/config/conclude.am: + +$(top_builddir)/config.status: $(top_srcdir)/configure $(CONFIG_STATUS_DEPENDENCIES) + cd $(top_builddir) && $(MAKE) $(AM_MAKEFLAGS) am--refresh + +$(top_srcdir)/configure: @MAINTAINER_MODE_TRUE@ $(am__configure_deps) + cd $(top_builddir) && $(MAKE) $(AM_MAKEFLAGS) am--refresh +$(ACLOCAL_M4): @MAINTAINER_MODE_TRUE@ $(am__aclocal_m4_deps) + cd $(top_builddir) && $(MAKE) $(AM_MAKEFLAGS) am--refresh +$(am__aclocal_m4_deps): +testh5fc.sh: $(top_builddir)/config.status $(srcdir)/testh5fc.sh.in + cd $(top_builddir) && $(SHELL) ./config.status $(subdir)/$@ +install-binPROGRAMS: $(bin_PROGRAMS) + @$(NORMAL_INSTALL) + @list='$(bin_PROGRAMS)'; test -n "$(bindir)" || list=; \ + if test -n "$$list"; then \ + echo " $(MKDIR_P) '$(DESTDIR)$(bindir)'"; \ + $(MKDIR_P) "$(DESTDIR)$(bindir)" || exit 1; \ + fi; \ + for p in $$list; do echo "$$p $$p"; done | \ + sed 's/$(EXEEXT)$$//' | \ + while read p p1; do if test -f $$p \ + || test -f $$p1 \ + ; then echo "$$p"; echo "$$p"; else :; fi; \ + done | \ + sed -e 'p;s,.*/,,;n;h' \ + -e 's|.*|.|' \ + -e 'p;x;s,.*/,,;s/$(EXEEXT)$$//;$(transform);s/$$/$(EXEEXT)/' | \ + sed 'N;N;N;s,\n, ,g' | \ + $(AWK) 'BEGIN { files["."] = ""; dirs["."] = 1 } \ + { d=$$3; if (dirs[d] != 1) { print "d", d; dirs[d] = 1 } \ + if ($$2 == $$4) files[d] = files[d] " " $$1; \ + else { print "f", $$3 "/" $$4, $$1; } } \ + END { for (d in files) print "f", d, files[d] }' | \ + while read type dir files; do \ + if test "$$dir" = .; then dir=; else dir=/$$dir; fi; \ + test -z "$$files" || { \ + echo " $(INSTALL_PROGRAM_ENV) $(LIBTOOL) $(AM_LIBTOOLFLAGS) $(LIBTOOLFLAGS) --mode=install $(INSTALL_PROGRAM) $$files '$(DESTDIR)$(bindir)$$dir'"; \ + $(INSTALL_PROGRAM_ENV) $(LIBTOOL) $(AM_LIBTOOLFLAGS) $(LIBTOOLFLAGS) --mode=install $(INSTALL_PROGRAM) $$files "$(DESTDIR)$(bindir)$$dir" || exit $$?; \ + } \ + ; done + +uninstall-binPROGRAMS: + @$(NORMAL_UNINSTALL) + @list='$(bin_PROGRAMS)'; test -n "$(bindir)" || list=; \ + files=`for p in $$list; do echo "$$p"; done | \ + sed -e 'h;s,^.*/,,;s/$(EXEEXT)$$//;$(transform)' \ + -e 's/$$/$(EXEEXT)/' \ + `; \ + test -n "$$list" || exit 0; \ + echo " ( cd '$(DESTDIR)$(bindir)' && rm -f" $$files ")"; \ + cd "$(DESTDIR)$(bindir)" && rm -f $$files + +clean-binPROGRAMS: + @list='$(bin_PROGRAMS)'; test -n "$$list" || exit 0; \ + echo " rm -f" $$list; \ + rm -f $$list || exit $$?; \ + test -n "$(EXEEXT)" || exit 0; \ + list=`for p in $$list; do echo "$$p"; done | sed 's/$(EXEEXT)$$//'`; \ + echo " rm -f" $$list; \ + rm -f $$list + +clean-checkPROGRAMS: + @list='$(check_PROGRAMS)'; test -n "$$list" || exit 0; \ + echo " rm -f" $$list; \ + rm -f $$list || exit $$?; \ + test -n "$(EXEEXT)" || exit 0; \ + list=`for p in $$list; do echo "$$p"; done | sed 's/$(EXEEXT)$$//'`; \ + echo " rm -f" $$list; \ + rm -f $$list + +h5fc_chk_idx$(EXEEXT): $(h5fc_chk_idx_OBJECTS) $(h5fc_chk_idx_DEPENDENCIES) $(EXTRA_h5fc_chk_idx_DEPENDENCIES) + @rm -f h5fc_chk_idx$(EXEEXT) + $(AM_V_CCLD)$(LINK) $(h5fc_chk_idx_OBJECTS) $(h5fc_chk_idx_LDADD) $(LIBS) + +h5fc_gentest$(EXEEXT): $(h5fc_gentest_OBJECTS) $(h5fc_gentest_DEPENDENCIES) $(EXTRA_h5fc_gentest_DEPENDENCIES) + @rm -f h5fc_gentest$(EXEEXT) + $(AM_V_CCLD)$(LINK) $(h5fc_gentest_OBJECTS) $(h5fc_gentest_LDADD) $(LIBS) + +h5format_convert$(EXEEXT): $(h5format_convert_OBJECTS) $(h5format_convert_DEPENDENCIES) $(EXTRA_h5format_convert_DEPENDENCIES) + @rm -f h5format_convert$(EXEEXT) + $(AM_V_CCLD)$(h5format_convert_LINK) $(h5format_convert_OBJECTS) $(h5format_convert_LDADD) $(LIBS) + +mostlyclean-compile: + -rm -f *.$(OBJEXT) + +distclean-compile: + -rm -f *.tab.c + +@AMDEP_TRUE@@am__include@ @am__quote@./$(DEPDIR)/h5fc_chk_idx.Po@am__quote@ +@AMDEP_TRUE@@am__include@ @am__quote@./$(DEPDIR)/h5fc_gentest.Po@am__quote@ +@AMDEP_TRUE@@am__include@ @am__quote@./$(DEPDIR)/h5format_convert.Po@am__quote@ + +.c.o: +@am__fastdepCC_TRUE@ $(AM_V_CC)$(COMPILE) -MT $@ -MD -MP -MF $(DEPDIR)/$*.Tpo -c -o $@ $< +@am__fastdepCC_TRUE@ $(AM_V_at)$(am__mv) $(DEPDIR)/$*.Tpo $(DEPDIR)/$*.Po +@AMDEP_TRUE@@am__fastdepCC_FALSE@ $(AM_V_CC)source='$<' object='$@' libtool=no @AMDEPBACKSLASH@ +@AMDEP_TRUE@@am__fastdepCC_FALSE@ DEPDIR=$(DEPDIR) $(CCDEPMODE) $(depcomp) @AMDEPBACKSLASH@ +@am__fastdepCC_FALSE@ $(AM_V_CC@am__nodep@)$(COMPILE) -c -o $@ $< + +.c.obj: +@am__fastdepCC_TRUE@ $(AM_V_CC)$(COMPILE) -MT $@ -MD -MP -MF $(DEPDIR)/$*.Tpo -c -o $@ `$(CYGPATH_W) '$<'` +@am__fastdepCC_TRUE@ $(AM_V_at)$(am__mv) $(DEPDIR)/$*.Tpo $(DEPDIR)/$*.Po +@AMDEP_TRUE@@am__fastdepCC_FALSE@ $(AM_V_CC)source='$<' object='$@' libtool=no @AMDEPBACKSLASH@ +@AMDEP_TRUE@@am__fastdepCC_FALSE@ DEPDIR=$(DEPDIR) $(CCDEPMODE) $(depcomp) @AMDEPBACKSLASH@ +@am__fastdepCC_FALSE@ $(AM_V_CC@am__nodep@)$(COMPILE) -c -o $@ `$(CYGPATH_W) '$<'` + +.c.lo: +@am__fastdepCC_TRUE@ $(AM_V_CC)$(LTCOMPILE) -MT $@ -MD -MP -MF $(DEPDIR)/$*.Tpo -c -o $@ $< +@am__fastdepCC_TRUE@ $(AM_V_at)$(am__mv) $(DEPDIR)/$*.Tpo $(DEPDIR)/$*.Plo +@AMDEP_TRUE@@am__fastdepCC_FALSE@ $(AM_V_CC)source='$<' object='$@' libtool=yes @AMDEPBACKSLASH@ +@AMDEP_TRUE@@am__fastdepCC_FALSE@ DEPDIR=$(DEPDIR) $(CCDEPMODE) $(depcomp) @AMDEPBACKSLASH@ +@am__fastdepCC_FALSE@ $(AM_V_CC@am__nodep@)$(LTCOMPILE) -c -o $@ $< + +mostlyclean-libtool: + -rm -f *.lo + +clean-libtool: + -rm -rf .libs _libs + +ID: $(am__tagged_files) + $(am__define_uniq_tagged_files); mkid -fID $$unique +tags: tags-am +TAGS: tags + +tags-am: $(TAGS_DEPENDENCIES) $(am__tagged_files) + set x; \ + here=`pwd`; \ + $(am__define_uniq_tagged_files); \ + shift; \ + if test -z "$(ETAGS_ARGS)$$*$$unique"; then :; else \ + test -n "$$unique" || unique=$$empty_fix; \ + if test $$# -gt 0; then \ + $(ETAGS) $(ETAGSFLAGS) $(AM_ETAGSFLAGS) $(ETAGS_ARGS) \ + "$$@" $$unique; \ + else \ + $(ETAGS) $(ETAGSFLAGS) $(AM_ETAGSFLAGS) $(ETAGS_ARGS) \ + $$unique; \ + fi; \ + fi +ctags: ctags-am + +CTAGS: ctags +ctags-am: $(TAGS_DEPENDENCIES) $(am__tagged_files) + $(am__define_uniq_tagged_files); \ + test -z "$(CTAGS_ARGS)$$unique" \ + || $(CTAGS) $(CTAGSFLAGS) $(AM_CTAGSFLAGS) $(CTAGS_ARGS) \ + $$unique + +GTAGS: + here=`$(am__cd) $(top_builddir) && pwd` \ + && $(am__cd) $(top_srcdir) \ + && gtags -i $(GTAGS_ARGS) "$$here" +cscopelist: cscopelist-am + +cscopelist-am: $(am__tagged_files) + list='$(am__tagged_files)'; \ + case "$(srcdir)" in \ + [\\/]* | ?:[\\/]*) sdir="$(srcdir)" ;; \ + *) sdir=$(subdir)/$(srcdir) ;; \ + esac; \ + for i in $$list; do \ + if test -f "$$i"; then \ + echo "$(subdir)/$$i"; \ + else \ + echo "$$sdir/$$i"; \ + fi; \ + done >> $(top_builddir)/cscope.files + +distclean-tags: + -rm -f TAGS ID GTAGS GRTAGS GSYMS GPATH tags + +# Recover from deleted '.trs' file; this should ensure that +# "rm -f foo.log; make foo.trs" re-run 'foo.test', and re-create +# both 'foo.log' and 'foo.trs'. Break the recipe in two subshells +# to avoid problems with "make -n". +.log.trs: + rm -f $< $@ + $(MAKE) $(AM_MAKEFLAGS) $< + +# Leading 'am--fnord' is there to ensure the list of targets does not +# expand to empty, as could happen e.g. with make check TESTS=''. +am--fnord $(TEST_LOGS) $(TEST_LOGS:.log=.trs): $(am__force_recheck) +am--force-recheck: + @: + +$(TEST_SUITE_LOG): $(TEST_LOGS) + @$(am__set_TESTS_bases); \ + am__f_ok () { test -f "$$1" && test -r "$$1"; }; \ + redo_bases=`for i in $$bases; do \ + am__f_ok $$i.trs && am__f_ok $$i.log || echo $$i; \ + done`; \ + if test -n "$$redo_bases"; then \ + redo_logs=`for i in $$redo_bases; do echo $$i.log; done`; \ + redo_results=`for i in $$redo_bases; do echo $$i.trs; done`; \ + if $(am__make_dryrun); then :; else \ + rm -f $$redo_logs && rm -f $$redo_results || exit 1; \ + fi; \ + fi; \ + if test -n "$$am__remaking_logs"; then \ + echo "fatal: making $(TEST_SUITE_LOG): possible infinite" \ + "recursion detected" >&2; \ + else \ + am__remaking_logs=yes $(MAKE) $(AM_MAKEFLAGS) $$redo_logs; \ + fi; \ + if $(am__make_dryrun); then :; else \ + st=0; \ + errmsg="fatal: making $(TEST_SUITE_LOG): failed to create"; \ + for i in $$redo_bases; do \ + test -f $$i.trs && test -r $$i.trs \ + || { echo "$$errmsg $$i.trs" >&2; st=1; }; \ + test -f $$i.log && test -r $$i.log \ + || { echo "$$errmsg $$i.log" >&2; st=1; }; \ + done; \ + test $$st -eq 0 || exit 1; \ + fi + @$(am__sh_e_setup); $(am__tty_colors); $(am__set_TESTS_bases); \ + ws='[ ]'; \ + results=`for b in $$bases; do echo $$b.trs; done`; \ + test -n "$$results" || results=/dev/null; \ + all=` grep "^$$ws*:test-result:" $$results | wc -l`; \ + pass=` grep "^$$ws*:test-result:$$ws*PASS" $$results | wc -l`; \ + fail=` grep "^$$ws*:test-result:$$ws*FAIL" $$results | wc -l`; \ + skip=` grep "^$$ws*:test-result:$$ws*SKIP" $$results | wc -l`; \ + xfail=`grep "^$$ws*:test-result:$$ws*XFAIL" $$results | wc -l`; \ + xpass=`grep "^$$ws*:test-result:$$ws*XPASS" $$results | wc -l`; \ + error=`grep "^$$ws*:test-result:$$ws*ERROR" $$results | wc -l`; \ + if test `expr $$fail + $$xpass + $$error` -eq 0; then \ + success=true; \ + else \ + success=false; \ + fi; \ + br='==================='; br=$$br$$br$$br$$br; \ + result_count () \ + { \ + if test x"$$1" = x"--maybe-color"; then \ + maybe_colorize=yes; \ + elif test x"$$1" = x"--no-color"; then \ + maybe_colorize=no; \ + else \ + echo "$@: invalid 'result_count' usage" >&2; exit 4; \ + fi; \ + shift; \ + desc=$$1 count=$$2; \ + if test $$maybe_colorize = yes && test $$count -gt 0; then \ + color_start=$$3 color_end=$$std; \ + else \ + color_start= color_end=; \ + fi; \ + echo "$${color_start}# $$desc $$count$${color_end}"; \ + }; \ + create_testsuite_report () \ + { \ + result_count $$1 "TOTAL:" $$all "$$brg"; \ + result_count $$1 "PASS: " $$pass "$$grn"; \ + result_count $$1 "SKIP: " $$skip "$$blu"; \ + result_count $$1 "XFAIL:" $$xfail "$$lgn"; \ + result_count $$1 "FAIL: " $$fail "$$red"; \ + result_count $$1 "XPASS:" $$xpass "$$red"; \ + result_count $$1 "ERROR:" $$error "$$mgn"; \ + }; \ + { \ + echo "$(PACKAGE_STRING): $(subdir)/$(TEST_SUITE_LOG)" | \ + $(am__rst_title); \ + create_testsuite_report --no-color; \ + echo; \ + echo ".. contents:: :depth: 2"; \ + echo; \ + for b in $$bases; do echo $$b; done \ + | $(am__create_global_log); \ + } >$(TEST_SUITE_LOG).tmp || exit 1; \ + mv $(TEST_SUITE_LOG).tmp $(TEST_SUITE_LOG); \ + if $$success; then \ + col="$$grn"; \ + else \ + col="$$red"; \ + test x"$$VERBOSE" = x || cat $(TEST_SUITE_LOG); \ + fi; \ + echo "$${col}$$br$${std}"; \ + echo "$${col}Testsuite summary for $(PACKAGE_STRING)$${std}"; \ + echo "$${col}$$br$${std}"; \ + create_testsuite_report --maybe-color; \ + echo "$$col$$br$$std"; \ + if $$success; then :; else \ + echo "$${col}See $(subdir)/$(TEST_SUITE_LOG)$${std}"; \ + if test -n "$(PACKAGE_BUGREPORT)"; then \ + echo "$${col}Please report to $(PACKAGE_BUGREPORT)$${std}"; \ + fi; \ + echo "$$col$$br$$std"; \ + fi; \ + $$success || exit 1 +recheck: all $(check_PROGRAMS) $(check_SCRIPTS) + @test -z "$(TEST_SUITE_LOG)" || rm -f $(TEST_SUITE_LOG) + @set +e; $(am__set_TESTS_bases); \ + bases=`for i in $$bases; do echo $$i; done \ + | $(am__list_recheck_tests)` || exit 1; \ + log_list=`for i in $$bases; do echo $$i.log; done`; \ + log_list=`echo $$log_list`; \ + $(MAKE) $(AM_MAKEFLAGS) $(TEST_SUITE_LOG) \ + am__force_recheck=am--force-recheck \ + TEST_LOGS="$$log_list"; \ + exit $$? +h5fc_gentest.log: h5fc_gentest$(EXEEXT) + @p='h5fc_gentest$(EXEEXT)'; \ + b='h5fc_gentest'; \ + $(am__check_pre) $(LOG_DRIVER) --test-name "$$f" \ + --log-file $$b.log --trs-file $$b.trs \ + $(am__common_driver_flags) $(AM_LOG_DRIVER_FLAGS) $(LOG_DRIVER_FLAGS) -- $(LOG_COMPILE) \ + "$$tst" $(AM_TESTS_FD_REDIRECT) +.sh.log: + @p='$<'; \ + $(am__set_b); \ + $(am__check_pre) $(SH_LOG_DRIVER) --test-name "$$f" \ + --log-file $$b.log --trs-file $$b.trs \ + $(am__common_driver_flags) $(AM_SH_LOG_DRIVER_FLAGS) $(SH_LOG_DRIVER_FLAGS) -- $(SH_LOG_COMPILE) \ + "$$tst" $(AM_TESTS_FD_REDIRECT) +@am__EXEEXT_TRUE@.sh$(EXEEXT).log: +@am__EXEEXT_TRUE@ @p='$<'; \ +@am__EXEEXT_TRUE@ $(am__set_b); \ +@am__EXEEXT_TRUE@ $(am__check_pre) $(SH_LOG_DRIVER) --test-name "$$f" \ +@am__EXEEXT_TRUE@ --log-file $$b.log --trs-file $$b.trs \ +@am__EXEEXT_TRUE@ $(am__common_driver_flags) $(AM_SH_LOG_DRIVER_FLAGS) $(SH_LOG_DRIVER_FLAGS) -- $(SH_LOG_COMPILE) \ +@am__EXEEXT_TRUE@ "$$tst" $(AM_TESTS_FD_REDIRECT) + +distdir: $(DISTFILES) + @srcdirstrip=`echo "$(srcdir)" | sed 's/[].[^$$\\*]/\\\\&/g'`; \ + topsrcdirstrip=`echo "$(top_srcdir)" | sed 's/[].[^$$\\*]/\\\\&/g'`; \ + list='$(DISTFILES)'; \ + dist_files=`for file in $$list; do echo $$file; done | \ + sed -e "s|^$$srcdirstrip/||;t" \ + -e "s|^$$topsrcdirstrip/|$(top_builddir)/|;t"`; \ + case $$dist_files in \ + */*) $(MKDIR_P) `echo "$$dist_files" | \ + sed '/\//!d;s|^|$(distdir)/|;s,/[^/]*$$,,' | \ + sort -u` ;; \ + esac; \ + for file in $$dist_files; do \ + if test -f $$file || test -d $$file; then d=.; else d=$(srcdir); fi; \ + if test -d $$d/$$file; then \ + dir=`echo "/$$file" | sed -e 's,/[^/]*$$,,'`; \ + if test -d "$(distdir)/$$file"; then \ + find "$(distdir)/$$file" -type d ! -perm -700 -exec chmod u+rwx {} \;; \ + fi; \ + if test -d $(srcdir)/$$file && test $$d != $(srcdir); then \ + cp -fpR $(srcdir)/$$file "$(distdir)$$dir" || exit 1; \ + find "$(distdir)/$$file" -type d ! -perm -700 -exec chmod u+rwx {} \;; \ + fi; \ + cp -fpR $$d/$$file "$(distdir)$$dir" || exit 1; \ + else \ + test -f "$(distdir)/$$file" \ + || cp -p $$d/$$file "$(distdir)/$$file" \ + || exit 1; \ + fi; \ + done +check-am: all-am + $(MAKE) $(AM_MAKEFLAGS) $(check_PROGRAMS) $(check_SCRIPTS) + $(MAKE) $(AM_MAKEFLAGS) check-TESTS +check: check-am +all-am: Makefile $(PROGRAMS) all-local +installdirs: + for dir in "$(DESTDIR)$(bindir)"; do \ + test -z "$$dir" || $(MKDIR_P) "$$dir"; \ + done +install: install-am +install-exec: install-exec-am +install-data: install-data-am +uninstall: uninstall-am + +install-am: all-am + @$(MAKE) $(AM_MAKEFLAGS) install-exec-am install-data-am + +installcheck: installcheck-am +install-strip: + if test -z '$(STRIP)'; then \ + $(MAKE) $(AM_MAKEFLAGS) INSTALL_PROGRAM="$(INSTALL_STRIP_PROGRAM)" \ + install_sh_PROGRAM="$(INSTALL_STRIP_PROGRAM)" INSTALL_STRIP_FLAG=-s \ + install; \ + else \ + $(MAKE) $(AM_MAKEFLAGS) INSTALL_PROGRAM="$(INSTALL_STRIP_PROGRAM)" \ + install_sh_PROGRAM="$(INSTALL_STRIP_PROGRAM)" INSTALL_STRIP_FLAG=-s \ + "INSTALL_PROGRAM_ENV=STRIPPROG='$(STRIP)'" install; \ + fi +mostlyclean-generic: + -test -z "$(TEST_LOGS)" || rm -f $(TEST_LOGS) + -test -z "$(TEST_LOGS:.log=.trs)" || rm -f $(TEST_LOGS:.log=.trs) + -test -z "$(TEST_SUITE_LOG)" || rm -f $(TEST_SUITE_LOG) + +clean-generic: + +distclean-generic: + -test -z "$(CONFIG_CLEAN_FILES)" || rm -f $(CONFIG_CLEAN_FILES) + -test . = "$(srcdir)" || test -z "$(CONFIG_CLEAN_VPATH_FILES)" || rm -f $(CONFIG_CLEAN_VPATH_FILES) + -test -z "$(DISTCLEANFILES)" || rm -f $(DISTCLEANFILES) + +maintainer-clean-generic: + @echo "This command is intended for maintainers to use" + @echo "it deletes files that may require special tools to rebuild." +clean: clean-am + +clean-am: clean-binPROGRAMS clean-checkPROGRAMS clean-generic \ + clean-libtool mostlyclean-am + +distclean: distclean-am + -rm -rf ./$(DEPDIR) + -rm -f Makefile +distclean-am: clean-am distclean-compile distclean-generic \ + distclean-tags + +dvi: dvi-am + +dvi-am: + +html: html-am + +html-am: + +info: info-am + +info-am: + +install-data-am: + +install-dvi: install-dvi-am + +install-dvi-am: + +install-exec-am: install-binPROGRAMS + +install-html: install-html-am + +install-html-am: + +install-info: install-info-am + +install-info-am: + +install-man: + +install-pdf: install-pdf-am + +install-pdf-am: + +install-ps: install-ps-am + +install-ps-am: + +installcheck-am: + +maintainer-clean: maintainer-clean-am + -rm -rf ./$(DEPDIR) + -rm -f Makefile +maintainer-clean-am: distclean-am maintainer-clean-generic + +mostlyclean: mostlyclean-am + +mostlyclean-am: mostlyclean-compile mostlyclean-generic \ + mostlyclean-libtool mostlyclean-local + +pdf: pdf-am + +pdf-am: + +ps: ps-am + +ps-am: + +uninstall-am: uninstall-binPROGRAMS + +.MAKE: check-am install-am install-strip + +.PHONY: CTAGS GTAGS TAGS all all-am all-local check check-TESTS \ + check-am clean clean-binPROGRAMS clean-checkPROGRAMS \ + clean-generic clean-libtool cscopelist-am ctags ctags-am \ + distclean distclean-compile distclean-generic \ + distclean-libtool distclean-tags distdir dvi dvi-am html \ + html-am info info-am install install-am install-binPROGRAMS \ + install-data install-data-am install-dvi install-dvi-am \ + install-exec install-exec-am install-html install-html-am \ + install-info install-info-am install-man install-pdf \ + install-pdf-am install-ps install-ps-am install-strip \ + installcheck installcheck-am installdirs maintainer-clean \ + maintainer-clean-generic mostlyclean mostlyclean-compile \ + mostlyclean-generic mostlyclean-libtool mostlyclean-local pdf \ + pdf-am ps ps-am recheck tags tags-am uninstall uninstall-am \ + uninstall-binPROGRAMS + + +# List all build rules defined by HDF5 Makefiles as "PHONY" targets here. +# This tells the Makefiles that these targets are not files to be built but +# commands that should be executed even if a file with the same name already +# exists. +.PHONY: build-check-clean build-check-p build-check-s build-lib build-progs \ + build-tests check-clean check-install check-p check-s check-vfd \ + install-doc lib progs tests uninstall-doc _exec_check-s _test help + +help: + @$(top_srcdir)/bin/makehelp + +# lib/progs/tests targets recurse into subdirectories. build-* targets +# build files in this directory. +build-lib: $(LIB) +build-progs: $(LIB) $(PROGS) +build-tests: $(LIB) $(PROGS) $(chk_TESTS) + +# General rule for recursive building targets. +# BUILT_SOURCES contain targets that need to be built before anything else +# in the directory (e.g., for Fortran type detection) +lib progs tests check-s check-p :: $(BUILT_SOURCES) + @$(MAKE) $(AM_MAKEFLAGS) build-$@ || exit 1; + @for d in X $(SUBDIRS); do \ + if test $$d != X && test $$d != .; then \ + (set -x; cd $$d && $(MAKE) $(AM_MAKEFLAGS) $@) || exit 1; \ + fi; \ + done + +# General rule for recursive cleaning targets. Like the rule above, +# but doesn't require building BUILT_SOURCES. +check-clean :: + @$(MAKE) $(AM_MAKEFLAGS) build-$@ || exit 1; + @for d in X $(SUBDIRS); do \ + if test $$d != X && test $$d != .; then \ + (set -x; cd $$d && $(MAKE) $(AM_MAKEFLAGS) $@) || exit 1; \ + fi; \ + done + +# Tell Automake to build tests when the user types `make all' (this is +# not its default behavior). Also build EXTRA_LIB and EXTRA_PROG since +# Automake won't build them automatically, either. +all-local: $(EXTRA_LIB) $(EXTRA_PROG) $(chk_TESTS) + +# make install-doc doesn't do anything outside of doc directory, but +# Makefiles should recognize it. +# UPDATE: docs no longer reside in this build tree, so this target +# is depreciated. +install-doc uninstall-doc: + @echo "Nothing to be done." + +# clean up files generated by tests so they can be re-run. +build-check-clean: + $(RM) -rf $(CHECK_CLEANFILES) + +# run check-clean whenever mostlyclean is run +mostlyclean-local: build-check-clean + +# check-install is just a synonym for installcheck +check-install: installcheck + +# Run each test in order, passing $(TEST_FLAGS) to the program. +# Since tests are done in a shell loop, "make -i" does apply inside it. +# Set HDF5_Make_Ignore to a non-blank string to ignore errors inside the loop. +# The timestamps give a rough idea how much time the tests use. +# +# Note that targets in chk_TESTS (defined above) will be built when the user +# types 'make tests' or 'make check', but only programs in TEST_PROG, +# TEST_PROG_PARA, or TEST_SCRIPT will actually be executed. +check-TESTS: test + +test _test: + @$(MAKE) build-check-s + @$(MAKE) build-check-p + +# Actual execution of check-s. +build-check-s: $(LIB) $(PROGS) $(chk_TESTS) + @if test -n "$(TEST_PROG)$(TEST_SCRIPT)"; then \ + echo "===Serial tests in `echo ${PWD} | sed -e s:.*/::` begin `date`==="; \ + fi + @$(MAKE) $(AM_MAKEFLAGS) _exec_check-s + @if test -n "$(TEST_PROG)$(TEST_SCRIPT)"; then \ + echo "===Serial tests in `echo ${PWD} | sed -e s:.*/::` ended `date`===";\ + fi + +_exec_check-s: $(TEST_PROG_CHKEXE) $(TEST_SCRIPT_CHKSH) + +# The dummy.chkexe here prevents the target from being +# empty if there are no tests in the current directory. +# $${log} is the log file. +# $${tname} is the name of test. +$(TEST_PROG_CHKEXE) $(TEST_PROG_PARA_CHKEXE) dummy.chkexe_: + @if test "X$@" != "X.chkexe_" && test "X$@" != "Xdummy.chkexe_"; then \ + tname=$(@:.chkexe_=)$(EXEEXT);\ + log=$(@:.chkexe_=.chklog); \ + echo "============================"; \ + if $(top_srcdir)/bin/newer $(@:.chkexe_=.chkexe) $${tname}; then \ + echo "No need to test $${tname} again."; \ + else \ + echo "============================" > $${log}; \ + if test "X$(FORTRAN_API)" = "Xyes"; then \ + echo "Fortran API: Testing $(HDF5_DRIVER) $${tname} $(TEST_FLAGS)"; \ + echo "Fortran API: $(HDF5_DRIVER) $${tname} $(TEST_FLAGS) Test Log" >> $${log}; \ + elif test "X$(CXX_API)" = "Xyes"; then \ + echo "C++ API: Testing $(HDF5_DRIVER) $${tname} $(TEST_FLAGS)"; \ + echo "C++ API: $(HDF5_DRIVER) $${tname} $(TEST_FLAGS) Test Log" >> $${log};\ + else \ + echo "Testing $(HDF5_DRIVER) $${tname} $(TEST_FLAGS)"; \ + echo "$(HDF5_DRIVER) $${tname} $(TEST_FLAGS) Test Log" >> $${log}; \ + fi; \ + echo "============================" >> $${log}; \ + srcdir="$(srcdir)" \ + $(TIME) $(RUNEXEC) ./$${tname} $(TEST_FLAGS) >> $${log} 2>&1 \ + && touch $(@:.chkexe_=.chkexe) || \ + (test $$HDF5_Make_Ignore && echo "*** Error ignored") || \ + (cat $${log} && false) || exit 1; \ + echo "" >> $${log}; \ + echo "Finished testing $${tname} $(TEST_FLAGS)" >> $${log}; \ + echo "============================" >> $${log}; \ + echo "Finished testing $${tname} $(TEST_FLAGS)"; \ + cat $${log}; \ + fi; \ + fi + +# The dummysh.chkexe here prevents the target from being +# empty if there are no tests in the current directory. +# $${log} is the log file. +# $${tname} is the name of test. +$(TEST_SCRIPT_CHKSH) $(TEST_SCRIPT_PARA_CHKSH) dummysh.chkexe_: + @if test "X$@" != "X.chkexe_" && test "X$@" != "Xdummysh.chkexe_"; then \ + cmd=$(@:.chkexe_=);\ + tname=`basename $$cmd`;\ + chkname=`basename $(@:.chkexe_=.chkexe)`;\ + log=`basename $(@:.chkexe_=.chklog)`; \ + echo "============================"; \ + if $(top_srcdir)/bin/newer $${chkname} $$cmd $(SCRIPT_DEPEND); then \ + echo "No need to test $${tname} again."; \ + else \ + echo "============================" > $${log}; \ + if test "X$(FORTRAN_API)" = "Xyes"; then \ + echo "Fortran API: Testing $${tname} $(TEST_FLAGS)"; \ + echo "Fortran API: $${tname} $(TEST_FLAGS) Test Log" >> $${log}; \ + elif test "X$(CXX_API)" = "Xyes"; then \ + echo "C++ API: Testing $${tname} $(TEST_FLAGS)"; \ + echo "C++ API: $${tname} $(TEST_FLAGS) Test Log" >> $${log}; \ + else \ + echo "Testing $${tname} $(TEST_FLAGS)"; \ + echo "$${tname} $(TEST_FLAGS) Test Log" >> $${log}; \ + fi; \ + echo "============================" >> $${log}; \ + RUNSERIAL="$(RUNSERIAL)" RUNPARALLEL="$(RUNPARALLEL)" \ + srcdir="$(srcdir)" \ + $(TIME) $(SHELL) $$cmd $(TEST_FLAGS) >> $${log} 2>&1 \ + && touch $${chkname} || \ + (test $$HDF5_Make_Ignore && echo "*** Error ignored") || \ + (cat $${log} && false) || exit 1; \ + echo "" >> $${log}; \ + echo "Finished testing $${tname} $(TEST_FLAGS)" >> $${log}; \ + echo "============================" >> $${log}; \ + echo "Finished testing $${tname} $(TEST_FLAGS)"; \ + cat $${log}; \ + fi; \ + echo "============================"; \ + fi + +# Actual execution of check-p. +build-check-p: $(LIB) $(PROGS) $(chk_TESTS) + @if test -n "$(TEST_PROG_PARA)$(TEST_SCRIPT_PARA)"; then \ + echo "===Parallel tests in `echo ${PWD} | sed -e s:.*/::` begin `date`==="; \ + fi + @if test -n "$(TEST_PROG_PARA)"; then \ + echo "**** Hint ****"; \ + echo "Parallel test files reside in the current directory" \ + "by default."; \ + echo "Set HDF5_PARAPREFIX to use another directory. E.g.,"; \ + echo " HDF5_PARAPREFIX=/PFS/user/me"; \ + echo " export HDF5_PARAPREFIX"; \ + echo " make check"; \ + echo "**** end of Hint ****"; \ + fi + @for test in $(TEST_PROG_PARA) dummy; do \ + if test $$test != dummy; then \ + $(MAKE) $(AM_MAKEFLAGS) $$test.chkexe_ \ + RUNEXEC="$(RUNPARALLEL)" || exit 1; \ + fi; \ + done + @for test in $(TEST_SCRIPT_PARA) dummy; do \ + if test $$test != dummy; then \ + $(MAKE) $(AM_MAKEFLAGS) $$test.chkexe_ || exit 1; \ + fi; \ + done + @if test -n "$(TEST_PROG_PARA)$(TEST_SCRIPT_PARA)"; then \ + echo "===Parallel tests in `echo ${PWD} | sed -e s:.*/::` ended `date`===";\ + fi + +# Run test with different Virtual File Driver +check-vfd: $(LIB) $(PROGS) $(chk_TESTS) + @for vfd in $(VFD_LIST) dummy; do \ + if test $$vfd != dummy; then \ + echo "============================"; \ + echo "Testing Virtual File Driver $$vfd"; \ + echo "============================"; \ + $(MAKE) $(AM_MAKEFLAGS) check-clean || exit 1; \ + HDF5_DRIVER=$$vfd $(MAKE) $(AM_MAKEFLAGS) check || exit 1; \ + fi; \ + done + +# Tell versions [3.59,3.63) of GNU make to not export all variables. +# Otherwise a system limit (for SysV at least) may be exceeded. +.NOEXPORT: diff --git a/tools/h5format_convert/h5fc_chk_idx.c b/tools/h5format_convert/h5fc_chk_idx.c new file mode 100644 index 0000000..ed3d949 --- /dev/null +++ b/tools/h5format_convert/h5fc_chk_idx.c @@ -0,0 +1,101 @@ +/* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * + * Copyright by The HDF Group. * + * Copyright by the Board of Trustees of the University of Illinois. * + * All rights reserved. * + * * + * This file is part of HDF5. The full HDF5 copyright notice, including * + * terms governing use, modification, and redistribution, is contained in * + * the files COPYING and Copyright.html. COPYING can be found at the root * + * of the source code distribution tree; Copyright.html can be found at the * + * root level of an installed copy of the electronic HDF5 document set and * + * is linked from the top-level documents page. It can also be found at * + * http://hdfgroup.org/HDF5/doc/Copyright.html. If you do not have * + * access to either file, you may request a copy from help@hdfgroup.org. * + * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */ + +/* + * A program to verify that the chunk indexing type of a dataset in a file + * is version 1 B-tree. + * This is to support the testing of the tool "h5format_convert". + */ + +#include "hdf5.h" +#include "H5private.h" +#include "h5tools.h" + +static void usage(void); + +static void +usage(void) +{ + HDfprintf(stdout, "Usage: h5fc_chk_idx file_name dataset_pathname\n"); +} /* usage() */ + +/*------------------------------------------------------------------------- + * Function: main + * + * Purpose: To check that the chunk indexing type for the dataset in + * the file is version 1 B-tree. + * + * Return: 0 -- the indexing type is version 1 B-tree + * 1 -- otherwise + * + *------------------------------------------------------------------------- + */ +int +main(int argc, char *argv[]) +{ + char *fname = NULL; + char *dname = NULL; + hid_t fid = -1; + hid_t did = -1; + H5D_chunk_index_t idx_type; + + /* h5fc_chk_idx fname dname */ + if(argc != 3) { + usage(); + exit(EXIT_FAILURE); + } + + /* Duplicate the file name & dataset name */ + fname = strdup(argv[1]); + dname = strdup(argv[2]); + + /* Try opening the file */ + if((fid = h5tools_fopen(fname, H5F_ACC_RDONLY, H5P_DEFAULT, NULL, NULL, (size_t)0)) < 0) { + HDfprintf(stderr, "h5fc_chk_idx: unable to open the file\n"); + return EXIT_FAILURE; + } + + /* Open the dataset */ + if((did = H5Dopen(fid, dname, H5P_DEFAULT)) < 0) { + HDfprintf(stderr, "h5fc_chk_idx: unable to open the dataset\n"); + exit(EXIT_FAILURE); + } + + /* Get the dataset's chunk indexing type */ + if(H5Dget_chunk_index_type(did, &idx_type) < 0) { + HDfprintf(stderr, "h5fc_chk_idx: unable to get chunk index type for the dataset\n"); + exit(EXIT_FAILURE); + } + + /* Close the dataset */ + if(H5Dclose(did) < 0) { + HDfprintf(stderr, "h5fc_chk_idx: unable to close the dataset\n"); + exit(EXIT_FAILURE); + } + + /* Close the file */ + if(H5Fclose(fid) < 0) { + HDfprintf(stderr, "h5fc_chk_idx_type: cannot close the file\n"); + return EXIT_FAILURE; + } + + /* Return success when the chunk indexing type is version 1 B-tree */ + if(idx_type == H5D_CHUNK_IDX_BTREE) + return(EXIT_SUCCESS); + else { + HDfprintf(stderr, "Error: chunk indexing type is %d\n", idx_type); + return(EXIT_FAILURE); + } +} /* main() */ diff --git a/tools/h5format_convert/h5fc_gentest.c b/tools/h5format_convert/h5fc_gentest.c new file mode 100644 index 0000000..dd0c3dd --- /dev/null +++ b/tools/h5format_convert/h5fc_gentest.c @@ -0,0 +1,623 @@ +/* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * + * Copyright by The HDF Group. * + * Copyright by the Board of Trustees of the University of Illinois. * + * All rights reserved. * + * * + * This file is part of HDF5. The full HDF5 copyright notice, including * + * terms governing use, modification, and redistribution, is contained in * + * the files COPYING and Copyright.html. COPYING can be found at the root * + * of the source code distribution tree; Copyright.html can be found at the * + * root level of an installed copy of the electronic HDF5 document set and * + * is linked from the top-level documents page. It can also be found at * + * http://hdfgroup.org/HDF5/doc/Copyright.html. If you do not have * + * access to either file, you may request a copy from help@hdfgroup.org. * + * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */ + +/* + * Generate the binary hdf5 files for the h5format_convert tests. + * Usage: just execute the program without any arguments will + * generate all the binary hdf5 files + * + * If you regenerate the test files (e.g., changing some code, + * trying it on a new platform, ...), you need to verify the correctness + * of the expected output and update the corresponding *.ddl files. + */ +#include "hdf5.h" + +#define GROUP "GROUP" + +#define OLD_FILE "h5fc_old.h5" +#define DSET_NON_CHUNKED "DSET_NON_CHUNKED" +#define DSET_BT1 "DSET_BT1" +#define DSET_NDATA_BT1 "DSET_NDATA_BT1" + +#define NEW_FILE "h5fc_new.h5" +#define DSET_EA "DSET_EA" +#define DSET_NDATA_EA "DSET_NDATA_EA" +#define DSET_BT2 "DSET_BT2" +#define DSET_NDATA_BT2 "DSET_NDATA_BT2" +#define DSET_FA "DSET_FA" +#define DSET_NDATA_FA "DSET_NDATA_FA" +#define DSET_NONE "DSET_NONE" +#define DSET_NDATA_NONE "DSET_NDATA_NONE" + +#define ALL_FILE "h5fc_all.h5" + +#define EDGE_FILE "h5fc_edge.h5" +#define DSET_EDGE "DSET_EDGE" + +/* + * Function: gen_old() + * + * Create an old format file with: + * 1) 1 non-chunked dataset + * 2) 2 chunked datasets with version 1 B-tree chunk indexing type: with/without data + */ +static void +gen_old(const char *fname) +{ + hid_t fid; /* file id */ + hid_t gid; /* group id */ + hid_t sid; /* space id */ + hid_t dcpl; /* dataset creation property id */ + hid_t did1, did2; /* dataset id */ + hsize_t dims1[1] = {10}; /* dataset dimension */ + hsize_t dims2[2] = {4, 6}; /* dataset dimension */ + hsize_t c_dims[2] = {2, 3}; /* chunk dimension */ + int i; /* local index variable */ + int buf[24]; /* data buffer */ + + /* Create file */ + if((fid = H5Fcreate(fname, H5F_ACC_TRUNC, H5P_DEFAULT, H5P_DEFAULT)) < 0) + goto error; + + /* Create a group */ + if((gid = H5Gcreate2(fid, GROUP, H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT)) < 0) + goto error; + + + /* + * Create a non-chunked dataset + */ + + /* Create dataspace */ + if((sid = H5Screate_simple(1, dims1, NULL)) < 0) + goto error; + + /* Create the dataset */ + if((did1 = H5Dcreate2(fid, DSET_NON_CHUNKED, H5T_NATIVE_INT, sid, H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT)) < 0) + goto error; + + /* Closing */ + if(H5Sclose(sid) < 0) + goto error; + if(H5Dclose(did1) < 0) + goto error; + + /* + * Create two chunked datasets with version 1 B-tree chunk indexing type + * (one with data, one without data) + */ + + /* Create data */ + for(i = 0; i < 24; i++) + buf[i] = i; + + /* Set chunk */ + if((dcpl = H5Pcreate(H5P_DATASET_CREATE)) < 0) + goto error; + if(H5Pset_chunk(dcpl, 2, c_dims) < 0) + goto error; + + /* Create dataspace */ + if((sid = H5Screate_simple(2, dims2, NULL)) < 0) + goto error; + + /* Create the 2 datasets */ + if((did1 = H5Dcreate2(fid, DSET_NDATA_BT1, H5T_NATIVE_INT, sid, H5P_DEFAULT, dcpl, H5P_DEFAULT)) < 0) + goto error; + + if((did2 = H5Dcreate2(gid, DSET_BT1, H5T_NATIVE_INT, sid, H5P_DEFAULT, dcpl, H5P_DEFAULT)) < 0) + goto error; + + /* Write to one dataset */ + if(H5Dwrite(did2, H5T_NATIVE_INT, H5S_ALL, H5S_ALL, H5P_DEFAULT, buf) < 0) + goto error; + + /* Closing */ + if(H5Pclose(dcpl) < 0) + goto error; + if(H5Sclose(sid) < 0) + goto error; + if(H5Dclose(did1) < 0) + goto error; + if(H5Dclose(did2) < 0) + goto error; + + if(H5Gclose(gid) < 0) + goto error; + if(H5Fclose(fid) < 0) + goto error; + +error: + H5E_BEGIN_TRY { + H5Pclose(dcpl); + H5Sclose(sid); + H5Dclose(did1); + H5Dclose(did2); + H5Gclose(gid); + H5Fclose(fid); + } H5E_END_TRY; + +} /* gen_old() */ + +/* + * Function: gen_new() + * + * Create a new format file with: + * 1) 2 chunked datasets with extensible array chunk indexing type (with/without data) + * 2) 2 chunked datasets with version 2 B-tree chunk indexing type (with/without data) + * 3) 2 chunked datasets with fixed array chunk indexing type (with/without data) + * 4) 2 chunked datasets with implicit array chunk indexing type (with/without data) + */ +static void +gen_new(const char *fname) +{ + hid_t fid; /* file id */ + hid_t fapl; /* file access property list */ + hid_t gid; /* group id */ + hid_t sid; /* space id */ + hid_t dcpl; /* dataset creation property id */ + hid_t did1, did2; /* dataset id */ + hsize_t dims2[2] = {4, 6}; /* dataset dimension */ + hsize_t max_dims[2]; /* maximum dataset dimension */ + hsize_t c_dims[2] = {2, 3}; /* chunk dimension */ + int i; /* local index variable */ + int buf[24]; /* data buffer */ + + /* Create a new format file */ + if((fapl = H5Pcreate(H5P_FILE_ACCESS)) < 0) + goto error; + if(H5Pset_libver_bounds(fapl, H5F_LIBVER_LATEST, H5F_LIBVER_LATEST) < 0) + goto error; + + if((fid = H5Fcreate(fname, H5F_ACC_TRUNC, H5P_DEFAULT, fapl)) < 0) + goto error; + + /* Create a group */ + if((gid = H5Gcreate2(fid, GROUP, H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT)) < 0) + goto error; + + /* Set chunk */ + if((dcpl = H5Pcreate(H5P_DATASET_CREATE)) < 0) + goto error; + if(H5Pset_chunk(dcpl, 2, c_dims) < 0) + goto error; + + /* + * Create 2 chunked datasets with extensible array chunk indexing type + * (one with data; one without data) + */ + + /* Create dataspace */ + max_dims[0] = 10; + max_dims[1] = H5S_UNLIMITED; + if((sid = H5Screate_simple(2, dims2, max_dims)) < 0) + goto error; + + /* Create the 2 datasets */ + if((did1 = H5Dcreate2(gid, DSET_NDATA_EA, H5T_NATIVE_INT, sid, H5P_DEFAULT, dcpl, H5P_DEFAULT)) < 0) + goto error; + + if((did2 = H5Dcreate2(fid, DSET_EA, H5T_NATIVE_INT, sid, H5P_DEFAULT, dcpl, H5P_DEFAULT)) < 0) + goto error; + + /* Create data */ + for(i = 0; i < 24; i++) + buf[i] = i; + + /* Write to one dataset */ + if(H5Dwrite(did2, H5T_NATIVE_INT, H5S_ALL, H5S_ALL, H5P_DEFAULT, buf) < 0) + goto error; + + /* Closing */ + if(H5Sclose(sid) < 0) + goto error; + if(H5Dclose(did1) < 0) + goto error; + if(H5Dclose(did2) < 0) + goto error; + + + /* + * Create 2 chunked datasets with version 2 B-tree chunk indexing type + * (one with data; one without data) + */ + + /* Create dataspace */ + max_dims[0] = 10; + max_dims[0] = H5S_UNLIMITED; + max_dims[1] = H5S_UNLIMITED; + if((sid = H5Screate_simple(2, dims2, max_dims)) < 0) + goto error; + + /* Create the 2 datasets */ + if((did1 = H5Dcreate2(fid, DSET_NDATA_BT2, H5T_NATIVE_INT, sid, H5P_DEFAULT, dcpl, H5P_DEFAULT)) < 0) + goto error; + + if((did2 = H5Dcreate2(gid, DSET_BT2, H5T_NATIVE_INT, sid, H5P_DEFAULT, dcpl, H5P_DEFAULT)) < 0) + goto error; + + /* Write to one dataset */ + if(H5Dwrite(did2, H5T_NATIVE_INT, H5S_ALL, H5S_ALL, H5P_DEFAULT, buf) < 0) + goto error; + + /* Closing */ + if(H5Sclose(sid) < 0) + goto error; + if(H5Dclose(did1) < 0) + goto error; + if(H5Dclose(did2) < 0) + goto error; + + /* + * Create 2 chunked datasets with fixed array chunk indexing type + * (one with data; one without data) + */ + + /* Create dataspace */ + max_dims[0] = 20; + max_dims[1] = 10; + if((sid = H5Screate_simple(2, dims2, max_dims)) < 0) + goto error; + + /* Create the datasets */ + if((did1 = H5Dcreate2(fid, DSET_FA, H5T_NATIVE_INT, sid, H5P_DEFAULT, dcpl, H5P_DEFAULT)) < 0) + goto error; + + if((did2 = H5Dcreate2(gid, DSET_NDATA_FA, H5T_NATIVE_INT, sid, H5P_DEFAULT, dcpl, H5P_DEFAULT)) < 0) + goto error; + + /* Write to the dataset */ + if(H5Dwrite(did1, H5T_NATIVE_INT, H5S_ALL, H5S_ALL, H5P_DEFAULT, buf) < 0) + goto error; + + /* Closing */ + if(H5Sclose(sid) < 0) + goto error; + if(H5Dclose(did1) < 0) + goto error; + if(H5Dclose(did2) < 0) + goto error; + + + /* + * Create 2 chunked datasets with implicit chunk indexing type + * (one with data; one without data) + */ + + /* Create dataspace */ + if((sid = H5Screate_simple(2, dims2, NULL)) < 0) + goto error; + + /* Set early allocation */ + if(H5Pset_alloc_time(dcpl, H5D_ALLOC_TIME_EARLY) < 0) + goto error; + + /* Create the 2 datasets */ + if((did1 = H5Dcreate2(fid, DSET_NONE, H5T_NATIVE_INT, sid, H5P_DEFAULT, dcpl, H5P_DEFAULT)) < 0) + goto error; + + if((did2 = H5Dcreate2(gid, DSET_NDATA_NONE, H5T_NATIVE_INT, sid, H5P_DEFAULT, dcpl, H5P_DEFAULT)) < 0) + goto error; + + /* Write to one dataset */ + if(H5Dwrite(did1, H5T_NATIVE_INT, H5S_ALL, H5S_ALL, H5P_DEFAULT, buf) < 0) + goto error; + + /* Closing */ + if(H5Dclose(did1) < 0) + goto error; + if(H5Dclose(did2) < 0) + goto error; + if(H5Sclose(sid) < 0) + goto error; + + if(H5Pclose(dcpl) < 0) + goto error; + if(H5Gclose(gid) < 0) + goto error; + if(H5Fclose(fid) < 0) + goto error; + +error: + H5E_BEGIN_TRY { + H5Pclose(dcpl); + H5Sclose(sid); + H5Dclose(did1); + H5Dclose(did2); + H5Gclose(gid); + H5Fclose(fid); + H5Pclose(fapl); + } H5E_END_TRY; + +} /* gen_new() */ + +/* + * Function: gen_all() + * + * Create a new format file with: + * 1) 1 chunked dataset with extensible array chunk indexing type (without data) + * 2) 1 chunked dataset with version 2 B-tree chunk indexing type (with data) + * Re-open the file with old format and create: + * 5) 1 non-chunked dataset + * 6) 2 chunked datasets with version 1 B-tree chunk indexing type (with/without data) + */ +static void +gen_all(const char *fname) +{ + hid_t fid; /* file id */ + hid_t fapl; /* file access property list */ + hid_t gid; /* group id */ + hid_t sid; /* space id */ + hid_t dcpl; /* dataset creation property id */ + hid_t did1, did2; /* dataset id */ + hsize_t dims1[1] = {10}; /* dataset dimension */ + hsize_t dims2[2] = {4, 6}; /* dataset dimension */ + hsize_t max_dims[2]; /* maximum dataset dimension */ + hsize_t c_dims[2] = {2, 3}; /* chunk dimension */ + int i; /* local index variable */ + int buf[24]; /* data buffer */ + + /* Create a new format file */ + if((fapl = H5Pcreate(H5P_FILE_ACCESS)) < 0) + goto error; + if(H5Pset_libver_bounds(fapl, H5F_LIBVER_LATEST, H5F_LIBVER_LATEST) < 0) + goto error; + + if((fid = H5Fcreate(fname, H5F_ACC_TRUNC, H5P_DEFAULT, fapl)) < 0) + goto error; + + /* Create a group */ + if((gid = H5Gcreate2(fid, GROUP, H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT)) < 0) + goto error; + + /* Create data */ + for(i = 0; i < 24; i++) + buf[i] = i; + + /* Set chunk */ + if((dcpl = H5Pcreate(H5P_DATASET_CREATE)) < 0) + goto error; + if(H5Pset_chunk(dcpl, 2, c_dims) < 0) + goto error; + + /* + * Create 1 chunked dataset with extensible array chunk indexing type (without data) + */ + + /* Create dataspace */ + max_dims[0] = 10; + max_dims[1] = H5S_UNLIMITED; + if((sid = H5Screate_simple(2, dims2, max_dims)) < 0) + goto error; + + /* Create the dataset */ + if((did1 = H5Dcreate2(fid, DSET_NDATA_EA, H5T_NATIVE_INT, sid, H5P_DEFAULT, dcpl, H5P_DEFAULT)) < 0) + goto error; + + /* Closing */ + if(H5Sclose(sid) < 0) + goto error; + if(H5Dclose(did1) < 0) + goto error; + + /* + * Create 1 chunked dataset with version 2 B-tree chunk indexing type (with data) + */ + + /* Create dataspace */ + max_dims[0] = 10; + max_dims[0] = H5S_UNLIMITED; + max_dims[1] = H5S_UNLIMITED; + if((sid = H5Screate_simple(2, dims2, max_dims)) < 0) + goto error; + + /* Create the dataset */ + if((did1 = H5Dcreate2(gid, DSET_BT2, H5T_NATIVE_INT, sid, H5P_DEFAULT, dcpl, H5P_DEFAULT)) < 0) + goto error; + + /* Write to the dataset */ + if(H5Dwrite(did1, H5T_NATIVE_INT, H5S_ALL, H5S_ALL, H5P_DEFAULT, buf) < 0) + goto error; + + /* Closing */ + if(H5Sclose(sid) < 0) + goto error; + if(H5Dclose(did1) < 0) + goto error; + + + if(H5Pclose(dcpl) < 0) + goto error; + if(H5Gclose(gid) < 0) + goto error; + if(H5Fclose(fid) < 0) + goto error; + + /* Re-open the file with old format */ + if((fid = H5Fopen(fname, H5F_ACC_RDWR, H5P_DEFAULT)) < 0) + goto error; + + /* Open the group */ + if((gid = H5Gopen2(fid, GROUP, H5P_DEFAULT)) < 0) + goto error; + + /* + * Create 2 datasets with version 1 B-btree chunk indexing type (with/without data) + */ + + /* Set chunk */ + if((dcpl = H5Pcreate(H5P_DATASET_CREATE)) < 0) + goto error; + if(H5Pset_chunk(dcpl, 2, c_dims) < 0) + goto error; + + /* Create dataspace */ + max_dims[0] = 10; + max_dims[1] = H5S_UNLIMITED; + if((sid = H5Screate_simple(2, dims2, max_dims)) < 0) + goto error; + + /* Create the datasets */ + if((did1 = H5Dcreate2(fid, DSET_NDATA_BT1, H5T_NATIVE_INT, sid, H5P_DEFAULT, dcpl, H5P_DEFAULT)) < 0) + goto error; + + if((did2 = H5Dcreate2(gid, DSET_BT1, H5T_NATIVE_INT, sid, H5P_DEFAULT, dcpl, H5P_DEFAULT)) < 0) + goto error; + + /* Write to one dataset */ + if(H5Dwrite(did2, H5T_NATIVE_INT, H5S_ALL, H5S_ALL, H5P_DEFAULT, buf) < 0) + goto error; + + /* Closing */ + if(H5Sclose(sid) < 0) + goto error; + if(H5Dclose(did1) < 0) + goto error; + if(H5Dclose(did2) < 0) + goto error; + + /* + * Create a non-chunked dataset + */ + + /* Create dataspace */ + if((sid = H5Screate_simple(1, dims1, NULL)) < 0) + goto error; + + /* Create the dataset */ + if((did1 = H5Dcreate2(gid, DSET_NON_CHUNKED, H5T_NATIVE_INT, sid, H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT)) < 0) + goto error; + + /* Closing */ + if(H5Sclose(sid) < 0) + goto error; + if(H5Dclose(did1) < 0) + goto error; + + if(H5Gclose(gid) < 0) + goto error; + if(H5Fclose(fid) < 0) + goto error; + if(H5Pclose(dcpl) < 0) + goto error; + if(H5Pclose(fapl) < 0) + goto error; + +error: + H5E_BEGIN_TRY { + H5Pclose(dcpl); + H5Sclose(sid); + H5Dclose(did1); + H5Dclose(did2); + H5Gclose(gid); + H5Fclose(fid); + H5Pclose(fapl); + } H5E_END_TRY; + +} /* gen_all() */ + +/* + * Function: gen_edge() + * + * Create a new format file with: + * A dataset: chunked, filtered, H5D_CHUNK_DONT_FILTER_PARTIAL_CHUNKS enabled + * (i.e. the dataset does not filter partial edge chunks) + */ +static void +gen_edge(const char *fname) +{ + hid_t fid; /* file id */ + hid_t fapl; /* file access property list */ + hid_t sid; /* dataspace id */ + hid_t dcpl; /* dataset creation property id */ + hid_t did; /* dataset id */ + hsize_t dims2[2] = {12, 6}; /* Dataset dimensions */ + hsize_t c_dims[2] = {5, 5}; /* Chunk dimensions */ + float buf[12][6]; /* Buffer for writing data */ + int i, j; /* local index variable */ + + /* Create a new format file */ + if((fapl = H5Pcreate(H5P_FILE_ACCESS)) < 0) + goto error; + if(H5Pset_libver_bounds(fapl, H5F_LIBVER_LATEST, H5F_LIBVER_LATEST) < 0) + goto error; + if((fid = H5Fcreate(fname, H5F_ACC_TRUNC, H5P_DEFAULT, fapl)) < 0) + goto error; + + /* Set chunk, filter, no-filter-edge-chunk */ + if((dcpl = H5Pcreate(H5P_DATASET_CREATE)) < 0) + goto error; + if(H5Pset_chunk(dcpl, 2, c_dims) < 0) + goto error; + if(H5Pset_deflate(dcpl, 9) < 0) + goto error; + if(H5Pset_chunk_opts(dcpl, H5D_CHUNK_DONT_FILTER_PARTIAL_CHUNKS) < 0) + goto error; + + /* Create dataspace */ + if((sid = H5Screate_simple(2, dims2, NULL)) < 0) + goto error; + + /* Create the dataset */ + if((did = H5Dcreate2(fid, DSET_EDGE, H5T_NATIVE_FLOAT, sid, H5P_DEFAULT, dcpl, H5P_DEFAULT)) < 0) + goto error; + + /* Create data */ + for (i = 0; i< 12; i++) + for (j = 0; j< 6; j++) + buf[i][j] = (float)(100.0); + + /* Write to the dataset */ + if(H5Dwrite(did, H5T_NATIVE_FLOAT, H5S_ALL, H5S_ALL, H5P_DEFAULT, buf) < 0) + goto error; + + /* Closing */ + if(H5Pclose(dcpl) < 0) + goto error; + if(H5Sclose(sid) < 0) + goto error; + if(H5Dclose(did) < 0) + goto error; + if(H5Pclose(fapl) < 0) + goto error; + if(H5Fclose(fid) < 0) + goto error; + +error: + H5E_BEGIN_TRY { + H5Pclose(dcpl); + H5Sclose(sid); + H5Dclose(did); + H5Fclose(fid); + H5Pclose(fapl); + } H5E_END_TRY; + +} /* gen_edge() */ + +int main(void) +{ + /* Generate an old format file with datasets for testing */ + gen_old(OLD_FILE); + + /* Generate a new format file with datasets for testing */ + gen_new(NEW_FILE); + + /* Generate a new format file (+reopen with old format) for testing */ + gen_all(ALL_FILE); + + /* Generate a new format file with a no-filter-edge-chunk dataset for testing */ + gen_edge(EDGE_FILE); + return 0; +} diff --git a/tools/h5format_convert/h5format_convert.c b/tools/h5format_convert/h5format_convert.c new file mode 100644 index 0000000..8c19693 --- /dev/null +++ b/tools/h5format_convert/h5format_convert.c @@ -0,0 +1,438 @@ +/* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * + * Copyright by The HDF Group. * + * Copyright by the Board of Trustees of the University of Illinois. * + * All rights reserved. * + * * + * This file is part of HDF5. The full HDF5 copyright notice, including * + * terms governing use, modification, and redistribution, is contained in * + * the files COPYING and Copyright.html. COPYING can be found at the root * + * of the source code distribution tree; Copyright.html can be found at the * + * root level of an installed copy of the electronic HDF5 document set and * + * is linked from the top-level documents page. It can also be found at * + * http://hdfgroup.org/HDF5/doc/Copyright.html. If you do not have * + * access to either file, you may request a copy from help@hdfgroup.org. * + * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */ + +/* + * Programmer: Vailin Choi; Feb 2015 + */ + + +/* + * We include the private header file so we can get to the uniform + * programming environment it declares. + * HDF5 API functions (except for H5G_basename()) + */ +#include "H5private.h" +#include "h5tools.h" +#include "h5tools_utils.h" +#include "h5trav.h" + +/* Name of tool */ +#define PROGRAMNAME "h5format_convert" + +static char *fname_g = NULL; +static char *dname_g = NULL; +static int dset_g = FALSE; +static int noop_g = FALSE; +static int verbose_g = 0; + +/* + * Command-line options: The user can specify short or long-named + * parameters. + */ +static const char *s_opts = "hVvd:n"; +static struct long_options l_opts[] = { + { "help", no_arg, 'h' }, + { "hel", no_arg, 'h'}, + { "he", no_arg, 'h'}, + { "version", no_arg, 'V' }, + { "version", no_arg, 'V' }, + { "versio", no_arg, 'V' }, + { "versi", no_arg, 'V' }, + { "vers", no_arg, 'V' }, + { "verbose", no_arg, 'v' }, + { "verbos", no_arg, 'v' }, + { "verbo", no_arg, 'v' }, + { "verb", no_arg, 'v' }, + { "dname", require_arg, 'd' }, + { "dnam", require_arg, 'd' }, + { "dna", require_arg, 'd' }, + { "dn", require_arg, 'd' }, + { "noop", no_arg, 'n' }, + { "noo", no_arg, 'n' }, + { "no", no_arg, 'n' }, + { NULL, 0, '\0' } +}; + + +/*------------------------------------------------------------------------- + * Function: usage + * + * Purpose: print usage + * + * Return: void + * + *------------------------------------------------------------------------- + */ +static void usage(const char *prog) +{ + printf("usage: %s [OPTIONS] file_name\n", prog); + printf(" OPTIONS\n"); + printf(" -h, --help Print a usage message and exit\n"); + printf(" -V, --version Print version number and exit\n"); + printf(" -v, --verbose Turn on verbose mode\n"); + printf(" -d dname, --dname=dataset_name Pathname for the dataset\n"); + printf(" -n, --noop Perform all the steps except the actual conversion\n"); + printf("\n"); + printf("Examples of use:\n"); + printf("\n"); + printf("h5format_convert -d /group/dataset file_name\n"); + printf(" Convert the chunk indexing type to version 1 B-tree\n"); + printf(" for the chunked dataset in the HDF5 file .\n"); + printf("\n"); + printf("h5format_convert file_name\n"); + printf(" Convert the chunk indexing type to version 1 B-tree\n"); + printf(" for all the chunked datasets in the HDF5 file .\n"); + printf("\n"); + printf("h5format_convert -n -d /group/dataset file_name\n"); + printf(" Go through all the steps except the actual conversion when \n"); + printf(" converting the chunked dataset in the HDF5 file .\n"); +} /* usage() */ + +/*------------------------------------------------------------------------- + * Function: parse_command_line + * + * Purpose: parse command line input + * + * Return: Success: 0 + * Failure: 1 + * + *------------------------------------------------------------------------- + */ +static int +parse_command_line(int argc, const char **argv) +{ + int opt; + + /* no arguments */ + if (argc == 1) { + usage(h5tools_getprogname()); + h5tools_setstatus(EXIT_FAILURE); + goto error; + } + + /* parse command line options */ + while ((opt = get_option(argc, argv, s_opts, l_opts)) != EOF) { + switch((char) opt) { + case 'h': + usage(h5tools_getprogname()); + h5tools_setstatus(EXIT_SUCCESS); + goto error; + + case 'V': + print_version(h5tools_getprogname()); + h5tools_setstatus(EXIT_SUCCESS); + goto error; + + case 'v': + verbose_g = TRUE; + break; + + case 'd': /* -d dname */ + if(opt_arg != NULL && *opt_arg) + /* if(opt_arg != NULL)*/ + dname_g = HDstrdup(opt_arg); + if(dname_g == NULL) { + h5tools_setstatus(EXIT_FAILURE); + error_msg("No dataset name\n", opt_arg); + usage(h5tools_getprogname()); + goto error; + } + dset_g = TRUE; + break; + + case 'n': /* -n */ + noop_g = TRUE; + break; + + default: + h5tools_setstatus(EXIT_FAILURE); + usage(h5tools_getprogname()); + goto error; + break; + } /* switch */ + } /* while */ + + if (argc <= opt_ind) { + error_msg("missing file name\n"); + usage(h5tools_getprogname()); + h5tools_setstatus(EXIT_FAILURE); + goto error; + } + + fname_g = HDstrdup(argv[opt_ind]); + + return(0); + +error: + return(-1); ; +} /* parse_command_line() */ + + +/*------------------------------------------------------------------------- + * Function: leave + * + * Purpose: Close HDF5 + * + * Return: Does not return + * + *------------------------------------------------------------------------- + */ +static void +leave(int ret) +{ + h5tools_close(); + + HDexit(ret); +} /* leave() */ + +/*------------------------------------------------------------------------- + * Function: convert() + * + * Purpose: To change the chunk indexing type for the dataset to version 1 B-tree. + * -- the dataset has to be chunked + * -- the dataset's chunk indexing type is not already version 1 B-tree. + * If the above conditions are not fulfilled, the tool will not perform + * the conversion but will exit with success. + * + * Return: Success: 0 + * Failure: 1 + * + *------------------------------------------------------------------------- + */ +static int +convert(hid_t fid, const char *dname) +{ + hid_t dcpl = -1; + hid_t did = -1; + H5D_layout_t layout_type; + H5D_chunk_index_t idx_type; + + /* Open the dataset */ + if((did = H5Dopen(fid, dname, H5P_DEFAULT)) < 0) { + error_msg("unable to open dataset \"%s\"\n", dname); + h5tools_setstatus(EXIT_FAILURE); + goto error; + + } else if(verbose_g) + printf("Open the dataset\n"); + + /* Get the dataset's creation property list */ + if((dcpl = H5Dget_create_plist(did)) < 0) { + error_msg("unable to get the dataset creation property list\n"); + h5tools_setstatus(EXIT_FAILURE); + goto error; + } + + /* Get the dataset's layout */ + if((layout_type = H5Pget_layout(dcpl)) < 0) { + error_msg("unable to get the dataset layout type\n"); + h5tools_setstatus(EXIT_FAILURE); + goto error; + + } else if(verbose_g) + printf("Retrieve the dataset's layout\n"); + + /* No further action if not a chunked dataset */ + if(layout_type != H5D_CHUNKED) { + if(verbose_g) + printf("Dataset is not chunked: no further action\n"); + h5tools_setstatus(EXIT_SUCCESS); + goto done; + + } else if(verbose_g) + printf("Verify the dataset is a chunked dataset\n"); + + /* Get the dataset's chunk indexing type */ + if(H5Dget_chunk_index_type(did, &idx_type) < 0) { + error_msg("unable to get the chunk indexing type for \"%s\"\n", dname); + h5tools_setstatus(EXIT_FAILURE); + goto error; + + } else if(verbose_g) + printf("Retrieve the dataset's chunk indexing type\n"); + + /* No further action if the chunk indexing type is already version 1 B-tree */ + if(idx_type == H5D_CHUNK_IDX_BTREE) { + if(verbose_g) + printf("Chunk indexing type is already version 1 B-tree: no further action\n"); + h5tools_setstatus(EXIT_SUCCESS); + goto done; + + } else if (verbose_g) + printf("Verify the dataset's chunk indexing type is not version 1 B-tree\n"); + + /* No further action if it is a noop */ + if(noop_g) { + if(verbose_g) + printf("Not converting the dataset\n"); + h5tools_setstatus(EXIT_SUCCESS); + goto done; + } + + if(verbose_g) + printf("Converting the dataset...\n"); + + /* Convert the dataset's chunk indexing type to version 1 B-tree */ + if(H5Dformat_convert(did) < 0) { + error_msg("unable to convert chunk indexing for \"%s\"\n", dname); + h5tools_setstatus(EXIT_FAILURE); + goto error; + + } else if(verbose_g) + printf("Done\n"); + +done: + /* Close the dataset */ + if(H5Dclose(did) < 0) { + error_msg("unable to close dataset \"%s\"\n", dname); + h5tools_setstatus(EXIT_FAILURE); + goto error; + } else if(verbose_g) + printf("Close the dataset\n"); + + /* Close the dataset creation property list */ + if(H5Pclose(dcpl) < 0) { + error_msg("unable to close dataset creation property list\n"); + h5tools_setstatus(EXIT_FAILURE); + goto error; + } else if(verbose_g) + printf("Close the dataset creation property list\n"); + + return(0); + +error: + if(verbose_g) + printf("Error encountered\n"); + + H5E_BEGIN_TRY { + H5Pclose(dcpl); + H5Dclose(did); + } H5E_END_TRY; + + return(-1); + +} /* convert() */ + +/*------------------------------------------------------------------------- + * Function: convert_dsets_cb() + * + * Purpose: The callback routine from the traversal to convert the + * chunk indexing type of the dataset object. + * + * Return: Success: 0 + * Failure: 1 + *------------------------------------------------------------------------- + */ +static int +convert_dsets_cb(const char *path, const H5O_info_t *oi, const char *already_visited, void *_fid) +{ + hid_t fid = *(hid_t *)_fid; + + /* If the object has already been seen then just return */ + if(NULL == already_visited) { + + if(oi->type == H5O_TYPE_DATASET) { + if(verbose_g) + printf("Going to process dataset:%s...\n", path); + if(convert(fid, path) < 0) + goto error; + } + + } /* end if */ + + return 0; + +error: + return -1; + +} /* end convert_dsets_cb() */ + + +/*------------------------------------------------------------------------- + * Function: main + * + * Purpose: To convert the chunk indexing type of a dataset in a file to + * version 1 B-tree. + * + * Return: Success: 0 + * Failure: 1 + * + *------------------------------------------------------------------------- + */ +int +main(int argc, const char *argv[]) +{ + H5E_auto2_t func; + void *edata; + hid_t fid = -1; + + h5tools_setprogname(PROGRAMNAME); + h5tools_setstatus(EXIT_SUCCESS); + + /* Disable error reporting */ + H5Eget_auto2(H5E_DEFAULT, &func, &edata); + H5Eset_auto2(H5E_DEFAULT, NULL, NULL); + + /* Initialize h5tools lib */ + h5tools_init(); + + /* Parse command line options */ + if(parse_command_line(argc, argv) < 0) + goto done; + else if(verbose_g) + printf("Process command line options\n"); + + if(noop_g && verbose_g) + printf("It is noop...\n"); + + /* Open the HDF5 file */ + if((fid = h5tools_fopen(fname_g, H5F_ACC_RDWR, H5P_DEFAULT, NULL, NULL, 0)) < 0) { + error_msg("unable to open file \"%s\"\n", fname_g); + h5tools_setstatus(EXIT_FAILURE); + goto done; + } else if(verbose_g) + printf("Open the file %s\n", fname_g); + + if(dset_g) { /* Convert a specified dataset in the file */ + if(verbose_g) + printf("Going to process dataset: %s...\n", dname_g); + if(convert(fid, dname_g) < 0) + goto done; + } else { /* Convert all datasets in the file */ + if(verbose_g) + printf("Processing all datasets in the file...\n"); + if(h5trav_visit(fid, "/", TRUE, TRUE, convert_dsets_cb, NULL, &fid) < 0) + goto done; + } + +done: + /* Close the file */ + if(fid >= 0) { + if(H5Fclose(fid) < 0) { + error_msg("unable to close file \"%s\"\n", fname_g); + h5tools_setstatus(EXIT_FAILURE); + } else if(verbose_g) + printf("Close the file\n"); + } + + if(fname_g) + HDfree(fname_g); + if(dname_g) + HDfree(dname_g); + + H5Eset_auto2(H5E_DEFAULT, func, edata); + leave(h5tools_getstatus()); + +} /* end main() */ diff --git a/tools/h5format_convert/testfiles/h5fc_all.h5 b/tools/h5format_convert/testfiles/h5fc_all.h5 new file mode 100644 index 0000000..26e682d Binary files /dev/null and b/tools/h5format_convert/testfiles/h5fc_all.h5 differ diff --git a/tools/h5format_convert/testfiles/h5fc_d_file.ddl b/tools/h5format_convert/testfiles/h5fc_d_file.ddl new file mode 100644 index 0000000..3641a4f --- /dev/null +++ b/tools/h5format_convert/testfiles/h5fc_d_file.ddl @@ -0,0 +1,22 @@ +usage: h5format_convert [OPTIONS] file_name + OPTIONS + -h, --help Print a usage message and exit + -V, --version Print version number and exit + -v, --verbose Turn on verbose mode + -d dname, --dname=dataset_name Pathname for the dataset + -n, --noop Perform all the steps except the actual conversion + +Examples of use: + +h5format_convert -d /group/dataset file_name + Convert the chunk indexing type to version 1 B-tree + for the chunked dataset in the HDF5 file . + +h5format_convert file_name + Convert the chunk indexing type to version 1 B-tree + for all the chunked datasets in the HDF5 file . + +h5format_convert -n -d /group/dataset file_name + Go through all the steps except the actual conversion when + converting the chunked dataset in the HDF5 file . +h5format_convert error: missing file name diff --git a/tools/h5format_convert/testfiles/h5fc_dname.ddl b/tools/h5format_convert/testfiles/h5fc_dname.ddl new file mode 100644 index 0000000..c391764 --- /dev/null +++ b/tools/h5format_convert/testfiles/h5fc_dname.ddl @@ -0,0 +1,22 @@ +usage: h5format_convert [OPTIONS] file_name + OPTIONS + -h, --help Print a usage message and exit + -V, --version Print version number and exit + -v, --verbose Turn on verbose mode + -d dname, --dname=dataset_name Pathname for the dataset + -n, --noop Perform all the steps except the actual conversion + +Examples of use: + +h5format_convert -d /group/dataset file_name + Convert the chunk indexing type to version 1 B-tree + for the chunked dataset in the HDF5 file . + +h5format_convert file_name + Convert the chunk indexing type to version 1 B-tree + for all the chunked datasets in the HDF5 file . + +h5format_convert -n -d /group/dataset file_name + Go through all the steps except the actual conversion when + converting the chunked dataset in the HDF5 file . +h5format_convert error: No dataset name diff --git a/tools/h5format_convert/testfiles/h5fc_edge.h5 b/tools/h5format_convert/testfiles/h5fc_edge.h5 new file mode 100644 index 0000000..2a1ab10 Binary files /dev/null and b/tools/h5format_convert/testfiles/h5fc_edge.h5 differ diff --git a/tools/h5format_convert/testfiles/h5fc_help.ddl b/tools/h5format_convert/testfiles/h5fc_help.ddl new file mode 100644 index 0000000..9081ab8 --- /dev/null +++ b/tools/h5format_convert/testfiles/h5fc_help.ddl @@ -0,0 +1,21 @@ +usage: h5format_convert [OPTIONS] file_name + OPTIONS + -h, --help Print a usage message and exit + -V, --version Print version number and exit + -v, --verbose Turn on verbose mode + -d dname, --dname=dataset_name Pathname for the dataset + -n, --noop Perform all the steps except the actual conversion + +Examples of use: + +h5format_convert -d /group/dataset file_name + Convert the chunk indexing type to version 1 B-tree + for the chunked dataset in the HDF5 file . + +h5format_convert file_name + Convert the chunk indexing type to version 1 B-tree + for all the chunked datasets in the HDF5 file . + +h5format_convert -n -d /group/dataset file_name + Go through all the steps except the actual conversion when + converting the chunked dataset in the HDF5 file . diff --git a/tools/h5format_convert/testfiles/h5fc_new.h5 b/tools/h5format_convert/testfiles/h5fc_new.h5 new file mode 100644 index 0000000..fe910fa Binary files /dev/null and b/tools/h5format_convert/testfiles/h5fc_new.h5 differ diff --git a/tools/h5format_convert/testfiles/h5fc_nonexistdset_file.ddl b/tools/h5format_convert/testfiles/h5fc_nonexistdset_file.ddl new file mode 100644 index 0000000..39450c0 --- /dev/null +++ b/tools/h5format_convert/testfiles/h5fc_nonexistdset_file.ddl @@ -0,0 +1 @@ +h5format_convert error: unable to open dataset "nonexist" diff --git a/tools/h5format_convert/testfiles/h5fc_nonexistfile.ddl b/tools/h5format_convert/testfiles/h5fc_nonexistfile.ddl new file mode 100644 index 0000000..706ea9d --- /dev/null +++ b/tools/h5format_convert/testfiles/h5fc_nonexistfile.ddl @@ -0,0 +1 @@ +h5format_convert error: unable to open file "nonexist.h5" diff --git a/tools/h5format_convert/testfiles/h5fc_nooption.ddl b/tools/h5format_convert/testfiles/h5fc_nooption.ddl new file mode 100644 index 0000000..9081ab8 --- /dev/null +++ b/tools/h5format_convert/testfiles/h5fc_nooption.ddl @@ -0,0 +1,21 @@ +usage: h5format_convert [OPTIONS] file_name + OPTIONS + -h, --help Print a usage message and exit + -V, --version Print version number and exit + -v, --verbose Turn on verbose mode + -d dname, --dname=dataset_name Pathname for the dataset + -n, --noop Perform all the steps except the actual conversion + +Examples of use: + +h5format_convert -d /group/dataset file_name + Convert the chunk indexing type to version 1 B-tree + for the chunked dataset in the HDF5 file . + +h5format_convert file_name + Convert the chunk indexing type to version 1 B-tree + for all the chunked datasets in the HDF5 file . + +h5format_convert -n -d /group/dataset file_name + Go through all the steps except the actual conversion when + converting the chunked dataset in the HDF5 file . diff --git a/tools/h5format_convert/testfiles/h5fc_old.h5 b/tools/h5format_convert/testfiles/h5fc_old.h5 new file mode 100644 index 0000000..13642a1 Binary files /dev/null and b/tools/h5format_convert/testfiles/h5fc_old.h5 differ diff --git a/tools/h5format_convert/testfiles/h5fc_v_all.ddl b/tools/h5format_convert/testfiles/h5fc_v_all.ddl new file mode 100644 index 0000000..2d5b99c --- /dev/null +++ b/tools/h5format_convert/testfiles/h5fc_v_all.ddl @@ -0,0 +1,26 @@ +Process command line options +Open the file h5fc_old.h5 +Processing all datasets in the file... +Going to process dataset:/DSET_NDATA_BT1... +Open the dataset +Retrieve the dataset's layout +Verify the dataset is a chunked dataset +Retrieve the dataset's chunk indexing type +Chunk indexing type is already version 1 B-tree: no further action +Close the dataset +Close the dataset creation property list +Going to process dataset:/DSET_NON_CHUNKED... +Open the dataset +Retrieve the dataset's layout +Dataset is not chunked: no further action +Close the dataset +Close the dataset creation property list +Going to process dataset:/GROUP/DSET_BT1... +Open the dataset +Retrieve the dataset's layout +Verify the dataset is a chunked dataset +Retrieve the dataset's chunk indexing type +Chunk indexing type is already version 1 B-tree: no further action +Close the dataset +Close the dataset creation property list +Close the file diff --git a/tools/h5format_convert/testfiles/h5fc_v_bt1.ddl b/tools/h5format_convert/testfiles/h5fc_v_bt1.ddl new file mode 100644 index 0000000..c63c1a0 --- /dev/null +++ b/tools/h5format_convert/testfiles/h5fc_v_bt1.ddl @@ -0,0 +1,11 @@ +Process command line options +Open the file h5fc_old.h5 +Going to process dataset: /GROUP/DSET_BT1... +Open the dataset +Retrieve the dataset's layout +Verify the dataset is a chunked dataset +Retrieve the dataset's chunk indexing type +Chunk indexing type is already version 1 B-tree: no further action +Close the dataset +Close the dataset creation property list +Close the file diff --git a/tools/h5format_convert/testfiles/h5fc_v_n_1d.ddl b/tools/h5format_convert/testfiles/h5fc_v_n_1d.ddl new file mode 100644 index 0000000..8cc6dec --- /dev/null +++ b/tools/h5format_convert/testfiles/h5fc_v_n_1d.ddl @@ -0,0 +1,13 @@ +Process command line options +It is noop... +Open the file h5fc_new.h5 +Going to process dataset: /DSET_EA... +Open the dataset +Retrieve the dataset's layout +Verify the dataset is a chunked dataset +Retrieve the dataset's chunk indexing type +Verify the dataset's chunk indexing type is not version 1 B-tree +Not converting the dataset +Close the dataset +Close the dataset creation property list +Close the file diff --git a/tools/h5format_convert/testfiles/h5fc_v_n_all.ddl b/tools/h5format_convert/testfiles/h5fc_v_n_all.ddl new file mode 100644 index 0000000..e02a465 --- /dev/null +++ b/tools/h5format_convert/testfiles/h5fc_v_n_all.ddl @@ -0,0 +1,45 @@ +Process command line options +It is noop... +Open the file h5fc_all.h5 +Processing all datasets in the file... +Going to process dataset:/DSET_NDATA_BT1... +Open the dataset +Retrieve the dataset's layout +Verify the dataset is a chunked dataset +Retrieve the dataset's chunk indexing type +Chunk indexing type is already version 1 B-tree: no further action +Close the dataset +Close the dataset creation property list +Going to process dataset:/DSET_NDATA_EA... +Open the dataset +Retrieve the dataset's layout +Verify the dataset is a chunked dataset +Retrieve the dataset's chunk indexing type +Verify the dataset's chunk indexing type is not version 1 B-tree +Not converting the dataset +Close the dataset +Close the dataset creation property list +Going to process dataset:/GROUP/DSET_BT1... +Open the dataset +Retrieve the dataset's layout +Verify the dataset is a chunked dataset +Retrieve the dataset's chunk indexing type +Chunk indexing type is already version 1 B-tree: no further action +Close the dataset +Close the dataset creation property list +Going to process dataset:/GROUP/DSET_BT2... +Open the dataset +Retrieve the dataset's layout +Verify the dataset is a chunked dataset +Retrieve the dataset's chunk indexing type +Verify the dataset's chunk indexing type is not version 1 B-tree +Not converting the dataset +Close the dataset +Close the dataset creation property list +Going to process dataset:/GROUP/DSET_NON_CHUNKED... +Open the dataset +Retrieve the dataset's layout +Dataset is not chunked: no further action +Close the dataset +Close the dataset creation property list +Close the file diff --git a/tools/h5format_convert/testfiles/h5fc_v_ndata_bt1.ddl b/tools/h5format_convert/testfiles/h5fc_v_ndata_bt1.ddl new file mode 100644 index 0000000..32f04aa --- /dev/null +++ b/tools/h5format_convert/testfiles/h5fc_v_ndata_bt1.ddl @@ -0,0 +1,12 @@ +Process command line options +It is noop... +Open the file h5fc_old.h5 +Going to process dataset: /DSET_NDATA_BT1... +Open the dataset +Retrieve the dataset's layout +Verify the dataset is a chunked dataset +Retrieve the dataset's chunk indexing type +Chunk indexing type is already version 1 B-tree: no further action +Close the dataset +Close the dataset creation property list +Close the file diff --git a/tools/h5format_convert/testfiles/h5fc_v_non_chunked.ddl b/tools/h5format_convert/testfiles/h5fc_v_non_chunked.ddl new file mode 100644 index 0000000..d4f0f6c --- /dev/null +++ b/tools/h5format_convert/testfiles/h5fc_v_non_chunked.ddl @@ -0,0 +1,9 @@ +Process command line options +Open the file h5fc_old.h5 +Going to process dataset: /DSET_NON_CHUNKED... +Open the dataset +Retrieve the dataset's layout +Dataset is not chunked: no further action +Close the dataset +Close the dataset creation property list +Close the file diff --git a/tools/h5format_convert/testfiles/h5fc_version.ddl b/tools/h5format_convert/testfiles/h5fc_version.ddl new file mode 100644 index 0000000..67bd55c --- /dev/null +++ b/tools/h5format_convert/testfiles/h5fc_version.ddl @@ -0,0 +1 @@ +h5format_convert: Version 1.9.222-swmr1 diff --git a/tools/h5format_convert/testh5fc.sh.in b/tools/h5format_convert/testh5fc.sh.in new file mode 100644 index 0000000..c732897 --- /dev/null +++ b/tools/h5format_convert/testh5fc.sh.in @@ -0,0 +1,381 @@ +#! /bin/sh +# +# Copyright by The HDF Group. +# Copyright by the Board of Trustees of the University of Illinois. +# All rights reserved. +# +# This file is part of HDF5. The full HDF5 copyright notice, including +# terms governing use, modification, and redistribution, is contained in +# the files COPYING and Copyright.html. COPYING can be found at the root +# of the source code distribution tree; Copyright.html can be found at the +# root level of an installed copy of the electronic HDF5 document set and +# is linked from the top-level documents page. It can also be found at +# http://hdfgroup.org/HDF5/doc/Copyright.html. If you do not have +# access to either file, you may request a copy from help@hdfgroup.org. +# +# Tests for the h5format_convert tool +# +# + +srcdir=@srcdir@ + +# Determine which filters are available +USE_FILTER_SZIP="@USE_FILTER_SZIP@" +USE_FILTER_DEFLATE="@USE_FILTER_DEFLATE@" +USE_FILTER_SHUFFLE="@USE_FILTER_SHUFFLE@" +USE_FILTER_FLETCHER32="@USE_FILTER_FLETCHER32@" +USE_FILTER_NBIT="@USE_FILTER_NBIT@" +USE_FILTER_SCALEOFFSET="@USE_FILTER_SCALEOFFSET@" + +TESTNAME=h5format_convert +EXIT_SUCCESS=0 +EXIT_FAILURE=1 + +FORMCONV=h5format_convert # The tool name +FORMCONV_BIN=`pwd`/$FORMCONV # The path of the tool binary + +CHK_IDX=h5fc_chk_idx # The program name +CHK_IDX_BIN=`pwd`/$CHK_IDX # The program to verify the chunk indexing type is v1 B-tree + +RM='rm -rf' +CMP='cmp -s' +DIFF='diff -c' +CP='cp' +DIRNAME='dirname' +LS='ls' +AWK='awk' + +nerrors=0 +verbose=yes + +# source dirs +SRC_TOOLS="$srcdir/.." +SRC_TOOLS_TESTFILES="$SRC_TOOLS/testfiles" + +# testfiles source dirs for tools +SRC_H5LS_TESTFILES="$SRC_TOOLS_TESTFILES" +SRC_H5DUMP_TESTFILES="$SRC_TOOLS_TESTFILES" +SRC_H5DIFF_TESTFILES="$SRC_TOOLS/h5diff/testfiles" +SRC_H5COPY_TESTFILES="$SRC_TOOLS/h5copy/testfiles" +SRC_H5REPACK_TESTFILES="$SRC_TOOLS/h5repack/testfiles" +SRC_H5JAM_TESTFILES="$SRC_TOOLS/h5jam/testfiles" +SRC_H5STAT_TESTFILES="$SRC_TOOLS/h5stat/testfiles" +SRC_H5IMPORT_TESTFILES="$SRC_TOOLS/h5import/testfiles" +SRC_H5FORMCONV_TESTFILES="$SRC_TOOLS/h5format_convert/testfiles" + +TESTDIR=./testfiles +test -d $TESTDIR || mkdir $TESTDIR + +###################################################################### +# test files +# -------------------------------------------------------------------- +# All the test files copy from source directory to test directory +# NOTE: Keep this framework to add/remove test files. +# Any test files from other tools can be used in this framework. +# This list are also used for checking exist. +# Comment '#' without space can be used. +# -------------------------------------------------------------------- +LIST_HDF5_TEST_FILES=" +$SRC_H5FORMCONV_TESTFILES/h5fc_old.h5 +$SRC_H5FORMCONV_TESTFILES/h5fc_new.h5 +$SRC_H5FORMCONV_TESTFILES/h5fc_all.h5 +$SRC_H5FORMCONV_TESTFILES/h5fc_edge.h5 +" + +LIST_OTHER_TEST_FILES=" +$SRC_H5FORMCONV_TESTFILES/h5fc_version.ddl +$SRC_H5FORMCONV_TESTFILES/h5fc_help.ddl +$SRC_H5FORMCONV_TESTFILES/h5fc_nooption.ddl +$SRC_H5FORMCONV_TESTFILES/h5fc_nonexistfile.ddl +$SRC_H5FORMCONV_TESTFILES/h5fc_d_file.ddl +$SRC_H5FORMCONV_TESTFILES/h5fc_dname.ddl +$SRC_H5FORMCONV_TESTFILES/h5fc_nonexistdset_file.ddl +$SRC_H5FORMCONV_TESTFILES/h5fc_v_non_chunked.ddl +$SRC_H5FORMCONV_TESTFILES/h5fc_v_bt1.ddl +$SRC_H5FORMCONV_TESTFILES/h5fc_v_ndata_bt1.ddl +$SRC_H5FORMCONV_TESTFILES/h5fc_v_all.ddl +$SRC_H5FORMCONV_TESTFILES/h5fc_v_n_1d.ddl +$SRC_H5FORMCONV_TESTFILES/h5fc_v_n_all.ddl +" + +# +# copy test files and expected output files from source dirs to test dir +# +COPY_TESTFILES="$LIST_HDF5_TEST_FILES $LIST_OTHER_TEST_FILES" + +COPY_TESTFILES_TO_TESTDIR() +{ + # copy test files. Used -f to make sure get a new copy + for tstfile in $COPY_TESTFILES + do + # ignore '#' comment + echo $tstfile | tr -d ' ' | grep '^#' > /dev/null + RET=$? + if [ $RET -eq 1 ]; then + # skip cp if srcdir is same as destdir + # this occurs when build/test performed in source dir and + # make cp fail + SDIR=`$DIRNAME $tstfile` + INODE_SDIR=`$LS -i -d $SDIR | $AWK -F' ' '{print $1}'` + INODE_DDIR=`$LS -i -d $TESTDIR | $AWK -F' ' '{print $1}'` + if [ "$INODE_SDIR" != "$INODE_DDIR" ]; then + $CP -f $tstfile $TESTDIR + if [ $? -ne 0 ]; then + echo "Error: FAILED to copy $tstfile ." + + # Comment out this to CREATE expected file + exit $EXIT_FAILURE + fi + fi + fi + done +} + +CLEAN_TESTFILES_AND_TESTDIR() +{ + # skip rm if srcdir is same as destdir + # this occurs when build/test performed in source dir and + # make cp fail + SDIR=`$DIRNAME $tstfile` + INODE_SDIR=`$LS -i -d $SDIR | $AWK -F' ' '{print $1}'` + INODE_DDIR=`$LS -i -d $TESTDIR | $AWK -F' ' '{print $1}'` + if [ "$INODE_SDIR" != "$INODE_DDIR" ]; then + $RM $TESTDIR + fi +} + +# Print a line-line message left justified in a field of 80 characters +# beginning with the word "Testing". +# +TESTING() { + SPACES=" " + echo "Testing $* $SPACES" | cut -c1-80 | tr -d '\012' +} + +# Run a test and print PASS or *FAIL*. If a test fails then increment +# the `nerrors' global variable and (if $verbose is set) display the +# difference between the actual output and the expected output. The +# expected output is given as the first argument to this function and +# the actual output file is calculated by replacing the `.ddl' with +# `.out'. The actual output is not removed if $HDF5_NOCLEANUP has a +# non-zero value. +# +# $1: expected output +# $2 to at most $6 : options to the tool and the input fname +# -d dname or --dname=dname +# -v or --verbose +# -n or --noop +# fname +TOOLTEST_OUT() { + expect="$TESTDIR/$1" + actual="$TESTDIR/`basename $1 .ddl`.out" + actual_err="$TESTDIR/`basename $1 .ddl`.err" + actual_sav=${actual}-sav + actual_err_sav=${actual_err}-sav + # Run test. + TESTING $FORMCONV $@ + ( + cd $TESTDIR + $RUNSERIAL $FORMCONV_BIN $2 $3 $4 $5 $6 + ) >$actual 2>$actual_err + cp $actual $actual_sav + cp $actual_err $actual_err_sav + cat $actual_err >> $actual + + if $CMP $expect $actual; then + echo " PASSED" + else + echo "*FAILED*" + echo " Expected result (*.ddl) differs from actual result (*.out)" + nerrors="`expr $nerrors + 1`" + test yes = "$verbose" && $DIFF $expect $actual |sed 's/^/ /' + fi + + # Clean up output file + if test -z "$HDF5_NOCLEANUP"; then + rm -f $actual $actual_err + rm -f $actual $actual_err $actual_sav $actual_err_sav + fi +} + +# To check that the tool exits success, no output +# Assume all short options +# Assume $1 is fname +# $2 to at most $4: options to the tool +# -d dname or -a +# -n +TOOLTEST() { + TESTING $FORMCONV $@ + $RUNSERIAL $FORMCONV_BIN $2 $3 $4 $TESTDIR/$1 + exitcode=$? + if [ $exitcode -ne 0 ]; then + echo "*FAILED*" + echo " The tool exits failure" + nerrors="`expr $nerrors + 1`" + else + echo " PASSED" + fi +} + +CHECKING() { + SPACES=" " + echo "Verifing $* $SPACES" | cut -c1-80 | tr -d '\012' +} + +# $1 file name +# $2 dataset name +IDX_CHECK() { + CHECKING $1 $2 + $RUNSERIAL $CHK_IDX_BIN $TESTDIR/$1 $2 + ret=$? + if [ $ret -eq 0 ]; then + echo " PASSED" + else + echo "*FAILED*" + echo " The chunk indexing type is not correct" + nerrors="`expr $nerrors + 1`" + fi +} + +# Print a "SKIP" message +SKIP() { + TESTING $STAT $@ + echo " -SKIP-" +} + + + +############################################################################## +############################################################################## +### T H E T E S T S ### +############################################################################## +############################################################################## +# prepare for test +COPY_TESTFILES_TO_TESTDIR +# +# +# +# h5format_convert --version +# h5format_convert --help +# h5format_convert (no options) +# h5format_convert nonexist.h5 (no options, file does not exist) +TOOLTEST_OUT h5fc_version.ddl --version +TOOLTEST_OUT h5fc_help.ddl --help +TOOLTEST_OUT h5fc_nooption.ddl +TOOLTEST_OUT h5fc_nonexistfile.ddl nonexist.h5 +# +# +# h5format_convert -d h5fc_old.h5 (just -d option, file exists) +# h5format_convert --dname h5fc_old.h5 (just --dname option, file exists) +# h5format_convert --dname (just --dname option) +# h5format_convert --dname=nonexist h5fc_old.h5 (dataset does not exist, file exists) +TOOLTEST_OUT h5fc_d_file.ddl -d h5fc_old.h5 +TOOLTEST_OUT h5fc_d_file.ddl --dname h5fc_old.h5 +TOOLTEST_OUT h5fc_dname.ddl --dname +TOOLTEST_OUT h5fc_nonexistdset_file.ddl --dname=nonexist h5fc_old.h5 +# +# +# +# h5format_convert -d /DSET_NON_CHUNKED -v h5fc_old.h5 (verbose, non-chunked dataset) +# h5format_convert -d /GROUP/DSET_BT1 --verbose h5fc_old.h5 (verbose, bt1 dataset) +# h5format_convert -d /DSET_NDATA_BT1 -v -n h5fc_old.h5 (verbose, noop, bt1+nodata dataset) +# h5format_convert -v h5fc_old.h5 (verbose, all datasets) +TOOLTEST_OUT h5fc_v_non_chunked.ddl -d /DSET_NON_CHUNKED -v h5fc_old.h5 +TOOLTEST_OUT h5fc_v_bt1.ddl -d /GROUP/DSET_BT1 --verbose h5fc_old.h5 +TOOLTEST_OUT h5fc_v_ndata_bt1.ddl -d /DSET_NDATA_BT1 -v -n h5fc_old.h5 +TOOLTEST_OUT h5fc_v_all.ddl -v h5fc_old.h5 +# +# +# +# h5format_convert -d /DSET_EA -v -n h5fc_new.h5 (verbose, noop, one ea dataset) +# h5format_convert -v -n h5fc_all.h5 (verbose, noop, all datasets) +TOOLTEST_OUT h5fc_v_n_1d.ddl -d /DSET_EA -v -n h5fc_new.h5 +TOOLTEST_OUT h5fc_v_n_all.ddl -v -n h5fc_all.h5 +# +# +# +# No output from tests +# 1) Use the tool to convert the dataset +# 2) Verify the chunk indexing type is correct +# h5format_convert -d /DSET_EA h5fc_new.h5 +# h5format_convert -d /GROUP/DSET_NDATA_EA h5fc_new.h5 +# h5format_convert -d /GROUP/DSET_BT2 h5fc_new.h5 +# h5format_convert -d /DSET_NDATA_BT2 h5fc_new.h5 +# h5format_convert -d /DSET_FA h5fc_new.h5 +# h5format_convert -d /GROUP/DSET_FA h5fc_new.h5 +# h5format_convert -d /DSET_NONE h5fc_new.h5 +# h5format_convert -d /GROUP/DSET_NONE h5fc_new.h5 +TOOLTEST h5fc_new.h5 -d /DSET_EA +IDX_CHECK h5fc_new.h5 /DSET_EA +# +TOOLTEST h5fc_new.h5 -d /GROUP/DSET_NDATA_EA +IDX_CHECK h5fc_new.h5 /GROUP/DSET_NDATA_EA +# +TOOLTEST h5fc_new.h5 -d /GROUP/DSET_BT2 +IDX_CHECK h5fc_new.h5 /GROUP/DSET_BT2 +# +TOOLTEST h5fc_new.h5 -d /DSET_NDATA_BT2 +IDX_CHECK h5fc_new.h5 /DSET_NDATA_BT2 +# +TOOLTEST h5fc_new.h5 -d /DSET_FA +IDX_CHECK h5fc_new.h5 /DSET_FA +# +TOOLTEST h5fc_new.h5 -d /GROUP/DSET_NDATA_FA +IDX_CHECK h5fc_new.h5 /GROUP/DSET_NDATA_FA +# +TOOLTEST h5fc_new.h5 -d /DSET_NONE +IDX_CHECK h5fc_new.h5 /DSET_NONE +# +TOOLTEST h5fc_new.h5 -d /GROUP/DSET_NDATA_NONE +IDX_CHECK h5fc_new.h5 /GROUP/DSET_NDATA_NONE +# +# +# +# No output from tests: just check exit code +# h5format_convert -d /DSET_NDATA_BT1 h5fc_old.h5 (v1-btree dataset) +# h5format_convert -d /GROUP/DSET_NON_CHUNKED h5fc_all.h5 (non-chunked dataset) +TOOLTEST h5fc_old.h5 -d /DSET_NDATA_BT1 +TOOLTEST h5fc_all.h5 -d /GROUP/DSET_NON_CHUNKED +# +# +# +# No output from tests: just check exit code +# h5format_convert -d /GROUP/DSET_BT2 -n h5fc_all.h5 (noop, one dataset) +# h5format_convert -n h5fc_all.h5 (noop, all datasets) +TOOLTEST h5fc_all.h5 -d /GROUP/DSET_BT2 -n +TOOLTEST h5fc_all.h5 -n +# +# +# +# No output from tests: just check exit code +# h5format_convert h5fc_all.h5 +# 1) convert all datasets +# 2) verify indexing types +TOOLTEST h5fc_all.h5 +IDX_CHECK h5fc_all.h5 /DSET_NDATA_BT1 +IDX_CHECK h5fc_all.h5 /DSET_NDATA_EA +IDX_CHECK h5fc_all.h5 /GROUP/DSET_BT1 +IDX_CHECK h5fc_all.h5 /GROUP/DSET_BT2 +# +# +# +# No output from test: just check exit code +# h5format_convert h5fc_edge.h5 +# 1) convert the chunked dataset (filter, no-filter-edge-chunk) +# 2) verify the indexing type +TOOLTEST h5fc_edge.h5 +IDX_CHECK h5fc_edge.h5 /DSET_EDGE +# +# +# +# Clean up temporary files/directories +CLEAN_TESTFILES_AND_TESTDIR + +if test $nerrors -eq 0 ; then + echo "All $TESTNAME tests passed." + exit $EXIT_SUCCESS +else + echo "$TESTNAME tests failed with $nerrors errors." + exit $EXIT_FAILURE +fi + diff --git a/tools/h5stat/h5stat_gentest.c b/tools/h5stat/h5stat_gentest.c index 3096f78..d78a5e5 100644 --- a/tools/h5stat/h5stat_gentest.c +++ b/tools/h5stat/h5stat_gentest.c @@ -359,7 +359,6 @@ gen_idx_file(const char *fname) hid_t did, did2; /* dataset id */ hsize_t dims[1] = {10}; /* dataset dimension */ hsize_t c_dims[1] = {2}; /* chunk dimension */ - herr_t status; /* return status */ int i; /* local index variable */ int buf[10]; /* data buffer */ -- cgit v0.12