diff options
author | Dana Robinson <derobins@hdfgroup.org> | 2015-12-15 09:48:59 (GMT) |
---|---|---|
committer | Dana Robinson <derobins@hdfgroup.org> | 2015-12-15 09:48:59 (GMT) |
commit | 4fe9d56f786f1f43e06655fa238ac7b11a194ba1 (patch) | |
tree | 7b1d0a0d729a2fe508efb0c46360d5204edec7b1 /tools/h5format_convert | |
parent | e9da1c3460abdb32496f9271edf2a2dbbaa4e72b (diff) | |
download | hdf5-4fe9d56f786f1f43e06655fa238ac7b11a194ba1.zip hdf5-4fe9d56f786f1f43e06655fa238ac7b11a194ba1.tar.gz hdf5-4fe9d56f786f1f43e06655fa238ac7b11a194ba1.tar.bz2 |
[svn-r28658] Manual merge of revise_chunks and alpha branch.
Tested on: Unbuntu 15.10 (Linux 4.2.0 x86_64) gcc 5.2.1
Autotools serial (C++ Fortran)
Autotools parallel (MPICH 3.1.4, Fortran)
CMake serial (CMake 3.3.2)
Diffstat (limited to 'tools/h5format_convert')
21 files changed, 1829 insertions, 0 deletions
diff --git a/tools/h5format_convert/Makefile.am b/tools/h5format_convert/Makefile.am new file mode 100644 index 0000000..d3aef7d --- /dev/null +++ b/tools/h5format_convert/Makefile.am @@ -0,0 +1,49 @@ +# +# Copyright by The HDF Group. +# Copyright by the Board of Trustees of the University of Illinois. +# All rights reserved. +# +# This file is part of HDF5. The full HDF5 copyright notice, including +# terms governing use, modification, and redistribution, is contained in +# the files COPYING and Copyright.html. COPYING can be found at the root +# of the source code distribution tree; Copyright.html can be found at the +# root level of an installed copy of the electronic HDF5 document set and +# is linked from the top-level documents page. It can also be found at +# http://hdfgroup.org/HDF5/doc/Copyright.html. If you do not have +# access to either file, you may request a copy from help@hdfgroup.org. +## +## Makefile.am +## Run automake to generate a Makefile.in from this file. +# +# HDF5 Library Makefile(.in) +# + +include $(top_srcdir)/config/commence.am + +# Include src directory +AM_CPPFLAGS+=-I$(top_srcdir)/src -I$(top_srcdir)/tools/lib + +#test script and program +TEST_PROG=h5fc_gentest +TEST_SCRIPT=testh5fc.sh + +check_PROGRAMS=$(TEST_PROG) h5fc_chk_idx +check_SCRIPTS=$(TEST_SCRIPT) +SCRIPT_DEPEND=h5format_convert$(EXEEXT) + +# These are our main targets, the tools +bin_PROGRAMS=h5format_convert + +# Add h5format_convert specific linker flags here +h5format_convert_LDFLAGS = $(LT_STATIC_EXEC) $(AM_LDFLAGS) + +# Tell automake to clean h5redeploy script +CHECK_CLEANFILES+=*.h5 + +# These were generated by configure. Remove them only when distclean. +DISTCLEANFILES=testh5fc.sh + +# All programs rely on hdf5 library and h5tools library +LDADD=$(LIBH5TOOLS) $(LIBHDF5) + +include $(top_srcdir)/config/conclude.am diff --git a/tools/h5format_convert/h5fc_chk_idx.c b/tools/h5format_convert/h5fc_chk_idx.c new file mode 100644 index 0000000..3114379 --- /dev/null +++ b/tools/h5format_convert/h5fc_chk_idx.c @@ -0,0 +1,101 @@ +/* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * + * Copyright by The HDF Group. * + * Copyright by the Board of Trustees of the University of Illinois. * + * All rights reserved. * + * * + * This file is part of HDF5. The full HDF5 copyright notice, including * + * terms governing use, modification, and redistribution, is contained in * + * the files COPYING and Copyright.html. COPYING can be found at the root * + * of the source code distribution tree; Copyright.html can be found at the * + * root level of an installed copy of the electronic HDF5 document set and * + * is linked from the top-level documents page. It can also be found at * + * http://hdfgroup.org/HDF5/doc/Copyright.html. If you do not have * + * access to either file, you may request a copy from help@hdfgroup.org. * + * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */ + +/* + * A program to verify that the chunk indexing type of a dataset in a file + * is version 1 B-tree. + * This is to support the testing of the tool "h5format_convert". + */ + +#include "hdf5.h" +#include "H5private.h" +#include "h5tools.h" + +static void usage(void); + +static void +usage(void) +{ + HDfprintf(stdout, "Usage: h5fc_chk_idx file_name dataset_pathname\n"); +} /* usage() */ + +/*------------------------------------------------------------------------- + * Function: main + * + * Purpose: To check that the chunk indexing type for the dataset in + * the file is version 1 B-tree. + * + * Return: 0 -- the indexing type is version 1 B-tree + * 1 -- otherwise + * + *------------------------------------------------------------------------- + */ +int +main(int argc, char *argv[]) +{ + char *fname = NULL; + char *dname = NULL; + hid_t fid = -1; + hid_t did = -1; + H5D_chunk_index_t idx_type; + + /* h5fc_chk_idx fname dname */ + if(argc != 3) { + usage(); + exit(EXIT_FAILURE); + } + + /* Duplicate the file name & dataset name */ + fname = strdup(argv[1]); + dname = strdup(argv[2]); + + /* Try opening the file */ + if((fid = h5tools_fopen(fname, H5F_ACC_RDONLY, H5P_DEFAULT, NULL, NULL, (size_t)0)) < 0) { + HDfprintf(stderr, "h5fc_chk_idx: unable to open the file\n"); + return EXIT_FAILURE; + } + + /* Open the dataset */ + if((did = H5Dopen2(fid, dname, H5P_DEFAULT)) < 0) { + HDfprintf(stderr, "h5fc_chk_idx: unable to open the dataset\n"); + exit(EXIT_FAILURE); + } + + /* Get the dataset's chunk indexing type */ + if(H5Dget_chunk_index_type(did, &idx_type) < 0) { + HDfprintf(stderr, "h5fc_chk_idx: unable to get chunk index type for the dataset\n"); + exit(EXIT_FAILURE); + } + + /* Close the dataset */ + if(H5Dclose(did) < 0) { + HDfprintf(stderr, "h5fc_chk_idx: unable to close the dataset\n"); + exit(EXIT_FAILURE); + } + + /* Close the file */ + if(H5Fclose(fid) < 0) { + HDfprintf(stderr, "h5fc_chk_idx_type: cannot close the file\n"); + return EXIT_FAILURE; + } + + /* Return success when the chunk indexing type is version 1 B-tree */ + if(idx_type == H5D_CHUNK_IDX_BTREE) + return(EXIT_SUCCESS); + else { + HDfprintf(stderr, "Error: chunk indexing type is %d\n", idx_type); + return(EXIT_FAILURE); + } +} /* main() */ diff --git a/tools/h5format_convert/h5fc_gentest.c b/tools/h5format_convert/h5fc_gentest.c new file mode 100644 index 0000000..4dcc286 --- /dev/null +++ b/tools/h5format_convert/h5fc_gentest.c @@ -0,0 +1,635 @@ +/* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * + * Copyright by The HDF Group. * + * Copyright by the Board of Trustees of the University of Illinois. * + * All rights reserved. * + * * + * This file is part of HDF5. The full HDF5 copyright notice, including * + * terms governing use, modification, and redistribution, is contained in * + * the files COPYING and Copyright.html. COPYING can be found at the root * + * of the source code distribution tree; Copyright.html can be found at the * + * root level of an installed copy of the electronic HDF5 document set and * + * is linked from the top-level documents page. It can also be found at * + * http://hdfgroup.org/HDF5/doc/Copyright.html. If you do not have * + * access to either file, you may request a copy from help@hdfgroup.org. * + * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */ + +/* + * Generate the binary hdf5 files for the h5format_convert tests. + * Usage: just execute the program without any arguments will + * generate all the binary hdf5 files + * + * If you regenerate the test files (e.g., changing some code, + * trying it on a new platform, ...), you need to verify the correctness + * of the expected output and update the corresponding *.ddl files. + */ +#include "hdf5.h" + +#define GROUP "GROUP" + +#define OLD_V1_FILE "h5fc_v1.h5" +#define DSET_NON_CHUNKED "DSET_NON_CHUNKED" +#define DSET_BT1 "DSET_BT1" +#define DSET_NDATA_BT1 "DSET_NDATA_BT1" + +#define LATEST_V3_FILE "h5fc_latest_v3.h5" +#define DSET_EA "DSET_EA" +#define DSET_NDATA_EA "DSET_NDATA_EA" +#define DSET_BT2 "DSET_BT2" +#define DSET_NDATA_BT2 "DSET_NDATA_BT2" +#define DSET_FA "DSET_FA" +#define DSET_NDATA_FA "DSET_NDATA_FA" +#define DSET_NONE "DSET_NONE" +#define DSET_NDATA_NONE "DSET_NDATA_NONE" + +#define NON_V3_FILE "h5fc_non_v3.h5" + +#define EDGE_V3_FILE "h5fc_edge_v3.h5" +#define DSET_EDGE "DSET_EDGE" + +/* + * Function: gen_old() + * + * Create an old format file with: + * 1) 1 non-chunked dataset + * 2) 2 chunked datasets with version 1 B-tree chunk indexing type: with/without data + */ +static void +gen_old(const char *fname) +{ + hid_t fid = -1; /* file id */ + hid_t fcpl = -1; + hid_t gid = -1; /* group id */ + hid_t sid = -1; /* space id */ + hid_t dcpl = -1; /* dataset creation property id */ + hid_t did1 = -1, did2 = -1; /* dataset id */ + hsize_t dims1[1] = {10}; /* dataset dimension */ + hsize_t dims2[2] = {4, 6}; /* dataset dimension */ + hsize_t c_dims[2] = {2, 3}; /* chunk dimension */ + int i; /* local index variable */ + int buf[24]; /* data buffer */ + + if((fcpl = H5Pcreate(H5P_FILE_CREATE)) < 0) + goto error; + + if(H5Pset_istore_k(fcpl, 64) < 0) + goto error; + + /* Create file */ + if((fid = H5Fcreate(fname, H5F_ACC_TRUNC, fcpl, H5P_DEFAULT)) < 0) + goto error; + + /* Create a group */ + if((gid = H5Gcreate2(fid, GROUP, H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT)) < 0) + goto error; + + + /* + * Create a non-chunked dataset + */ + + /* Create dataspace */ + if((sid = H5Screate_simple(1, dims1, NULL)) < 0) + goto error; + + /* Create the dataset */ + if((did1 = H5Dcreate2(fid, DSET_NON_CHUNKED, H5T_NATIVE_INT, sid, H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT)) < 0) + goto error; + + /* Closing */ + if(H5Sclose(sid) < 0) + goto error; + if(H5Dclose(did1) < 0) + goto error; + + /* + * Create two chunked datasets with version 1 B-tree chunk indexing type + * (one with data, one without data) + */ + + /* Create data */ + for(i = 0; i < 24; i++) + buf[i] = i; + + /* Set chunk */ + if((dcpl = H5Pcreate(H5P_DATASET_CREATE)) < 0) + goto error; + if(H5Pset_chunk(dcpl, 2, c_dims) < 0) + goto error; + + /* Create dataspace */ + if((sid = H5Screate_simple(2, dims2, NULL)) < 0) + goto error; + + /* Create the 2 datasets */ + if((did1 = H5Dcreate2(fid, DSET_NDATA_BT1, H5T_NATIVE_INT, sid, H5P_DEFAULT, dcpl, H5P_DEFAULT)) < 0) + goto error; + + if((did2 = H5Dcreate2(gid, DSET_BT1, H5T_NATIVE_INT, sid, H5P_DEFAULT, dcpl, H5P_DEFAULT)) < 0) + goto error; + + /* Write to one dataset */ + if(H5Dwrite(did2, H5T_NATIVE_INT, H5S_ALL, H5S_ALL, H5P_DEFAULT, buf) < 0) + goto error; + + /* Closing */ + if(H5Pclose(dcpl) < 0) + goto error; + if(H5Sclose(sid) < 0) + goto error; + if(H5Dclose(did1) < 0) + goto error; + if(H5Dclose(did2) < 0) + goto error; + + if(H5Gclose(gid) < 0) + goto error; + if(H5Fclose(fid) < 0) + goto error; + +error: + H5E_BEGIN_TRY { + H5Pclose(dcpl); + H5Sclose(sid); + H5Dclose(did1); + H5Dclose(did2); + H5Gclose(gid); + H5Fclose(fid); + } H5E_END_TRY; + +} /* gen_old() */ + +/* + * Function: gen_latest() + * + * Create a file with write+latest-format--this will result in v3 superblock+latest version support: + * 1) 2 chunked datasets with extensible array chunk indexing type (with/without data) + * 2) 2 chunked datasets with version 2 B-tree chunk indexing type (with/without data) + * 3) 2 chunked datasets with fixed array chunk indexing type (with/without data) + * 4) 2 chunked datasets with implicit array chunk indexing type (with/without data) + */ +static void +gen_latest(const char *fname) +{ + hid_t fid = -1; /* file id */ + hid_t fapl = -1; /* file access property list */ + hid_t gid = -1; /* group id */ + hid_t sid = -1; /* space id */ + hid_t dcpl = -1; /* dataset creation property id */ + hid_t did1 = -1, did2 = -1; /* dataset id */ + hsize_t dims2[2] = {4, 6}; /* dataset dimension */ + hsize_t max_dims[2]; /* maximum dataset dimension */ + hsize_t c_dims[2] = {2, 3}; /* chunk dimension */ + int i; /* local index variable */ + int buf[24]; /* data buffer */ + + /* Create a new format file */ + if((fapl = H5Pcreate(H5P_FILE_ACCESS)) < 0) + goto error; + if(H5Pset_libver_bounds(fapl, H5F_LIBVER_LATEST, H5F_LIBVER_LATEST) < 0) + goto error; + + if((fid = H5Fcreate(fname, H5F_ACC_TRUNC, H5P_DEFAULT, fapl)) < 0) + goto error; + + /* Create a group */ + if((gid = H5Gcreate2(fid, GROUP, H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT)) < 0) + goto error; + + /* Set chunk */ + if((dcpl = H5Pcreate(H5P_DATASET_CREATE)) < 0) + goto error; + if(H5Pset_chunk(dcpl, 2, c_dims) < 0) + goto error; + + /* + * Create 2 chunked datasets with extensible array chunk indexing type + * (one with data; one without data) + */ + + /* Create dataspace */ + max_dims[0] = 10; + max_dims[1] = H5S_UNLIMITED; + if((sid = H5Screate_simple(2, dims2, max_dims)) < 0) + goto error; + + /* Create the 2 datasets */ + if((did1 = H5Dcreate2(gid, DSET_NDATA_EA, H5T_NATIVE_INT, sid, H5P_DEFAULT, dcpl, H5P_DEFAULT)) < 0) + goto error; + + if((did2 = H5Dcreate2(fid, DSET_EA, H5T_NATIVE_INT, sid, H5P_DEFAULT, dcpl, H5P_DEFAULT)) < 0) + goto error; + + /* Create data */ + for(i = 0; i < 24; i++) + buf[i] = i; + + /* Write to one dataset */ + if(H5Dwrite(did2, H5T_NATIVE_INT, H5S_ALL, H5S_ALL, H5P_DEFAULT, buf) < 0) + goto error; + + /* Closing */ + if(H5Sclose(sid) < 0) + goto error; + if(H5Dclose(did1) < 0) + goto error; + if(H5Dclose(did2) < 0) + goto error; + + + /* + * Create 2 chunked datasets with version 2 B-tree chunk indexing type + * (one with data; one without data) + */ + + /* Create dataspace */ + max_dims[0] = 10; + max_dims[0] = H5S_UNLIMITED; + max_dims[1] = H5S_UNLIMITED; + if((sid = H5Screate_simple(2, dims2, max_dims)) < 0) + goto error; + + /* Create the 2 datasets */ + if((did1 = H5Dcreate2(fid, DSET_NDATA_BT2, H5T_NATIVE_INT, sid, H5P_DEFAULT, dcpl, H5P_DEFAULT)) < 0) + goto error; + + if((did2 = H5Dcreate2(gid, DSET_BT2, H5T_NATIVE_INT, sid, H5P_DEFAULT, dcpl, H5P_DEFAULT)) < 0) + goto error; + + /* Write to one dataset */ + if(H5Dwrite(did2, H5T_NATIVE_INT, H5S_ALL, H5S_ALL, H5P_DEFAULT, buf) < 0) + goto error; + + /* Closing */ + if(H5Sclose(sid) < 0) + goto error; + if(H5Dclose(did1) < 0) + goto error; + if(H5Dclose(did2) < 0) + goto error; + + /* + * Create 2 chunked datasets with fixed array chunk indexing type + * (one with data; one without data) + */ + + /* Create dataspace */ + max_dims[0] = 20; + max_dims[1] = 10; + if((sid = H5Screate_simple(2, dims2, max_dims)) < 0) + goto error; + + /* Create the datasets */ + if((did1 = H5Dcreate2(fid, DSET_FA, H5T_NATIVE_INT, sid, H5P_DEFAULT, dcpl, H5P_DEFAULT)) < 0) + goto error; + + if((did2 = H5Dcreate2(gid, DSET_NDATA_FA, H5T_NATIVE_INT, sid, H5P_DEFAULT, dcpl, H5P_DEFAULT)) < 0) + goto error; + + /* Write to the dataset */ + if(H5Dwrite(did1, H5T_NATIVE_INT, H5S_ALL, H5S_ALL, H5P_DEFAULT, buf) < 0) + goto error; + + /* Closing */ + if(H5Sclose(sid) < 0) + goto error; + if(H5Dclose(did1) < 0) + goto error; + if(H5Dclose(did2) < 0) + goto error; + + + /* + * Create 2 chunked datasets with implicit chunk indexing type + * (one with data; one without data) + */ + + /* Create dataspace */ + if((sid = H5Screate_simple(2, dims2, NULL)) < 0) + goto error; + + /* Set early allocation */ + if(H5Pset_alloc_time(dcpl, H5D_ALLOC_TIME_EARLY) < 0) + goto error; + + /* Create the 2 datasets */ + if((did1 = H5Dcreate2(fid, DSET_NONE, H5T_NATIVE_INT, sid, H5P_DEFAULT, dcpl, H5P_DEFAULT)) < 0) + goto error; + + if((did2 = H5Dcreate2(gid, DSET_NDATA_NONE, H5T_NATIVE_INT, sid, H5P_DEFAULT, dcpl, H5P_DEFAULT)) < 0) + goto error; + + /* Write to one dataset */ + if(H5Dwrite(did1, H5T_NATIVE_INT, H5S_ALL, H5S_ALL, H5P_DEFAULT, buf) < 0) + goto error; + + /* Closing */ + if(H5Dclose(did1) < 0) + goto error; + if(H5Dclose(did2) < 0) + goto error; + if(H5Sclose(sid) < 0) + goto error; + + if(H5Pclose(dcpl) < 0) + goto error; + if(H5Gclose(gid) < 0) + goto error; + if(H5Fclose(fid) < 0) + goto error; + +error: + H5E_BEGIN_TRY { + H5Pclose(dcpl); + H5Sclose(sid); + H5Dclose(did1); + H5Dclose(did2); + H5Gclose(gid); + H5Fclose(fid); + H5Pclose(fapl); + } H5E_END_TRY; + +} /* gen_latest() */ + +/* + * Function: gen_non() + * + * Create a file with SWMR write+non-latest-format--this will result in v3 superbock+latest version support: + * 1) 1 chunked dataset with extensible array chunk indexing type (without data) + * 2) 1 chunked dataset with version 2 B-tree chunk indexing type (with data) + * Re-open the file with write+non-latest-format and create: + * 3) 1 chunked dataset with version 2 B-tree chunk indexing type (without data) + * 4) 1 chunked dataset with extensible array indexing type (with data) + * 5) 1 non-chunked dataset + */ +static void +gen_non(const char *fname) +{ + hid_t fid = -1; /* file id */ + hid_t gid = -1; /* group id */ + hid_t sid = -1; /* space id */ + hid_t dcpl = -1; /* dataset creation property id */ + hid_t did1 = -1, did2 = -1; /* dataset id */ + hsize_t dims1[1] = {10}; /* dataset dimension */ + hsize_t dims2[2] = {4, 6}; /* dataset dimension */ + hsize_t max_dims[2]; /* maximum dataset dimension */ + hsize_t c_dims[2] = {2, 3}; /* chunk dimension */ + int i; /* local index variable */ + int buf[24]; /* data buffer */ + + /* Create a new file with SWMR_WRITE + non-latest-format */ + if((fid = H5Fcreate(fname, H5F_ACC_TRUNC|H5F_ACC_SWMR_WRITE, H5P_DEFAULT, H5P_DEFAULT)) < 0) + goto error; + + /* Create a group */ + if((gid = H5Gcreate2(fid, GROUP, H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT)) < 0) + goto error; + + /* Create data */ + for(i = 0; i < 24; i++) + buf[i] = i; + + /* Set chunk */ + if((dcpl = H5Pcreate(H5P_DATASET_CREATE)) < 0) + goto error; + if(H5Pset_chunk(dcpl, 2, c_dims) < 0) + goto error; + + /* + * Create a chunked dataset with extensible array chunk indexing type (without data) + */ + + /* Create dataspace */ + max_dims[0] = 10; + max_dims[1] = H5S_UNLIMITED; + if((sid = H5Screate_simple(2, dims2, max_dims)) < 0) + goto error; + + /* Create the dataset */ + if((did1 = H5Dcreate2(fid, DSET_NDATA_EA, H5T_NATIVE_INT, sid, H5P_DEFAULT, dcpl, H5P_DEFAULT)) < 0) + goto error; + + /* Closing */ + if(H5Sclose(sid) < 0) + goto error; + if(H5Dclose(did1) < 0) + goto error; + + /* + * Create a chunked dataset with version 2 B-tree chunk indexing type (with data) + */ + + /* Create dataspace */ + max_dims[0] = 10; + max_dims[0] = H5S_UNLIMITED; + max_dims[1] = H5S_UNLIMITED; + if((sid = H5Screate_simple(2, dims2, max_dims)) < 0) + goto error; + + /* Create the dataset */ + if((did1 = H5Dcreate2(gid, DSET_BT2, H5T_NATIVE_INT, sid, H5P_DEFAULT, dcpl, H5P_DEFAULT)) < 0) + goto error; + + /* Write to the dataset */ + if(H5Dwrite(did1, H5T_NATIVE_INT, H5S_ALL, H5S_ALL, H5P_DEFAULT, buf) < 0) + goto error; + + /* Closing */ + if(H5Sclose(sid) < 0) + goto error; + if(H5Dclose(did1) < 0) + goto error; + if(H5Pclose(dcpl) < 0) + goto error; + if(H5Gclose(gid) < 0) + goto error; + if(H5Fclose(fid) < 0) + goto error; + + /* Re-open the file with old format */ + if((fid = H5Fopen(fname, H5F_ACC_RDWR, H5P_DEFAULT)) < 0) + goto error; + + /* Open the group */ + if((gid = H5Gopen2(fid, GROUP, H5P_DEFAULT)) < 0) + goto error; + + /* + * Create a dataset with version 2 B-btree chunk indexing type (without data) + */ + + /* Set chunk */ + if((dcpl = H5Pcreate(H5P_DATASET_CREATE)) < 0) + goto error; + if(H5Pset_chunk(dcpl, 2, c_dims) < 0) + goto error; + + /* Create dataspace */ + max_dims[0] = H5S_UNLIMITED; + max_dims[1] = H5S_UNLIMITED; + if((sid = H5Screate_simple(2, dims2, max_dims)) < 0) + goto error; + + /* Create the dataset */ + if((did1 = H5Dcreate2(fid, DSET_NDATA_BT2, H5T_NATIVE_INT, sid, H5P_DEFAULT, dcpl, H5P_DEFAULT)) < 0) + goto error; + + /* Close the dataspace */ + if(H5Sclose(sid) < 0) + goto error; + + /* + * Create a dataset with version extensible array chunk indexing type (with data) in the group + */ + + /* Create dataspace */ + max_dims[0] = 10; + max_dims[1] = H5S_UNLIMITED; + if((sid = H5Screate_simple(2, dims2, max_dims)) < 0) + goto error; + + /* Create the dataset */ + if((did2 = H5Dcreate2(gid, DSET_EA, H5T_NATIVE_INT, sid, H5P_DEFAULT, dcpl, H5P_DEFAULT)) < 0) + goto error; + + /* Write to the dataset */ + if(H5Dwrite(did2, H5T_NATIVE_INT, H5S_ALL, H5S_ALL, H5P_DEFAULT, buf) < 0) + goto error; + + /* Closing */ + if(H5Sclose(sid) < 0) + goto error; + if(H5Dclose(did1) < 0) + goto error; + if(H5Dclose(did2) < 0) + goto error; + + /* + * Create a non-chunked dataset in the group + */ + + /* Create dataspace */ + if((sid = H5Screate_simple(1, dims1, NULL)) < 0) + goto error; + + /* Create the dataset */ + if((did1 = H5Dcreate2(gid, DSET_NON_CHUNKED, H5T_NATIVE_INT, sid, H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT)) < 0) + goto error; + + /* Closing */ + if(H5Sclose(sid) < 0) + goto error; + if(H5Dclose(did1) < 0) + goto error; + + if(H5Gclose(gid) < 0) + goto error; + if(H5Fclose(fid) < 0) + goto error; + if(H5Pclose(dcpl) < 0) + goto error; + +error: + H5E_BEGIN_TRY { + H5Pclose(dcpl); + H5Sclose(sid); + H5Dclose(did1); + H5Dclose(did2); + H5Gclose(gid); + H5Fclose(fid); + } H5E_END_TRY; + +} /* gen_non() */ + +/* + * Function: gen_edge() + * + * Create a file with write+latest-format--this will result in v3 superblock+latest version support: + * A dataset: chunked, filtered, H5D_CHUNK_DONT_FILTER_PARTIAL_CHUNKS enabled + * (i.e. the dataset does not filter partial edge chunks) + */ +static void +gen_edge(const char *fname) +{ + hid_t fid = -1; /* file id */ + hid_t fapl = -1; /* file access property list */ + hid_t sid = -1; /* dataspace id */ + hid_t dcpl = -1; /* dataset creation property id */ + hid_t did = -1; /* dataset id */ + hsize_t dims2[2] = {12, 6}; /* Dataset dimensions */ + hsize_t c_dims[2] = {5, 5}; /* Chunk dimensions */ + float buf[12][6]; /* Buffer for writing data */ + int i, j; /* local index variable */ + + /* Create a new format file */ + if((fapl = H5Pcreate(H5P_FILE_ACCESS)) < 0) + goto error; + if(H5Pset_libver_bounds(fapl, H5F_LIBVER_LATEST, H5F_LIBVER_LATEST) < 0) + goto error; + if((fid = H5Fcreate(fname, H5F_ACC_TRUNC, H5P_DEFAULT, fapl)) < 0) + goto error; + + /* Set chunk, filter, no-filter-edge-chunk */ + if((dcpl = H5Pcreate(H5P_DATASET_CREATE)) < 0) + goto error; + if(H5Pset_chunk(dcpl, 2, c_dims) < 0) + goto error; + if(H5Pset_deflate(dcpl, 9) < 0) + goto error; + if(H5Pset_chunk_opts(dcpl, H5D_CHUNK_DONT_FILTER_PARTIAL_CHUNKS) < 0) + goto error; + + /* Create dataspace */ + if((sid = H5Screate_simple(2, dims2, NULL)) < 0) + goto error; + + /* Create the dataset */ + if((did = H5Dcreate2(fid, DSET_EDGE, H5T_NATIVE_FLOAT, sid, H5P_DEFAULT, dcpl, H5P_DEFAULT)) < 0) + goto error; + + /* Create data */ + for (i = 0; i< 12; i++) + for (j = 0; j< 6; j++) + buf[i][j] = 100.0F; + + /* Write to the dataset */ + if(H5Dwrite(did, H5T_NATIVE_FLOAT, H5S_ALL, H5S_ALL, H5P_DEFAULT, buf) < 0) + goto error; + + /* Closing */ + if(H5Pclose(dcpl) < 0) + goto error; + if(H5Sclose(sid) < 0) + goto error; + if(H5Dclose(did) < 0) + goto error; + if(H5Pclose(fapl) < 0) + goto error; + if(H5Fclose(fid) < 0) + goto error; + +error: + H5E_BEGIN_TRY { + H5Pclose(dcpl); + H5Sclose(sid); + H5Dclose(did); + H5Fclose(fid); + H5Pclose(fapl); + } H5E_END_TRY; + +} /* gen_edge() */ + +int main(void) +{ + /* Generate an old format file with v1 superbock */ + gen_old(OLD_V1_FILE); + + /* Generate a latest-format file with v3 superblock */ + gen_latest(LATEST_V3_FILE); + + /* Generate a non-latest-format file with v3 superblock */ + gen_non(NON_V3_FILE); + + /* Generate a new format file with a no-filter-edge-chunk dataset for testing */ + gen_edge(EDGE_V3_FILE); + return 0; +} diff --git a/tools/h5format_convert/h5format_convert.c b/tools/h5format_convert/h5format_convert.c new file mode 100644 index 0000000..7686acc --- /dev/null +++ b/tools/h5format_convert/h5format_convert.c @@ -0,0 +1,438 @@ +/* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * + * Copyright by The HDF Group. * + * Copyright by the Board of Trustees of the University of Illinois. * + * All rights reserved. * + * * + * This file is part of HDF5. The full HDF5 copyright notice, including * + * terms governing use, modification, and redistribution, is contained in * + * the files COPYING and Copyright.html. COPYING can be found at the root * + * of the source code distribution tree; Copyright.html can be found at the * + * root level of an installed copy of the electronic HDF5 document set and * + * is linked from the top-level documents page. It can also be found at * + * http://hdfgroup.org/HDF5/doc/Copyright.html. If you do not have * + * access to either file, you may request a copy from help@hdfgroup.org. * + * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */ + +/* + * Programmer: Vailin Choi; Feb 2015 + */ + + +/* + * We include the private header file so we can get to the uniform + * programming environment it declares. + * HDF5 API functions (except for H5G_basename()) + */ +#include "H5private.h" +#include "h5tools.h" +#include "h5tools_utils.h" +#include "h5trav.h" + +/* Name of tool */ +#define PROGRAMNAME "h5format_convert" + +static char *fname_g = NULL; +static char *dname_g = NULL; +static int dset_g = FALSE; +static int noop_g = FALSE; +static int verbose_g = 0; + +/* + * Command-line options: The user can specify short or long-named + * parameters. + */ +static const char *s_opts = "hVvd:n"; +static struct long_options l_opts[] = { + { "help", no_arg, 'h' }, + { "hel", no_arg, 'h'}, + { "he", no_arg, 'h'}, + { "version", no_arg, 'V' }, + { "version", no_arg, 'V' }, + { "versio", no_arg, 'V' }, + { "versi", no_arg, 'V' }, + { "vers", no_arg, 'V' }, + { "verbose", no_arg, 'v' }, + { "verbos", no_arg, 'v' }, + { "verbo", no_arg, 'v' }, + { "verb", no_arg, 'v' }, + { "dname", require_arg, 'd' }, + { "dnam", require_arg, 'd' }, + { "dna", require_arg, 'd' }, + { "dn", require_arg, 'd' }, + { "noop", no_arg, 'n' }, + { "noo", no_arg, 'n' }, + { "no", no_arg, 'n' }, + { NULL, 0, '\0' } +}; + + +/*------------------------------------------------------------------------- + * Function: usage + * + * Purpose: print usage + * + * Return: void + * + *------------------------------------------------------------------------- + */ +static void usage(const char *prog) +{ + printf("usage: %s [OPTIONS] file_name\n", prog); + printf(" OPTIONS\n"); + printf(" -h, --help Print a usage message and exit\n"); + printf(" -V, --version Print version number and exit\n"); + printf(" -v, --verbose Turn on verbose mode\n"); + printf(" -d dname, --dname=dataset_name Pathname for the dataset\n"); + printf(" -n, --noop Perform all the steps except the actual conversion\n"); + printf("\n"); + printf("Examples of use:\n"); + printf("\n"); + printf("h5format_convert -d /group/dataset file_name\n"); + printf(" Convert the chunk indexing type to version 1 B-tree\n"); + printf(" for the chunked dataset </group/dataset> in the HDF5 file <file_name>.\n"); + printf("\n"); + printf("h5format_convert file_name\n"); + printf(" Convert the chunk indexing type to version 1 B-tree\n"); + printf(" for all the chunked datasets in the HDF5 file <file_name>.\n"); + printf("\n"); + printf("h5format_convert -n -d /group/dataset file_name\n"); + printf(" Go through all the steps except the actual conversion when \n"); + printf(" converting the chunked dataset </group/dataset> in the HDF5 file <file_name>.\n"); +} /* usage() */ + +/*------------------------------------------------------------------------- + * Function: parse_command_line + * + * Purpose: parse command line input + * + * Return: Success: 0 + * Failure: 1 + * + *------------------------------------------------------------------------- + */ +static int +parse_command_line(int argc, const char **argv) +{ + int opt; + + /* no arguments */ + if (argc == 1) { + usage(h5tools_getprogname()); + h5tools_setstatus(EXIT_FAILURE); + goto error; + } + + /* parse command line options */ + while ((opt = get_option(argc, argv, s_opts, l_opts)) != EOF) { + switch((char) opt) { + case 'h': + usage(h5tools_getprogname()); + h5tools_setstatus(EXIT_SUCCESS); + goto error; + + case 'V': + print_version(h5tools_getprogname()); + h5tools_setstatus(EXIT_SUCCESS); + goto error; + + case 'v': + verbose_g = TRUE; + break; + + case 'd': /* -d dname */ + if(opt_arg != NULL && *opt_arg) + /* if(opt_arg != NULL)*/ + dname_g = HDstrdup(opt_arg); + if(dname_g == NULL) { + h5tools_setstatus(EXIT_FAILURE); + error_msg("No dataset name\n", opt_arg); + usage(h5tools_getprogname()); + goto error; + } + dset_g = TRUE; + break; + + case 'n': /* -n */ + noop_g = TRUE; + break; + + default: + h5tools_setstatus(EXIT_FAILURE); + usage(h5tools_getprogname()); + goto error; + break; + } /* switch */ + } /* while */ + + if (argc <= opt_ind) { + error_msg("missing file name\n"); + usage(h5tools_getprogname()); + h5tools_setstatus(EXIT_FAILURE); + goto error; + } + + fname_g = HDstrdup(argv[opt_ind]); + + return(0); + +error: + return(-1); ; +} /* parse_command_line() */ + + +/*------------------------------------------------------------------------- + * Function: leave + * + * Purpose: Close HDF5 + * + * Return: Does not return + * + *------------------------------------------------------------------------- + */ +static void +leave(int ret) +{ + h5tools_close(); + + HDexit(ret); +} /* leave() */ + +/*------------------------------------------------------------------------- + * Function: convert() + * + * Purpose: To change the chunk indexing type for the dataset to version 1 B-tree. + * -- the dataset has to be chunked + * -- the dataset's chunk indexing type is not already version 1 B-tree. + * If the above conditions are not fulfilled, the tool will not perform + * the conversion but will exit with success. + * + * Return: Success: 0 + * Failure: 1 + * + *------------------------------------------------------------------------- + */ +static int +convert(hid_t fid, const char *dname) +{ + hid_t dcpl = -1; + hid_t did = -1; + H5D_layout_t layout_type; + H5D_chunk_index_t idx_type; + + /* Open the dataset */ + if((did = H5Dopen2(fid, dname, H5P_DEFAULT)) < 0) { + error_msg("unable to open dataset \"%s\"\n", dname); + h5tools_setstatus(EXIT_FAILURE); + goto error; + + } else if(verbose_g) + printf("Open the dataset\n"); + + /* Get the dataset's creation property list */ + if((dcpl = H5Dget_create_plist(did)) < 0) { + error_msg("unable to get the dataset creation property list\n"); + h5tools_setstatus(EXIT_FAILURE); + goto error; + } + + /* Get the dataset's layout */ + if((layout_type = H5Pget_layout(dcpl)) < 0) { + error_msg("unable to get the dataset layout type\n"); + h5tools_setstatus(EXIT_FAILURE); + goto error; + + } else if(verbose_g) + printf("Retrieve the dataset's layout\n"); + + /* No further action if not a chunked dataset */ + if(layout_type != H5D_CHUNKED) { + if(verbose_g) + printf("Dataset is not chunked: no further action\n"); + h5tools_setstatus(EXIT_SUCCESS); + goto done; + + } else if(verbose_g) + printf("Verify the dataset is a chunked dataset\n"); + + /* Get the dataset's chunk indexing type */ + if(H5Dget_chunk_index_type(did, &idx_type) < 0) { + error_msg("unable to get the chunk indexing type for \"%s\"\n", dname); + h5tools_setstatus(EXIT_FAILURE); + goto error; + + } else if(verbose_g) + printf("Retrieve the dataset's chunk indexing type\n"); + + /* No further action if the chunk indexing type is already version 1 B-tree */ + if(idx_type == H5D_CHUNK_IDX_BTREE) { + if(verbose_g) + printf("Chunk indexing type is already version 1 B-tree: no further action\n"); + h5tools_setstatus(EXIT_SUCCESS); + goto done; + + } else if (verbose_g) + printf("Verify the dataset's chunk indexing type is not version 1 B-tree\n"); + + /* No further action if it is a noop */ + if(noop_g) { + if(verbose_g) + printf("Not converting the dataset\n"); + h5tools_setstatus(EXIT_SUCCESS); + goto done; + } + + if(verbose_g) + printf("Converting the dataset...\n"); + + /* Convert the dataset's chunk indexing type to version 1 B-tree */ + if(H5Dformat_convert(did) < 0) { + error_msg("unable to convert chunk indexing for \"%s\"\n", dname); + h5tools_setstatus(EXIT_FAILURE); + goto error; + + } else if(verbose_g) + printf("Done\n"); + +done: + /* Close the dataset */ + if(H5Dclose(did) < 0) { + error_msg("unable to close dataset \"%s\"\n", dname); + h5tools_setstatus(EXIT_FAILURE); + goto error; + } else if(verbose_g) + printf("Close the dataset\n"); + + /* Close the dataset creation property list */ + if(H5Pclose(dcpl) < 0) { + error_msg("unable to close dataset creation property list\n"); + h5tools_setstatus(EXIT_FAILURE); + goto error; + } else if(verbose_g) + printf("Close the dataset creation property list\n"); + + return(0); + +error: + if(verbose_g) + printf("Error encountered\n"); + + H5E_BEGIN_TRY { + H5Pclose(dcpl); + H5Dclose(did); + } H5E_END_TRY; + + return(-1); + +} /* convert() */ + +/*------------------------------------------------------------------------- + * Function: convert_dsets_cb() + * + * Purpose: The callback routine from the traversal to convert the + * chunk indexing type of the dataset object. + * + * Return: Success: 0 + * Failure: 1 + *------------------------------------------------------------------------- + */ +static int +convert_dsets_cb(const char *path, const H5O_info_t *oi, const char *already_visited, void *_fid) +{ + hid_t fid = *(hid_t *)_fid; + + /* If the object has already been seen then just return */ + if(NULL == already_visited) { + + if(oi->type == H5O_TYPE_DATASET) { + if(verbose_g) + printf("Going to process dataset:%s...\n", path); + if(convert(fid, path) < 0) + goto error; + } + + } /* end if */ + + return 0; + +error: + return -1; + +} /* end convert_dsets_cb() */ + + +/*------------------------------------------------------------------------- + * Function: main + * + * Purpose: To convert the chunk indexing type of a dataset in a file to + * version 1 B-tree. + * + * Return: Success: 0 + * Failure: 1 + * + *------------------------------------------------------------------------- + */ +int +main(int argc, const char *argv[]) +{ + H5E_auto2_t func; + void *edata; + hid_t fid = -1; + + h5tools_setprogname(PROGRAMNAME); + h5tools_setstatus(EXIT_SUCCESS); + + /* Disable error reporting */ + H5Eget_auto2(H5E_DEFAULT, &func, &edata); + H5Eset_auto2(H5E_DEFAULT, NULL, NULL); + + /* Initialize h5tools lib */ + h5tools_init(); + + /* Parse command line options */ + if(parse_command_line(argc, argv) < 0) + goto done; + else if(verbose_g) + printf("Process command line options\n"); + + if(noop_g && verbose_g) + printf("It is noop...\n"); + + /* Open the HDF5 file */ + if((fid = h5tools_fopen(fname_g, H5F_ACC_RDWR, H5P_DEFAULT, NULL, NULL, 0)) < 0) { + error_msg("unable to open file \"%s\"\n", fname_g); + h5tools_setstatus(EXIT_FAILURE); + goto done; + } else if(verbose_g) + printf("Open the file %s\n", fname_g); + + if(dset_g) { /* Convert a specified dataset in the file */ + if(verbose_g) + printf("Going to process dataset: %s...\n", dname_g); + if(convert(fid, dname_g) < 0) + goto done; + } else { /* Convert all datasets in the file */ + if(verbose_g) + printf("Processing all datasets in the file...\n"); + if(h5trav_visit(fid, "/", TRUE, TRUE, convert_dsets_cb, NULL, &fid) < 0) + goto done; + } + +done: + /* Close the file */ + if(fid >= 0) { + if(H5Fclose(fid) < 0) { + error_msg("unable to close file \"%s\"\n", fname_g); + h5tools_setstatus(EXIT_FAILURE); + } else if(verbose_g) + printf("Close the file\n"); + } + + if(fname_g) + HDfree(fname_g); + if(dname_g) + HDfree(dname_g); + + H5Eset_auto2(H5E_DEFAULT, func, edata); + leave(h5tools_getstatus()); + +} /* end main() */ diff --git a/tools/h5format_convert/testfiles/h5fc_d_file.ddl b/tools/h5format_convert/testfiles/h5fc_d_file.ddl new file mode 100644 index 0000000..3641a4f --- /dev/null +++ b/tools/h5format_convert/testfiles/h5fc_d_file.ddl @@ -0,0 +1,22 @@ +usage: h5format_convert [OPTIONS] file_name + OPTIONS + -h, --help Print a usage message and exit + -V, --version Print version number and exit + -v, --verbose Turn on verbose mode + -d dname, --dname=dataset_name Pathname for the dataset + -n, --noop Perform all the steps except the actual conversion + +Examples of use: + +h5format_convert -d /group/dataset file_name + Convert the chunk indexing type to version 1 B-tree + for the chunked dataset </group/dataset> in the HDF5 file <file_name>. + +h5format_convert file_name + Convert the chunk indexing type to version 1 B-tree + for all the chunked datasets in the HDF5 file <file_name>. + +h5format_convert -n -d /group/dataset file_name + Go through all the steps except the actual conversion when + converting the chunked dataset </group/dataset> in the HDF5 file <file_name>. +h5format_convert error: missing file name diff --git a/tools/h5format_convert/testfiles/h5fc_dname.ddl b/tools/h5format_convert/testfiles/h5fc_dname.ddl new file mode 100644 index 0000000..c391764 --- /dev/null +++ b/tools/h5format_convert/testfiles/h5fc_dname.ddl @@ -0,0 +1,22 @@ +usage: h5format_convert [OPTIONS] file_name + OPTIONS + -h, --help Print a usage message and exit + -V, --version Print version number and exit + -v, --verbose Turn on verbose mode + -d dname, --dname=dataset_name Pathname for the dataset + -n, --noop Perform all the steps except the actual conversion + +Examples of use: + +h5format_convert -d /group/dataset file_name + Convert the chunk indexing type to version 1 B-tree + for the chunked dataset </group/dataset> in the HDF5 file <file_name>. + +h5format_convert file_name + Convert the chunk indexing type to version 1 B-tree + for all the chunked datasets in the HDF5 file <file_name>. + +h5format_convert -n -d /group/dataset file_name + Go through all the steps except the actual conversion when + converting the chunked dataset </group/dataset> in the HDF5 file <file_name>. +h5format_convert error: No dataset name diff --git a/tools/h5format_convert/testfiles/h5fc_edge_v3.h5 b/tools/h5format_convert/testfiles/h5fc_edge_v3.h5 Binary files differnew file mode 100644 index 0000000..debeda4 --- /dev/null +++ b/tools/h5format_convert/testfiles/h5fc_edge_v3.h5 diff --git a/tools/h5format_convert/testfiles/h5fc_help.ddl b/tools/h5format_convert/testfiles/h5fc_help.ddl new file mode 100644 index 0000000..9081ab8 --- /dev/null +++ b/tools/h5format_convert/testfiles/h5fc_help.ddl @@ -0,0 +1,21 @@ +usage: h5format_convert [OPTIONS] file_name + OPTIONS + -h, --help Print a usage message and exit + -V, --version Print version number and exit + -v, --verbose Turn on verbose mode + -d dname, --dname=dataset_name Pathname for the dataset + -n, --noop Perform all the steps except the actual conversion + +Examples of use: + +h5format_convert -d /group/dataset file_name + Convert the chunk indexing type to version 1 B-tree + for the chunked dataset </group/dataset> in the HDF5 file <file_name>. + +h5format_convert file_name + Convert the chunk indexing type to version 1 B-tree + for all the chunked datasets in the HDF5 file <file_name>. + +h5format_convert -n -d /group/dataset file_name + Go through all the steps except the actual conversion when + converting the chunked dataset </group/dataset> in the HDF5 file <file_name>. diff --git a/tools/h5format_convert/testfiles/h5fc_latest_v3.h5 b/tools/h5format_convert/testfiles/h5fc_latest_v3.h5 Binary files differnew file mode 100644 index 0000000..f7de743 --- /dev/null +++ b/tools/h5format_convert/testfiles/h5fc_latest_v3.h5 diff --git a/tools/h5format_convert/testfiles/h5fc_non_v3.h5 b/tools/h5format_convert/testfiles/h5fc_non_v3.h5 Binary files differnew file mode 100644 index 0000000..b1bffa8 --- /dev/null +++ b/tools/h5format_convert/testfiles/h5fc_non_v3.h5 diff --git a/tools/h5format_convert/testfiles/h5fc_nonexistdset_file.ddl b/tools/h5format_convert/testfiles/h5fc_nonexistdset_file.ddl new file mode 100644 index 0000000..39450c0 --- /dev/null +++ b/tools/h5format_convert/testfiles/h5fc_nonexistdset_file.ddl @@ -0,0 +1 @@ +h5format_convert error: unable to open dataset "nonexist" diff --git a/tools/h5format_convert/testfiles/h5fc_nonexistfile.ddl b/tools/h5format_convert/testfiles/h5fc_nonexistfile.ddl new file mode 100644 index 0000000..706ea9d --- /dev/null +++ b/tools/h5format_convert/testfiles/h5fc_nonexistfile.ddl @@ -0,0 +1 @@ +h5format_convert error: unable to open file "nonexist.h5" diff --git a/tools/h5format_convert/testfiles/h5fc_nooption.ddl b/tools/h5format_convert/testfiles/h5fc_nooption.ddl new file mode 100644 index 0000000..9081ab8 --- /dev/null +++ b/tools/h5format_convert/testfiles/h5fc_nooption.ddl @@ -0,0 +1,21 @@ +usage: h5format_convert [OPTIONS] file_name + OPTIONS + -h, --help Print a usage message and exit + -V, --version Print version number and exit + -v, --verbose Turn on verbose mode + -d dname, --dname=dataset_name Pathname for the dataset + -n, --noop Perform all the steps except the actual conversion + +Examples of use: + +h5format_convert -d /group/dataset file_name + Convert the chunk indexing type to version 1 B-tree + for the chunked dataset </group/dataset> in the HDF5 file <file_name>. + +h5format_convert file_name + Convert the chunk indexing type to version 1 B-tree + for all the chunked datasets in the HDF5 file <file_name>. + +h5format_convert -n -d /group/dataset file_name + Go through all the steps except the actual conversion when + converting the chunked dataset </group/dataset> in the HDF5 file <file_name>. diff --git a/tools/h5format_convert/testfiles/h5fc_v1.h5 b/tools/h5format_convert/testfiles/h5fc_v1.h5 Binary files differnew file mode 100644 index 0000000..d3d66f8 --- /dev/null +++ b/tools/h5format_convert/testfiles/h5fc_v1.h5 diff --git a/tools/h5format_convert/testfiles/h5fc_v_all.ddl b/tools/h5format_convert/testfiles/h5fc_v_all.ddl new file mode 100644 index 0000000..3f474fe --- /dev/null +++ b/tools/h5format_convert/testfiles/h5fc_v_all.ddl @@ -0,0 +1,26 @@ +Process command line options +Open the file tmp.h5 +Processing all datasets in the file... +Going to process dataset:/DSET_NDATA_BT1... +Open the dataset +Retrieve the dataset's layout +Verify the dataset is a chunked dataset +Retrieve the dataset's chunk indexing type +Chunk indexing type is already version 1 B-tree: no further action +Close the dataset +Close the dataset creation property list +Going to process dataset:/DSET_NON_CHUNKED... +Open the dataset +Retrieve the dataset's layout +Dataset is not chunked: no further action +Close the dataset +Close the dataset creation property list +Going to process dataset:/GROUP/DSET_BT1... +Open the dataset +Retrieve the dataset's layout +Verify the dataset is a chunked dataset +Retrieve the dataset's chunk indexing type +Chunk indexing type is already version 1 B-tree: no further action +Close the dataset +Close the dataset creation property list +Close the file diff --git a/tools/h5format_convert/testfiles/h5fc_v_bt1.ddl b/tools/h5format_convert/testfiles/h5fc_v_bt1.ddl new file mode 100644 index 0000000..abb0a89 --- /dev/null +++ b/tools/h5format_convert/testfiles/h5fc_v_bt1.ddl @@ -0,0 +1,11 @@ +Process command line options +Open the file tmp.h5 +Going to process dataset: /GROUP/DSET_BT1... +Open the dataset +Retrieve the dataset's layout +Verify the dataset is a chunked dataset +Retrieve the dataset's chunk indexing type +Chunk indexing type is already version 1 B-tree: no further action +Close the dataset +Close the dataset creation property list +Close the file diff --git a/tools/h5format_convert/testfiles/h5fc_v_n_1d.ddl b/tools/h5format_convert/testfiles/h5fc_v_n_1d.ddl new file mode 100644 index 0000000..a26dc66 --- /dev/null +++ b/tools/h5format_convert/testfiles/h5fc_v_n_1d.ddl @@ -0,0 +1,13 @@ +Process command line options +It is noop... +Open the file tmp.h5 +Going to process dataset: /DSET_EA... +Open the dataset +Retrieve the dataset's layout +Verify the dataset is a chunked dataset +Retrieve the dataset's chunk indexing type +Verify the dataset's chunk indexing type is not version 1 B-tree +Not converting the dataset +Close the dataset +Close the dataset creation property list +Close the file diff --git a/tools/h5format_convert/testfiles/h5fc_v_n_all.ddl b/tools/h5format_convert/testfiles/h5fc_v_n_all.ddl new file mode 100644 index 0000000..76c70ee --- /dev/null +++ b/tools/h5format_convert/testfiles/h5fc_v_n_all.ddl @@ -0,0 +1,47 @@ +Process command line options +It is noop... +Open the file tmp.h5 +Processing all datasets in the file... +Going to process dataset:/DSET_NDATA_BT2... +Open the dataset +Retrieve the dataset's layout +Verify the dataset is a chunked dataset +Retrieve the dataset's chunk indexing type +Verify the dataset's chunk indexing type is not version 1 B-tree +Not converting the dataset +Close the dataset +Close the dataset creation property list +Going to process dataset:/DSET_NDATA_EA... +Open the dataset +Retrieve the dataset's layout +Verify the dataset is a chunked dataset +Retrieve the dataset's chunk indexing type +Verify the dataset's chunk indexing type is not version 1 B-tree +Not converting the dataset +Close the dataset +Close the dataset creation property list +Going to process dataset:/GROUP/DSET_BT2... +Open the dataset +Retrieve the dataset's layout +Verify the dataset is a chunked dataset +Retrieve the dataset's chunk indexing type +Verify the dataset's chunk indexing type is not version 1 B-tree +Not converting the dataset +Close the dataset +Close the dataset creation property list +Going to process dataset:/GROUP/DSET_EA... +Open the dataset +Retrieve the dataset's layout +Verify the dataset is a chunked dataset +Retrieve the dataset's chunk indexing type +Verify the dataset's chunk indexing type is not version 1 B-tree +Not converting the dataset +Close the dataset +Close the dataset creation property list +Going to process dataset:/GROUP/DSET_NON_CHUNKED... +Open the dataset +Retrieve the dataset's layout +Dataset is not chunked: no further action +Close the dataset +Close the dataset creation property list +Close the file diff --git a/tools/h5format_convert/testfiles/h5fc_v_ndata_bt1.ddl b/tools/h5format_convert/testfiles/h5fc_v_ndata_bt1.ddl new file mode 100644 index 0000000..86081f3 --- /dev/null +++ b/tools/h5format_convert/testfiles/h5fc_v_ndata_bt1.ddl @@ -0,0 +1,12 @@ +Process command line options +It is noop... +Open the file tmp.h5 +Going to process dataset: /DSET_NDATA_BT1... +Open the dataset +Retrieve the dataset's layout +Verify the dataset is a chunked dataset +Retrieve the dataset's chunk indexing type +Chunk indexing type is already version 1 B-tree: no further action +Close the dataset +Close the dataset creation property list +Close the file diff --git a/tools/h5format_convert/testfiles/h5fc_v_non_chunked.ddl b/tools/h5format_convert/testfiles/h5fc_v_non_chunked.ddl new file mode 100644 index 0000000..baba0e4 --- /dev/null +++ b/tools/h5format_convert/testfiles/h5fc_v_non_chunked.ddl @@ -0,0 +1,9 @@ +Process command line options +Open the file tmp.h5 +Going to process dataset: /DSET_NON_CHUNKED... +Open the dataset +Retrieve the dataset's layout +Dataset is not chunked: no further action +Close the dataset +Close the dataset creation property list +Close the file diff --git a/tools/h5format_convert/testh5fc.sh.in b/tools/h5format_convert/testh5fc.sh.in new file mode 100644 index 0000000..dc5aa48 --- /dev/null +++ b/tools/h5format_convert/testh5fc.sh.in @@ -0,0 +1,400 @@ +#! /bin/sh +# +# Copyright by The HDF Group. +# Copyright by the Board of Trustees of the University of Illinois. +# All rights reserved. +# +# This file is part of HDF5. The full HDF5 copyright notice, including +# terms governing use, modification, and redistribution, is contained in +# the files COPYING and Copyright.html. COPYING can be found at the root +# of the source code distribution tree; Copyright.html can be found at the +# root level of an installed copy of the electronic HDF5 document set and +# is linked from the top-level documents page. It can also be found at +# http://hdfgroup.org/HDF5/doc/Copyright.html. If you do not have +# access to either file, you may request a copy from help@hdfgroup.org. +# +# Tests for the h5format_convert tool +# +# + +srcdir=@srcdir@ + +# Determine which filters are available +USE_FILTER_SZIP="@USE_FILTER_SZIP@" +USE_FILTER_DEFLATE="@USE_FILTER_DEFLATE@" +USE_FILTER_SHUFFLE="@USE_FILTER_SHUFFLE@" +USE_FILTER_FLETCHER32="@USE_FILTER_FLETCHER32@" +USE_FILTER_NBIT="@USE_FILTER_NBIT@" +USE_FILTER_SCALEOFFSET="@USE_FILTER_SCALEOFFSET@" + +TESTNAME=h5format_convert +EXIT_SUCCESS=0 +EXIT_FAILURE=1 + +FORMCONV=h5format_convert # The tool name +FORMCONV_BIN=`pwd`/$FORMCONV # The path of the tool binary + +CHK_IDX=h5fc_chk_idx # The program name +CHK_IDX_BIN=`pwd`/$CHK_IDX # The program to verify the chunk indexing type is v1 B-tree + +RM='rm -rf' +CMP='cmp -s' +DIFF='diff -c' +CP='cp' +DIRNAME='dirname' +LS='ls' +AWK='awk' + +nerrors=0 +verbose=yes + +# source dirs +SRC_TOOLS="$srcdir/.." +SRC_TOOLS_TESTFILES="$SRC_TOOLS/testfiles" + +# testfiles source dirs for tools +SRC_H5LS_TESTFILES="$SRC_TOOLS_TESTFILES" +SRC_H5DUMP_TESTFILES="$SRC_TOOLS_TESTFILES" +SRC_H5DIFF_TESTFILES="$SRC_TOOLS/h5diff/testfiles" +SRC_H5COPY_TESTFILES="$SRC_TOOLS/h5copy/testfiles" +SRC_H5REPACK_TESTFILES="$SRC_TOOLS/h5repack/testfiles" +SRC_H5JAM_TESTFILES="$SRC_TOOLS/h5jam/testfiles" +SRC_H5STAT_TESTFILES="$SRC_TOOLS/h5stat/testfiles" +SRC_H5IMPORT_TESTFILES="$SRC_TOOLS/h5import/testfiles" +SRC_H5FORMCONV_TESTFILES="$SRC_TOOLS/h5format_convert/testfiles" + +TESTDIR=./testfiles +test -d $TESTDIR || mkdir $TESTDIR + +# Copy the testfile to a temporary file for testing as h5format_convert is changing the file in place +TMPFILE=tmp.h5 + +###################################################################### +# test files +# -------------------------------------------------------------------- +# All the test files copy from source directory to test directory +# NOTE: Keep this framework to add/remove test files. +# Any test files from other tools can be used in this framework. +# This list are also used for checking exist. +# Comment '#' without space can be used. +# -------------------------------------------------------------------- +LIST_HDF5_TEST_FILES=" +$SRC_H5FORMCONV_TESTFILES/h5fc_v1.h5 +$SRC_H5FORMCONV_TESTFILES/h5fc_latest_v3.h5 +$SRC_H5FORMCONV_TESTFILES/h5fc_non_v3.h5 +$SRC_H5FORMCONV_TESTFILES/h5fc_edge_v3.h5 +" + +LIST_OTHER_TEST_FILES=" +$SRC_H5FORMCONV_TESTFILES/h5fc_help.ddl +$SRC_H5FORMCONV_TESTFILES/h5fc_nooption.ddl +$SRC_H5FORMCONV_TESTFILES/h5fc_nonexistfile.ddl +$SRC_H5FORMCONV_TESTFILES/h5fc_d_file.ddl +$SRC_H5FORMCONV_TESTFILES/h5fc_dname.ddl +$SRC_H5FORMCONV_TESTFILES/h5fc_nonexistdset_file.ddl +$SRC_H5FORMCONV_TESTFILES/h5fc_v_non_chunked.ddl +$SRC_H5FORMCONV_TESTFILES/h5fc_v_bt1.ddl +$SRC_H5FORMCONV_TESTFILES/h5fc_v_ndata_bt1.ddl +$SRC_H5FORMCONV_TESTFILES/h5fc_v_all.ddl +$SRC_H5FORMCONV_TESTFILES/h5fc_v_n_1d.ddl +$SRC_H5FORMCONV_TESTFILES/h5fc_v_n_all.ddl +" + +# +# copy test files and expected output files from source dirs to test dir +# +COPY_TESTFILES="$LIST_HDF5_TEST_FILES $LIST_OTHER_TEST_FILES" + +COPY_TESTFILES_TO_TESTDIR() +{ + # copy test files. Used -f to make sure get a new copy + for tstfile in $COPY_TESTFILES + do + # ignore '#' comment + echo $tstfile | tr -d ' ' | grep '^#' > /dev/null + RET=$? + if [ $RET -eq 1 ]; then + # skip cp if srcdir is same as destdir + # this occurs when build/test performed in source dir and + # make cp fail + SDIR=`$DIRNAME $tstfile` + INODE_SDIR=`$LS -i -d $SDIR | $AWK -F' ' '{print $1}'` + INODE_DDIR=`$LS -i -d $TESTDIR | $AWK -F' ' '{print $1}'` + if [ "$INODE_SDIR" != "$INODE_DDIR" ]; then + $CP -f $tstfile $TESTDIR + if [ $? -ne 0 ]; then + echo "Error: FAILED to copy $tstfile ." + + # Comment out this to CREATE expected file + exit $EXIT_FAILURE + fi + fi + fi + done +} + +CLEAN_TESTFILES_AND_TESTDIR() +{ + # skip rm if srcdir is same as destdir + # this occurs when build/test performed in source dir and + # make cp fail + SDIR=`$DIRNAME $tstfile` + INODE_SDIR=`$LS -i -d $SDIR | $AWK -F' ' '{print $1}'` + INODE_DDIR=`$LS -i -d $TESTDIR | $AWK -F' ' '{print $1}'` + if [ "$INODE_SDIR" != "$INODE_DDIR" ]; then + $RM $TESTDIR + else + $RM $TESTDIR/$TMPFILE + fi +} + +# Print a line-line message left justified in a field of 80 characters +# beginning with the word "Testing". +# +TESTING() { + SPACES=" " + echo "Testing $* $SPACES" | cut -c1-80 | tr -d '\012' +} + +# Run a test and print PASS or *FAIL*. If a test fails then increment +# the `nerrors' global variable and (if $verbose is set) display the +# difference between the actual output and the expected output. The +# expected output is given as the first argument to this function and +# the actual output file is calculated by replacing the `.ddl' with +# `.out'. The actual output is not removed if $HDF5_NOCLEANUP has a +# non-zero value. +# +# $1: expected output +# $2: the test file name +# --fname might be empty or fname does not exist +# --fname is copied to a temporary file for testing +# $3 to at most $6--options to the tool such as: +# -d dname or --dname=dname +# -v or --verbose +# -n or --noop +TOOLTEST_OUT() { + # Prepare expected and actual output + expect="$TESTDIR/$1" + actual="$TESTDIR/`basename $1 .ddl`.out" + actual_err="$TESTDIR/`basename $1 .ddl`.err" + actual_sav=${actual}-sav + actual_err_sav=${actual_err}-sav + + # Prepare the test file + $RM $TESTDIR/$TMPFILE + TFILE=$2 + if [ ! -z "$2" ] && [ -e $TESTDIR/$2 ] ; then + $CP $TESTDIR/$2 $TESTDIR/$TMPFILE + TFILE=$TMPFILE + fi + + # Run test. + TESTING $FORMCONV $3 $4 $5 $6 $2 + ( + cd $TESTDIR + $RUNSERIAL $FORMCONV_BIN $3 $4 $5 $6 $TFILE + ) >$actual 2>$actual_err + cp $actual $actual_sav + cp $actual_err $actual_err_sav + cat $actual_err >> $actual + + # Compare output + if $CMP $expect $actual; then + echo " PASSED" + else + echo "*FAILED*" + echo " Expected result (*.ddl) differs from actual result (*.out)" + nerrors="`expr $nerrors + 1`" + test yes = "$verbose" && $DIFF $expect $actual |sed 's/^/ /' + fi + + # Clean up output file + if test -z "$HDF5_NOCLEANUP"; then + $RM $actual $actual_err + $RM $actual $actual_err $actual_sav $actual_err_sav + fi +} + +# To check that the tool exits success, no output +# Assume all short options +# $1 is the test file name +# --fname exists +# --fname is copied to a temporary file for testing +# $2 to at most $4--options to the tool such as: +# -d dname +# -n +TOOLTEST() { + TESTING $FORMCONV $2 $3 $4 $1 + $RM $TESTDIR/$TMPFILE + $CP $TESTDIR/$1 $TESTDIR/$TMPFILE + $RUNSERIAL $FORMCONV_BIN $2 $3 $4 $TESTDIR/$TMPFILE + exitcode=$? + if [ $exitcode -ne 0 ]; then + echo "*FAILED*" + echo " The tool exits failure" + nerrors="`expr $nerrors + 1`" + else + echo " PASSED" + fi +} + +CHECKING() { + SPACES=" " + echo "Verifying $* $SPACES" | cut -c1-80 | tr -d '\012' +} + +# $1 dataset name +# Assume $TESTDIR/$TMPFILE is the converted test file +IDX_CHECK() { + CHECKING $1 + $RUNSERIAL $CHK_IDX_BIN $TESTDIR/$TMPFILE $1 + ret=$? + if [ $ret -eq 0 ]; then + echo " PASSED" + else + echo "*FAILED*" + echo " The chunk indexing type is not correct" + nerrors="`expr $nerrors + 1`" + fi +} + +# Print a "SKIP" message +SKIP() { + TESTING $STAT $@ + echo " -SKIP-" +} + + + +############################################################################## +############################################################################## +### T H E T E S T S ### +############################################################################## +############################################################################## +# prepare for test +COPY_TESTFILES_TO_TESTDIR +# +# +# +# h5format_convert --help +# h5format_convert (no options) +# h5format_convert nonexist.h5 (no options, file does not exist) +TOOLTEST_OUT h5fc_help.ddl '' --help +TOOLTEST_OUT h5fc_nooption.ddl '' +TOOLTEST_OUT h5fc_nonexistfile.ddl nonexist.h5 +# +# +# h5format_convert -d h5fc_v1.h5 (just -d option, file exists) +# h5format_convert --dname h5fc_v1.h5 (just --dname option, file exists) +# h5format_convert --dname (just --dname option) +# h5format_convert --dname=nonexist h5fc_v1.h5 (dataset does not exist, file exists) +TOOLTEST_OUT h5fc_d_file.ddl h5fc_v1.h5 -d +TOOLTEST_OUT h5fc_d_file.ddl h5fc_v1.h5 --dname +TOOLTEST_OUT h5fc_dname.ddl '' --dname +TOOLTEST_OUT h5fc_nonexistdset_file.ddl h5fc_v1.h5 --dname=nonexist +# +# +# +# h5format_convert -d /DSET_NON_CHUNKED -v h5fc_v1.h5 (verbose, non-chunked dataset) +# h5format_convert -d /GROUP/DSET_BT1 --verbose h5fc_v1.h5 (verbose, bt1 dataset) +# h5format_convert -d /DSET_NDATA_BT1 -v -n h5fc_v1.h5 (verbose, noop, bt1+nodata dataset) +# h5format_convert -v h5fc_v1.h5 (verbose, all datasets) +TOOLTEST_OUT h5fc_v_non_chunked.ddl h5fc_v1.h5 -d /DSET_NON_CHUNKED -v +TOOLTEST_OUT h5fc_v_bt1.ddl h5fc_v1.h5 -d /GROUP/DSET_BT1 --verbose +TOOLTEST_OUT h5fc_v_ndata_bt1.ddl h5fc_v1.h5 -d /DSET_NDATA_BT1 -v -n +TOOLTEST_OUT h5fc_v_all.ddl h5fc_v1.h5 -v +# +# +# +# h5format_convert -d /DSET_EA -v -n h5fc_latest_v3.h5 (verbose, noop, one ea dataset) +# h5format_convert -v -n h5fc_non_v3.h5 (verbose, noop, all datasets) +TOOLTEST_OUT h5fc_v_n_1d.ddl h5fc_latest_v3.h5 -d /DSET_EA -v -n +TOOLTEST_OUT h5fc_v_n_all.ddl h5fc_non_v3.h5 -v -n +# +# +# +# No output from tests +# 1) Use the tool to convert the dataset +# 2) Verify the chunk indexing type is correct +# h5format_convert -d /DSET_EA h5fc_latest_v3.h5 +# h5format_convert -d /GROUP/DSET_NDATA_EA h5fc_latest_v3.h5 +# h5format_convert -d /GROUP/DSET_BT2 h5fc_latest_v3.h5 +# h5format_convert -d /DSET_NDATA_BT2 h5fc_latest_v3.h5 +# h5format_convert -d /DSET_FA h5fc_latest_v3.h5 +# h5format_convert -d /GROUP/DSET_FA h5fc_latest_v3.h5 +# h5format_convert -d /DSET_NONE h5fc_latest_v3.h5 +# h5format_convert -d /GROUP/DSET_NONE h5fc_latest_v3.h5 +TOOLTEST h5fc_latest_v3.h5 -d /DSET_EA +IDX_CHECK /DSET_EA +# +TOOLTEST h5fc_latest_v3.h5 -d /GROUP/DSET_NDATA_EA +IDX_CHECK /GROUP/DSET_NDATA_EA +# +TOOLTEST h5fc_latest_v3.h5 -d /GROUP/DSET_BT2 +IDX_CHECK /GROUP/DSET_BT2 +# +TOOLTEST h5fc_latest_v3.h5 -d /DSET_NDATA_BT2 +IDX_CHECK /DSET_NDATA_BT2 +# +TOOLTEST h5fc_latest_v3.h5 -d /DSET_FA +IDX_CHECK /DSET_FA +# +TOOLTEST h5fc_latest_v3.h5 -d /GROUP/DSET_NDATA_FA +IDX_CHECK /GROUP/DSET_NDATA_FA +# +TOOLTEST h5fc_latest_v3.h5 -d /DSET_NONE +IDX_CHECK /DSET_NONE +# +TOOLTEST h5fc_latest_v3.h5 -d /GROUP/DSET_NDATA_NONE +IDX_CHECK /GROUP/DSET_NDATA_NONE +# +# +# +# No output from tests: just check exit code +# h5format_convert -d /DSET_NDATA_BT1 h5fc_v1.h5 (v1-btree dataset) +# h5format_convert -d /GROUP/DSET_NON_CHUNKED h5fc_non_v3.h5 (non-chunked dataset) +TOOLTEST h5fc_v1.h5 -d /DSET_NDATA_BT1 +TOOLTEST h5fc_non_v3.h5 -d /GROUP/DSET_NON_CHUNKED +# +# +# +# No output from tests: just check exit code +# h5format_convert -d /GROUP/DSET_BT2 -n h5fc_non_v3.h5 (noop, one dataset) +# h5format_convert -n h5fc_non_v3.h5 (noop, all datasets) +TOOLTEST h5fc_non_v3.h5 -d /GROUP/DSET_BT2 -n +TOOLTEST h5fc_non_v3.h5 -n +# +# +# +# No output from tests: just check exit code +# h5format_convert h5fc_non_v3.h5 +# 1) convert all datasets +# 2) verify indexing types +TOOLTEST h5fc_non_v3.h5 +IDX_CHECK /DSET_NDATA_EA +IDX_CHECK /DSET_NDATA_BT2 +IDX_CHECK /GROUP/DSET_BT2 +IDX_CHECK /GROUP/DSET_EA +# +# +# +# No output from test: just check exit code +# h5format_convert h5fc_edge_v3.h5 +# 1) convert the chunked dataset (filter, no-filter-edge-chunk) +# 2) verify the indexing type +TOOLTEST h5fc_edge_v3.h5 +IDX_CHECK /DSET_EDGE +# +# +# +# Clean up temporary files/directories +CLEAN_TESTFILES_AND_TESTDIR + +if test $nerrors -eq 0 ; then + echo "All $TESTNAME tests passed." + exit $EXIT_SUCCESS +else + echo "$TESTNAME tests failed with $nerrors errors." + exit $EXIT_FAILURE +fi + |