summaryrefslogtreecommitdiffstats
diff options
context:
space:
mode:
authorraylu-hdf <60487644+raylu-hdf@users.noreply.github.com>2021-02-15 17:31:16 (GMT)
committerGitHub <noreply@github.com>2021-02-15 17:31:16 (GMT)
commit511b940da6fa60e163f6f94e423f6cbd2879468d (patch)
treeaf6ad27dce3bdf0e35f012b5e0784e735e9f25df
parent85ae0a268729265ab365371cbcb20b1b250791f3 (diff)
parentc20fb7c76d53807ab665c38d8c82cb386f67d2bd (diff)
downloadhdf5-511b940da6fa60e163f6f94e423f6cbd2879468d.zip
hdf5-511b940da6fa60e163f6f94e423f6cbd2879468d.tar.gz
hdf5-511b940da6fa60e163f6f94e423f6cbd2879468d.tar.bz2
Merge pull request #331 from HDFGroup/raylu_fixed_array
Adding the test case for fixed-array chunked dataset and VDS
-rw-r--r--src/H5Dfarray.c2
-rw-r--r--test/testvfdswmr.sh.in4
-rw-r--r--test/vfd_swmr_bigset_writer.c33
3 files changed, 31 insertions, 8 deletions
diff --git a/src/H5Dfarray.c b/src/H5Dfarray.c
index 1417bc2..122fbd1 100644
--- a/src/H5Dfarray.c
+++ b/src/H5Dfarray.c
@@ -162,7 +162,7 @@ const H5D_chunk_ops_t H5D_COPS_FARRAY[1] = {{
H5D__farray_idx_reset, /* reset */
H5D__farray_idx_dump, /* dump */
H5D__farray_idx_dest, /* destroy */
- NULL /* close */
+ H5D__farray_idx_dest /* close (same as destroy) */
}};
diff --git a/test/testvfdswmr.sh.in b/test/testvfdswmr.sh.in
index f50051e..a66ddf4 100644
--- a/test/testvfdswmr.sh.in
+++ b/test/testvfdswmr.sh.in
@@ -624,7 +624,7 @@ if [ ${do_groups:-no} = yes ]; then
rm -f vfd_swmr_group_reader.*.{out,rc}
fi
-for options in "-d 1" "-d 2" "-d 1 -V" "-d 1 -M"; do
+for options in "-d 1" "-d 1 -F" "-d 2" "-d 2 -F" "-d 1 -V" "-d 1 -M" "-d 1 -V -F" "-d 1 -M -F"; do
if [ ${do_many_small:-no} = no ]; then
continue
fi
@@ -673,7 +673,7 @@ for options in "-d 1" "-d 2" "-d 1 -V" "-d 1 -M"; do
rm -f vfd_swmr_bigset_reader.*.{out,rc}
done
-for options in "-d 1" "-d 2" "-d 1 -V" "-d 1 -M"; do
+for options in "-d 1" "-d 1 -F" "-d 2" "-d 2 -F" "-d 1 -V" "-d 1 -M" "-d 1 -V -F" "-d 1 -M -F"; do
#
# Test a few big datasets of one and two dimensions.
#
diff --git a/test/vfd_swmr_bigset_writer.c b/test/vfd_swmr_bigset_writer.c
index d99d8e3..d57479a 100644
--- a/test/vfd_swmr_bigset_writer.c
+++ b/test/vfd_swmr_bigset_writer.c
@@ -16,6 +16,7 @@
*
* 1 the two major indices for extensible, chunked datasets: the
* extensible array and the version-2 B-tree, with VFD SWMR active.
+ * The maximal dimension can be either fixed or unlimited.
*
* 2 reading and writing virtual datasets with source datasets residing
* in the same HDF5 file
@@ -133,6 +134,7 @@ typedef struct {
enum {vds_off, vds_single, vds_multi} vds;
bool use_vfd_swmr;
bool writer;
+ bool fixed_array;
hsize_t chunk_dims[RANK];
hsize_t one_dee_max_dims[RANK];
} state_t;
@@ -159,6 +161,7 @@ state_initializer(void)
, .vds = vds_off
, .use_vfd_swmr = true
, .writer = true
+ , .fixed_array = false
, .one_dee_max_dims = {ROWS, H5S_UNLIMITED}
, .chunk_dims = {ROWS, COLS}
, .update_interval = (struct timespec){
@@ -170,7 +173,7 @@ static void state_init(state_t *, int, char **);
static const hid_t badhid = H5I_INVALID_HID;
-static const hsize_t two_dee_max_dims[RANK] = {H5S_UNLIMITED, H5S_UNLIMITED};
+static hsize_t two_dee_max_dims[RANK];
static uint32_t
matget(const mat_t *mat, unsigned i, unsigned j)
@@ -207,11 +210,12 @@ newmat(unsigned rows, unsigned cols)
static void
usage(const char *progname)
{
- fprintf(stderr, "usage: %s [-S] [-W] [-a steps] [-b] [-c cols]\n"
+ fprintf(stderr, "usage: %s [-F] [-M] [-S] [-V] [-W] [-a steps] [-b] [-c cols]\n"
" [-d dims]\n"
" [-n iterations] [-r rows] [-s datasets]\n"
" [-u milliseconds]\n"
"\n"
+ "-F: fixed maximal dimension for the chunked datasets\n"
"-M: use virtual datasets and many source\n"
" files\n"
"-S: do not use VFD SWMR\n"
@@ -271,8 +275,12 @@ state_init(state_t *s, int argc, char **argv)
esnprintf(tfile, sizeof(tfile), "%s", argv[0]);
esnprintf(s->progname, sizeof(s->progname), "%s", basename(tfile));
- while ((ch = getopt(argc, argv, "MSVWa:bc:d:n:qr:s:u:")) != -1) {
+ while ((ch = getopt(argc, argv, "FMSVWa:bc:d:n:qr:s:u:")) != -1) {
switch (ch) {
+ case 'F':
+ /* The flag to indicate whether the maximal dimension of the chunked datasets is fixed or unlimited */
+ s->fixed_array = true;
+ break;
case 'M':
s->vds = vds_multi;
break;
@@ -370,11 +378,26 @@ state_init(state_t *s, int argc, char **argv)
s->chunk_dims[0] = s->rows;
s->chunk_dims[1] = s->cols;
s->one_dee_max_dims[0] = s->rows;
- s->one_dee_max_dims[1] = H5S_UNLIMITED;
+ if(s->fixed_array) {
+ s->one_dee_max_dims[1] = s->cols * s->nsteps;
+ two_dee_max_dims[0] = s->rows * s->nsteps;
+ two_dee_max_dims[1] = s->cols * s->nsteps;
+ } else {
+ s->one_dee_max_dims[1] = H5S_UNLIMITED;
+ two_dee_max_dims[0] = two_dee_max_dims[1] = H5S_UNLIMITED;
+ }
if (s->vds != vds_off) {
const hsize_t half_chunk_dims[RANK] = {s->rows / 2, s->cols / 2};
- const hsize_t half_max_dims[RANK] = {s->rows / 2, H5S_UNLIMITED};
+ hsize_t half_max_dims[RANK];
+
+ if(s->fixed_array) {
+ half_max_dims[0] = s->rows / 2;
+ half_max_dims[1] = (s->cols * s->nsteps) / 2;
+ } else {
+ half_max_dims[0] = s->rows / 2;
+ half_max_dims[1] = H5S_UNLIMITED;
+ }
if ((s->quadrant_dcpl = H5Pcreate(H5P_DATASET_CREATE)) < 0) {
errx(EXIT_FAILURE, "%s.%d: H5Pcreate failed",