summaryrefslogtreecommitdiffstats
diff options
context:
space:
mode:
authorAllen Byrne <50328838+byrnHDF@users.noreply.github.com>2024-01-03 20:17:46 (GMT)
committerGitHub <noreply@github.com>2024-01-03 20:17:46 (GMT)
commit772276ea1f6f8b150a22eff3a908eff24ccc50a7 (patch)
tree29d4f2850422dccf3b4e057786f9766b8e2adc54
parent8f1a93f1a208055c9b872a06be28a20e72f8f488 (diff)
downloadhdf5-772276ea1f6f8b150a22eff3a908eff24ccc50a7.zip
hdf5-772276ea1f6f8b150a22eff3a908eff24ccc50a7.tar.gz
hdf5-772276ea1f6f8b150a22eff3a908eff24ccc50a7.tar.bz2
Remove examples that have been moved to HDF5Examples folder (#3917)
-rw-r--r--HDF5Examples/C/H5PAR/ph5example.c (renamed from examples/ph5example.c)0
-rw-r--r--HDF5Examples/CTestConfig.cmake24
-rw-r--r--examples/CMakeLists.txt42
-rw-r--r--examples/CMakeTests.cmake43
-rw-r--r--examples/Makefile.am33
-rw-r--r--examples/h5_vds-eiger.c179
-rw-r--r--examples/h5_vds-exc.c217
-rw-r--r--examples/h5_vds-exclim.c214
-rw-r--r--examples/h5_vds-percival-unlim-maxmin.c304
-rw-r--r--examples/h5_vds-percival-unlim.c346
-rw-r--r--examples/h5_vds-percival.c241
-rw-r--r--examples/h5_vds-simpleIO.c190
-rw-r--r--examples/h5_vds.c252
-rw-r--r--examples/ph5_filtered_writes.c488
-rw-r--r--examples/ph5_filtered_writes_no_sel.c369
-rw-r--r--examples/ph5_subfiling.c551
-rw-r--r--examples/run-c-ex.sh.in18
17 files changed, 22 insertions, 3489 deletions
diff --git a/examples/ph5example.c b/HDF5Examples/C/H5PAR/ph5example.c
index 5ec2cdc..5ec2cdc 100644
--- a/examples/ph5example.c
+++ b/HDF5Examples/C/H5PAR/ph5example.c
diff --git a/HDF5Examples/CTestConfig.cmake b/HDF5Examples/CTestConfig.cmake
index 44e26e2..aef6da8 100644
--- a/HDF5Examples/CTestConfig.cmake
+++ b/HDF5Examples/CTestConfig.cmake
@@ -1,18 +1,32 @@
+#
+# Copyright by The HDF Group.
+# All rights reserved.
+#
+# This file is part of HDF5. The full HDF5 copyright notice, including
+# terms governing use, modification, and redistribution, is contained in
+# the COPYING file, which can be found at the root of the source code
+# distribution tree, or in https://www.hdfgroup.org/licenses.
+# If you do not have access to either file, you may request a copy from
+# help@hdfgroup.org.
+#
## This file should be placed in the root directory of your project.
## Then modify the CMakeLists.txt file in the root directory of your
## project to incorporate the testing dashboard.
-## # The following are required to uses Dart and the Cdash dashboard
+## # The following are required to use Dart and the CDash dashboard
## ENABLE_TESTING()
## INCLUDE(CTest)
-set (CTEST_NIGHTLY_START_TIME "18:00:00 CST")
set (CTEST_PROJECT_NAME "HDF5Examples")
+set (CTEST_NIGHTLY_START_TIME "18:00:00 CST")
set (CTEST_DROP_METHOD "https")
-if (CDASH_LOCAL)
- set (CTEST_DROP_SITE "cdash-internal.hdfgroup.org")
- set (CTEST_DROP_LOCATION "/submit.php?project=HDF5Examples")
+if (CTEST_DROP_SITE_INIT)
+ set (CTEST_DROP_SITE "${CTEST_DROP_SITE_INIT}")
else ()
set (CTEST_DROP_SITE "cdash.hdfgroup.org")
+endif ()
+if (CTEST_DROP_LOCATION_INIT)
+ set (CTEST_DROP_LOCATION "${CTEST_DROP_LOCATION_INIT}")
+else ()
set (CTEST_DROP_LOCATION "/submit.php?project=HDF5Examples")
endif ()
set (CTEST_DROP_SITE_CDASH TRUE)
diff --git a/examples/CMakeLists.txt b/examples/CMakeLists.txt
index 10c6ede..43d7af2 100644
--- a/examples/CMakeLists.txt
+++ b/examples/CMakeLists.txt
@@ -32,28 +32,8 @@ set (examples
h5_elink_unix2win
h5_shared_mesg
h5_debug_trace
- h5_vds
- h5_vds-exc
- h5_vds-exclim
- h5_vds-eiger
- h5_vds-simpleIO
- h5_vds-percival
- h5_vds-percival-unlim
- h5_vds-percival-unlim-maxmin
)
-if (H5_HAVE_PARALLEL)
- set (parallel_examples
- ph5example
- ph5_filtered_writes
- ph5_filtered_writes_no_sel
- )
-
- if (HDF5_ENABLE_SUBFILING_VFD)
- list (APPEND parallel_examples ph5_subfiling)
- endif ()
-endif ()
-
foreach (example ${examples})
add_executable (${example} ${HDF5_EXAMPLES_SOURCE_DIR}/${example}.c)
target_include_directories (${example} PRIVATE "${HDF5_SRC_INCLUDE_DIRS};${HDF5_SRC_BINARY_DIR};$<$<BOOL:${HDF5_ENABLE_PARALLEL}>:${MPI_C_INCLUDE_DIRS}>")
@@ -74,28 +54,6 @@ foreach (example ${examples})
endif ()
endforeach ()
-if (H5_HAVE_PARALLEL)
- foreach (parallel_example ${parallel_examples})
- add_executable (${parallel_example} ${HDF5_EXAMPLES_SOURCE_DIR}/${parallel_example}.c)
- target_include_directories (${parallel_example} PRIVATE "${HDF5_SRC_INCLUDE_DIRS};${HDF5_SRC_BINARY_DIR};$<$<BOOL:${HDF5_ENABLE_PARALLEL}>:${MPI_C_INCLUDE_DIRS}>")
- if (NOT BUILD_SHARED_LIBS)
- TARGET_C_PROPERTIES (${parallel_example} STATIC)
- target_link_libraries (${parallel_example} PRIVATE ${HDF5_LIB_TARGET} MPI::MPI_C)
- else ()
- TARGET_C_PROPERTIES (${parallel_example} SHARED)
- target_link_libraries (${parallel_example} PRIVATE ${HDF5_LIBSH_TARGET} MPI::MPI_C)
- endif ()
- set_target_properties (${parallel_example} PROPERTIES FOLDER examples)
-
- #-----------------------------------------------------------------------------
- # Add Target to clang-format
- #-----------------------------------------------------------------------------
- if (HDF5_ENABLE_FORMATTERS)
- clang_format (HDF5_EXAMPLES_${parallel_example}_FORMAT ${parallel_example})
- endif ()
- endforeach ()
-endif ()
-
if (BUILD_TESTING AND HDF5_TEST_EXAMPLES)
include (CMakeTests.cmake)
endif ()
diff --git a/examples/CMakeTests.cmake b/examples/CMakeTests.cmake
index 449bc44..09f9060 100644
--- a/examples/CMakeTests.cmake
+++ b/examples/CMakeTests.cmake
@@ -30,8 +30,6 @@ set (test_ex_CLEANFILES
group.h5
groups.h5
hard_link.h5
- h5_subfiling_default_example.h5
- h5_subfiling_custom_example.h5
mount1.h5
mount2.h5
one_index_file.h5
@@ -54,19 +52,6 @@ set (test_ex_CLEANFILES
blue/prefix_target.h5
red/prefix_target.h5
u2w/u2w_target.h5
- vds.h5
- vds-exc.h5
- vds-excalibur.h5
- vds-exclim.h5
- vds-percival.h5
- vds-percival-unlim.h5
- vds-percival-unlim-maxmin.h5
- a.h5
- b.h5
- c.h5
- d.h5
- vds-simpleIO.h5
- vds-eiger.h5
)
if (HDF5_TEST_SERIAL)
@@ -110,31 +95,3 @@ if (HDF5_TEST_SERIAL)
set (last_test "EXAMPLES-${example}")
endforeach ()
endif ()
-
-### Windows pops up a modal permission dialog on this test
-if (H5_HAVE_PARALLEL AND HDF5_TEST_PARALLEL AND NOT WIN32)
- # Ensure that 24 is a multiple of the number of processes.
- # The number 24 corresponds to SPACE1_DIM1 and SPACE1_DIM2 defined in ph5example.c
- math(EXPR NUMPROCS "24 / ((24 + ${MPIEXEC_MAX_NUMPROCS} - 1) / ${MPIEXEC_MAX_NUMPROCS})")
-
- foreach (parallel_example ${parallel_examples})
- if (HDF5_ENABLE_USING_MEMCHECKER)
- add_test (NAME MPI_TEST_EXAMPLES-${parallel_example} COMMAND ${MPIEXEC_EXECUTABLE} ${MPIEXEC_NUMPROC_FLAG} ${NUMPROCS} ${MPIEXEC_PREFLAGS} $<TARGET_FILE:${parallel_example}> ${MPIEXEC_POSTFLAGS})
- else ()
- add_test (NAME MPI_TEST_EXAMPLES-${parallel_example} COMMAND "${CMAKE_COMMAND}"
- -D "TEST_PROGRAM=${MPIEXEC_EXECUTABLE}"
- -D "TEST_ARGS:STRING=${MPIEXEC_NUMPROC_FLAG};${NUMPROCS};${MPIEXEC_PREFLAGS};$<TARGET_FILE:${parallel_example}>;${MPIEXEC_POSTFLAGS}"
- -D "TEST_EXPECT=0"
- -D "TEST_SKIP_COMPARE=TRUE"
- -D "TEST_OUTPUT=${parallel_example}.out"
- -D "TEST_REFERENCE:STRING=PHDF5 example finished with no errors"
- -D "TEST_FOLDER=${PROJECT_BINARY_DIR}"
- -P "${HDF_RESOURCES_DIR}/grepTest.cmake"
- )
- endif ()
- if (last_test)
- set_tests_properties (MPI_TEST_EXAMPLES-${parallel_example} PROPERTIES DEPENDS ${last_test})
- endif ()
- set (last_test "MPI_TEST_EXAMPLES-${parallel_example}")
- endforeach ()
-endif ()
diff --git a/examples/Makefile.am b/examples/Makefile.am
index a09042c..508664b 100644
--- a/examples/Makefile.am
+++ b/examples/Makefile.am
@@ -18,15 +18,6 @@
include $(top_srcdir)/config/commence.am
-if BUILD_PARALLEL_CONDITIONAL
- EXAMPLE_PROG_PARA = ph5example ph5_filtered_writes ph5_filtered_writes_no_sel
-
-if SUBFILING_VFD_CONDITIONAL
- EXAMPLE_PROG_PARA += ph5_subfiling
-endif
-
-endif
-
INSTALL_SCRIPT_FILES = run-c-ex.sh
INSTALL_TOP_SCRIPT_FILES = run-all-ex.sh
INSTALL_TOP_FILES = README
@@ -40,9 +31,7 @@ EXAMPLE_PROG = h5_write h5_read h5_extend_write h5_chunk_read h5_compound \
h5_crtatt h5_crtgrp h5_crtdat \
h5_group h5_select h5_attribute h5_mount h5_drivers \
h5_reference_deprec h5_ref_extern h5_ref_compat h5_ref2reg_deprec \
- h5_extlink h5_elink_unix2win h5_shared_mesg h5_debug_trace \
- h5_vds h5_vds-exc h5_vds-exclim h5_vds-eiger h5_vds-simpleIO \
- h5_vds-percival h5_vds-percival-unlim h5_vds-percival-unlim-maxmin
+ h5_extlink h5_elink_unix2win h5_shared_mesg h5_debug_trace
TEST_SCRIPT=testh5cc.sh
TEST_EXAMPLES_SCRIPT=$(INSTALL_SCRIPT_FILES)
@@ -53,13 +42,7 @@ INSTALL_FILES = h5_write.c h5_read.c h5_extend_write.c h5_chunk_read.c h5_compou
h5_crtatt.c h5_crtgrp.c h5_crtdat.c \
h5_group.c h5_select.c h5_attribute.c h5_mount.c h5_drivers.c \
h5_reference_deprec.c h5_ref_extern.c h5_ref_compat.c h5_ref2reg_deprec.c \
- h5_extlink.c h5_elink_unix2win.c h5_shared_mesg.c h5_debug_trace.c \
- ph5example.c ph5_filtered_writes.c ph5_filtered_writes_no_sel.c \
- ph5_subfiling.c h5_vds.c h5_vds-exc.c h5_vds-exclim.c h5_vds-eiger.c \
- h5_vds-simpleIO.c h5_vds-percival.c h5_vds-percival-unlim.c \
- h5_vds-percival-unlim-maxmin.c
-
-
+ h5_extlink.c h5_elink_unix2win.c h5_shared_mesg.c h5_debug_trace.c
# How to build examples, using installed version of h5cc
if BUILD_PARALLEL_CONDITIONAL
@@ -123,22 +106,10 @@ h5_ref_extern: $(srcdir)/h5_ref_extern.c
h5_reference_deprec: $(srcdir)/h5_reference_deprec.c
h5_ref2reg_deprec: $(srcdir)/h5_ref2reg_deprec.c
h5_drivers: $(srcdir)/h5_drivers.c
-ph5example: $(srcdir)/ph5example.c
-ph5_filtered_writes: $(srcdir)/ph5_filtered_writes.c
-ph5_filtered_writes_no_sel: $(srcdir)/ph5_filtered_writes_no_sel.c
-ph5_subfiling: $(srcdir)/ph5_subfiling.c
h5_dtransform: $(srcdir)/h5_dtransform.c
h5_extlink: $(srcdir)/h5_extlink.c $(EXTLINK_DIRS)
h5_elink_unix2win: $(srcdir)/h5_elink_unix2win.c $(EXTLINK_DIRS)
h5_shared_mesg: $(srcdir)/h5_shared_mesg.c
-h5_vds: $(srcdir)/h5_vds.c
-h5_vds-exc: $(srcdir)/h5_vds-exc.c
-h5_vds-exclim: $(srcdir)/h5_vds-exclim.c
-h5_vds-eiger: $(srcdir)/h5_vds-eiger.c
-h5_vds-simpleIO: $(srcdir)/h5_vds-simpleIO.c
-h5_vds-percival: $(srcdir)/h5_vds-percival.c
-h5_vds-percival-unlim: $(srcdir)/h5_vds-percival-unlim.c
-h5_vds-percival-unlim-maxmin: $(srcdir)/h5_vds-percival-unlim-maxmin.c
if BUILD_SHARED_SZIP_CONDITIONAL
LD_LIBRARY_PATH=$(LL_PATH)
diff --git a/examples/h5_vds-eiger.c b/examples/h5_vds-eiger.c
deleted file mode 100644
index fcde490..0000000
--- a/examples/h5_vds-eiger.c
+++ /dev/null
@@ -1,179 +0,0 @@
-/* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
- * Copyright by The HDF Group. *
- * All rights reserved. *
- * *
- * This file is part of HDF5. The full HDF5 copyright notice, including *
- * terms governing use, modification, and redistribution, is contained in *
- * the COPYING file, which can be found at the root of the source code *
- * distribution tree, or in https://www.hdfgroup.org/licenses. *
- * If you do not have access to either file, you may request a copy from *
- * help@hdfgroup.org. *
- * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */
-/************************************************************
-
- This example illustrates the concept of the virtual dataset.
- Eiger use case. Every 5 frames 10x10 are in the source
- dataset "/A" in file with the name f-<#>.h5
- This file is intended for use with HDF5 Library version 1.10
-
- ************************************************************/
-/* EIP Add link to the picture */
-
-#include "hdf5.h"
-#include <stdio.h>
-#include <stdlib.h>
-
-#define FILE "vds-eiger.h5"
-#define DATASET "VDS-Eiger"
-#define VDSDIM0 5
-#define VDSDIM1 10
-#define VDSDIM2 10
-#define DIM0 5
-#define DIM1 10
-#define DIM2 10
-#define RANK 3
-
-int
-main(void)
-{
- hid_t file, src_space, vspace, dset; /* Handles */
- hid_t dcpl;
- herr_t status;
- hsize_t vdsdims[3] = {VDSDIM0, VDSDIM1, VDSDIM2}, vdsdims_max[3] = {H5S_UNLIMITED, VDSDIM1, VDSDIM1},
- dims[3] = {DIM0, DIM1, DIM2}, start[3], /* Hyperslab parameters */
- stride[3], count[3], block[3];
- hsize_t start_out[3], /* Hyperslab parameter out */
- stride_out[3], count_out[3], block_out[3];
- int i;
- H5D_layout_t layout; /* Storage layout */
- size_t num_map; /* Number of mappings */
- ssize_t len; /* Length of the string; also a return value */
- char *filename;
- char *dsetname;
-
- file = H5Fcreate(FILE, H5F_ACC_TRUNC, H5P_DEFAULT, H5P_DEFAULT);
-
- /* Create VDS dataspace. */
- vspace = H5Screate_simple(RANK, vdsdims, vdsdims_max);
-
- /* Create dataspaces for the source dataset. */
- src_space = H5Screate_simple(RANK, dims, NULL);
-
- /* Create VDS creation property */
- dcpl = H5Pcreate(H5P_DATASET_CREATE);
-
- /* Initialize hyperslab values */
-
- start[0] = 0;
- start[1] = 0;
- start[2] = 0;
- stride[0] = DIM0;
- stride[1] = 1;
- stride[2] = 1;
- count[0] = H5S_UNLIMITED;
- count[1] = 1;
- count[2] = 1;
- block[0] = DIM0;
- block[1] = DIM1;
- block[2] = DIM2;
-
- /*
- * Build the mappings
- *
- */
- status = H5Sselect_hyperslab(vspace, H5S_SELECT_SET, start, stride, count, block);
- status = H5Pset_virtual(dcpl, vspace, "f-%b.h5", "/A", src_space);
-
- /* Create a virtual dataset */
- dset = H5Dcreate2(file, DATASET, H5T_NATIVE_INT, vspace, H5P_DEFAULT, dcpl, H5P_DEFAULT);
- status = H5Sclose(vspace);
- status = H5Sclose(src_space);
- status = H5Dclose(dset);
- status = H5Fclose(file);
-
- /*
- * Now we begin the read section of this example.
- */
-
- /*
- * Open file and dataset using the default properties.
- */
- file = H5Fopen(FILE, H5F_ACC_RDONLY, H5P_DEFAULT);
- dset = H5Dopen2(file, DATASET, H5P_DEFAULT);
-
- /*
- * Get creation property list and mapping properties.
- */
- dcpl = H5Dget_create_plist(dset);
-
- /*
- * Get storage layout.
- */
- layout = H5Pget_layout(dcpl);
-
- if (H5D_VIRTUAL == layout)
- printf(" Dataset has a virtual layout \n");
- else
- printf(" Wrong layout found \n");
-
- /*
- * Find number of mappings.
- */
- status = H5Pget_virtual_count(dcpl, &num_map);
- printf(" Number of mappings is %d\n", (int)num_map);
-
- /*
- * Get mapping parameters for each mapping.
- */
- for (i = 0; i < (int)num_map; i++) {
- printf(" Mapping %d \n", i);
- printf(" Selection in the virtual dataset \n");
- /* Get selection in the virtual dataset */
- vspace = H5Pget_virtual_vspace(dcpl, (size_t)i);
- if (H5Sget_select_type(vspace) == H5S_SEL_HYPERSLABS) {
- if (H5Sis_regular_hyperslab(vspace)) {
- status = H5Sget_regular_hyperslab(vspace, start_out, stride_out, count_out, block_out);
- printf(" start = [%llu, %llu, %llu] \n", (unsigned long long)start_out[0],
- (unsigned long long)start_out[1], (unsigned long long)start_out[2]);
- printf(" stride = [%llu, %llu, %llu] \n", (unsigned long long)stride_out[0],
- (unsigned long long)stride_out[1], (unsigned long long)stride_out[2]);
- printf(" count = [%llu, %llu, %llu] \n", (unsigned long long)count_out[0],
- (unsigned long long)count_out[1], (unsigned long long)count_out[2]);
- printf(" block = [%llu, %llu, %llu] \n", (unsigned long long)block_out[0],
- (unsigned long long)block_out[1], (unsigned long long)block_out[2]);
- }
- }
- /* Get source file name */
- len = H5Pget_virtual_filename(dcpl, (size_t)i, NULL, 0);
- filename = (char *)malloc((size_t)len * sizeof(char) + 1);
- H5Pget_virtual_filename(dcpl, (size_t)i, filename, len + 1);
- printf(" Source filename %s\n", filename);
-
- /* Get source dataset name */
- len = H5Pget_virtual_dsetname(dcpl, (size_t)i, NULL, 0);
- dsetname = (char *)malloc((size_t)len * sizeof(char) + 1);
- H5Pget_virtual_dsetname(dcpl, (size_t)i, dsetname, len + 1);
- printf(" Source dataset name %s\n", dsetname);
-
- /* Get selection in the source dataset */
- printf(" Selection in the source dataset ");
- src_space = H5Pget_virtual_srcspace(dcpl, (size_t)i);
- if (H5Sget_select_type(src_space) == H5S_SEL_ALL) {
- printf("H5S_ALL \n");
- }
- /* EIP read data back */
- H5Sclose(vspace);
- H5Sclose(src_space);
- free(filename);
- free(dsetname);
- }
-
- /*
- * Close and release resources.
- */
- status = H5Pclose(dcpl);
- status = H5Dclose(dset);
- status = H5Fclose(file);
-
- return 0;
-}
diff --git a/examples/h5_vds-exc.c b/examples/h5_vds-exc.c
deleted file mode 100644
index 01597cc..0000000
--- a/examples/h5_vds-exc.c
+++ /dev/null
@@ -1,217 +0,0 @@
-/* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
- * Copyright by The HDF Group. *
- * All rights reserved. *
- * *
- * This file is part of HDF5. The full HDF5 copyright notice, including *
- * terms governing use, modification, and redistribution, is contained in *
- * the COPYING file, which can be found at the root of the source code *
- * distribution tree, or in https://www.hdfgroup.org/licenses. *
- * If you do not have access to either file, you may request a copy from *
- * help@hdfgroup.org. *
- * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */
-/************************************************************
-
- This example illustrates the concept of the virtual dataset.
- Excalibur use case with k=2 and m=3.
- This file is intended for use with HDF5 Library version 1.10
-
- ************************************************************/
-/* EIP Add link to the picture */
-
-#include "hdf5.h"
-#include <stdio.h>
-#include <stdlib.h>
-
-#define FILE "vds-exc.h5"
-#define DATASET "VDS-Excalibur"
-#define VDSDIM0 0
-#define VDSDIM1 15
-#define VDSDIM2 6
-#define KDIM0 0
-#define KDIM1 2
-#define KDIM2 6
-#define NDIM0 0
-#define NDIM1 3
-#define NDIM2 6
-#define RANK 3
-
-const char *SRC_FILE[] = {"a.h5", "b.h5", "c.h5", "d.h5", "e.h5", "f.h5"};
-
-const char *SRC_DATASET[] = {"A", "B", "C", "D", "E", "F"};
-
-int
-main(void)
-{
- hid_t file, space, ksrc_space, nsrc_space, vspace, src_space, dset; /* Handles */
- hid_t dcpl;
- herr_t status;
- hsize_t vdsdims[3] = {VDSDIM0, VDSDIM1, VDSDIM2}, vdsdims_max[3] = {H5S_UNLIMITED, VDSDIM1, VDSDIM2},
- kdims[3] = {KDIM0, KDIM1, KDIM2}, kdims_max[3] = {H5S_UNLIMITED, KDIM1, KDIM2},
- ndims[3] = {NDIM0, NDIM1, NDIM2}, ndims_max[3] = {H5S_UNLIMITED, NDIM1, NDIM2},
- start[3], /* Hyperslab parameters */
- count[3], block[3];
- hsize_t start_out[3], stride_out[3], count_out[3], block_out[3];
- int k = 2;
- int n = 3;
- int i;
- H5D_layout_t layout; /* Storage layout */
- size_t num_map; /* Number of mappings */
- ssize_t len; /* Length of the string; also a return value */
- char *filename;
- char *dsetname;
-
- file = H5Fcreate(FILE, H5F_ACC_TRUNC, H5P_DEFAULT, H5P_DEFAULT);
-
- /* Create VDS dataspace. */
- space = H5Screate_simple(RANK, vdsdims, vdsdims_max);
- /* Create dataspaces for A, C, and E datasets. */
- ksrc_space = H5Screate_simple(RANK, kdims, kdims_max);
- /* Create dataspaces for B, D, and F datasets. */
- nsrc_space = H5Screate_simple(RANK, ndims, ndims_max);
-
- /* Create VDS creation property */
- dcpl = H5Pcreate(H5P_DATASET_CREATE);
-
- /* Initialize hyperslab values */
-
- start[0] = 0;
- start[1] = 0;
- start[2] = 0;
- count[0] = H5S_UNLIMITED;
- count[1] = 1;
- count[2] = 1;
- block[0] = 1;
- block[1] = k;
- block[2] = VDSDIM2;
-
- /*
- * Build the mappings for A, C and E source datasets.
- * Unlimited hyperslab selection is the same in the source datasets.
- * Unlimited hyperslab selections in the virtual dataset have different offsets.
- */
- status = H5Sselect_hyperslab(ksrc_space, H5S_SELECT_SET, start, NULL, count, block);
- for (i = 0; i < 3; i++) {
- start[1] = (hsize_t)((k + n) * i);
- status = H5Sselect_hyperslab(space, H5S_SELECT_SET, start, NULL, count, block);
- status = H5Pset_virtual(dcpl, space, SRC_FILE[2 * i], SRC_DATASET[2 * i], ksrc_space);
- }
-
- /* Reinitialize start[1] and block[1] to build the second set of mappings. */
- start[1] = 0;
- block[1] = n;
- /*
- * Build the mappings for B, D and F source datasets.
- * Unlimited hyperslab selection is the same in the source datasets.
- * Unlimited hyperslab selections in the virtual dataset have different offsets.
- */
- status = H5Sselect_hyperslab(nsrc_space, H5S_SELECT_SET, start, NULL, count, block);
- for (i = 0; i < 3; i++) {
- start[1] = (hsize_t)(k + (k + n) * i);
- status = H5Sselect_hyperslab(space, H5S_SELECT_SET, start, NULL, count, block);
- status = H5Pset_virtual(dcpl, space, SRC_FILE[2 * i + 1], SRC_DATASET[2 * i + 1], nsrc_space);
- }
-
- /* Create a virtual dataset */
- dset = H5Dcreate2(file, DATASET, H5T_NATIVE_INT, space, H5P_DEFAULT, dcpl, H5P_DEFAULT);
- status = H5Sclose(space);
- status = H5Sclose(nsrc_space);
- status = H5Sclose(ksrc_space);
- status = H5Dclose(dset);
- status = H5Fclose(file);
-
- /*
- * Now we begin the read section of this example.
- */
-
- /*
- * Open file and dataset using the default properties.
- */
- file = H5Fopen(FILE, H5F_ACC_RDONLY, H5P_DEFAULT);
- dset = H5Dopen2(file, DATASET, H5P_DEFAULT);
-
- /*
- * Get creation property list and mapping properties.
- */
- dcpl = H5Dget_create_plist(dset);
-
- /*
- * Get storage layout.
- */
- layout = H5Pget_layout(dcpl);
-
- if (H5D_VIRTUAL == layout)
- printf(" Dataset has a virtual layout \n");
- else
- printf("Wrong layout found \n");
-
- /*
- * Find number of mappings.
- */
- status = H5Pget_virtual_count(dcpl, &num_map);
- printf(" Number of mappings is %lu\n", (unsigned long)num_map);
-
- /*
- * Get mapping parameters for each mapping.
- */
- for (i = 0; i < (int)num_map; i++) {
- printf(" Mapping %d \n", i);
- printf(" Selection in the virtual dataset \n");
- /* Get selection in the virtual dataset */
- vspace = H5Pget_virtual_vspace(dcpl, (size_t)i);
- if (H5Sget_select_type(vspace) == H5S_SEL_HYPERSLABS) {
- if (H5Sis_regular_hyperslab(vspace)) {
- status = H5Sget_regular_hyperslab(vspace, start_out, stride_out, count_out, block_out);
- printf(" start = [%llu, %llu, %llu] \n", (unsigned long long)start_out[0],
- (unsigned long long)start_out[1], (unsigned long long)start_out[2]);
- printf(" stride = [%llu, %llu, %llu] \n", (unsigned long long)stride_out[0],
- (unsigned long long)stride_out[1], (unsigned long long)stride_out[2]);
- printf(" count = [%llu, %llu, %llu] \n", (unsigned long long)count_out[0],
- (unsigned long long)count_out[1], (unsigned long long)count_out[2]);
- printf(" block = [%llu, %llu, %llu] \n", (unsigned long long)block_out[0],
- (unsigned long long)block_out[1], (unsigned long long)block_out[2]);
- }
- }
- /* Get source file name */
- len = H5Pget_virtual_filename(dcpl, (size_t)i, NULL, 0);
- filename = (char *)malloc((size_t)len * sizeof(char) + 1);
- H5Pget_virtual_filename(dcpl, (size_t)i, filename, len + 1);
- printf(" Source filename %s\n", filename);
-
- /* Get source dataset name */
- len = H5Pget_virtual_dsetname(dcpl, (size_t)i, NULL, 0);
- dsetname = (char *)malloc((size_t)len * sizeof(char) + 1);
- H5Pget_virtual_dsetname(dcpl, (size_t)i, dsetname, len + 1);
- printf(" Source dataset name %s\n", dsetname);
-
- /* Get selection in the source dataset */
- printf(" Selection in the source dataset \n");
- src_space = H5Pget_virtual_srcspace(dcpl, (size_t)i);
- if (H5Sget_select_type(src_space) == H5S_SEL_HYPERSLABS) {
- if (H5Sis_regular_hyperslab(vspace)) {
- status = H5Sget_regular_hyperslab(src_space, start_out, stride_out, count_out, block_out);
- printf(" start = [%llu, %llu, %llu] \n", (unsigned long long)start_out[0],
- (unsigned long long)start_out[1], (unsigned long long)start_out[2]);
- printf(" stride = [%llu, %llu, %llu] \n", (unsigned long long)stride_out[0],
- (unsigned long long)stride_out[1], (unsigned long long)stride_out[2]);
- printf(" count = [%llu, %llu, %llu] \n", (unsigned long long)count_out[0],
- (unsigned long long)count_out[1], (unsigned long long)count_out[2]);
- printf(" block = [%llu, %llu, %llu] \n", (unsigned long long)block_out[0],
- (unsigned long long)block_out[1], (unsigned long long)block_out[2]);
- }
- }
- H5Sclose(vspace);
- H5Sclose(src_space);
- free(filename);
- free(dsetname);
- }
- /* EIP read data back */
-
- /*
- * Close and release resources.
- */
- status = H5Pclose(dcpl);
- status = H5Dclose(dset);
- status = H5Fclose(file);
-
- return 0;
-}
diff --git a/examples/h5_vds-exclim.c b/examples/h5_vds-exclim.c
deleted file mode 100644
index 4fb5536..0000000
--- a/examples/h5_vds-exclim.c
+++ /dev/null
@@ -1,214 +0,0 @@
-/* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
- * Copyright by The HDF Group. *
- * All rights reserved. *
- * *
- * This file is part of HDF5. The full HDF5 copyright notice, including *
- * terms governing use, modification, and redistribution, is contained in *
- * the COPYING file, which can be found at the root of the source code *
- * distribution tree, or in https://www.hdfgroup.org/licenses. *
- * If you do not have access to either file, you may request a copy from *
- * help@hdfgroup.org. *
- * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */
-/************************************************************
-
- This example illustrates the concept of the virtual dataset.
- Excalibur use case with k=2 and m=3 and only 3 planes in
- Z-direction (i.e., not unlimited).
- This file is intended for use with HDF5 Library version 1.10
-
- ************************************************************/
-/* EIP Add link to the picture */
-
-#include "hdf5.h"
-#include <stdio.h>
-#include <stdlib.h>
-
-#define FILE "vds-exclim.h5"
-#define DATASET "VDS-Excaliburlim"
-#define VDSDIM0 3
-#define VDSDIM1 15
-#define VDSDIM2 6
-#define KDIM0 3
-#define KDIM1 2
-#define KDIM2 6
-#define NDIM0 3
-#define NDIM1 3
-#define NDIM2 6
-#define RANK 3
-
-const char *SRC_FILE[] = {"a.h5", "b.h5", "c.h5", "d.h5", "e.h5", "f.h5"};
-
-const char *SRC_DATASET[] = {"A", "B", "C", "D", "E", "F"};
-
-int
-main(void)
-{
- hid_t file, space, ksrc_space, nsrc_space, vspace, src_space, dset; /* Handles */
- hid_t dcpl;
- herr_t status;
- hsize_t vdsdims[3] = {VDSDIM0, VDSDIM1, VDSDIM2}, kdims[3] = {KDIM0, KDIM1, KDIM2},
- ndims[3] = {NDIM0, NDIM1, NDIM2}, start[3], /* Hyperslab parameters */
- count[3], block[3];
- hsize_t start_out[3], stride_out[3], count_out[3], block_out[3];
- int k = 2;
- int n = 3;
- int i;
- H5D_layout_t layout; /* Storage layout */
- size_t num_map; /* Number of mappings */
- ssize_t len; /* Length of the string; also a return value */
- char *filename;
- char *dsetname;
-
- file = H5Fcreate(FILE, H5F_ACC_TRUNC, H5P_DEFAULT, H5P_DEFAULT);
-
- /* Create VDS dataspace. */
- space = H5Screate_simple(RANK, vdsdims, NULL);
- /* Create dataspaces for A, C, and E datasets. */
- ksrc_space = H5Screate_simple(RANK, kdims, NULL);
- /* Create dataspaces for B, D, and F datasets. */
- nsrc_space = H5Screate_simple(RANK, ndims, NULL);
-
- /* Create VDS creation property */
- dcpl = H5Pcreate(H5P_DATASET_CREATE);
-
- /* Initialize hyperslab values */
-
- start[0] = 0;
- start[1] = 0;
- start[2] = 0;
- count[0] = VDSDIM0;
- count[1] = 1;
- count[2] = 1;
- block[0] = 1;
- block[1] = k;
- block[2] = VDSDIM2;
-
- /*
- * Build the mappings for A, C and E source datasets.
- *
- */
- status = H5Sselect_hyperslab(ksrc_space, H5S_SELECT_SET, start, NULL, count, block);
- for (i = 0; i < 3; i++) {
- start[1] = (hsize_t)((k + n) * i);
- status = H5Sselect_hyperslab(space, H5S_SELECT_SET, start, NULL, count, block);
- status = H5Pset_virtual(dcpl, space, SRC_FILE[2 * i], SRC_DATASET[2 * i], ksrc_space);
- }
-
- /* Reinitialize start[0] and block[1] */
- start[0] = 0;
- block[1] = n;
- /*
- * Build the mappings for B, D and F source datasets.
- *
- */
- status = H5Sselect_hyperslab(nsrc_space, H5S_SELECT_SET, start, NULL, count, block);
- for (i = 0; i < 3; i++) {
- start[1] = (hsize_t)(k + (k + n) * i);
- status = H5Sselect_hyperslab(space, H5S_SELECT_SET, start, NULL, count, block);
- status = H5Pset_virtual(dcpl, space, SRC_FILE[2 * i + 1], SRC_DATASET[2 * i + 1], nsrc_space);
- }
-
- /* Create a virtual dataset */
- dset = H5Dcreate2(file, DATASET, H5T_NATIVE_INT, space, H5P_DEFAULT, dcpl, H5P_DEFAULT);
- status = H5Sclose(space);
- status = H5Sclose(nsrc_space);
- status = H5Sclose(ksrc_space);
- status = H5Dclose(dset);
- status = H5Fclose(file);
-
- /*
- * Now we begin the read section of this example.
- */
-
- /*
- * Open file and dataset using the default properties.
- */
- file = H5Fopen(FILE, H5F_ACC_RDONLY, H5P_DEFAULT);
- dset = H5Dopen2(file, DATASET, H5P_DEFAULT);
-
- /*
- * Get creation property list and mapping properties.
- */
- dcpl = H5Dget_create_plist(dset);
-
- /*
- * Get storage layout.
- */
- layout = H5Pget_layout(dcpl);
-
- if (H5D_VIRTUAL == layout)
- printf(" Dataset has a virtual layout \n");
- else
- printf("Wrong layout found \n");
-
- /*
- * Find number of mappings.
- */
- status = H5Pget_virtual_count(dcpl, &num_map);
- printf(" Number of mappings is %lu\n", (unsigned long)num_map);
-
- /*
- * Get mapping parameters for each mapping.
- */
- for (i = 0; i < (int)num_map; i++) {
- printf(" Mapping %d \n", i);
- printf(" Selection in the virtual dataset \n");
- /* Get selection in the virtual dataset */
- vspace = H5Pget_virtual_vspace(dcpl, (size_t)i);
- if (H5Sget_select_type(vspace) == H5S_SEL_HYPERSLABS) {
- if (H5Sis_regular_hyperslab(vspace)) {
- status = H5Sget_regular_hyperslab(vspace, start_out, stride_out, count_out, block_out);
- printf(" start = [%llu, %llu, %llu] \n", (unsigned long long)start_out[0],
- (unsigned long long)start_out[1], (unsigned long long)start_out[2]);
- printf(" stride = [%llu, %llu, %llu] \n", (unsigned long long)stride_out[0],
- (unsigned long long)stride_out[1], (unsigned long long)stride_out[2]);
- printf(" count = [%llu, %llu, %llu] \n", (unsigned long long)count_out[0],
- (unsigned long long)count_out[1], (unsigned long long)count_out[2]);
- printf(" block = [%llu, %llu, %llu] \n", (unsigned long long)block_out[0],
- (unsigned long long)block_out[1], (unsigned long long)block_out[2]);
- }
- }
- /* Get source file name */
- len = H5Pget_virtual_filename(dcpl, (size_t)i, NULL, 0);
- filename = (char *)malloc((size_t)len * sizeof(char) + 1);
- H5Pget_virtual_filename(dcpl, (size_t)i, filename, len + 1);
- printf(" Source filename %s\n", filename);
-
- /* Get source dataset name */
- len = H5Pget_virtual_dsetname(dcpl, (size_t)i, NULL, 0);
- dsetname = (char *)malloc((size_t)len * sizeof(char) + 1);
- H5Pget_virtual_dsetname(dcpl, (size_t)i, dsetname, len + 1);
- printf(" Source dataset name %s\n", dsetname);
-
- /* Get selection in the source dataset */
- printf(" Selection in the source dataset \n");
- src_space = H5Pget_virtual_srcspace(dcpl, (size_t)i);
- if (H5Sget_select_type(src_space) == H5S_SEL_HYPERSLABS) {
- if (H5Sis_regular_hyperslab(vspace)) {
- status = H5Sget_regular_hyperslab(vspace, start_out, stride_out, count_out, block_out);
- printf(" start = [%d, %d, %d] \n", (int)start_out[0], (int)start_out[1],
- (int)start_out[2]);
- printf(" stride = [%d, %d, %d] \n", (int)stride_out[0], (int)stride_out[1],
- (int)stride_out[2]);
- printf(" count = [%d, %d, %d] \n", (int)count_out[0], (int)count_out[1],
- (int)count_out[2]);
- printf(" block = [%d, %d, %d] \n", (int)block_out[0], (int)block_out[1],
- (int)block_out[2]);
- }
- }
- H5Sclose(vspace);
- H5Sclose(src_space);
- free(filename);
- free(dsetname);
- }
- /* EIP read data back */
-
- /*
- * Close and release resources.
- */
- status = H5Pclose(dcpl);
- status = H5Dclose(dset);
- status = H5Fclose(file);
-
- return 0;
-}
diff --git a/examples/h5_vds-percival-unlim-maxmin.c b/examples/h5_vds-percival-unlim-maxmin.c
deleted file mode 100644
index 9ef514d..0000000
--- a/examples/h5_vds-percival-unlim-maxmin.c
+++ /dev/null
@@ -1,304 +0,0 @@
-/* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
- * Copyright by The HDF Group. *
- * All rights reserved. *
- * *
- * This file is part of HDF5. The full HDF5 copyright notice, including *
- * terms governing use, modification, and redistribution, is contained in *
- * the COPYING file, which can be found at the root of the source code *
- * distribution tree, or in https://www.hdfgroup.org/licenses. *
- * If you do not have access to either file, you may request a copy from *
- * help@hdfgroup.org. *
- * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */
-/************************************************************
-
- This example illustrates the concept of the virtual dataset.
- Percival use case. Every fifth 10x10 plane in VDS is stored in
- the corresponding 3D unlimited dataset.
- There are 4 source datasets total.
- Each of the source datasets is extended to different sizes.
- VDS access property can be used to get max and min extent.
- This file is intended for use with HDF5 Library version 1.10
-
- ************************************************************/
-/* EIP Add link to the picture */
-
-#include "hdf5.h"
-#include <stdio.h>
-#include <stdlib.h>
-
-#define VFILE "vds-percival-unlim-maxmin.h5"
-#define DATASET "VDS-Percival-unlim-maxmin"
-#define VDSDIM0 H5S_UNLIMITED
-#define VDSDIM1 10
-#define VDSDIM2 10
-
-#define DIM0 H5S_UNLIMITED
-#define DIM0_1 4 /* Initial size of the source datasets */
-#define DIM1 10
-#define DIM2 10
-#define RANK 3
-#define PLANE_STRIDE 4
-
-const char *SRC_FILE[] = {"a.h5", "b.h5", "c.h5", "d.h5"};
-
-const char *SRC_DATASET[] = {"A", "B", "C", "D"};
-
-int
-main(void)
-{
- hid_t vfile, file, src_space, mem_space, vspace, vdset, dset; /* Handles */
- hid_t dcpl, dapl;
- herr_t status;
- hsize_t vdsdims[3] = {4 * DIM0_1, VDSDIM1, VDSDIM2}, vdsdims_max[3] = {VDSDIM0, VDSDIM1, VDSDIM2},
- dims[3] = {DIM0_1, DIM1, DIM2}, memdims[3] = {DIM0_1, DIM1, DIM2},
- extdims[3] = {0, DIM1, DIM2}, /* Dimensions of the extended source datasets */
- chunk_dims[3] = {DIM0_1, DIM1, DIM2}, dims_max[3] = {DIM0, DIM1, DIM2}, vdsdims_out[3],
- vdsdims_max_out[3], start[3], /* Hyperslab parameters */
- stride[3], count[3], src_count[3], block[3];
- hsize_t start_out[3], /* Hyperslab parameter out */
- stride_out[3], count_out[3], block_out[3];
- int i, j;
- H5D_layout_t layout; /* Storage layout */
- size_t num_map; /* Number of mappings */
- ssize_t len; /* Length of the string; also a return value */
- char *filename;
- char *dsetname;
- int wdata[DIM0_1 * DIM1 * DIM2];
-
- /*
- * Create source files and datasets. This step is optional.
- */
- for (i = 0; i < PLANE_STRIDE; i++) {
- /*
- * Initialize data for i-th source dataset.
- */
- for (j = 0; j < DIM0_1 * DIM1 * DIM2; j++)
- wdata[j] = i + 1;
-
- /*
- * Create the source files and datasets. Write data to each dataset and
- * close all resources.
- */
-
- file = H5Fcreate(SRC_FILE[i], H5F_ACC_TRUNC, H5P_DEFAULT, H5P_DEFAULT);
- src_space = H5Screate_simple(RANK, dims, dims_max);
- dcpl = H5Pcreate(H5P_DATASET_CREATE);
- status = H5Pset_chunk(dcpl, RANK, chunk_dims);
- dset = H5Dcreate2(file, SRC_DATASET[i], H5T_NATIVE_INT, src_space, H5P_DEFAULT, dcpl, H5P_DEFAULT);
- status = H5Dwrite(dset, H5T_NATIVE_INT, H5S_ALL, H5S_ALL, H5P_DEFAULT, wdata);
- status = H5Sclose(src_space);
- status = H5Pclose(dcpl);
- status = H5Dclose(dset);
- status = H5Fclose(file);
- }
-
- vfile = H5Fcreate(VFILE, H5F_ACC_TRUNC, H5P_DEFAULT, H5P_DEFAULT);
-
- /* Create VDS dataspace. */
- vspace = H5Screate_simple(RANK, vdsdims, vdsdims_max);
-
- /* Create dataspaces for the source dataset. */
- src_space = H5Screate_simple(RANK, dims, dims_max);
-
- /* Create VDS creation property */
- dcpl = H5Pcreate(H5P_DATASET_CREATE);
-
- /* Initialize hyperslab values */
-
- start[0] = 0;
- start[1] = 0;
- start[2] = 0;
- stride[0] = PLANE_STRIDE; /* we will select every fifth plane in VDS */
- stride[1] = 1;
- stride[2] = 1;
- count[0] = H5S_UNLIMITED;
- count[1] = 1;
- count[2] = 1;
- src_count[0] = H5S_UNLIMITED;
- src_count[1] = 1;
- src_count[2] = 1;
- block[0] = 1;
- block[1] = DIM1;
- block[2] = DIM2;
-
- /*
- * Build the mappings
- *
- */
- status = H5Sselect_hyperslab(src_space, H5S_SELECT_SET, start, NULL, src_count, block);
- for (i = 0; i < PLANE_STRIDE; i++) {
- status = H5Sselect_hyperslab(vspace, H5S_SELECT_SET, start, stride, count, block);
- status = H5Pset_virtual(dcpl, vspace, SRC_FILE[i], SRC_DATASET[i], src_space);
- start[0]++;
- }
-
- H5Sselect_none(vspace);
-
- /* Create a virtual dataset */
- vdset = H5Dcreate2(vfile, DATASET, H5T_NATIVE_INT, vspace, H5P_DEFAULT, dcpl, H5P_DEFAULT);
- status = H5Sclose(vspace);
- status = H5Sclose(src_space);
- status = H5Pclose(dcpl);
-
- /* Let's add data to the source datasets and check new dimensions for VDS */
- /* We will add only one plane to the first source dataset, two planes to the
- second one, three to the third, and four to the forth. */
-
- for (i = 0; i < PLANE_STRIDE; i++) {
- /*
- * Initialize data for i-th source dataset.
- */
- for (j = 0; j < (i + 1) * DIM1 * DIM2; j++)
- wdata[j] = 10 * (i + 1);
-
- /*
- * Open the source files and datasets. Append data to each dataset and
- * close all resources.
- */
-
- file = H5Fopen(SRC_FILE[i], H5F_ACC_RDWR, H5P_DEFAULT);
- dset = H5Dopen2(file, SRC_DATASET[i], H5P_DEFAULT);
- extdims[0] = DIM0_1 + i + 1;
- status = H5Dset_extent(dset, extdims);
- src_space = H5Dget_space(dset);
- start[0] = DIM0_1;
- start[1] = 0;
- start[2] = 0;
- count[0] = 1;
- count[1] = 1;
- count[2] = 1;
- block[0] = i + 1;
- block[1] = DIM1;
- block[2] = DIM2;
-
- memdims[0] = i + 1;
- mem_space = H5Screate_simple(RANK, memdims, NULL);
- status = H5Sselect_hyperslab(src_space, H5S_SELECT_SET, start, NULL, count, block);
- status = H5Dwrite(dset, H5T_NATIVE_INT, mem_space, src_space, H5P_DEFAULT, wdata);
- status = H5Sclose(src_space);
- status = H5Dclose(dset);
- status = H5Fclose(file);
- }
-
- status = H5Dclose(vdset);
- status = H5Fclose(vfile);
-
- /*
- * Now we begin the read section of this example.
- */
-
- /*
- * Open file and dataset using the default properties.
- */
- vfile = H5Fopen(VFILE, H5F_ACC_RDONLY, H5P_DEFAULT);
-
- /*
- * Open VDS using different access properties to use max or
- * min extents depending on the sizes of the underlying datasets
- */
- dapl = H5Pcreate(H5P_DATASET_ACCESS);
-
- for (i = 0; i < 2; i++) {
- status = H5Pset_virtual_view(dapl, i ? H5D_VDS_LAST_AVAILABLE : H5D_VDS_FIRST_MISSING);
- vdset = H5Dopen2(vfile, DATASET, dapl);
-
- /* Let's get space of the VDS and its dimension; we should get 32(or 20)x10x10 */
- vspace = H5Dget_space(vdset);
- H5Sget_simple_extent_dims(vspace, vdsdims_out, vdsdims_max_out);
- printf("VDS dimensions, bounds = H5D_VDS_%s: ", i ? "LAST_AVAILABLE" : "FIRST_MISSING");
- for (j = 0; j < RANK; j++)
- printf(" %d ", (int)vdsdims_out[j]);
- printf("\n");
-
- /* Close */
- status = H5Dclose(vdset);
- status = H5Sclose(vspace);
- }
-
- status = H5Pclose(dapl);
-
- vdset = H5Dopen2(vfile, DATASET, H5P_DEFAULT);
-
- /*
- * Get creation property list and mapping properties.
- */
- dcpl = H5Dget_create_plist(vdset);
-
- /*
- * Get storage layout.
- */
- layout = H5Pget_layout(dcpl);
-
- if (H5D_VIRTUAL == layout)
- printf(" Dataset has a virtual layout \n");
- else
- printf(" Wrong layout found \n");
-
- /*
- * Find number of mappings.
- */
- status = H5Pget_virtual_count(dcpl, &num_map);
- printf(" Number of mappings is %lu\n", (unsigned long)num_map);
-
- /*
- * Get mapping parameters for each mapping.
- */
- for (i = 0; i < (int)num_map; i++) {
- printf(" Mapping %d \n", i);
- printf(" Selection in the virtual dataset \n");
- /* Get selection in the virtual dataset */
- vspace = H5Pget_virtual_vspace(dcpl, (size_t)i);
- if (H5Sget_select_type(vspace) == H5S_SEL_HYPERSLABS) {
- if (H5Sis_regular_hyperslab(vspace)) {
- status = H5Sget_regular_hyperslab(vspace, start_out, stride_out, count_out, block_out);
- printf(" start = [%llu, %llu, %llu] \n", (unsigned long long)start_out[0],
- (unsigned long long)start_out[1], (unsigned long long)start_out[2]);
- printf(" stride = [%llu, %llu, %llu] \n", (unsigned long long)stride_out[0],
- (unsigned long long)stride_out[1], (unsigned long long)stride_out[2]);
- printf(" count = [%llu, %llu, %llu] \n", (unsigned long long)count_out[0],
- (unsigned long long)count_out[1], (unsigned long long)count_out[2]);
- printf(" block = [%llu, %llu, %llu] \n", (unsigned long long)block_out[0],
- (unsigned long long)block_out[1], (unsigned long long)block_out[2]);
- }
- }
- /* Get source file name */
- len = H5Pget_virtual_filename(dcpl, (size_t)i, NULL, 0);
- filename = (char *)malloc((size_t)len * sizeof(char) + 1);
- H5Pget_virtual_filename(dcpl, (size_t)i, filename, len + 1);
- printf(" Source filename %s\n", filename);
-
- /* Get source dataset name */
- len = H5Pget_virtual_dsetname(dcpl, (size_t)i, NULL, 0);
- dsetname = (char *)malloc((size_t)len * sizeof(char) + 1);
- H5Pget_virtual_dsetname(dcpl, (size_t)i, dsetname, len + 1);
- printf(" Source dataset name %s\n", dsetname);
-
- /* Get selection in the source dataset */
- printf(" Selection in the source dataset \n");
- src_space = H5Pget_virtual_srcspace(dcpl, (size_t)i);
- if (H5Sget_select_type(src_space) == H5S_SEL_HYPERSLABS) {
- if (H5Sis_regular_hyperslab(src_space)) {
- status = H5Sget_regular_hyperslab(src_space, start_out, stride_out, count_out, block_out);
- printf(" start = [%llu, %llu, %llu] \n", (unsigned long long)start_out[0],
- (unsigned long long)start_out[1], (unsigned long long)start_out[2]);
- printf(" stride = [%llu, %llu, %llu] \n", (unsigned long long)stride_out[0],
- (unsigned long long)stride_out[1], (unsigned long long)stride_out[2]);
- printf(" count = [%llu, %llu, %llu] \n", (unsigned long long)count_out[0],
- (unsigned long long)count_out[1], (unsigned long long)count_out[2]);
- printf(" block = [%llu, %llu, %llu] \n", (unsigned long long)block_out[0],
- (unsigned long long)block_out[1], (unsigned long long)block_out[2]);
- }
- }
- H5Sclose(vspace);
- H5Sclose(src_space);
- free(filename);
- free(dsetname);
- }
- /*
- * Close and release resources.
- */
- status = H5Pclose(dcpl);
- status = H5Dclose(vdset);
- status = H5Fclose(vfile);
- return 0;
-}
diff --git a/examples/h5_vds-percival-unlim.c b/examples/h5_vds-percival-unlim.c
deleted file mode 100644
index ddbcdec..0000000
--- a/examples/h5_vds-percival-unlim.c
+++ /dev/null
@@ -1,346 +0,0 @@
-/* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
- * Copyright by The HDF Group. *
- * All rights reserved. *
- * *
- * This file is part of HDF5. The full HDF5 copyright notice, including *
- * terms governing use, modification, and redistribution, is contained in *
- * the COPYING file, which can be found at the root of the source code *
- * distribution tree, or in https://www.hdfgroup.org/licenses. *
- * If you do not have access to either file, you may request a copy from *
- * help@hdfgroup.org. *
- * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */
-/************************************************************
-
- This example illustrates the concept of the virtual dataset.
- Percival use case. Every fifth 10x10 plane in VDS is stored in
- the corresponding 3D unlimited dataset.
- There are 4 source datasets total.
- This file is intended for use with HDF5 Library version 1.10
-
- ************************************************************/
-/* EIP Add link to the picture */
-
-#include "hdf5.h"
-#include <stdio.h>
-#include <stdlib.h>
-
-#define VFILE "vds-percival-unlim.h5"
-#define DATASET "VDS-Percival-unlim"
-#define VDSDIM0 H5S_UNLIMITED
-#define VDSDIM1 10
-#define VDSDIM2 10
-
-#define DIM0 H5S_UNLIMITED
-#define DIM0_1 10 /* Initial size of the datasets */
-#define DIM1 10
-#define DIM2 10
-#define RANK 3
-#define PLANE_STRIDE 4
-
-const char *SRC_FILE[] = {"a.h5", "b.h5", "c.h5", "d.h5"};
-
-const char *SRC_DATASET[] = {"A", "B", "C", "D"};
-
-int
-main(void)
-{
- hid_t vfile, file, src_space, mem_space, vspace, vdset, dset; /* Handles */
- hid_t dcpl;
- herr_t status;
- hsize_t vdsdims[3] = {4 * DIM0_1, VDSDIM1, VDSDIM2}, vdsdims_max[3] = {VDSDIM0, VDSDIM1, VDSDIM2},
- dims[3] = {DIM0_1, DIM1, DIM2}, extdims[3] = {2 * DIM0_1, DIM1, DIM2},
- chunk_dims[3] = {DIM0_1, DIM1, DIM2}, dims_max[3] = {DIM0, DIM1, DIM2}, vdsdims_out[3],
- vdsdims_max_out[3], start[3], /* Hyperslab parameters */
- stride[3], count[3], src_count[3], block[3];
- hsize_t start_out[3], /* Hyperslab parameter out */
- stride_out[3], count_out[3], block_out[3];
- int i, j, k;
- H5D_layout_t layout; /* Storage layout */
- size_t num_map; /* Number of mappings */
- ssize_t len; /* Length of the string; also a return value */
- char *filename;
- char *dsetname;
- int wdata[DIM0_1 * DIM1 * DIM2];
- int rdata[80][10][10];
- int a_rdata[20][10][10];
-
- /*
- * Create source files and datasets. This step is optional.
- */
- for (i = 0; i < PLANE_STRIDE; i++) {
- /*
- * Initialize data for i-th source dataset.
- */
- for (j = 0; j < DIM0_1 * DIM1 * DIM2; j++)
- wdata[j] = i + 1;
-
- /*
- * Create the source files and datasets. Write data to each dataset and
- * close all resources.
- */
-
- file = H5Fcreate(SRC_FILE[i], H5F_ACC_TRUNC, H5P_DEFAULT, H5P_DEFAULT);
- src_space = H5Screate_simple(RANK, dims, dims_max);
- dcpl = H5Pcreate(H5P_DATASET_CREATE);
- status = H5Pset_chunk(dcpl, RANK, chunk_dims);
- dset = H5Dcreate2(file, SRC_DATASET[i], H5T_NATIVE_INT, src_space, H5P_DEFAULT, dcpl, H5P_DEFAULT);
- status = H5Dwrite(dset, H5T_NATIVE_INT, H5S_ALL, H5S_ALL, H5P_DEFAULT, wdata);
- status = H5Sclose(src_space);
- status = H5Pclose(dcpl);
- status = H5Dclose(dset);
- status = H5Fclose(file);
- }
-
- vfile = H5Fcreate(VFILE, H5F_ACC_TRUNC, H5P_DEFAULT, H5P_DEFAULT);
-
- /* Create VDS dataspace. */
- vspace = H5Screate_simple(RANK, vdsdims, vdsdims_max);
-
- /* Create dataspaces for the source dataset. */
- src_space = H5Screate_simple(RANK, dims, dims_max);
-
- /* Create VDS creation property */
- dcpl = H5Pcreate(H5P_DATASET_CREATE);
-
- /* Initialize hyperslab values */
-
- start[0] = 0;
- start[1] = 0;
- start[2] = 0;
- stride[0] = PLANE_STRIDE; /* we will select every fifth plane in VDS */
- stride[1] = 1;
- stride[2] = 1;
- count[0] = H5S_UNLIMITED;
- count[1] = 1;
- count[2] = 1;
- src_count[0] = H5S_UNLIMITED;
- src_count[1] = 1;
- src_count[2] = 1;
- block[0] = 1;
- block[1] = DIM1;
- block[2] = DIM2;
-
- /*
- * Build the mappings
- *
- */
- status = H5Sselect_hyperslab(src_space, H5S_SELECT_SET, start, NULL, src_count, block);
- for (i = 0; i < PLANE_STRIDE; i++) {
- status = H5Sselect_hyperslab(vspace, H5S_SELECT_SET, start, stride, count, block);
- status = H5Pset_virtual(dcpl, vspace, SRC_FILE[i], SRC_DATASET[i], src_space);
- start[0]++;
- }
-
- H5Sselect_none(vspace);
-
- /* Create a virtual dataset */
- vdset = H5Dcreate2(vfile, DATASET, H5T_NATIVE_INT, vspace, H5P_DEFAULT, dcpl, H5P_DEFAULT);
- status = H5Sclose(vspace);
- status = H5Sclose(src_space);
- status = H5Pclose(dcpl);
- /* Let's get space of the VDS and its dimension; we should get 40x10x10 */
- vspace = H5Dget_space(vdset);
- H5Sget_simple_extent_dims(vspace, vdsdims_out, vdsdims_max_out);
- printf("VDS dimensions first time \n");
- printf(" Current: ");
- for (i = 0; i < RANK; i++)
- printf(" %d ", (int)vdsdims_out[i]);
- printf("\n");
-
- /* Let's add data to the source datasets and check new dimensions for VDS */
-
- for (i = 0; i < PLANE_STRIDE; i++) {
- /*
- * Initialize data for i-th source dataset.
- */
- for (j = 0; j < DIM0_1 * DIM1 * DIM2; j++)
- wdata[j] = 10 * (i + 1);
-
- /*
- * Create the source files and datasets. Write data to each dataset and
- * close all resources.
- */
-
- file = H5Fopen(SRC_FILE[i], H5F_ACC_RDWR, H5P_DEFAULT);
- dset = H5Dopen2(file, SRC_DATASET[i], H5P_DEFAULT);
- status = H5Dset_extent(dset, extdims);
- src_space = H5Dget_space(dset);
- start[0] = DIM0_1;
- start[1] = 0;
- start[2] = 0;
- count[0] = 1;
- count[1] = 1;
- count[2] = 1;
- block[0] = DIM0_1;
- block[1] = DIM1;
- block[2] = DIM2;
-
- mem_space = H5Screate_simple(RANK, dims, NULL);
- status = H5Sselect_hyperslab(src_space, H5S_SELECT_SET, start, NULL, count, block);
- status = H5Dwrite(dset, H5T_NATIVE_INT, mem_space, src_space, H5P_DEFAULT, wdata);
- status = H5Sclose(src_space);
- status = H5Dclose(dset);
- status = H5Fclose(file);
- }
-
- status = H5Dclose(vdset);
- status = H5Fclose(vfile);
-
- /*
- * Now we begin the read section of this example.
- */
-
- /*
- * Open file and dataset using the default properties.
- */
- vfile = H5Fopen(VFILE, H5F_ACC_RDONLY, H5P_DEFAULT);
- vdset = H5Dopen2(vfile, DATASET, H5P_DEFAULT);
-
- /*
- * Get creation property list and mapping properties.
- */
- dcpl = H5Dget_create_plist(vdset);
-
- /*
- * Get storage layout.
- */
- layout = H5Pget_layout(dcpl);
-
- if (H5D_VIRTUAL == layout)
- printf(" Dataset has a virtual layout \n");
- else
- printf(" Wrong layout found \n");
-
- /*
- * Find number of mappings.
- */
- status = H5Pget_virtual_count(dcpl, &num_map);
- printf(" Number of mappings is %lu\n", (unsigned long)num_map);
-
- /*
- * Get mapping parameters for each mapping.
- */
- for (i = 0; i < (int)num_map; i++) {
- printf(" Mapping %d \n", i);
- printf(" Selection in the virtual dataset \n");
- /* Get selection in the virtual dataset */
- vspace = H5Pget_virtual_vspace(dcpl, (size_t)i);
- if (H5Sget_select_type(vspace) == H5S_SEL_HYPERSLABS) {
- if (H5Sis_regular_hyperslab(vspace)) {
- status = H5Sget_regular_hyperslab(vspace, start_out, stride_out, count_out, block_out);
- printf(" start = [%llu, %llu, %llu] \n", (unsigned long long)start_out[0],
- (unsigned long long)start_out[1], (unsigned long long)start_out[2]);
- printf(" stride = [%llu, %llu, %llu] \n", (unsigned long long)stride_out[0],
- (unsigned long long)stride_out[1], (unsigned long long)stride_out[2]);
- printf(" count = [%llu, %llu, %llu] \n", (unsigned long long)count_out[0],
- (unsigned long long)count_out[1], (unsigned long long)count_out[2]);
- printf(" block = [%llu, %llu, %llu] \n", (unsigned long long)block_out[0],
- (unsigned long long)block_out[1], (unsigned long long)block_out[2]);
- }
- }
- /* Get source file name */
- len = H5Pget_virtual_filename(dcpl, (size_t)i, NULL, 0);
- filename = (char *)malloc((size_t)len * sizeof(char) + 1);
- H5Pget_virtual_filename(dcpl, (size_t)i, filename, len + 1);
- printf(" Source filename %s\n", filename);
-
- /* Get source dataset name */
- len = H5Pget_virtual_dsetname(dcpl, (size_t)i, NULL, 0);
- dsetname = (char *)malloc((size_t)len * sizeof(char) + 1);
- H5Pget_virtual_dsetname(dcpl, (size_t)i, dsetname, len + 1);
- printf(" Source dataset name %s\n", dsetname);
-
- /* Get selection in the source dataset */
- printf(" Selection in the source dataset \n");
- src_space = H5Pget_virtual_srcspace(dcpl, (size_t)i);
- if (H5Sget_select_type(src_space) == H5S_SEL_HYPERSLABS) {
- if (H5Sis_regular_hyperslab(src_space)) {
- status = H5Sget_regular_hyperslab(src_space, start_out, stride_out, count_out, block_out);
- printf(" start = [%llu, %llu, %llu] \n", (unsigned long long)start_out[0],
- (unsigned long long)start_out[1], (unsigned long long)start_out[2]);
- printf(" stride = [%llu, %llu, %llu] \n", (unsigned long long)stride_out[0],
- (unsigned long long)stride_out[1], (unsigned long long)stride_out[2]);
- printf(" count = [%llu, %llu, %llu] \n", (unsigned long long)count_out[0],
- (unsigned long long)count_out[1], (unsigned long long)count_out[2]);
- printf(" block = [%llu, %llu, %llu] \n", (unsigned long long)block_out[0],
- (unsigned long long)block_out[1], (unsigned long long)block_out[2]);
- }
- }
- H5Sclose(vspace);
- H5Sclose(src_space);
- free(filename);
- free(dsetname);
- }
- /*
- * Read data from VDS.
- */
- vspace = H5Dget_space(vdset);
- H5Sget_simple_extent_dims(vspace, vdsdims_out, vdsdims_max_out);
- printf("VDS dimensions second time \n");
- printf(" Current: ");
- for (i = 0; i < RANK; i++)
- printf(" %d ", (int)vdsdims_out[i]);
- printf("\n");
-
- /* Read all VDS data */
-
- /* EIP We should be able to do it by using H5S_ALL instead of making selection
- * or using H5Sselect_all from vspace.
- */
- start[0] = 0;
- start[1] = 0;
- start[2] = 0;
- count[0] = 1;
- count[1] = 1;
- count[2] = 1;
- block[0] = vdsdims_out[0];
- block[1] = vdsdims_out[1];
- block[2] = vdsdims_out[2];
-
- status = H5Sselect_hyperslab(vspace, H5S_SELECT_SET, start, NULL, count, block);
- mem_space = H5Screate_simple(RANK, vdsdims_out, NULL);
- status = H5Dread(vdset, H5T_NATIVE_INT, mem_space, vspace, H5P_DEFAULT, rdata);
- printf(" All data: \n");
- for (i = 0; i < (int)vdsdims_out[0]; i++) {
- for (j = 0; j < (int)vdsdims_out[1]; j++) {
- printf("(%d, %d, 0)", i, j);
- for (k = 0; k < (int)vdsdims_out[2]; k++)
- printf(" %d ", rdata[i][j][k]);
- printf("\n");
- }
- }
- /* Read VDS, but only data mapeed to dataset a.h5 */
- start[0] = 0;
- start[1] = 0;
- start[2] = 0;
- stride[0] = PLANE_STRIDE;
- stride[1] = 1;
- stride[2] = 1;
- count[0] = 2 * DIM0_1;
- count[1] = 1;
- count[2] = 1;
- block[0] = 1;
- block[1] = vdsdims_out[1];
- block[2] = vdsdims_out[2];
- dims[0] = 2 * DIM0_1;
- status = H5Sselect_hyperslab(vspace, H5S_SELECT_SET, start, stride, count, block);
- mem_space = H5Screate_simple(RANK, dims, NULL);
- status = H5Dread(vdset, H5T_NATIVE_INT, mem_space, vspace, H5P_DEFAULT, a_rdata);
- printf(" All data: \n");
- for (i = 0; i < 2 * DIM0_1; i++) {
- for (j = 0; j < (int)vdsdims_out[1]; j++) {
- printf("(%d, %d, 0)", i, j);
- for (k = 0; k < (int)vdsdims_out[2]; k++)
- printf(" %d ", a_rdata[i][j][k]);
- printf("\n");
- }
- }
- /*
- * Close and release resources.
- */
- status = H5Sclose(mem_space);
- status = H5Pclose(dcpl);
- status = H5Dclose(vdset);
- status = H5Fclose(vfile);
- return 0;
-}
diff --git a/examples/h5_vds-percival.c b/examples/h5_vds-percival.c
deleted file mode 100644
index 82c8ef4..0000000
--- a/examples/h5_vds-percival.c
+++ /dev/null
@@ -1,241 +0,0 @@
-/* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
- * Copyright by The HDF Group. *
- * All rights reserved. *
- * *
- * This file is part of HDF5. The full HDF5 copyright notice, including *
- * terms governing use, modification, and redistribution, is contained in *
- * the COPYING file, which can be found at the root of the source code *
- * distribution tree, or in https://www.hdfgroup.org/licenses. *
- * If you do not have access to either file, you may request a copy from *
- * help@hdfgroup.org. *
- * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */
-/************************************************************
-
- This example illustrates the concept of the virtual dataset.
- Percival use case. Every fifth 10x10 plane in VDS is stored in
- the corresponding 3D unlimited dataset.
- EIP: For now we will use finite dimension.
- There are 4 source datasets total.
- This file is intended for use with HDF5 Library version 1.10
-
- ************************************************************/
-/* EIP Add link to the picture */
-
-#include "hdf5.h"
-#include <stdio.h>
-#include <stdlib.h>
-
-#define FILE "vds-percival.h5"
-#define DATASET "VDS-Percival"
-/* later
-#define VDSDIM0 H5S_UNLIMITED
-*/
-#define VDSDIM0 40
-#define VDSDIM1 10
-#define VDSDIM2 10
-/* later
-#define DIM0 H5S_UNLIMITED
-*/
-#define DIM0 10
-#define DIM1 10
-#define DIM2 10
-#define RANK 3
-#define PLANE_STRIDE 4
-
-const char *SRC_FILE[] = {"a.h5", "b.h5", "c.h5", "d.h5"};
-
-const char *SRC_DATASET[] = {"A", "B", "C", "D"};
-
-int
-main(void)
-{
- hid_t file, src_space, vspace, dset; /* Handles */
- hid_t dcpl;
- herr_t status;
- hsize_t vdsdims[3] = {VDSDIM0, VDSDIM1, VDSDIM2}, vdsdims_max[3] = {VDSDIM0, VDSDIM1, VDSDIM2},
- dims[3] = {DIM0, DIM1, DIM2}, dims_max[3] = {DIM0, DIM1, DIM2},
- start[3], /* Hyperslab start parameter for VDS */
- stride[3], count[3], src_count[3], block[3];
- hsize_t start_out[3], /* Hyperslab parameter out */
- stride_out[3], count_out[3], block_out[3];
- int i, j;
- H5D_layout_t layout; /* Storage layout */
- size_t num_map; /* Number of mappings */
- ssize_t len; /* Length of the string; also a return value */
- char *filename;
- char *dsetname;
- int wdata[DIM0 * DIM1 * DIM2];
-
- /*
- * Create source files and datasets. This step is optional.
- */
- for (i = 0; i < PLANE_STRIDE; i++) {
- /*
- * Initialize data for i-th source dataset.
- */
- for (j = 0; j < DIM0 * DIM1 * DIM2; j++)
- wdata[j] = i + 1;
-
- /*
- * Create the source files and datasets. Write data to each dataset and
- * close all resources.
- */
-
- file = H5Fcreate(SRC_FILE[i], H5F_ACC_TRUNC, H5P_DEFAULT, H5P_DEFAULT);
- src_space = H5Screate_simple(RANK, dims, NULL);
- dset = H5Dcreate2(file, SRC_DATASET[i], H5T_NATIVE_INT, src_space, H5P_DEFAULT, H5P_DEFAULT,
- H5P_DEFAULT);
- status = H5Dwrite(dset, H5T_NATIVE_INT, H5S_ALL, H5S_ALL, H5P_DEFAULT, wdata);
- status = H5Sclose(src_space);
- status = H5Dclose(dset);
- status = H5Fclose(file);
- }
-
- file = H5Fcreate(FILE, H5F_ACC_TRUNC, H5P_DEFAULT, H5P_DEFAULT);
-
- /* Create VDS dataspace. */
- vspace = H5Screate_simple(RANK, vdsdims, vdsdims_max);
-
- /* Create dataspaces for the source dataset. */
- src_space = H5Screate_simple(RANK, dims, dims_max);
-
- /* Create VDS creation property */
- dcpl = H5Pcreate(H5P_DATASET_CREATE);
-
- /* Initialize hyperslab values */
-
- start[0] = 0;
- start[1] = 0;
- start[2] = 0;
- stride[0] = PLANE_STRIDE; /* we will select every fifth plane in VDS */
- stride[1] = 1;
- stride[2] = 1;
- /* later
- count[0] = H5S_UNLIMITED;
- */
- count[0] = VDSDIM0 / 4;
- count[1] = 1;
- count[2] = 1;
- /* later
- src_count[0] = H5S_UNLIMITED;
- */
- src_count[0] = DIM0;
- src_count[1] = 1;
- src_count[2] = 1;
- block[0] = 1;
- block[1] = DIM1;
- block[2] = DIM2;
-
- /*
- * Build the mappings
- *
- */
- status = H5Sselect_hyperslab(src_space, H5S_SELECT_SET, start, NULL, src_count, block);
- for (i = 0; i < PLANE_STRIDE; i++) {
- status = H5Sselect_hyperslab(vspace, H5S_SELECT_SET, start, stride, count, block);
- status = H5Pset_virtual(dcpl, vspace, SRC_FILE[i], SRC_DATASET[i], src_space);
- start[0]++;
- }
-
- H5Sselect_none(vspace);
-
- /* Create a virtual dataset */
- dset = H5Dcreate2(file, DATASET, H5T_NATIVE_INT, vspace, H5P_DEFAULT, dcpl, H5P_DEFAULT);
- status = H5Sclose(vspace);
- status = H5Sclose(src_space);
- status = H5Dclose(dset);
- status = H5Fclose(file);
-
- /*
- * Now we begin the read section of this example.
- */
-
- /*
- * Open file and dataset using the default properties.
- */
- file = H5Fopen(FILE, H5F_ACC_RDONLY, H5P_DEFAULT);
- dset = H5Dopen2(file, DATASET, H5P_DEFAULT);
-
- /*
- * Get creation property list and mapping properties.
- */
- dcpl = H5Dget_create_plist(dset);
-
- /*
- * Get storage layout.
- */
- layout = H5Pget_layout(dcpl);
-
- if (H5D_VIRTUAL == layout)
- printf(" Dataset has a virtual layout \n");
- else
- printf(" Wrong layout found \n");
-
- /*
- * Find number of mappings.
- */
- status = H5Pget_virtual_count(dcpl, &num_map);
- printf(" Number of mappings is %lu\n", (unsigned long)num_map);
-
- /*
- * Get mapping parameters for each mapping.
- */
- for (i = 0; i < (int)num_map; i++) {
- printf(" Mapping %d \n", i);
- printf(" Selection in the virtual dataset \n");
- /* Get selection in the virtual dataset */
- vspace = H5Pget_virtual_vspace(dcpl, (size_t)i);
- if (H5Sget_select_type(vspace) == H5S_SEL_HYPERSLABS) {
- if (H5Sis_regular_hyperslab(vspace)) {
- status = H5Sget_regular_hyperslab(vspace, start_out, stride_out, count_out, block_out);
- printf(" start = [%llu, %llu, %llu] \n", (unsigned long long)start_out[0],
- (unsigned long long)start_out[1], (unsigned long long)start_out[2]);
- printf(" stride = [%llu, %llu, %llu] \n", (unsigned long long)stride_out[0],
- (unsigned long long)stride_out[1], (unsigned long long)stride_out[2]);
- printf(" count = [%llu, %llu, %llu] \n", (unsigned long long)count_out[0],
- (unsigned long long)count_out[1], (unsigned long long)count_out[2]);
- printf(" block = [%llu, %llu, %llu] \n", (unsigned long long)block_out[0],
- (unsigned long long)block_out[1], (unsigned long long)block_out[2]);
- }
- }
- /* Get source file name */
- len = H5Pget_virtual_filename(dcpl, (size_t)i, NULL, 0);
- filename = (char *)malloc((size_t)len * sizeof(char) + 1);
- H5Pget_virtual_filename(dcpl, (size_t)i, filename, len + 1);
- printf(" Source filename %s\n", filename);
-
- /* Get source dataset name */
- len = H5Pget_virtual_dsetname(dcpl, (size_t)i, NULL, 0);
- dsetname = (char *)malloc((size_t)len * sizeof(char) + 1);
- H5Pget_virtual_dsetname(dcpl, (size_t)i, dsetname, len + 1);
- printf(" Source dataset name %s\n", dsetname);
-
- /* Get selection in the source dataset */
- printf(" Selection in the source dataset \n");
- src_space = H5Pget_virtual_srcspace(dcpl, (size_t)i);
- if (H5Sget_select_type(src_space) == H5S_SEL_HYPERSLABS) {
- if (H5Sis_regular_hyperslab(src_space)) {
- status = H5Sget_regular_hyperslab(src_space, start_out, stride_out, count_out, block_out);
- printf(" start = [%llu, %llu, %llu] \n", (unsigned long long)start_out[0],
- (unsigned long long)start_out[1], (unsigned long long)start_out[2]);
- printf(" stride = [%llu, %llu, %llu] \n", (unsigned long long)stride_out[0],
- (unsigned long long)stride_out[1], (unsigned long long)stride_out[2]);
- printf(" count = [%llu, %llu, %llu] \n", (unsigned long long)count_out[0],
- (unsigned long long)count_out[1], (unsigned long long)count_out[2]);
- printf(" block = [%llu, %llu, %llu] \n", (unsigned long long)block_out[0],
- (unsigned long long)block_out[1], (unsigned long long)block_out[2]);
- }
- }
- H5Sclose(vspace);
- H5Sclose(src_space);
- free(filename);
- free(dsetname);
- }
- /*
- * Close and release resources.
- */
- status = H5Pclose(dcpl);
- status = H5Dclose(dset);
- status = H5Fclose(file);
- return 0;
-}
diff --git a/examples/h5_vds-simpleIO.c b/examples/h5_vds-simpleIO.c
deleted file mode 100644
index f516af9..0000000
--- a/examples/h5_vds-simpleIO.c
+++ /dev/null
@@ -1,190 +0,0 @@
-/* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
- * Copyright by The HDF Group. *
- * All rights reserved. *
- * *
- * This file is part of HDF5. The full HDF5 copyright notice, including *
- * terms governing use, modification, and redistribution, is contained in *
- * the COPYING file, which can be found at the root of the source code *
- * distribution tree, or in https://www.hdfgroup.org/licenses. *
- * If you do not have access to either file, you may request a copy from *
- * help@hdfgroup.org. *
- * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */
-/************************************************************
-
- This example illustrates the concept of virtual dataset I/O
- The program creates 2-dim source dataset and writes
- data to it. Then it creates 2-dim virtual dataset that has
- the same dimension sizes and maps the all elements of the
- virtual dataset to all elements of the source dataset.
- Then VDS is read back.
-
- This file is intended for use with HDF5 Library version 1.10
-
- ************************************************************/
-/* EIP Add link to the picture */
-
-#include "hdf5.h"
-#include <stdio.h>
-#include <stdlib.h>
-
-#define FILE "vds-simpleIO.h5"
-#define DATASET "VDS"
-#define DIM1 6
-#define DIM0 4
-#define RANK 2
-
-#define SRC_FILE "a.h5"
-#define SRC_DATASET "/A"
-
-int
-main(void)
-{
- hid_t file, space, src_space, vspace, dset; /* Handles */
- hid_t dcpl;
- herr_t status;
- hsize_t vdsdims[2] = {DIM0, DIM1}, /* Virtual dataset dimension */
- dims[2] = {DIM0, DIM1}; /* Source dataset dimensions */
- int wdata[DIM0][DIM1], /* Write buffer for source dataset */
- rdata[DIM0][DIM1], /* Read buffer for virtual dataset */
- i, j;
- H5D_layout_t layout; /* Storage layout */
- size_t num_map; /* Number of mappings */
- ssize_t len; /* Length of the string; also a return value */
- char *filename;
- char *dsetname;
- /*
- * Initialize data.
- */
- for (i = 0; i < DIM0; i++)
- for (j = 0; j < DIM1; j++)
- wdata[i][j] = i + 1;
-
- /*
- * Create the source file and the dataset. Write data to the source dataset
- * and close all resources.
- */
-
- file = H5Fcreate(SRC_FILE, H5F_ACC_TRUNC, H5P_DEFAULT, H5P_DEFAULT);
- space = H5Screate_simple(RANK, dims, NULL);
- dset = H5Dcreate2(file, SRC_DATASET, H5T_NATIVE_INT, space, H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT);
- status = H5Dwrite(dset, H5T_NATIVE_INT, H5S_ALL, H5S_ALL, H5P_DEFAULT, wdata[0]);
- status = H5Sclose(space);
- status = H5Dclose(dset);
- status = H5Fclose(file);
-
- /* Create file in which virtual dataset will be stored. */
- file = H5Fcreate(FILE, H5F_ACC_TRUNC, H5P_DEFAULT, H5P_DEFAULT);
-
- /* Create VDS dataspace. */
- vspace = H5Screate_simple(RANK, vdsdims, NULL);
-
- /* Set VDS creation property. */
- dcpl = H5Pcreate(H5P_DATASET_CREATE);
-
- /*
- * Build the mappings.
- * Selections in the source datasets are H5S_ALL.
- * In the virtual dataset we select the first, the second and the third rows
- * and map each row to the data in the corresponding source dataset.
- */
- src_space = H5Screate_simple(RANK, dims, NULL);
- status = H5Pset_virtual(dcpl, vspace, SRC_FILE, SRC_DATASET, src_space);
-
- /* Create a virtual dataset. */
- dset = H5Dcreate2(file, DATASET, H5T_NATIVE_INT, vspace, H5P_DEFAULT, dcpl, H5P_DEFAULT);
- status = H5Sclose(vspace);
- status = H5Sclose(src_space);
- status = H5Dclose(dset);
- status = H5Fclose(file);
-
- /*
- * Now we begin the read section of this example.
- */
-
- /*
- * Open the file and virtual dataset.
- */
- file = H5Fopen(FILE, H5F_ACC_RDONLY, H5P_DEFAULT);
- dset = H5Dopen2(file, DATASET, H5P_DEFAULT);
- /*
- * Get creation property list and mapping properties.
- */
- dcpl = H5Dget_create_plist(dset);
-
- /*
- * Get storage layout.
- */
- layout = H5Pget_layout(dcpl);
- if (H5D_VIRTUAL == layout)
- printf(" Dataset has a virtual layout \n");
- else
- printf(" Wrong layout found \n");
-
- /*
- * Find the number of mappings.
- */
- status = H5Pget_virtual_count(dcpl, &num_map);
- printf(" Number of mappings is %lu\n", (unsigned long)num_map);
-
- /*
- * Get mapping parameters for each mapping.
- */
- for (i = 0; i < (int)num_map; i++) {
- printf(" Mapping %d \n", i);
- printf(" Selection in the virtual dataset ");
- /* Get selection in the virtual dataset */
- vspace = H5Pget_virtual_vspace(dcpl, (size_t)i);
-
- /* Make sure it is ALL selection and then print selection. */
- if (H5Sget_select_type(vspace) == H5S_SEL_ALL) {
- printf("Selection is H5S_ALL \n");
- }
- /* Get source file name. */
- len = H5Pget_virtual_filename(dcpl, (size_t)i, NULL, 0);
- filename = (char *)malloc((size_t)len * sizeof(char) + 1);
- H5Pget_virtual_filename(dcpl, (size_t)i, filename, len + 1);
- printf(" Source filename %s\n", filename);
-
- /* Get source dataset name. */
- len = H5Pget_virtual_dsetname(dcpl, (size_t)i, NULL, 0);
- dsetname = (char *)malloc((size_t)len * sizeof(char) + 1);
- H5Pget_virtual_dsetname(dcpl, (size_t)i, dsetname, len + 1);
- printf(" Source dataset name %s\n", dsetname);
-
- /* Get selection in the source dataset. */
- printf(" Selection in the source dataset ");
- src_space = H5Pget_virtual_srcspace(dcpl, (size_t)i);
-
- /* Make sure it is ALL selection and then print selection. */
- if (H5Sget_select_type(src_space) == H5S_SEL_ALL) {
- printf("Selection is H5S_ALL \n");
- }
- H5Sclose(vspace);
- H5Sclose(src_space);
- free(filename);
- free(dsetname);
- }
- /*
- * Read the data using the default properties.
- */
- status = H5Dread(dset, H5T_NATIVE_INT, H5S_ALL, H5S_ALL, H5P_DEFAULT, rdata[0]);
-
- /*
- * Output the data to the screen.
- */
- printf(" VDS Data:\n");
- for (i = 0; i < DIM0; i++) {
- printf(" [");
- for (j = 0; j < DIM1; j++)
- printf(" %3d", rdata[i][j]);
- printf("]\n");
- }
- /*
- * Close and release resources.
- */
- status = H5Pclose(dcpl);
- status = H5Dclose(dset);
- status = H5Fclose(file);
-
- return 0;
-}
diff --git a/examples/h5_vds.c b/examples/h5_vds.c
deleted file mode 100644
index 96bd8a2..0000000
--- a/examples/h5_vds.c
+++ /dev/null
@@ -1,252 +0,0 @@
-/* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
- * Copyright by The HDF Group. *
- * All rights reserved. *
- * *
- * This file is part of HDF5. The full HDF5 copyright notice, including *
- * terms governing use, modification, and redistribution, is contained in *
- * the COPYING file, which can be found at the root of the source code *
- * distribution tree, or in https://www.hdfgroup.org/licenses. *
- * If you do not have access to either file, you may request a copy from *
- * help@hdfgroup.org. *
- * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */
-
-/************************************************************
-
- This example illustrates the concept of virtual dataset.
- The program creates three 1-dim source datasets and writes
- data to them. Then it creates a 2-dim virtual dataset and
- maps the first three rows of the virtual dataset to the data
- in the source datasets. Elements of a row are mapped to all
- elements of the corresponding source dataset.
- The fourth row is not mapped and will be filled with the fill
- values when virtual dataset is read back.
-
- The program closes all datasets, and then reopens the virtual
- dataset, and finds and prints its creation properties.
- Then it reads the values.
-
- This file is intended for use with HDF5 Library version 1.10
-
- ************************************************************/
-/* EIP Add link to the picture */
-
-#include "hdf5.h"
-#include <stdio.h>
-#include <stdlib.h>
-
-#define FILE "vds.h5"
-#define DATASET "VDS"
-#define VDSDIM1 6
-#define VDSDIM0 4
-#define DIM0 6
-#define RANK1 1
-#define RANK2 2
-
-const char *SRC_FILE[] = {"a.h5", "b.h5", "c.h5"};
-
-const char *SRC_DATASET[] = {"A", "B", "C"};
-
-int
-main(void)
-{
- hid_t file, space, src_space, vspace, dset; /* Handles */
- hid_t dcpl;
- herr_t status;
- hsize_t vdsdims[2] = {VDSDIM0, VDSDIM1}, /* Virtual datasets dimension */
- dims[1] = {DIM0}, /* Source datasets dimensions */
- start[2], /* Hyperslab parameters */
- count[2], block[2];
- hsize_t start_out[2], stride_out[2], count_out[2], block_out[2];
- int wdata[DIM0], /* Write buffer for source dataset */
- rdata[VDSDIM0][VDSDIM1], /* Read buffer for virtual dataset */
- i, j, k, l, block_inc;
- int fill_value = -1; /* Fill value for VDS */
- H5D_layout_t layout; /* Storage layout */
- size_t num_map; /* Number of mappings */
- ssize_t len; /* Length of the string; also a return value */
- char *filename;
- char *dsetname;
- hsize_t nblocks;
- hsize_t *buf; /* Buffer to hold hyperslab coordinates */
-
- /*
- * Create source files and datasets. This step is optional.
- */
- for (i = 0; i < 3; i++) {
- /*
- * Initialize data for i-th source dataset.
- */
- for (j = 0; j < DIM0; j++)
- wdata[j] = i + 1;
-
- /*
- * Create the source files and datasets. Write data to each dataset and
- * close all resources.
- */
-
- file = H5Fcreate(SRC_FILE[i], H5F_ACC_TRUNC, H5P_DEFAULT, H5P_DEFAULT);
- space = H5Screate_simple(RANK1, dims, NULL);
- dset = H5Dcreate2(file, SRC_DATASET[i], H5T_NATIVE_INT, space, H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT);
- status = H5Dwrite(dset, H5T_NATIVE_INT, H5S_ALL, H5S_ALL, H5P_DEFAULT, wdata);
- status = H5Sclose(space);
- status = H5Dclose(dset);
- status = H5Fclose(file);
- }
-
- /* Create file in which virtual dataset will be stored. */
- file = H5Fcreate(FILE, H5F_ACC_TRUNC, H5P_DEFAULT, H5P_DEFAULT);
-
- /* Create VDS dataspace. */
- space = H5Screate_simple(RANK2, vdsdims, NULL);
-
- /* Set VDS creation property. */
- dcpl = H5Pcreate(H5P_DATASET_CREATE);
- status = H5Pset_fill_value(dcpl, H5T_NATIVE_INT, &fill_value);
-
- /* Initialize hyperslab values. */
- start[0] = 0;
- start[1] = 0;
- count[0] = 1;
- count[1] = 1;
- block[0] = 1;
- block[1] = VDSDIM1;
-
- /*
- * Build the mappings.
- * Selections in the source datasets are H5S_ALL.
- * In the virtual dataset we select the first, the second and the third rows
- * and map each row to the data in the corresponding source dataset.
- */
- src_space = H5Screate_simple(RANK1, dims, NULL);
- for (i = 0; i < 3; i++) {
- start[0] = (hsize_t)i;
- /* Select i-th row in the virtual dataset; selection in the source datasets is the same. */
- status = H5Sselect_hyperslab(space, H5S_SELECT_SET, start, NULL, count, block);
- status = H5Pset_virtual(dcpl, space, SRC_FILE[i], SRC_DATASET[i], src_space);
- }
-
- /* Create a virtual dataset. */
- dset = H5Dcreate2(file, DATASET, H5T_NATIVE_INT, space, H5P_DEFAULT, dcpl, H5P_DEFAULT);
- status = H5Sclose(space);
- status = H5Sclose(src_space);
- status = H5Dclose(dset);
- status = H5Fclose(file);
-
- /*
- * Now we begin the read section of this example.
- */
-
- /*
- * Open the file and virtual dataset.
- */
- file = H5Fopen(FILE, H5F_ACC_RDONLY, H5P_DEFAULT);
- dset = H5Dopen2(file, DATASET, H5P_DEFAULT);
-
- /*
- * Get creation property list and mapping properties.
- */
- dcpl = H5Dget_create_plist(dset);
-
- /*
- * Get storage layout.
- */
- layout = H5Pget_layout(dcpl);
- if (H5D_VIRTUAL == layout)
- printf(" Dataset has a virtual layout \n");
- else
- printf(" Wrong layout found \n");
-
- /*
- * Find the number of mappings.
- */
- status = H5Pget_virtual_count(dcpl, &num_map);
- printf(" Number of mappings is %lu\n", (unsigned long)num_map);
-
- /*
- * Get mapping parameters for each mapping.
- */
- for (i = 0; i < (int)num_map; i++) {
- printf(" Mapping %d \n", i);
- printf(" Selection in the virtual dataset ");
- /* Get selection in the virtual dataset */
- vspace = H5Pget_virtual_vspace(dcpl, (size_t)i);
-
- /* Make sure that this is a hyperslab selection and then print information. */
- if (H5Sget_select_type(vspace) == H5S_SEL_HYPERSLABS) {
- nblocks = H5Sget_select_hyper_nblocks(vspace);
- buf = (hsize_t *)malloc(sizeof(hsize_t) * 2 * RANK2 * nblocks);
- status = H5Sget_select_hyper_blocklist(vspace, (hsize_t)0, nblocks, buf);
- for (l = 0; l < nblocks; l++) {
- block_inc = 2 * RANK2 * l;
- printf("(");
- for (k = 0; k < RANK2 - 1; k++)
- printf("%d,", (int)buf[block_inc + k]);
- printf("%d) - (", (int)buf[block_inc + k]);
- for (k = 0; k < RANK2 - 1; k++)
- printf("%d,", (int)buf[block_inc + RANK2 + k]);
- printf("%d)\n", (int)buf[block_inc + RANK2 + k]);
- }
- /* We also can use new APIs to get start, stride, count and block */
- if (H5Sis_regular_hyperslab(vspace)) {
- status = H5Sget_regular_hyperslab(vspace, start_out, stride_out, count_out, block_out);
- printf(" start = [%llu, %llu] \n", (unsigned long long)start_out[0],
- (unsigned long long)start_out[1]);
- printf(" stride = [%llu, %llu] \n", (unsigned long long)stride_out[0],
- (unsigned long long)stride_out[1]);
- printf(" count = [%llu, %llu] \n", (unsigned long long)count_out[0],
- (unsigned long long)count_out[1]);
- printf(" block = [%llu, %llu] \n", (unsigned long long)block_out[0],
- (unsigned long long)block_out[1]);
- }
- }
- /* Get source file name. */
- len = H5Pget_virtual_filename(dcpl, (size_t)i, NULL, 0);
- filename = (char *)malloc((size_t)len * sizeof(char) + 1);
- H5Pget_virtual_filename(dcpl, (size_t)i, filename, len + 1);
- printf(" Source filename %s\n", filename);
-
- /* Get source dataset name. */
- len = H5Pget_virtual_dsetname(dcpl, (size_t)i, NULL, 0);
- dsetname = (char *)malloc((size_t)len * sizeof(char) + 1);
- H5Pget_virtual_dsetname(dcpl, (size_t)i, dsetname, len + 1);
- printf(" Source dataset name %s\n", dsetname);
-
- /* Get selection in the source dataset. */
- printf(" Selection in the source dataset ");
- src_space = H5Pget_virtual_srcspace(dcpl, (size_t)i);
-
- /* Make sure it is ALL selection and then print the coordinates. */
- if (H5Sget_select_type(src_space) == H5S_SEL_ALL) {
- printf("(0) - (%d) \n", DIM0 - 1);
- }
- H5Sclose(vspace);
- H5Sclose(src_space);
- free(filename);
- free(dsetname);
- free(buf);
- }
-
- /*
- * Read the data using the default properties.
- */
- status = H5Dread(dset, H5T_NATIVE_INT, H5S_ALL, H5S_ALL, H5P_DEFAULT, rdata[0]);
-
- /*
- * Output the data to the screen.
- */
- printf(" VDS Data:\n");
- for (i = 0; i < VDSDIM0; i++) {
- printf(" [");
- for (j = 0; j < VDSDIM1; j++)
- printf(" %3d", rdata[i][j]);
- printf("]\n");
- }
- /*
- * Close and release resources.
- */
- status = H5Pclose(dcpl);
- status = H5Dclose(dset);
- status = H5Fclose(file);
-
- return 0;
-}
diff --git a/examples/ph5_filtered_writes.c b/examples/ph5_filtered_writes.c
deleted file mode 100644
index f4ddae9..0000000
--- a/examples/ph5_filtered_writes.c
+++ /dev/null
@@ -1,488 +0,0 @@
-/* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
- * Copyright by The HDF Group. *
- * All rights reserved. *
- * *
- * This file is part of HDF5. The full HDF5 copyright notice, including *
- * terms governing use, modification, and redistribution, is contained in *
- * the COPYING file, which can be found at the root of the source code *
- * distribution tree, or in https://www.hdfgroup.org/licenses. *
- * If you do not have access to either file, you may request a copy from *
- * help@hdfgroup.org. *
- * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */
-
-/*
- * Example of using the parallel HDF5 library to write to datasets
- * with filters applied to them.
- *
- * If the HDF5_NOCLEANUP environment variable is set, the file that
- * this example creates will not be removed as the example finishes.
- *
- * The need of requirement of parallel file prefix is that in general
- * the current working directory in which compiling is done, is not suitable
- * for parallel I/O and there is no standard pathname for parallel file
- * systems. In some cases, the parallel file name may even need some
- * parallel file type prefix such as: "pfs:/GF/...". Therefore, this
- * example parses the HDF5_PARAPREFIX environment variable for a prefix,
- * if one is needed.
- */
-
-#include <stdlib.h>
-
-#include "hdf5.h"
-
-#if defined(H5_HAVE_PARALLEL) && defined(H5_HAVE_PARALLEL_FILTERED_WRITES)
-
-#define EXAMPLE_FILE "ph5_filtered_writes.h5"
-#define EXAMPLE_DSET1_NAME "DSET1"
-#define EXAMPLE_DSET2_NAME "DSET2"
-
-#define EXAMPLE_DSET_DIMS 2
-#define EXAMPLE_DSET_CHUNK_DIM_SIZE 10
-
-/* Dataset datatype */
-#define HDF5_DATATYPE H5T_NATIVE_INT
-typedef int C_DATATYPE;
-
-#ifndef PATH_MAX
-#define PATH_MAX 512
-#endif
-
-/* Global variables */
-int mpi_rank, mpi_size;
-
-/*
- * Routine to set an HDF5 filter on the given DCPL
- */
-static void
-set_filter(hid_t dcpl_id)
-{
- htri_t filter_avail;
-
- /*
- * Check if 'deflate' filter is available
- */
- filter_avail = H5Zfilter_avail(H5Z_FILTER_DEFLATE);
- if (filter_avail < 0)
- return;
- else if (filter_avail) {
- /*
- * Set 'deflate' filter with reasonable
- * compression level on DCPL
- */
- H5Pset_deflate(dcpl_id, 6);
- }
- else {
- /*
- * Set Fletcher32 checksum filter on DCPL
- * since it is always available in HDF5
- */
- H5Pset_fletcher32(dcpl_id);
- }
-}
-
-/*
- * Routine to fill a data buffer with data. Assumes
- * dimension rank is 2 and data is stored contiguous.
- */
-void
-fill_databuf(hsize_t start[], hsize_t count[], hsize_t stride[], C_DATATYPE *data)
-{
- C_DATATYPE *dataptr = data;
- hsize_t i, j;
-
- /* Use MPI rank value for data */
- for (i = 0; i < count[0]; i++) {
- for (j = 0; j < count[1]; j++) {
- *dataptr++ = mpi_rank;
- }
- }
-}
-
-/* Cleanup created file */
-static void
-cleanup(char *filename)
-{
- bool do_cleanup = getenv(HDF5_NOCLEANUP) ? false : true;
-
- if (do_cleanup)
- MPI_File_delete(filename, MPI_INFO_NULL);
-}
-
-/*
- * Routine to write to a dataset in a fashion
- * where no chunks in the dataset are written
- * to by more than 1 MPI rank. This will
- * generally give the best performance as the
- * MPI ranks will need the least amount of
- * inter-process communication.
- */
-static void
-write_dataset_no_overlap(hid_t file_id, hid_t dxpl_id)
-{
- C_DATATYPE data[EXAMPLE_DSET_CHUNK_DIM_SIZE][4 * EXAMPLE_DSET_CHUNK_DIM_SIZE];
- hsize_t dataset_dims[EXAMPLE_DSET_DIMS];
- hsize_t chunk_dims[EXAMPLE_DSET_DIMS];
- hsize_t start[EXAMPLE_DSET_DIMS];
- hsize_t stride[EXAMPLE_DSET_DIMS];
- hsize_t count[EXAMPLE_DSET_DIMS];
- hid_t dset_id = H5I_INVALID_HID;
- hid_t dcpl_id = H5I_INVALID_HID;
- hid_t file_dataspace = H5I_INVALID_HID;
-
- /*
- * ------------------------------------
- * Setup Dataset Creation Property List
- * ------------------------------------
- */
-
- dcpl_id = H5Pcreate(H5P_DATASET_CREATE);
-
- /*
- * REQUIRED: Dataset chunking must be enabled to
- * apply a data filter to the dataset.
- * Chunks in the dataset are of size
- * EXAMPLE_DSET_CHUNK_DIM_SIZE x EXAMPLE_DSET_CHUNK_DIM_SIZE.
- */
- chunk_dims[0] = EXAMPLE_DSET_CHUNK_DIM_SIZE;
- chunk_dims[1] = EXAMPLE_DSET_CHUNK_DIM_SIZE;
- H5Pset_chunk(dcpl_id, EXAMPLE_DSET_DIMS, chunk_dims);
-
- /* Set filter to be applied to created datasets */
- set_filter(dcpl_id);
-
- /*
- * ------------------------------------
- * Define the dimensions of the dataset
- * and create it
- * ------------------------------------
- */
-
- /*
- * Create a dataset composed of 4 chunks
- * per MPI rank. The first dataset dimension
- * scales according to the number of MPI ranks.
- * The second dataset dimension stays fixed
- * according to the chunk size.
- */
- dataset_dims[0] = EXAMPLE_DSET_CHUNK_DIM_SIZE * mpi_size;
- dataset_dims[1] = 4 * EXAMPLE_DSET_CHUNK_DIM_SIZE;
-
- file_dataspace = H5Screate_simple(EXAMPLE_DSET_DIMS, dataset_dims, NULL);
-
- /* Create the dataset */
- dset_id = H5Dcreate2(file_id, EXAMPLE_DSET1_NAME, HDF5_DATATYPE, file_dataspace, H5P_DEFAULT, dcpl_id,
- H5P_DEFAULT);
-
- /*
- * ------------------------------------
- * Setup selection in the dataset for
- * each MPI rank
- * ------------------------------------
- */
-
- /*
- * Each MPI rank's selection covers a
- * single chunk in the first dataset
- * dimension. Each MPI rank's selection
- * covers 4 chunks in the second dataset
- * dimension. This leads to each MPI rank
- * writing to 4 chunks of the dataset.
- */
- start[0] = mpi_rank * EXAMPLE_DSET_CHUNK_DIM_SIZE;
- start[1] = 0;
- stride[0] = 1;
- stride[1] = 1;
- count[0] = EXAMPLE_DSET_CHUNK_DIM_SIZE;
- count[1] = 4 * EXAMPLE_DSET_CHUNK_DIM_SIZE;
-
- H5Sselect_hyperslab(file_dataspace, H5S_SELECT_SET, start, stride, count, NULL);
-
- /*
- * --------------------------------------
- * Fill data buffer with MPI rank's rank
- * value to make it easy to see which
- * part of the dataset each rank wrote to
- * --------------------------------------
- */
-
- fill_databuf(start, count, stride, &data[0][0]);
-
- /*
- * ---------------------------------
- * Write to the dataset collectively
- * ---------------------------------
- */
-
- H5Dwrite(dset_id, HDF5_DATATYPE, H5S_BLOCK, file_dataspace, dxpl_id, data);
-
- /*
- * --------------
- * Close HDF5 IDs
- * --------------
- */
-
- H5Sclose(file_dataspace);
- H5Pclose(dcpl_id);
- H5Dclose(dset_id);
-}
-
-/*
- * Routine to write to a dataset in a fashion
- * where every chunk in the dataset is written
- * to by every MPI rank. This will generally
- * give the worst performance as the MPI ranks
- * will need the most amount of inter-process
- * communication.
- */
-static void
-write_dataset_overlap(hid_t file_id, hid_t dxpl_id)
-{
- C_DATATYPE *data = NULL;
- hsize_t dataset_dims[EXAMPLE_DSET_DIMS];
- hsize_t chunk_dims[EXAMPLE_DSET_DIMS];
- hsize_t start[EXAMPLE_DSET_DIMS];
- hsize_t stride[EXAMPLE_DSET_DIMS];
- hsize_t count[EXAMPLE_DSET_DIMS];
- hid_t dset_id = H5I_INVALID_HID;
- hid_t dcpl_id = H5I_INVALID_HID;
- hid_t file_dataspace = H5I_INVALID_HID;
-
- /*
- * ------------------------------------
- * Setup Dataset Creation Property List
- * ------------------------------------
- */
-
- dcpl_id = H5Pcreate(H5P_DATASET_CREATE);
-
- /*
- * REQUIRED: Dataset chunking must be enabled to
- * apply a data filter to the dataset.
- * Chunks in the dataset are of size
- * mpi_size x EXAMPLE_DSET_CHUNK_DIM_SIZE.
- */
- chunk_dims[0] = mpi_size;
- chunk_dims[1] = EXAMPLE_DSET_CHUNK_DIM_SIZE;
- H5Pset_chunk(dcpl_id, EXAMPLE_DSET_DIMS, chunk_dims);
-
- /* Set filter to be applied to created datasets */
- set_filter(dcpl_id);
-
- /*
- * ------------------------------------
- * Define the dimensions of the dataset
- * and create it
- * ------------------------------------
- */
-
- /*
- * Create a dataset composed of N chunks,
- * where N is the number of MPI ranks. The
- * first dataset dimension scales according
- * to the number of MPI ranks. The second
- * dataset dimension stays fixed according
- * to the chunk size.
- */
- dataset_dims[0] = mpi_size * chunk_dims[0];
- dataset_dims[1] = EXAMPLE_DSET_CHUNK_DIM_SIZE;
-
- file_dataspace = H5Screate_simple(EXAMPLE_DSET_DIMS, dataset_dims, NULL);
-
- /* Create the dataset */
- dset_id = H5Dcreate2(file_id, EXAMPLE_DSET2_NAME, HDF5_DATATYPE, file_dataspace, H5P_DEFAULT, dcpl_id,
- H5P_DEFAULT);
-
- /*
- * ------------------------------------
- * Setup selection in the dataset for
- * each MPI rank
- * ------------------------------------
- */
-
- /*
- * Each MPI rank's selection covers
- * part of every chunk in the first
- * dimension. Each MPI rank's selection
- * covers all of every chunk in the
- * second dimension. This leads to
- * each MPI rank writing an equal
- * amount of data to every chunk
- * in the dataset.
- */
- start[0] = mpi_rank;
- start[1] = 0;
- stride[0] = chunk_dims[0];
- stride[1] = 1;
- count[0] = mpi_size;
- count[1] = EXAMPLE_DSET_CHUNK_DIM_SIZE;
-
- H5Sselect_hyperslab(file_dataspace, H5S_SELECT_SET, start, stride, count, NULL);
-
- /*
- * --------------------------------------
- * Fill data buffer with MPI rank's rank
- * value to make it easy to see which
- * part of the dataset each rank wrote to
- * --------------------------------------
- */
-
- data = malloc(mpi_size * EXAMPLE_DSET_CHUNK_DIM_SIZE * sizeof(C_DATATYPE));
-
- fill_databuf(start, count, stride, data);
-
- /*
- * ---------------------------------
- * Write to the dataset collectively
- * ---------------------------------
- */
-
- H5Dwrite(dset_id, HDF5_DATATYPE, H5S_BLOCK, file_dataspace, dxpl_id, data);
-
- free(data);
-
- /*
- * --------------
- * Close HDF5 IDs
- * --------------
- */
-
- H5Sclose(file_dataspace);
- H5Pclose(dcpl_id);
- H5Dclose(dset_id);
-}
-
-int
-main(int argc, char **argv)
-{
- MPI_Comm comm = MPI_COMM_WORLD;
- MPI_Info info = MPI_INFO_NULL;
- hid_t file_id = H5I_INVALID_HID;
- hid_t fapl_id = H5I_INVALID_HID;
- hid_t dxpl_id = H5I_INVALID_HID;
- char *par_prefix = NULL;
- char filename[PATH_MAX];
-
- MPI_Init(&argc, &argv);
- MPI_Comm_size(comm, &mpi_size);
- MPI_Comm_rank(comm, &mpi_rank);
-
- /*
- * ----------------------------------
- * Start parallel access to HDF5 file
- * ----------------------------------
- */
-
- /* Setup File Access Property List with parallel I/O access */
- fapl_id = H5Pcreate(H5P_FILE_ACCESS);
- H5Pset_fapl_mpio(fapl_id, comm, info);
-
- /*
- * OPTIONAL: Set collective metadata reads on FAPL to allow
- * parallel writes to filtered datasets to perform
- * better at scale. While not strictly necessary,
- * this is generally recommended.
- */
- H5Pset_all_coll_metadata_ops(fapl_id, true);
-
- /*
- * OPTIONAL: Set the latest file format version for HDF5 in
- * order to gain access to different dataset chunk
- * index types and better data encoding methods.
- * While not strictly necessary, this is generally
- * recommended.
- */
- H5Pset_libver_bounds(fapl_id, H5F_LIBVER_LATEST, H5F_LIBVER_LATEST);
-
- /* Parse any parallel prefix and create filename */
- par_prefix = getenv("HDF5_PARAPREFIX");
-
- snprintf(filename, PATH_MAX, "%s%s%s", par_prefix ? par_prefix : "", par_prefix ? "/" : "", EXAMPLE_FILE);
-
- /* Create HDF5 file */
- file_id = H5Fcreate(filename, H5F_ACC_TRUNC, H5P_DEFAULT, fapl_id);
-
- /*
- * --------------------------------------
- * Setup Dataset Transfer Property List
- * with collective I/O
- * --------------------------------------
- */
-
- dxpl_id = H5Pcreate(H5P_DATASET_XFER);
-
- /*
- * REQUIRED: Setup collective I/O for the dataset
- * write operations. Parallel writes to
- * filtered datasets MUST be collective,
- * even if some ranks have no data to
- * contribute to the write operation.
- *
- * Refer to the 'ph5_filtered_writes_no_sel'
- * example to see how to setup a dataset
- * write when one or more MPI ranks have
- * no data to contribute to the write
- * operation.
- */
- H5Pset_dxpl_mpio(dxpl_id, H5FD_MPIO_COLLECTIVE);
-
- /*
- * --------------------------------
- * Create and write to each dataset
- * --------------------------------
- */
-
- /*
- * Write to a dataset in a fashion where no
- * chunks in the dataset are written to by
- * more than 1 MPI rank. This will generally
- * give the best performance as the MPI ranks
- * will need the least amount of inter-process
- * communication.
- */
- write_dataset_no_overlap(file_id, dxpl_id);
-
- /*
- * Write to a dataset in a fashion where
- * every chunk in the dataset is written
- * to by every MPI rank. This will generally
- * give the worst performance as the MPI ranks
- * will need the most amount of inter-process
- * communication.
- */
- write_dataset_overlap(file_id, dxpl_id);
-
- /*
- * ------------------
- * Close all HDF5 IDs
- * ------------------
- */
-
- H5Pclose(dxpl_id);
- H5Pclose(fapl_id);
- H5Fclose(file_id);
-
- printf("PHDF5 example finished with no errors\n");
-
- /*
- * ------------------------------------
- * Cleanup created HDF5 file and finish
- * ------------------------------------
- */
-
- cleanup(filename);
-
- MPI_Finalize();
-
- return 0;
-}
-
-#else
-
-int
-main(void)
-{
- printf("HDF5 not configured with parallel support or parallel filtered writes are disabled!\n");
- return 0;
-}
-
-#endif
diff --git a/examples/ph5_filtered_writes_no_sel.c b/examples/ph5_filtered_writes_no_sel.c
deleted file mode 100644
index e3ec103..0000000
--- a/examples/ph5_filtered_writes_no_sel.c
+++ /dev/null
@@ -1,369 +0,0 @@
-/* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
- * Copyright by The HDF Group. *
- * All rights reserved. *
- * *
- * This file is part of HDF5. The full HDF5 copyright notice, including *
- * terms governing use, modification, and redistribution, is contained in *
- * the COPYING file, which can be found at the root of the source code *
- * distribution tree, or in https://www.hdfgroup.org/licenses. *
- * If you do not have access to either file, you may request a copy from *
- * help@hdfgroup.org. *
- * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */
-
-/*
- * Example of using the parallel HDF5 library to collectively write to
- * datasets with filters applied to them when one or MPI ranks do not
- * have data to contribute to the dataset.
- *
- * If the HDF5_NOCLEANUP environment variable is set, the file that
- * this example creates will not be removed as the example finishes.
- *
- * The need of requirement of parallel file prefix is that in general
- * the current working directory in which compiling is done, is not suitable
- * for parallel I/O and there is no standard pathname for parallel file
- * systems. In some cases, the parallel file name may even need some
- * parallel file type prefix such as: "pfs:/GF/...". Therefore, this
- * example parses the HDF5_PARAPREFIX environment variable for a prefix,
- * if one is needed.
- */
-
-#include <stdlib.h>
-
-#include "hdf5.h"
-
-#if defined(H5_HAVE_PARALLEL) && defined(H5_HAVE_PARALLEL_FILTERED_WRITES)
-
-#define EXAMPLE_FILE "ph5_filtered_writes_no_sel.h5"
-#define EXAMPLE_DSET_NAME "DSET"
-
-#define EXAMPLE_DSET_DIMS 2
-#define EXAMPLE_DSET_CHUNK_DIM_SIZE 10
-
-/* Dataset datatype */
-#define HDF5_DATATYPE H5T_NATIVE_INT
-typedef int C_DATATYPE;
-
-#ifndef PATH_MAX
-#define PATH_MAX 512
-#endif
-
-/* Global variables */
-int mpi_rank, mpi_size;
-
-/*
- * Routine to set an HDF5 filter on the given DCPL
- */
-static void
-set_filter(hid_t dcpl_id)
-{
- htri_t filter_avail;
-
- /*
- * Check if 'deflate' filter is available
- */
- filter_avail = H5Zfilter_avail(H5Z_FILTER_DEFLATE);
- if (filter_avail < 0)
- return;
- else if (filter_avail) {
- /*
- * Set 'deflate' filter with reasonable
- * compression level on DCPL
- */
- H5Pset_deflate(dcpl_id, 6);
- }
- else {
- /*
- * Set Fletcher32 checksum filter on DCPL
- * since it is always available in HDF5
- */
- H5Pset_fletcher32(dcpl_id);
- }
-}
-
-/*
- * Routine to fill a data buffer with data. Assumes
- * dimension rank is 2 and data is stored contiguous.
- */
-void
-fill_databuf(hsize_t start[], hsize_t count[], hsize_t stride[], C_DATATYPE *data)
-{
- C_DATATYPE *dataptr = data;
- hsize_t i, j;
-
- /* Use MPI rank value for data */
- for (i = 0; i < count[0]; i++) {
- for (j = 0; j < count[1]; j++) {
- *dataptr++ = mpi_rank;
- }
- }
-}
-
-/* Cleanup created file */
-static void
-cleanup(char *filename)
-{
- bool do_cleanup = getenv(HDF5_NOCLEANUP) ? false : true;
-
- if (do_cleanup)
- MPI_File_delete(filename, MPI_INFO_NULL);
-}
-
-/*
- * Routine to write to a dataset in a fashion
- * where no chunks in the dataset are written
- * to by more than 1 MPI rank. This will
- * generally give the best performance as the
- * MPI ranks will need the least amount of
- * inter-process communication.
- */
-static void
-write_dataset_some_no_sel(hid_t file_id, hid_t dxpl_id)
-{
- C_DATATYPE data[EXAMPLE_DSET_CHUNK_DIM_SIZE][4 * EXAMPLE_DSET_CHUNK_DIM_SIZE];
- hsize_t dataset_dims[EXAMPLE_DSET_DIMS];
- hsize_t chunk_dims[EXAMPLE_DSET_DIMS];
- hsize_t start[EXAMPLE_DSET_DIMS];
- hsize_t stride[EXAMPLE_DSET_DIMS];
- hsize_t count[EXAMPLE_DSET_DIMS];
- bool no_selection;
- hid_t dset_id = H5I_INVALID_HID;
- hid_t dcpl_id = H5I_INVALID_HID;
- hid_t file_dataspace = H5I_INVALID_HID;
-
- /*
- * ------------------------------------
- * Setup Dataset Creation Property List
- * ------------------------------------
- */
-
- dcpl_id = H5Pcreate(H5P_DATASET_CREATE);
-
- /*
- * REQUIRED: Dataset chunking must be enabled to
- * apply a data filter to the dataset.
- * Chunks in the dataset are of size
- * EXAMPLE_DSET_CHUNK_DIM_SIZE x EXAMPLE_DSET_CHUNK_DIM_SIZE.
- */
- chunk_dims[0] = EXAMPLE_DSET_CHUNK_DIM_SIZE;
- chunk_dims[1] = EXAMPLE_DSET_CHUNK_DIM_SIZE;
- H5Pset_chunk(dcpl_id, EXAMPLE_DSET_DIMS, chunk_dims);
-
- /* Set filter to be applied to created datasets */
- set_filter(dcpl_id);
-
- /*
- * ------------------------------------
- * Define the dimensions of the dataset
- * and create it
- * ------------------------------------
- */
-
- /*
- * Create a dataset composed of 4 chunks
- * per MPI rank. The first dataset dimension
- * scales according to the number of MPI ranks.
- * The second dataset dimension stays fixed
- * according to the chunk size.
- */
- dataset_dims[0] = EXAMPLE_DSET_CHUNK_DIM_SIZE * mpi_size;
- dataset_dims[1] = 4 * EXAMPLE_DSET_CHUNK_DIM_SIZE;
-
- file_dataspace = H5Screate_simple(EXAMPLE_DSET_DIMS, dataset_dims, NULL);
-
- /* Create the dataset */
- dset_id = H5Dcreate2(file_id, EXAMPLE_DSET_NAME, HDF5_DATATYPE, file_dataspace, H5P_DEFAULT, dcpl_id,
- H5P_DEFAULT);
-
- /*
- * ------------------------------------
- * Setup selection in the dataset for
- * each MPI rank
- * ------------------------------------
- */
-
- /*
- * Odd rank value MPI ranks do not
- * contribute any data to the dataset.
- */
- no_selection = (mpi_rank % 2) == 1;
-
- if (no_selection) {
- /*
- * MPI ranks not contributing data to
- * the dataset should call H5Sselect_none
- * on the file dataspace that will be
- * passed to H5Dwrite.
- */
- H5Sselect_none(file_dataspace);
- }
- else {
- /*
- * Even MPI ranks contribute data to
- * the dataset. Each MPI rank's selection
- * covers a single chunk in the first dataset
- * dimension. Each MPI rank's selection
- * covers 4 chunks in the second dataset
- * dimension. This leads to each contributing
- * MPI rank writing to 4 chunks of the dataset.
- */
- start[0] = mpi_rank * EXAMPLE_DSET_CHUNK_DIM_SIZE;
- start[1] = 0;
- stride[0] = 1;
- stride[1] = 1;
- count[0] = EXAMPLE_DSET_CHUNK_DIM_SIZE;
- count[1] = 4 * EXAMPLE_DSET_CHUNK_DIM_SIZE;
-
- H5Sselect_hyperslab(file_dataspace, H5S_SELECT_SET, start, stride, count, NULL);
-
- /*
- * --------------------------------------
- * Fill data buffer with MPI rank's rank
- * value to make it easy to see which
- * part of the dataset each rank wrote to
- * --------------------------------------
- */
-
- fill_databuf(start, count, stride, &data[0][0]);
- }
-
- /*
- * ---------------------------------
- * Write to the dataset collectively
- * ---------------------------------
- */
-
- H5Dwrite(dset_id, HDF5_DATATYPE, no_selection ? H5S_ALL : H5S_BLOCK, file_dataspace, dxpl_id, data);
-
- /*
- * --------------
- * Close HDF5 IDs
- * --------------
- */
-
- H5Sclose(file_dataspace);
- H5Pclose(dcpl_id);
- H5Dclose(dset_id);
-}
-
-int
-main(int argc, char **argv)
-{
- MPI_Comm comm = MPI_COMM_WORLD;
- MPI_Info info = MPI_INFO_NULL;
- hid_t file_id = H5I_INVALID_HID;
- hid_t fapl_id = H5I_INVALID_HID;
- hid_t dxpl_id = H5I_INVALID_HID;
- char *par_prefix = NULL;
- char filename[PATH_MAX];
-
- MPI_Init(&argc, &argv);
- MPI_Comm_size(comm, &mpi_size);
- MPI_Comm_rank(comm, &mpi_rank);
-
- /*
- * ----------------------------------
- * Start parallel access to HDF5 file
- * ----------------------------------
- */
-
- /* Setup File Access Property List with parallel I/O access */
- fapl_id = H5Pcreate(H5P_FILE_ACCESS);
- H5Pset_fapl_mpio(fapl_id, comm, info);
-
- /*
- * OPTIONAL: Set collective metadata reads on FAPL to allow
- * parallel writes to filtered datasets to perform
- * better at scale. While not strictly necessary,
- * this is generally recommended.
- */
- H5Pset_all_coll_metadata_ops(fapl_id, true);
-
- /*
- * OPTIONAL: Set the latest file format version for HDF5 in
- * order to gain access to different dataset chunk
- * index types and better data encoding methods.
- * While not strictly necessary, this is generally
- * recommended.
- */
- H5Pset_libver_bounds(fapl_id, H5F_LIBVER_LATEST, H5F_LIBVER_LATEST);
-
- /* Parse any parallel prefix and create filename */
- par_prefix = getenv("HDF5_PARAPREFIX");
-
- snprintf(filename, PATH_MAX, "%s%s%s", par_prefix ? par_prefix : "", par_prefix ? "/" : "", EXAMPLE_FILE);
-
- /* Create HDF5 file */
- file_id = H5Fcreate(filename, H5F_ACC_TRUNC, H5P_DEFAULT, fapl_id);
-
- /*
- * --------------------------------------
- * Setup Dataset Transfer Property List
- * with collective I/O
- * --------------------------------------
- */
-
- dxpl_id = H5Pcreate(H5P_DATASET_XFER);
-
- /*
- * REQUIRED: Setup collective I/O for the dataset
- * write operations. Parallel writes to
- * filtered datasets MUST be collective,
- * even if some ranks have no data to
- * contribute to the write operation.
- */
- H5Pset_dxpl_mpio(dxpl_id, H5FD_MPIO_COLLECTIVE);
-
- /*
- * --------------------------------
- * Create and write to the dataset
- * --------------------------------
- */
-
- /*
- * Write to a dataset in a fashion where no
- * chunks in the dataset are written to by
- * more than 1 MPI rank and some MPI ranks
- * have nothing to contribute to the dataset.
- * In this case, the MPI ranks that have no
- * data to contribute must still participate
- * in the collective H5Dwrite call, but should
- * call H5Sselect_none on the file dataspace
- * passed to the H5Dwrite call.
- */
- write_dataset_some_no_sel(file_id, dxpl_id);
-
- /*
- * ------------------
- * Close all HDF5 IDs
- * ------------------
- */
-
- H5Pclose(dxpl_id);
- H5Pclose(fapl_id);
- H5Fclose(file_id);
-
- printf("PHDF5 example finished with no errors\n");
-
- /*
- * ------------------------------------
- * Cleanup created HDF5 file and finish
- * ------------------------------------
- */
-
- cleanup(filename);
-
- MPI_Finalize();
-
- return 0;
-}
-
-#else
-
-int
-main(void)
-{
- printf("HDF5 not configured with parallel support or parallel filtered writes are disabled!\n");
- return 0;
-}
-
-#endif
diff --git a/examples/ph5_subfiling.c b/examples/ph5_subfiling.c
deleted file mode 100644
index f9c3322..0000000
--- a/examples/ph5_subfiling.c
+++ /dev/null
@@ -1,551 +0,0 @@
-/* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
- * Copyright by The HDF Group. *
- * All rights reserved. *
- * *
- * This file is part of HDF5. The full HDF5 copyright notice, including *
- * terms governing use, modification, and redistribution, is contained in *
- * the COPYING file, which can be found at the root of the source code *
- * distribution tree, or in https://www.hdfgroup.org/licenses. *
- * If you do not have access to either file, you may request a copy from *
- * help@hdfgroup.org. *
- * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */
-
-/*
- * Example of using HDF5's Subfiling VFD to write to an
- * HDF5 file that is striped across multiple subfiles
- *
- * If the HDF5_NOCLEANUP environment variable is set, the
- * files that this example creates will not be removed as
- * the example finishes.
- *
- * In general, the current working directory in which compiling
- * is done, is not suitable for parallel I/O and there is no
- * standard pathname for parallel file systems. In some cases,
- * the parallel file name may even need some parallel file type
- * prefix such as: "pfs:/GF/...". Therefore, this example parses
- * the HDF5_PARAPREFIX environment variable for a prefix, if one
- * is needed.
- */
-
-#include <stdlib.h>
-
-#include "hdf5.h"
-
-#if defined(H5_HAVE_PARALLEL) && defined(H5_HAVE_SUBFILING_VFD)
-
-#define EXAMPLE_FILE "h5_subfiling_default_example.h5"
-#define EXAMPLE_FILE2 "h5_subfiling_custom_example.h5"
-#define EXAMPLE_FILE3 "h5_subfiling_precreate_example.h5"
-
-#define EXAMPLE_DSET_NAME "DSET"
-#define EXAMPLE_DSET_DIMS 2
-
-/* Have each MPI rank write 16MiB of data */
-#define EXAMPLE_DSET_NY 4194304
-
-/* Dataset datatype */
-#define EXAMPLE_DSET_DATATYPE H5T_NATIVE_INT
-typedef int EXAMPLE_DSET_C_DATATYPE;
-
-/* Cleanup created files */
-static void
-cleanup(char *filename, hid_t fapl_id)
-{
- bool do_cleanup = getenv(HDF5_NOCLEANUP) ? false : true;
-
- if (do_cleanup)
- H5Fdelete(filename, fapl_id);
-}
-
-/*
- * An example of using the HDF5 Subfiling VFD with
- * its default settings of 1 subfile per node, with
- * a stripe size of 32MiB
- */
-static void
-subfiling_write_default(hid_t fapl_id, int mpi_size, int mpi_rank)
-{
- EXAMPLE_DSET_C_DATATYPE *data;
- hsize_t dset_dims[EXAMPLE_DSET_DIMS];
- hsize_t start[EXAMPLE_DSET_DIMS];
- hsize_t count[EXAMPLE_DSET_DIMS];
- hid_t file_id;
- hid_t subfiling_fapl;
- hid_t dset_id;
- hid_t filespace;
- char filename[512];
- char *par_prefix;
-
- /*
- * Make a copy of the FAPL so we don't disturb
- * it for the other examples
- */
- subfiling_fapl = H5Pcopy(fapl_id);
-
- /*
- * Set Subfiling VFD on FAPL using default settings
- * (use IOC VFD, 1 IOC per node, 32MiB stripe size)
- *
- * Note that all of Subfiling's configuration settings
- * can be adjusted with environment variables as well
- * in this case.
- */
- H5Pset_fapl_subfiling(subfiling_fapl, NULL);
-
- /*
- * OPTIONAL: Set alignment of objects in HDF5 file to
- * be equal to the Subfiling stripe size.
- * Choosing a Subfiling stripe size and HDF5
- * object alignment value that are some
- * multiple of the disk block size can
- * generally help performance by ensuring
- * that I/O is well-aligned and doesn't
- * excessively cross stripe boundaries.
- *
- * Note that this option can substantially
- * increase the size of the resulting HDF5
- * files, so it is a good idea to keep an eye
- * on this.
- */
- H5Pset_alignment(subfiling_fapl, 0, 33554432); /* Align to default 32MiB stripe size */
-
- /* Parse any parallel prefix and create filename */
- par_prefix = getenv("HDF5_PARAPREFIX");
-
- snprintf(filename, sizeof(filename), "%s%s%s", par_prefix ? par_prefix : "", par_prefix ? "/" : "",
- EXAMPLE_FILE);
-
- /*
- * Create a new file collectively
- */
- file_id = H5Fcreate(filename, H5F_ACC_TRUNC, H5P_DEFAULT, subfiling_fapl);
-
- /*
- * Create the dataspace for the dataset. The first
- * dimension varies with the number of MPI ranks
- * while the second dimension is fixed.
- */
- dset_dims[0] = mpi_size;
- dset_dims[1] = EXAMPLE_DSET_NY;
- filespace = H5Screate_simple(EXAMPLE_DSET_DIMS, dset_dims, NULL);
-
- /*
- * Create the dataset with default properties
- */
- dset_id = H5Dcreate2(file_id, EXAMPLE_DSET_NAME, EXAMPLE_DSET_DATATYPE, filespace, H5P_DEFAULT,
- H5P_DEFAULT, H5P_DEFAULT);
-
- /*
- * Each MPI rank writes from a contiguous memory
- * region to the hyperslab in the file
- */
- start[0] = mpi_rank;
- start[1] = 0;
- count[0] = 1;
- count[1] = dset_dims[1];
- H5Sselect_hyperslab(filespace, H5S_SELECT_SET, start, NULL, count, NULL);
-
- /*
- * Initialize data buffer
- */
- data = malloc(count[0] * count[1] * sizeof(EXAMPLE_DSET_C_DATATYPE));
- for (size_t i = 0; i < count[0] * count[1]; i++) {
- data[i] = mpi_rank + i;
- }
-
- /*
- * Write to dataset
- */
- H5Dwrite(dset_id, EXAMPLE_DSET_DATATYPE, H5S_BLOCK, filespace, H5P_DEFAULT, data);
-
- /*
- * Close/release resources.
- */
-
- free(data);
-
- H5Dclose(dset_id);
- H5Sclose(filespace);
- H5Fclose(file_id);
-
- cleanup(EXAMPLE_FILE, subfiling_fapl);
-
- H5Pclose(subfiling_fapl);
-}
-
-/*
- * An example of using the HDF5 Subfiling VFD with
- * custom settings
- */
-static void
-subfiling_write_custom(hid_t fapl_id, int mpi_size, int mpi_rank)
-{
- EXAMPLE_DSET_C_DATATYPE *data;
- H5FD_subfiling_config_t subf_config;
- H5FD_ioc_config_t ioc_config;
- hsize_t dset_dims[EXAMPLE_DSET_DIMS];
- hsize_t start[EXAMPLE_DSET_DIMS];
- hsize_t count[EXAMPLE_DSET_DIMS];
- hid_t file_id;
- hid_t subfiling_fapl;
- hid_t dset_id;
- hid_t filespace;
- char filename[512];
- char *par_prefix;
-
- /*
- * Make a copy of the FAPL so we don't disturb
- * it for the other examples
- */
- subfiling_fapl = H5Pcopy(fapl_id);
-
- /*
- * Get a default Subfiling and IOC configuration
- */
- H5Pget_fapl_subfiling(subfiling_fapl, &subf_config);
- H5Pget_fapl_ioc(subfiling_fapl, &ioc_config);
-
- /*
- * Set Subfiling configuration to use a 1MiB
- * stripe size and the SELECT_IOC_EVERY_NTH_RANK
- * selection method. By default, without a setting
- * in the H5FD_SUBFILING_IOC_SELECTION_CRITERIA
- * environment variable, this will use every MPI
- * rank as an I/O concentrator.
- */
- subf_config.shared_cfg.stripe_size = 1048576;
- subf_config.shared_cfg.ioc_selection = SELECT_IOC_EVERY_NTH_RANK;
-
- /*
- * Set IOC configuration to use 2 worker threads
- * per IOC instead of the default setting and
- * update IOC configuration with new subfiling
- * configuration.
- */
- ioc_config.thread_pool_size = 2;
-
- /*
- * Set our new configuration on the IOC
- * FAPL used for Subfiling
- */
- H5Pset_fapl_ioc(subf_config.ioc_fapl_id, &ioc_config);
-
- /*
- * Finally, set our new Subfiling configuration
- * on the original FAPL
- */
- H5Pset_fapl_subfiling(subfiling_fapl, &subf_config);
-
- /*
- * OPTIONAL: Set alignment of objects in HDF5 file to
- * be equal to the Subfiling stripe size.
- * Choosing a Subfiling stripe size and HDF5
- * object alignment value that are some
- * multiple of the disk block size can
- * generally help performance by ensuring
- * that I/O is well-aligned and doesn't
- * excessively cross stripe boundaries.
- *
- * Note that this option can substantially
- * increase the size of the resulting HDF5
- * files, so it is a good idea to keep an eye
- * on this.
- */
- H5Pset_alignment(subfiling_fapl, 0, 1048576); /* Align to custom 1MiB stripe size */
-
- /* Parse any parallel prefix and create filename */
- par_prefix = getenv("HDF5_PARAPREFIX");
-
- snprintf(filename, sizeof(filename), "%s%s%s", par_prefix ? par_prefix : "", par_prefix ? "/" : "",
- EXAMPLE_FILE2);
-
- /*
- * Create a new file collectively
- */
- file_id = H5Fcreate(filename, H5F_ACC_TRUNC, H5P_DEFAULT, subfiling_fapl);
-
- /*
- * Create the dataspace for the dataset. The first
- * dimension varies with the number of MPI ranks
- * while the second dimension is fixed.
- */
- dset_dims[0] = mpi_size;
- dset_dims[1] = EXAMPLE_DSET_NY;
- filespace = H5Screate_simple(EXAMPLE_DSET_DIMS, dset_dims, NULL);
-
- /*
- * Create the dataset with default properties
- */
- dset_id = H5Dcreate2(file_id, EXAMPLE_DSET_NAME, EXAMPLE_DSET_DATATYPE, filespace, H5P_DEFAULT,
- H5P_DEFAULT, H5P_DEFAULT);
-
- /*
- * Each MPI rank writes from a contiguous memory
- * region to the hyperslab in the file
- */
- start[0] = mpi_rank;
- start[1] = 0;
- count[0] = 1;
- count[1] = dset_dims[1];
- H5Sselect_hyperslab(filespace, H5S_SELECT_SET, start, NULL, count, NULL);
-
- /*
- * Initialize data buffer
- */
- data = malloc(count[0] * count[1] * sizeof(EXAMPLE_DSET_C_DATATYPE));
- for (size_t i = 0; i < count[0] * count[1]; i++) {
- data[i] = mpi_rank + i;
- }
-
- /*
- * Write to dataset
- */
- H5Dwrite(dset_id, EXAMPLE_DSET_DATATYPE, H5S_BLOCK, filespace, H5P_DEFAULT, data);
-
- /*
- * Close/release resources.
- */
-
- free(data);
-
- H5Dclose(dset_id);
- H5Sclose(filespace);
- H5Fclose(file_id);
-
- cleanup(EXAMPLE_FILE2, subfiling_fapl);
-
- H5Pclose(subfiling_fapl);
-}
-
-/*
- * An example of pre-creating an HDF5 file on MPI rank
- * 0 when using the HDF5 Subfiling VFD. In this case,
- * the subfiling stripe count must be set so that rank
- * 0 knows how many subfiles to pre-create.
- */
-static void
-subfiling_write_precreate(hid_t fapl_id, int mpi_size, int mpi_rank)
-{
- EXAMPLE_DSET_C_DATATYPE *data;
- H5FD_subfiling_config_t subf_config;
- hsize_t dset_dims[EXAMPLE_DSET_DIMS];
- hsize_t start[EXAMPLE_DSET_DIMS];
- hsize_t count[EXAMPLE_DSET_DIMS];
- hid_t file_id;
- hid_t subfiling_fapl;
- hid_t dset_id;
- hid_t filespace;
- char filename[512];
- char *par_prefix;
-
- /*
- * Make a copy of the FAPL so we don't disturb
- * it for the other examples
- */
- subfiling_fapl = H5Pcopy(fapl_id);
-
- /*
- * Get a default Subfiling and IOC configuration
- */
- H5Pget_fapl_subfiling(subfiling_fapl, &subf_config);
-
- /*
- * Set the Subfiling stripe count so that rank
- * 0 knows how many subfiles the logical HDF5
- * file should consist of. In this case, use
- * 5 subfiles with a default stripe size of
- * 32MiB.
- */
- subf_config.shared_cfg.stripe_count = 5;
-
- /*
- * OPTIONAL: Set alignment of objects in HDF5 file to
- * be equal to the Subfiling stripe size.
- * Choosing a Subfiling stripe size and HDF5
- * object alignment value that are some
- * multiple of the disk block size can
- * generally help performance by ensuring
- * that I/O is well-aligned and doesn't
- * excessively cross stripe boundaries.
- *
- * Note that this option can substantially
- * increase the size of the resulting HDF5
- * files, so it is a good idea to keep an eye
- * on this.
- */
- H5Pset_alignment(subfiling_fapl, 0, 1048576); /* Align to custom 1MiB stripe size */
-
- /* Parse any parallel prefix and create filename */
- par_prefix = getenv("HDF5_PARAPREFIX");
-
- snprintf(filename, sizeof(filename), "%s%s%s", par_prefix ? par_prefix : "", par_prefix ? "/" : "",
- EXAMPLE_FILE3);
-
- /* Set dataset dimensionality */
- dset_dims[0] = mpi_size;
- dset_dims[1] = EXAMPLE_DSET_NY;
-
- if (mpi_rank == 0) {
- /*
- * Make sure only this rank opens the file
- */
- H5Pset_mpi_params(subfiling_fapl, MPI_COMM_SELF, MPI_INFO_NULL);
-
- /*
- * Set the Subfiling VFD on our FAPL using
- * our custom configuration
- */
- H5Pset_fapl_subfiling(subfiling_fapl, &subf_config);
-
- /*
- * Create a new file on rank 0
- */
- file_id = H5Fcreate(filename, H5F_ACC_TRUNC, H5P_DEFAULT, subfiling_fapl);
-
- /*
- * Create the dataspace for the dataset. The first
- * dimension varies with the number of MPI ranks
- * while the second dimension is fixed.
- */
- filespace = H5Screate_simple(EXAMPLE_DSET_DIMS, dset_dims, NULL);
-
- /*
- * Create the dataset with default properties
- */
- dset_id = H5Dcreate2(file_id, EXAMPLE_DSET_NAME, EXAMPLE_DSET_DATATYPE, filespace, H5P_DEFAULT,
- H5P_DEFAULT, H5P_DEFAULT);
-
- /*
- * Initialize data buffer
- */
- data = malloc(dset_dims[0] * dset_dims[1] * sizeof(EXAMPLE_DSET_C_DATATYPE));
- for (size_t i = 0; i < dset_dims[0] * dset_dims[1]; i++) {
- data[i] = i;
- }
-
- /*
- * Rank 0 writes to the whole dataset
- */
- H5Dwrite(dset_id, EXAMPLE_DSET_DATATYPE, H5S_BLOCK, filespace, H5P_DEFAULT, data);
-
- /*
- * Close/release resources.
- */
-
- free(data);
-
- H5Dclose(dset_id);
- H5Sclose(filespace);
- H5Fclose(file_id);
- }
-
- MPI_Barrier(MPI_COMM_WORLD);
-
- /*
- * Use all MPI ranks to re-open the file and
- * read back the dataset that was created
- */
- H5Pset_mpi_params(subfiling_fapl, MPI_COMM_WORLD, MPI_INFO_NULL);
-
- /*
- * Use the same subfiling configuration as rank 0
- * used to create the file
- */
- H5Pset_fapl_subfiling(subfiling_fapl, &subf_config);
-
- /*
- * Re-open the file on all ranks
- */
- file_id = H5Fopen(filename, H5F_ACC_RDONLY, subfiling_fapl);
-
- /*
- * Open the dataset that was created
- */
- dset_id = H5Dopen2(file_id, EXAMPLE_DSET_NAME, H5P_DEFAULT);
-
- /*
- * Initialize data buffer
- */
- data = malloc(dset_dims[0] * dset_dims[1] * sizeof(EXAMPLE_DSET_C_DATATYPE));
-
- /*
- * Read the dataset on all ranks
- */
- H5Dread(dset_id, EXAMPLE_DSET_DATATYPE, H5S_BLOCK, H5S_ALL, H5P_DEFAULT, data);
-
- /*
- * Close/release resources.
- */
-
- free(data);
-
- H5Dclose(dset_id);
- H5Fclose(file_id);
-
- cleanup(EXAMPLE_FILE3, subfiling_fapl);
-
- H5Pclose(subfiling_fapl);
-}
-
-int
-main(int argc, char **argv)
-{
- MPI_Comm comm = MPI_COMM_WORLD;
- MPI_Info info = MPI_INFO_NULL;
- hid_t fapl_id;
- int mpi_size;
- int mpi_rank;
- int mpi_thread_required = MPI_THREAD_MULTIPLE;
- int mpi_thread_provided = 0;
-
- /* HDF5 Subfiling VFD requires MPI_Init_thread with MPI_THREAD_MULTIPLE */
- MPI_Init_thread(&argc, &argv, mpi_thread_required, &mpi_thread_provided);
- if (mpi_thread_provided < mpi_thread_required) {
- printf("MPI_THREAD_MULTIPLE not supported\n");
- MPI_Abort(comm, -1);
- }
-
- MPI_Comm_size(comm, &mpi_size);
- MPI_Comm_rank(comm, &mpi_rank);
-
- /*
- * Set up File Access Property List with MPI
- * parameters for the Subfiling VFD to use
- */
- fapl_id = H5Pcreate(H5P_FILE_ACCESS);
- H5Pset_mpi_params(fapl_id, comm, info);
-
- /* Use Subfiling VFD with default settings */
- subfiling_write_default(fapl_id, mpi_size, mpi_rank);
-
- /* Use Subfiling VFD with custom settings */
- subfiling_write_custom(fapl_id, mpi_size, mpi_rank);
-
- /*
- * Use Subfiling VFD to precreate the HDF5
- * file on MPI rank 0
- */
- subfiling_write_precreate(fapl_id, mpi_size, mpi_rank);
-
- H5Pclose(fapl_id);
-
- if (mpi_rank == 0)
- printf("PHDF5 example finished with no errors\n");
-
- MPI_Finalize();
-
- return 0;
-}
-
-#else
-
-/* dummy program since HDF5 is not parallel-enabled */
-int
-main(void)
-{
- printf(
- "Example program cannot run - HDF5 must be built with parallel support and Subfiling VFD support\n");
- return 0;
-}
-
-#endif /* H5_HAVE_PARALLEL && H5_HAVE_SUBFILING_VFD */
diff --git a/examples/run-c-ex.sh.in b/examples/run-c-ex.sh.in
index aae1d34..b51c5d6 100644
--- a/examples/run-c-ex.sh.in
+++ b/examples/run-c-ex.sh.in
@@ -155,23 +155,7 @@ then
HDF5_DEBUG=$OLD_DEBUG_STRING &&\
rm h5_debug_trace &&\
RunTest h5_shared_mesg &&\
- rm h5_shared_mesg &&\
- RunTest h5_vds-eiger &&\
- rm h5_vds-eiger &&\
- RunTest h5_vds-exclim &&\
- rm h5_vds-exclim &&\
- RunTest h5_vds-exc &&\
- rm h5_vds-exc &&\
- RunTest h5_vds-simpleIO &&\
- rm h5_vds-simpleIO &&\
- RunTest h5_vds-percival &&\
- rm h5_vds-percival &&\
- RunTest h5_vds-percival-unlim &&\
- rm h5_vds-percival-unlim &&\
- RunTest h5_vds-percival-unlim-maxmin&&\
- rm h5_vds-percival-unlim-maxmin &&\
- RunTest h5_vds &&\
- rm h5_vds); then
+ rm h5_shared_mesg); then
EXIT_VALUE=${EXIT_SUCCESS}
else
EXIT_VALUE=${EXIT_FAILURE}